mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-10 06:35:01 -05:00
Compare commits
14 Commits
feat/landi
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd7111dc1a | ||
|
|
1e80633397 | ||
|
|
e1c57376c8 | ||
|
|
454127f001 | ||
|
|
f25505a636 | ||
|
|
34bdb4fa1c | ||
|
|
8b4b3af120 | ||
|
|
190f12fd77 | ||
|
|
e5d30494cb | ||
|
|
b3dbb4487f | ||
|
|
2ffdcb4e6c | ||
|
|
b32d4e4e48 | ||
|
|
606f0f3b8c | ||
|
|
089b8dfc93 |
@@ -5483,3 +5483,37 @@ export function AgentSkillsIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 48 48' xmlns='http://www.w3.org/2000/svg' fill='none'>
|
||||
<circle
|
||||
cx='24'
|
||||
cy='24'
|
||||
r='21.5'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M28.083,17.28a7.8633,7.8633,0,0,1,0,13.44'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M19.917,30.72a7.8633,7.8633,0,0,1,0-13.44'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M26.067,10.43H21.933a2.0172,2.0172,0,0,0-2.016,2.016v6.36c2.358,1.281,2.736,2.562,0,3.843V35.574a2.0169,2.0169,0,0,0,2.016,2.015h4.134a2.0169,2.0169,0,0,0,2.016-2.015V29.213c-2.358-1.281-2.736-2.562,0-3.842V12.446A2.0172,2.0172,0,0,0,26.067,10.43Z'
|
||||
fill='#000000'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -80,6 +80,7 @@ import {
|
||||
MySQLIcon,
|
||||
Neo4jIcon,
|
||||
NotionIcon,
|
||||
OnePasswordIcon,
|
||||
OpenAIIcon,
|
||||
OutlookIcon,
|
||||
PackageSearchIcon,
|
||||
@@ -214,6 +215,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
neo4j: Neo4jIcon,
|
||||
notion_v2: NotionIcon,
|
||||
onedrive: MicrosoftOneDriveIcon,
|
||||
onepassword: OnePasswordIcon,
|
||||
openai: OpenAIIcon,
|
||||
outlook: OutlookIcon,
|
||||
parallel_ai: ParallelIcon,
|
||||
|
||||
@@ -56,7 +56,7 @@ Switch between modes using the mode selector at the bottom of the input area.
|
||||
Select your preferred AI model using the model selector at the bottom right of the input area.
|
||||
|
||||
**Available Models:**
|
||||
- Claude 4.5 Opus, Sonnet (default), Haiku
|
||||
- Claude 4.6 Opus (default), 4.5 Opus, Sonnet, Haiku
|
||||
- GPT 5.2 Codex, Pro
|
||||
- Gemini 3 Pro
|
||||
|
||||
@@ -190,3 +190,99 @@ Copilot usage is billed per token from the underlying LLM. If you reach your usa
|
||||
<Callout type="info">
|
||||
See the [Cost Calculation page](/execution/costs) for billing details.
|
||||
</Callout>
|
||||
## Copilot MCP
|
||||
|
||||
You can use Copilot as an MCP server in your favorite editor or AI client. This lets you build, test, deploy, and manage Sim workflows directly from tools like Cursor, Claude Code, Claude Desktop, and VS Code.
|
||||
|
||||
### Generating a Copilot API Key
|
||||
|
||||
To connect to the Copilot MCP server, you need a **Copilot API key**:
|
||||
|
||||
1. Go to [sim.ai](https://sim.ai) and sign in
|
||||
2. Navigate to **Settings** → **Copilot**
|
||||
3. Click **Generate API Key**
|
||||
4. Copy the key — it is only shown once
|
||||
|
||||
The key will look like `sk-sim-copilot-...`. You will use this in the configuration below.
|
||||
|
||||
### Cursor
|
||||
|
||||
Add the following to your `.cursor/mcp.json` (project-level) or global Cursor MCP settings:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sim-copilot": {
|
||||
"url": "https://www.sim.ai/api/mcp/copilot",
|
||||
"headers": {
|
||||
"X-API-Key": "YOUR_COPILOT_API_KEY"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Replace `YOUR_COPILOT_API_KEY` with the key you generated above.
|
||||
|
||||
### Claude Code
|
||||
|
||||
Run the following command to add the Copilot MCP server:
|
||||
|
||||
```bash
|
||||
claude mcp add sim-copilot \
|
||||
--transport http \
|
||||
https://www.sim.ai/api/mcp/copilot \
|
||||
--header "X-API-Key: YOUR_COPILOT_API_KEY"
|
||||
```
|
||||
|
||||
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||
|
||||
### Claude Desktop
|
||||
|
||||
Claude Desktop requires [`mcp-remote`](https://www.npmjs.com/package/mcp-remote) to connect to HTTP-based MCP servers. Add the following to your Claude Desktop config file (`~/Library/Application Support/Claude/claude_desktop_config.json` on macOS):
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"sim-copilot": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"mcp-remote",
|
||||
"https://www.sim.ai/api/mcp/copilot",
|
||||
"--header",
|
||||
"X-API-Key: YOUR_COPILOT_API_KEY"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||
|
||||
### VS Code
|
||||
|
||||
Add the following to your VS Code `settings.json` or workspace `.vscode/settings.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcp": {
|
||||
"servers": {
|
||||
"sim-copilot": {
|
||||
"type": "http",
|
||||
"url": "https://www.sim.ai/api/mcp/copilot",
|
||||
"headers": {
|
||||
"X-API-Key": "YOUR_COPILOT_API_KEY"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||
|
||||
<Callout type="info">
|
||||
For self-hosted deployments, replace `https://www.sim.ai` with your self-hosted Sim URL.
|
||||
</Callout>
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ With Airweave, you can:
|
||||
In Sim, the Airweave integration empowers your agents to search, summarize, and extract insights from all your organization’s data via a single tool. Use Airweave to drive rich, contextual knowledge retrieval within your workflows—whether answering questions, generating summaries, or supporting dynamic decision-making.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search across your synced data sources using Airweave. Supports semantic search with hybrid, neural, or keyword retrieval strategies. Optionally generate AI-powered answers from search results.
|
||||
|
||||
@@ -43,7 +43,6 @@ Retrieve detailed information about a specific Jira issue
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `projectId` | string | No | Jira project key \(e.g., PROJ\). Optional when retrieving a single issue. |
|
||||
| `issueKey` | string | Yes | Jira issue key to retrieve \(e.g., PROJ-123\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
@@ -51,13 +50,184 @@ Retrieve detailed information about a specific Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueKey` | string | Issue key \(e.g., PROJ-123\) |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `id` | string | Issue ID |
|
||||
| `key` | string | Issue key \(e.g., PROJ-123\) |
|
||||
| `self` | string | REST API URL for this issue |
|
||||
| `summary` | string | Issue summary |
|
||||
| `description` | json | Issue description content |
|
||||
| `created` | string | Issue creation timestamp |
|
||||
| `updated` | string | Issue last updated timestamp |
|
||||
| `issue` | json | Complete issue object with all fields |
|
||||
| `description` | string | Issue description text \(extracted from ADF\) |
|
||||
| `status` | object | Issue status |
|
||||
| ↳ `id` | string | Status ID |
|
||||
| ↳ `name` | string | Status name \(e.g., Open, In Progress, Done\) |
|
||||
| ↳ `description` | string | Status description |
|
||||
| ↳ `statusCategory` | object | Status category grouping |
|
||||
| ↳ `id` | number | Status category ID |
|
||||
| ↳ `key` | string | Status category key \(e.g., new, indeterminate, done\) |
|
||||
| ↳ `name` | string | Status category name \(e.g., To Do, In Progress, Done\) |
|
||||
| ↳ `colorName` | string | Status category color \(e.g., blue-gray, yellow, green\) |
|
||||
| `issuetype` | object | Issue type |
|
||||
| ↳ `id` | string | Issue type ID |
|
||||
| ↳ `name` | string | Issue type name \(e.g., Task, Bug, Story, Epic\) |
|
||||
| ↳ `description` | string | Issue type description |
|
||||
| ↳ `subtask` | boolean | Whether this is a subtask type |
|
||||
| ↳ `iconUrl` | string | URL to the issue type icon |
|
||||
| `project` | object | Project the issue belongs to |
|
||||
| ↳ `id` | string | Project ID |
|
||||
| ↳ `key` | string | Project key \(e.g., PROJ\) |
|
||||
| ↳ `name` | string | Project name |
|
||||
| ↳ `projectTypeKey` | string | Project type key \(e.g., software, business\) |
|
||||
| `priority` | object | Issue priority |
|
||||
| ↳ `id` | string | Priority ID |
|
||||
| ↳ `name` | string | Priority name \(e.g., Highest, High, Medium, Low, Lowest\) |
|
||||
| ↳ `iconUrl` | string | URL to the priority icon |
|
||||
| `assignee` | object | Assigned user |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `reporter` | object | Reporter user |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `creator` | object | Issue creator |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `labels` | array | Issue labels |
|
||||
| `components` | array | Issue components |
|
||||
| ↳ `id` | string | Component ID |
|
||||
| ↳ `name` | string | Component name |
|
||||
| ↳ `description` | string | Component description |
|
||||
| `fixVersions` | array | Fix versions |
|
||||
| ↳ `id` | string | Version ID |
|
||||
| ↳ `name` | string | Version name |
|
||||
| ↳ `released` | boolean | Whether the version is released |
|
||||
| ↳ `releaseDate` | string | Release date \(YYYY-MM-DD\) |
|
||||
| `resolution` | object | Issue resolution |
|
||||
| ↳ `id` | string | Resolution ID |
|
||||
| ↳ `name` | string | Resolution name \(e.g., Fixed, Duplicate, Won't Fix\) |
|
||||
| ↳ `description` | string | Resolution description |
|
||||
| `duedate` | string | Due date \(YYYY-MM-DD\) |
|
||||
| `created` | string | ISO 8601 timestamp when the issue was created |
|
||||
| `updated` | string | ISO 8601 timestamp when the issue was last updated |
|
||||
| `resolutiondate` | string | ISO 8601 timestamp when the issue was resolved |
|
||||
| `timetracking` | object | Time tracking information |
|
||||
| ↳ `originalEstimate` | string | Original estimate in human-readable format \(e.g., 1w 2d\) |
|
||||
| ↳ `remainingEstimate` | string | Remaining estimate in human-readable format |
|
||||
| ↳ `timeSpent` | string | Time spent in human-readable format |
|
||||
| ↳ `originalEstimateSeconds` | number | Original estimate in seconds |
|
||||
| ↳ `remainingEstimateSeconds` | number | Remaining estimate in seconds |
|
||||
| ↳ `timeSpentSeconds` | number | Time spent in seconds |
|
||||
| `parent` | object | Parent issue \(for subtasks\) |
|
||||
| ↳ `id` | string | Parent issue ID |
|
||||
| ↳ `key` | string | Parent issue key |
|
||||
| ↳ `summary` | string | Parent issue summary |
|
||||
| `issuelinks` | array | Linked issues |
|
||||
| ↳ `id` | string | Issue link ID |
|
||||
| ↳ `type` | object | Link type information |
|
||||
| ↳ `id` | string | Link type ID |
|
||||
| ↳ `name` | string | Link type name \(e.g., Blocks, Relates\) |
|
||||
| ↳ `inward` | string | Inward description \(e.g., is blocked by\) |
|
||||
| ↳ `outward` | string | Outward description \(e.g., blocks\) |
|
||||
| ↳ `inwardIssue` | object | Inward linked issue |
|
||||
| ↳ `id` | string | Issue ID |
|
||||
| ↳ `key` | string | Issue key |
|
||||
| ↳ `statusName` | string | Issue status name |
|
||||
| ↳ `summary` | string | Issue summary |
|
||||
| ↳ `outwardIssue` | object | Outward linked issue |
|
||||
| ↳ `id` | string | Issue ID |
|
||||
| ↳ `key` | string | Issue key |
|
||||
| ↳ `statusName` | string | Issue status name |
|
||||
| ↳ `summary` | string | Issue summary |
|
||||
| `subtasks` | array | Subtask issues |
|
||||
| ↳ `id` | string | Subtask issue ID |
|
||||
| ↳ `key` | string | Subtask issue key |
|
||||
| ↳ `summary` | string | Subtask summary |
|
||||
| ↳ `statusName` | string | Subtask status name |
|
||||
| ↳ `issueTypeName` | string | Subtask issue type name |
|
||||
| `votes` | object | Vote information |
|
||||
| ↳ `votes` | number | Number of votes |
|
||||
| ↳ `hasVoted` | boolean | Whether the current user has voted |
|
||||
| `watches` | object | Watch information |
|
||||
| ↳ `watchCount` | number | Number of watchers |
|
||||
| ↳ `isWatching` | boolean | Whether the current user is watching |
|
||||
| `comments` | array | Issue comments \(fetched separately\) |
|
||||
| ↳ `id` | string | Comment ID |
|
||||
| ↳ `body` | string | Comment body text \(extracted from ADF\) |
|
||||
| ↳ `author` | object | Comment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `updateAuthor` | object | User who last updated the comment |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the comment was created |
|
||||
| ↳ `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||
| ↳ `visibility` | object | Comment visibility restriction |
|
||||
| ↳ `type` | string | Restriction type \(e.g., role, group\) |
|
||||
| ↳ `value` | string | Restriction value \(e.g., Administrators\) |
|
||||
| `worklogs` | array | Issue worklogs \(fetched separately\) |
|
||||
| ↳ `id` | string | Worklog ID |
|
||||
| ↳ `author` | object | Worklog author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `updateAuthor` | object | User who last updated the worklog |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `comment` | string | Worklog comment text |
|
||||
| ↳ `started` | string | ISO 8601 timestamp when the work started |
|
||||
| ↳ `timeSpent` | string | Time spent in human-readable format \(e.g., 3h 20m\) |
|
||||
| ↳ `timeSpentSeconds` | number | Time spent in seconds |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the worklog was created |
|
||||
| ↳ `updated` | string | ISO 8601 timestamp when the worklog was last updated |
|
||||
| `attachments` | array | Issue attachments |
|
||||
| ↳ `id` | string | Attachment ID |
|
||||
| ↳ `filename` | string | Attachment file name |
|
||||
| ↳ `mimeType` | string | MIME type of the attachment |
|
||||
| ↳ `size` | number | File size in bytes |
|
||||
| ↳ `content` | string | URL to download the attachment content |
|
||||
| ↳ `thumbnail` | string | URL to the attachment thumbnail |
|
||||
| ↳ `author` | object | Attachment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the attachment was created |
|
||||
| `issueKey` | string | Issue key \(e.g., PROJ-123\) |
|
||||
| `issue` | json | Complete raw Jira issue object from the API |
|
||||
|
||||
### `jira_update`
|
||||
|
||||
@@ -68,26 +238,32 @@ Update a Jira issue
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `projectId` | string | No | Jira project key \(e.g., PROJ\). Optional when updating a single issue. |
|
||||
| `issueKey` | string | Yes | Jira issue key to update \(e.g., PROJ-123\) |
|
||||
| `summary` | string | No | New summary for the issue |
|
||||
| `description` | string | No | New description for the issue |
|
||||
| `status` | string | No | New status for the issue |
|
||||
| `priority` | string | No | New priority for the issue |
|
||||
| `assignee` | string | No | New assignee for the issue |
|
||||
| `priority` | string | No | New priority ID or name for the issue \(e.g., "High"\) |
|
||||
| `assignee` | string | No | New assignee account ID for the issue |
|
||||
| `labels` | json | No | Labels to set on the issue \(array of label name strings\) |
|
||||
| `components` | json | No | Components to set on the issue \(array of component name strings\) |
|
||||
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
||||
| `fixVersions` | json | No | Fix versions to set \(array of version name strings\) |
|
||||
| `environment` | string | No | Environment information for the issue |
|
||||
| `customFieldId` | string | No | Custom field ID to update \(e.g., customfield_10001\) |
|
||||
| `customFieldValue` | string | No | Value for the custom field |
|
||||
| `notifyUsers` | boolean | No | Whether to send email notifications about this update \(default: true\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Updated issue key \(e.g., PROJ-123\) |
|
||||
| `summary` | string | Issue summary after update |
|
||||
|
||||
### `jira_write`
|
||||
|
||||
Write a Jira issue
|
||||
Create a new Jira issue
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -100,9 +276,12 @@ Write a Jira issue
|
||||
| `priority` | string | No | Priority ID or name for the issue \(e.g., "10000" or "High"\) |
|
||||
| `assignee` | string | No | Assignee account ID for the issue |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story\) |
|
||||
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story, Bug, Epic, Sub-task\) |
|
||||
| `parent` | json | No | Parent issue key for creating subtasks \(e.g., \{ "key": "PROJ-123" \}\) |
|
||||
| `labels` | array | No | Labels for the issue \(array of label names\) |
|
||||
| `components` | array | No | Components for the issue \(array of component names\) |
|
||||
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
||||
| `fixVersions` | array | No | Fix versions for the issue \(array of version names\) |
|
||||
| `reporter` | string | No | Reporter account ID for the issue |
|
||||
| `environment` | string | No | Environment information for the issue |
|
||||
| `customFieldId` | string | No | Custom field ID \(e.g., customfield_10001\) |
|
||||
@@ -112,15 +291,17 @@ Write a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `id` | string | Created issue ID |
|
||||
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
|
||||
| `self` | string | REST API URL for the created issue |
|
||||
| `summary` | string | Issue summary |
|
||||
| `url` | string | URL to the created issue |
|
||||
| `assigneeId` | string | Account ID of the assigned user \(if assigned\) |
|
||||
| `url` | string | URL to the created issue in Jira |
|
||||
| `assigneeId` | string | Account ID of the assigned user \(null if no assignee was set\) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
Retrieve multiple Jira issues in bulk
|
||||
Retrieve multiple Jira issues from a project in bulk
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -134,7 +315,30 @@ Retrieve multiple Jira issues in bulk
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `issues` | array | Array of Jira issues with ts, summary, description, created, and updated timestamps |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `total` | number | Total number of issues in the project \(may not always be available\) |
|
||||
| `issues` | array | Array of Jira issues |
|
||||
| ↳ `id` | string | Issue ID |
|
||||
| ↳ `key` | string | Issue key \(e.g., PROJ-123\) |
|
||||
| ↳ `self` | string | REST API URL for this issue |
|
||||
| ↳ `summary` | string | Issue summary |
|
||||
| ↳ `description` | string | Issue description text |
|
||||
| ↳ `status` | object | Issue status |
|
||||
| ↳ `id` | string | Status ID |
|
||||
| ↳ `name` | string | Status name |
|
||||
| ↳ `issuetype` | object | Issue type |
|
||||
| ↳ `id` | string | Issue type ID |
|
||||
| ↳ `name` | string | Issue type name |
|
||||
| ↳ `priority` | object | Issue priority |
|
||||
| ↳ `id` | string | Priority ID |
|
||||
| ↳ `name` | string | Priority name |
|
||||
| ↳ `assignee` | object | Assigned user |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | Display name |
|
||||
| ↳ `created` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updated` | string | ISO 8601 last updated timestamp |
|
||||
| `nextPageToken` | string | Cursor token for the next page. Null when no more results. |
|
||||
| `isLast` | boolean | Whether this is the last page of results |
|
||||
|
||||
### `jira_delete_issue`
|
||||
|
||||
@@ -153,7 +357,7 @@ Delete a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Deleted issue key |
|
||||
|
||||
### `jira_assign_issue`
|
||||
@@ -173,9 +377,9 @@ Assign a Jira issue to a user
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key that was assigned |
|
||||
| `assigneeId` | string | Account ID of the assignee |
|
||||
| `assigneeId` | string | Account ID of the assignee \(use "-1" for auto-assign, null to unassign\) |
|
||||
|
||||
### `jira_transition_issue`
|
||||
|
||||
@@ -189,15 +393,20 @@ Move a Jira issue between workflow statuses (e.g., To Do -> In Progress)
|
||||
| `issueKey` | string | Yes | Jira issue key to transition \(e.g., PROJ-123\) |
|
||||
| `transitionId` | string | Yes | ID of the transition to execute \(e.g., "11" for "To Do", "21" for "In Progress"\) |
|
||||
| `comment` | string | No | Optional comment to add when transitioning the issue |
|
||||
| `resolution` | string | No | Resolution name to set during transition \(e.g., "Fixed", "Won\'t Fix"\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key that was transitioned |
|
||||
| `transitionId` | string | Applied transition ID |
|
||||
| `transitionName` | string | Applied transition name |
|
||||
| `toStatus` | object | Target status after transition |
|
||||
| ↳ `id` | string | Status ID |
|
||||
| ↳ `name` | string | Status name |
|
||||
|
||||
### `jira_search_issues`
|
||||
|
||||
@@ -209,20 +418,77 @@ Search for Jira issues using JQL (Jira Query Language)
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `jql` | string | Yes | JQL query string to search for issues \(e.g., "project = PROJ AND status = Open"\) |
|
||||
| `startAt` | number | No | The index of the first result to return \(for pagination\) |
|
||||
| `maxResults` | number | No | Maximum number of results to return \(default: 50\) |
|
||||
| `fields` | array | No | Array of field names to return \(default: \['summary', 'status', 'assignee', 'created', 'updated'\]\) |
|
||||
| `nextPageToken` | string | No | Cursor token for the next page of results. Omit for the first page. |
|
||||
| `maxResults` | number | No | Maximum number of results to return per page \(default: 50\) |
|
||||
| `fields` | array | No | Array of field names to return \(default: all navigable\). Use "*all" for every field. |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `total` | number | Total number of matching issues |
|
||||
| `startAt` | number | Pagination start index |
|
||||
| `maxResults` | number | Maximum results per page |
|
||||
| `issues` | array | Array of matching issues with key, summary, status, assignee, created, updated |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issues` | array | Array of matching issues |
|
||||
| ↳ `id` | string | Issue ID |
|
||||
| ↳ `key` | string | Issue key \(e.g., PROJ-123\) |
|
||||
| ↳ `self` | string | REST API URL for this issue |
|
||||
| ↳ `summary` | string | Issue summary |
|
||||
| ↳ `description` | string | Issue description text \(extracted from ADF\) |
|
||||
| ↳ `status` | object | Issue status |
|
||||
| ↳ `id` | string | Status ID |
|
||||
| ↳ `name` | string | Status name \(e.g., Open, In Progress, Done\) |
|
||||
| ↳ `description` | string | Status description |
|
||||
| ↳ `statusCategory` | object | Status category grouping |
|
||||
| ↳ `id` | number | Status category ID |
|
||||
| ↳ `key` | string | Status category key \(e.g., new, indeterminate, done\) |
|
||||
| ↳ `name` | string | Status category name \(e.g., To Do, In Progress, Done\) |
|
||||
| ↳ `colorName` | string | Status category color \(e.g., blue-gray, yellow, green\) |
|
||||
| ↳ `issuetype` | object | Issue type |
|
||||
| ↳ `id` | string | Issue type ID |
|
||||
| ↳ `name` | string | Issue type name \(e.g., Task, Bug, Story, Epic\) |
|
||||
| ↳ `description` | string | Issue type description |
|
||||
| ↳ `subtask` | boolean | Whether this is a subtask type |
|
||||
| ↳ `iconUrl` | string | URL to the issue type icon |
|
||||
| ↳ `project` | object | Project the issue belongs to |
|
||||
| ↳ `id` | string | Project ID |
|
||||
| ↳ `key` | string | Project key \(e.g., PROJ\) |
|
||||
| ↳ `name` | string | Project name |
|
||||
| ↳ `projectTypeKey` | string | Project type key \(e.g., software, business\) |
|
||||
| ↳ `priority` | object | Issue priority |
|
||||
| ↳ `id` | string | Priority ID |
|
||||
| ↳ `name` | string | Priority name \(e.g., Highest, High, Medium, Low, Lowest\) |
|
||||
| ↳ `iconUrl` | string | URL to the priority icon |
|
||||
| ↳ `assignee` | object | Assigned user |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `reporter` | object | Reporter user |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `labels` | array | Issue labels |
|
||||
| ↳ `components` | array | Issue components |
|
||||
| ↳ `id` | string | Component ID |
|
||||
| ↳ `name` | string | Component name |
|
||||
| ↳ `description` | string | Component description |
|
||||
| ↳ `resolution` | object | Issue resolution |
|
||||
| ↳ `id` | string | Resolution ID |
|
||||
| ↳ `name` | string | Resolution name \(e.g., Fixed, Duplicate, Won't Fix\) |
|
||||
| ↳ `description` | string | Resolution description |
|
||||
| ↳ `duedate` | string | Due date \(YYYY-MM-DD\) |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the issue was created |
|
||||
| ↳ `updated` | string | ISO 8601 timestamp when the issue was last updated |
|
||||
| `nextPageToken` | string | Cursor token for the next page. Null when no more results. |
|
||||
| `isLast` | boolean | Whether this is the last page of results |
|
||||
| `total` | number | Total number of matching issues \(may not always be available\) |
|
||||
|
||||
### `jira_add_comment`
|
||||
|
||||
@@ -235,16 +501,27 @@ Add a comment to a Jira issue
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `issueKey` | string | Yes | Jira issue key to add comment to \(e.g., PROJ-123\) |
|
||||
| `body` | string | Yes | Comment body text |
|
||||
| `visibility` | json | No | Restrict comment visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key the comment was added to |
|
||||
| `commentId` | string | Created comment ID |
|
||||
| `body` | string | Comment text content |
|
||||
| `author` | object | Comment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `created` | string | ISO 8601 timestamp when the comment was created |
|
||||
| `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||
|
||||
### `jira_get_comments`
|
||||
|
||||
@@ -258,16 +535,42 @@ Get all comments from a Jira issue
|
||||
| `issueKey` | string | Yes | Jira issue key to get comments from \(e.g., PROJ-123\) |
|
||||
| `startAt` | number | No | Index of the first comment to return \(default: 0\) |
|
||||
| `maxResults` | number | No | Maximum number of comments to return \(default: 50\) |
|
||||
| `orderBy` | string | No | Sort order for comments: "-created" for newest first, "created" for oldest first |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `total` | number | Total number of comments |
|
||||
| `comments` | array | Array of comments with id, author, body, created, updated |
|
||||
| `startAt` | number | Pagination start index |
|
||||
| `maxResults` | number | Maximum results per page |
|
||||
| `comments` | array | Array of comments |
|
||||
| ↳ `id` | string | Comment ID |
|
||||
| ↳ `body` | string | Comment body text \(extracted from ADF\) |
|
||||
| ↳ `author` | object | Comment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `updateAuthor` | object | User who last updated the comment |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the comment was created |
|
||||
| ↳ `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||
| ↳ `visibility` | object | Comment visibility restriction |
|
||||
| ↳ `type` | string | Restriction type \(e.g., role, group\) |
|
||||
| ↳ `value` | string | Restriction value \(e.g., Administrators\) |
|
||||
|
||||
### `jira_update_comment`
|
||||
|
||||
@@ -281,16 +584,27 @@ Update an existing comment on a Jira issue
|
||||
| `issueKey` | string | Yes | Jira issue key containing the comment \(e.g., PROJ-123\) |
|
||||
| `commentId` | string | Yes | ID of the comment to update |
|
||||
| `body` | string | Yes | Updated comment text |
|
||||
| `visibility` | json | No | Restrict comment visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `commentId` | string | Updated comment ID |
|
||||
| `body` | string | Updated comment text |
|
||||
| `author` | object | Comment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `created` | string | ISO 8601 timestamp when the comment was created |
|
||||
| `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||
|
||||
### `jira_delete_comment`
|
||||
|
||||
@@ -309,7 +623,7 @@ Delete a comment from a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `commentId` | string | Deleted comment ID |
|
||||
|
||||
@@ -329,9 +643,24 @@ Get all attachments from a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `attachments` | array | Array of attachments with id, filename, size, mimeType, created, author |
|
||||
| `attachments` | array | Array of attachments |
|
||||
| ↳ `id` | string | Attachment ID |
|
||||
| ↳ `filename` | string | Attachment file name |
|
||||
| ↳ `mimeType` | string | MIME type of the attachment |
|
||||
| ↳ `size` | number | File size in bytes |
|
||||
| ↳ `content` | string | URL to download the attachment content |
|
||||
| ↳ `thumbnail` | string | URL to the attachment thumbnail |
|
||||
| ↳ `author` | object | Attachment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the attachment was created |
|
||||
|
||||
### `jira_add_attachment`
|
||||
|
||||
@@ -350,10 +679,19 @@ Add attachments to a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `attachmentIds` | json | IDs of uploaded attachments |
|
||||
| `files` | file[] | Uploaded attachment files |
|
||||
| `attachments` | array | Uploaded attachments |
|
||||
| ↳ `id` | string | Attachment ID |
|
||||
| ↳ `filename` | string | Attachment file name |
|
||||
| ↳ `mimeType` | string | MIME type |
|
||||
| ↳ `size` | number | File size in bytes |
|
||||
| ↳ `content` | string | URL to download the attachment |
|
||||
| `attachmentIds` | array | Array of attachment IDs |
|
||||
| `files` | array | Uploaded file metadata |
|
||||
| ↳ `name` | string | File name |
|
||||
| ↳ `mimeType` | string | MIME type |
|
||||
| ↳ `size` | number | File size in bytes |
|
||||
|
||||
### `jira_delete_attachment`
|
||||
|
||||
@@ -371,7 +709,7 @@ Delete an attachment from a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `attachmentId` | string | Deleted attachment ID |
|
||||
|
||||
### `jira_add_worklog`
|
||||
@@ -387,16 +725,28 @@ Add a time tracking worklog entry to a Jira issue
|
||||
| `timeSpentSeconds` | number | Yes | Time spent in seconds |
|
||||
| `comment` | string | No | Optional comment for the worklog entry |
|
||||
| `started` | string | No | Optional start time in ISO format \(defaults to current time\) |
|
||||
| `visibility` | json | No | Restrict worklog visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key the worklog was added to |
|
||||
| `worklogId` | string | Created worklog ID |
|
||||
| `timeSpent` | string | Time spent in human-readable format \(e.g., 3h 20m\) |
|
||||
| `timeSpentSeconds` | number | Time spent in seconds |
|
||||
| `author` | object | Worklog author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `started` | string | ISO 8601 timestamp when the work started |
|
||||
| `created` | string | ISO 8601 timestamp when the worklog was created |
|
||||
|
||||
### `jira_get_worklogs`
|
||||
|
||||
@@ -416,10 +766,35 @@ Get all worklog entries from a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `total` | number | Total number of worklogs |
|
||||
| `worklogs` | array | Array of worklogs with id, author, timeSpentSeconds, timeSpent, comment, created, updated, started |
|
||||
| `startAt` | number | Pagination start index |
|
||||
| `maxResults` | number | Maximum results per page |
|
||||
| `worklogs` | array | Array of worklogs |
|
||||
| ↳ `id` | string | Worklog ID |
|
||||
| ↳ `author` | object | Worklog author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `updateAuthor` | object | User who last updated the worklog |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| ↳ `comment` | string | Worklog comment text |
|
||||
| ↳ `started` | string | ISO 8601 timestamp when the work started |
|
||||
| ↳ `timeSpent` | string | Time spent in human-readable format \(e.g., 3h 20m\) |
|
||||
| ↳ `timeSpentSeconds` | number | Time spent in seconds |
|
||||
| ↳ `created` | string | ISO 8601 timestamp when the worklog was created |
|
||||
| ↳ `updated` | string | ISO 8601 timestamp when the worklog was last updated |
|
||||
|
||||
### `jira_update_worklog`
|
||||
|
||||
@@ -435,15 +810,38 @@ Update an existing worklog entry on a Jira issue
|
||||
| `timeSpentSeconds` | number | No | Time spent in seconds |
|
||||
| `comment` | string | No | Optional comment for the worklog entry |
|
||||
| `started` | string | No | Optional start time in ISO format |
|
||||
| `visibility` | json | No | Restrict worklog visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `worklogId` | string | Updated worklog ID |
|
||||
| `timeSpent` | string | Human-readable time spent \(e.g., "3h 20m"\) |
|
||||
| `timeSpentSeconds` | number | Time spent in seconds |
|
||||
| `comment` | string | Worklog comment text |
|
||||
| `author` | object | Worklog author |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `updateAuthor` | object | User who last updated the worklog |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `started` | string | Worklog start time in ISO format |
|
||||
| `created` | string | Worklog creation time |
|
||||
| `updated` | string | Worklog last update time |
|
||||
|
||||
### `jira_delete_worklog`
|
||||
|
||||
@@ -462,7 +860,7 @@ Delete a worklog entry from a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `worklogId` | string | Deleted worklog ID |
|
||||
|
||||
@@ -485,7 +883,7 @@ Create a link relationship between two Jira issues
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `inwardIssue` | string | Inward issue key |
|
||||
| `outwardIssue` | string | Outward issue key |
|
||||
| `linkType` | string | Type of issue link |
|
||||
@@ -507,7 +905,7 @@ Delete a link between two Jira issues
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `linkId` | string | Deleted link ID |
|
||||
|
||||
### `jira_add_watcher`
|
||||
@@ -527,7 +925,7 @@ Add a watcher to a Jira issue to receive notifications about updates
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `watcherAccountId` | string | Added watcher account ID |
|
||||
|
||||
@@ -548,7 +946,7 @@ Remove a watcher from a Jira issue
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `issueKey` | string | Issue key |
|
||||
| `watcherAccountId` | string | Removed watcher account ID |
|
||||
|
||||
@@ -570,8 +968,15 @@ Get Jira users. If an account ID is provided, returns a single user. Otherwise,
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `users` | json | Array of users with accountId, displayName, emailAddress, active status, and avatarUrls |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `users` | array | Array of Jira users |
|
||||
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||
| ↳ `displayName` | string | Display name of the user |
|
||||
| ↳ `active` | boolean | Whether the user account is active |
|
||||
| ↳ `emailAddress` | string | Email address of the user |
|
||||
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `total` | number | Total number of users returned |
|
||||
| `startAt` | number | Pagination start index |
|
||||
| `maxResults` | number | Maximum results per page |
|
||||
|
||||
@@ -46,6 +46,7 @@ Get all service desks from Jira Service Management
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `expand` | string | No | Comma-separated fields to expand in the response |
|
||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||
|
||||
@@ -54,7 +55,14 @@ Get all service desks from Jira Service Management
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `serviceDesks` | json | Array of service desks |
|
||||
| `serviceDesks` | array | List of service desks |
|
||||
| ↳ `id` | string | Service desk ID |
|
||||
| ↳ `projectId` | string | Associated Jira project ID |
|
||||
| ↳ `projectName` | string | Associated project name |
|
||||
| ↳ `projectKey` | string | Associated project key |
|
||||
| ↳ `name` | string | Service desk name |
|
||||
| ↳ `description` | string | Service desk description |
|
||||
| ↳ `leadDisplayName` | string | Project lead display name |
|
||||
| `total` | number | Total number of service desks |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -69,6 +77,9 @@ Get request types for a service desk in Jira Service Management
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||
| `searchQuery` | string | No | Filter request types by name |
|
||||
| `groupId` | string | No | Filter by request type group ID |
|
||||
| `expand` | string | No | Comma-separated fields to expand in the response |
|
||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||
|
||||
@@ -77,7 +88,16 @@ Get request types for a service desk in Jira Service Management
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `requestTypes` | json | Array of request types |
|
||||
| `requestTypes` | array | List of request types |
|
||||
| ↳ `id` | string | Request type ID |
|
||||
| ↳ `name` | string | Request type name |
|
||||
| ↳ `description` | string | Request type description |
|
||||
| ↳ `helpText` | string | Help text for customers |
|
||||
| ↳ `issueTypeId` | string | Associated Jira issue type ID |
|
||||
| ↳ `serviceDeskId` | string | Parent service desk ID |
|
||||
| ↳ `groupIds` | json | Groups this request type belongs to |
|
||||
| ↳ `icon` | json | Request type icon with id and links |
|
||||
| ↳ `restrictionStatus` | string | OPEN or RESTRICTED |
|
||||
| `total` | number | Total number of request types |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -96,6 +116,9 @@ Create a new service request in Jira Service Management
|
||||
| `summary` | string | Yes | Summary/title for the service request |
|
||||
| `description` | string | No | Description for the service request |
|
||||
| `raiseOnBehalfOf` | string | No | Account ID of customer to raise request on behalf of |
|
||||
| `requestFieldValues` | json | No | Custom field values as key-value pairs \(overrides summary/description if provided\) |
|
||||
| `requestParticipants` | string | No | Comma-separated account IDs to add as request participants |
|
||||
| `channel` | string | No | Channel the request originates from \(e.g., portal, email\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -106,6 +129,9 @@ Create a new service request in Jira Service Management
|
||||
| `issueKey` | string | Created request issue key \(e.g., SD-123\) |
|
||||
| `requestTypeId` | string | Request type ID |
|
||||
| `serviceDeskId` | string | Service desk ID |
|
||||
| `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
||||
| `currentStatus` | json | Current status with status name and category |
|
||||
| `reporter` | json | Reporter user with accountId, displayName, emailAddress |
|
||||
| `success` | boolean | Whether the request was created successfully |
|
||||
| `url` | string | URL to the created request |
|
||||
|
||||
@@ -120,12 +146,33 @@ Get a single service request from Jira Service Management
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
||||
| `expand` | string | No | Comma-separated fields to expand: participant, status, sla, requestType, serviceDesk, attachment, comment, action |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueId` | string | Jira issue ID |
|
||||
| `issueKey` | string | Issue key \(e.g., SD-123\) |
|
||||
| `requestTypeId` | string | Request type ID |
|
||||
| `serviceDeskId` | string | Service desk ID |
|
||||
| `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
||||
| `currentStatus` | object | Current request status |
|
||||
| ↳ `status` | string | Status name |
|
||||
| ↳ `statusCategory` | string | Status category \(NEW, INDETERMINATE, DONE\) |
|
||||
| ↳ `statusDate` | json | Status change date with iso8601, friendly, epochMillis |
|
||||
| `reporter` | object | Reporter user details |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | User display name |
|
||||
| ↳ `emailAddress` | string | User email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| `requestFieldValues` | array | Request field values |
|
||||
| ↳ `fieldId` | string | Field identifier |
|
||||
| ↳ `label` | string | Human-readable field label |
|
||||
| ↳ `value` | json | Field value |
|
||||
| ↳ `renderedValue` | json | HTML-rendered field value |
|
||||
| `url` | string | URL to the request |
|
||||
| `request` | json | The service request object |
|
||||
|
||||
### `jsm_get_requests`
|
||||
@@ -139,9 +186,11 @@ Get multiple service requests from Jira Service Management
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `serviceDeskId` | string | No | Filter by service desk ID \(e.g., "1", "2"\) |
|
||||
| `requestOwnership` | string | No | Filter by ownership: OWNED_REQUESTS, PARTICIPATED_REQUESTS, ORGANIZATION, ALL_REQUESTS |
|
||||
| `requestStatus` | string | No | Filter by status: OPEN, CLOSED, ALL |
|
||||
| `requestOwnership` | string | No | Filter by ownership: OWNED_REQUESTS, PARTICIPATED_REQUESTS, APPROVER, ALL_REQUESTS |
|
||||
| `requestStatus` | string | No | Filter by status: OPEN_REQUESTS, CLOSED_REQUESTS, ALL_REQUESTS |
|
||||
| `requestTypeId` | string | No | Filter by request type ID |
|
||||
| `searchTerm` | string | No | Search term to filter requests \(e.g., "password reset", "laptop"\) |
|
||||
| `expand` | string | No | Comma-separated fields to expand: participant, status, sla, requestType, serviceDesk, attachment, comment, action |
|
||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||
|
||||
@@ -150,8 +199,27 @@ Get multiple service requests from Jira Service Management
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `requests` | json | Array of service requests |
|
||||
| `total` | number | Total number of requests |
|
||||
| `requests` | array | List of service requests |
|
||||
| ↳ `issueId` | string | Jira issue ID |
|
||||
| ↳ `issueKey` | string | Issue key \(e.g., SD-123\) |
|
||||
| ↳ `requestTypeId` | string | Request type ID |
|
||||
| ↳ `serviceDeskId` | string | Service desk ID |
|
||||
| ↳ `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
||||
| ↳ `currentStatus` | object | Current request status |
|
||||
| ↳ `status` | string | Status name |
|
||||
| ↳ `statusCategory` | string | Status category \(NEW, INDETERMINATE, DONE\) |
|
||||
| ↳ `statusDate` | json | Status change date with iso8601, friendly, epochMillis |
|
||||
| ↳ `reporter` | object | Reporter user details |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | User display name |
|
||||
| ↳ `emailAddress` | string | User email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| ↳ `requestFieldValues` | array | Request field values |
|
||||
| ↳ `fieldId` | string | Field identifier |
|
||||
| ↳ `label` | string | Human-readable field label |
|
||||
| ↳ `value` | json | Field value |
|
||||
| ↳ `renderedValue` | json | HTML-rendered field value |
|
||||
| `total` | number | Total number of requests in current page |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
### `jsm_add_comment`
|
||||
@@ -177,6 +245,12 @@ Add a comment (public or internal) to a service request in Jira Service Manageme
|
||||
| `commentId` | string | Created comment ID |
|
||||
| `body` | string | Comment body text |
|
||||
| `isPublic` | boolean | Whether the comment is public |
|
||||
| `author` | object | Comment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | User display name |
|
||||
| ↳ `emailAddress` | string | User email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| `createdDate` | json | Comment creation date with iso8601, friendly, epochMillis |
|
||||
| `success` | boolean | Whether the comment was added successfully |
|
||||
|
||||
### `jsm_get_comments`
|
||||
@@ -192,6 +266,7 @@ Get comments for a service request in Jira Service Management
|
||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
||||
| `isPublic` | boolean | No | Filter to only public comments \(true/false\) |
|
||||
| `internal` | boolean | No | Filter to only internal comments \(true/false\) |
|
||||
| `expand` | string | No | Comma-separated fields to expand: renderedBody, attachment |
|
||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||
|
||||
@@ -201,7 +276,17 @@ Get comments for a service request in Jira Service Management
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `comments` | json | Array of comments |
|
||||
| `comments` | array | List of comments |
|
||||
| ↳ `id` | string | Comment ID |
|
||||
| ↳ `body` | string | Comment body text |
|
||||
| ↳ `public` | boolean | Whether the comment is public |
|
||||
| ↳ `author` | object | Comment author |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | User display name |
|
||||
| ↳ `emailAddress` | string | User email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| ↳ `created` | json | Creation date with iso8601, friendly, epochMillis |
|
||||
| ↳ `renderedBody` | json | HTML-rendered comment body \(when expand=renderedBody\) |
|
||||
| `total` | number | Total number of comments |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -225,7 +310,12 @@ Get customers for a service desk in Jira Service Management
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `customers` | json | Array of customers |
|
||||
| `customers` | array | List of customers |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | Display name |
|
||||
| ↳ `emailAddress` | string | Email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| ↳ `timeZone` | string | User timezone |
|
||||
| `total` | number | Total number of customers |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -240,7 +330,8 @@ Add customers to a service desk in Jira Service Management
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||
| `emails` | string | Yes | Comma-separated email addresses to add as customers |
|
||||
| `accountIds` | string | No | Comma-separated Atlassian account IDs to add as customers |
|
||||
| `emails` | string | No | Comma-separated email addresses to add as customers |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -269,7 +360,9 @@ Get organizations for a service desk in Jira Service Management
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `organizations` | json | Array of organizations |
|
||||
| `organizations` | array | List of organizations |
|
||||
| ↳ `id` | string | Organization ID |
|
||||
| ↳ `name` | string | Organization name |
|
||||
| `total` | number | Total number of organizations |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -336,7 +429,12 @@ Get queues for a service desk in Jira Service Management
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `queues` | json | Array of queues |
|
||||
| `queues` | array | List of queues |
|
||||
| ↳ `id` | string | Queue ID |
|
||||
| ↳ `name` | string | Queue name |
|
||||
| ↳ `jql` | string | JQL filter for the queue |
|
||||
| ↳ `fields` | json | Fields displayed in the queue |
|
||||
| ↳ `issueCount` | number | Number of issues in the queue |
|
||||
| `total` | number | Total number of queues |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -360,7 +458,11 @@ Get SLA information for a service request in Jira Service Management
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `slas` | json | Array of SLA information |
|
||||
| `slas` | array | List of SLA metrics |
|
||||
| ↳ `id` | string | SLA metric ID |
|
||||
| ↳ `name` | string | SLA metric name |
|
||||
| ↳ `completedCycles` | json | Completed SLA cycles with startTime, stopTime, breachTime, breached, goalDuration, elapsedTime, remainingTime \(each time as DateDTO, durations as DurationDTO\) |
|
||||
| ↳ `ongoingCycle` | json | Ongoing SLA cycle with startTime, breachTime, breached, paused, withinCalendarHours, goalDuration, elapsedTime, remainingTime |
|
||||
| `total` | number | Total number of SLAs |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -375,6 +477,8 @@ Get available transitions for a service request in Jira Service Management
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -382,7 +486,11 @@ Get available transitions for a service request in Jira Service Management
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `transitions` | json | Array of available transitions |
|
||||
| `transitions` | array | List of available transitions |
|
||||
| ↳ `id` | string | Transition ID |
|
||||
| ↳ `name` | string | Transition name |
|
||||
| `total` | number | Total number of transitions |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
### `jsm_transition_request`
|
||||
|
||||
@@ -427,7 +535,11 @@ Get participants for a request in Jira Service Management
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `participants` | json | Array of participants |
|
||||
| `participants` | array | List of participants |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | Display name |
|
||||
| ↳ `emailAddress` | string | Email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| `total` | number | Total number of participants |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -450,7 +562,11 @@ Add participants to a request in Jira Service Management
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `participants` | json | Array of added participants |
|
||||
| `participants` | array | List of added participants |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | Display name |
|
||||
| ↳ `emailAddress` | string | Email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| `success` | boolean | Whether the operation succeeded |
|
||||
|
||||
### `jsm_get_approvals`
|
||||
@@ -473,7 +589,20 @@ Get approvals for a request in Jira Service Management
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `approvals` | json | Array of approvals |
|
||||
| `approvals` | array | List of approvals |
|
||||
| ↳ `id` | string | Approval ID |
|
||||
| ↳ `name` | string | Approval description |
|
||||
| ↳ `finalDecision` | string | Final decision: pending, approved, or declined |
|
||||
| ↳ `canAnswerApproval` | boolean | Whether current user can respond |
|
||||
| ↳ `approvers` | array | List of approvers with their decisions |
|
||||
| ↳ `approver` | object | Approver user details |
|
||||
| ↳ `accountId` | string | Atlassian account ID |
|
||||
| ↳ `displayName` | string | User display name |
|
||||
| ↳ `emailAddress` | string | User email address |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| ↳ `approverDecision` | string | Decision: pending, approved, or declined |
|
||||
| ↳ `createdDate` | json | Creation date |
|
||||
| ↳ `completedDate` | json | Completion date |
|
||||
| `total` | number | Total number of approvals |
|
||||
| `isLastPage` | boolean | Whether this is the last page |
|
||||
|
||||
@@ -499,6 +628,53 @@ Approve or decline an approval request in Jira Service Management
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `approvalId` | string | Approval ID |
|
||||
| `decision` | string | Decision made \(approve/decline\) |
|
||||
| `id` | string | Approval ID from response |
|
||||
| `name` | string | Approval description |
|
||||
| `finalDecision` | string | Final approval decision: pending, approved, or declined |
|
||||
| `canAnswerApproval` | boolean | Whether the current user can still respond |
|
||||
| `approvers` | array | Updated list of approvers with decisions |
|
||||
| ↳ `approver` | object | Approver user details |
|
||||
| ↳ `accountId` | string | Approver account ID |
|
||||
| ↳ `displayName` | string | Approver display name |
|
||||
| ↳ `emailAddress` | string | Approver email |
|
||||
| ↳ `active` | boolean | Whether the account is active |
|
||||
| ↳ `approverDecision` | string | Individual approver decision |
|
||||
| `createdDate` | json | Approval creation date |
|
||||
| `completedDate` | json | Approval completion date |
|
||||
| `approval` | json | The approval object |
|
||||
| `success` | boolean | Whether the operation succeeded |
|
||||
|
||||
### `jsm_get_request_type_fields`
|
||||
|
||||
Get the fields required to create a request of a specific type in Jira Service Management
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||
| `requestTypeId` | string | Yes | Request Type ID \(e.g., "10", "15"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `serviceDeskId` | string | Service desk ID |
|
||||
| `requestTypeId` | string | Request type ID |
|
||||
| `canAddRequestParticipants` | boolean | Whether participants can be added to requests of this type |
|
||||
| `canRaiseOnBehalfOf` | boolean | Whether requests can be raised on behalf of another user |
|
||||
| `requestTypeFields` | array | List of fields for this request type |
|
||||
| ↳ `fieldId` | string | Field identifier \(e.g., summary, description, customfield_10010\) |
|
||||
| ↳ `name` | string | Human-readable field name |
|
||||
| ↳ `description` | string | Help text for the field |
|
||||
| ↳ `required` | boolean | Whether the field is required |
|
||||
| ↳ `visible` | boolean | Whether the field is visible |
|
||||
| ↳ `validValues` | json | Allowed values for select fields |
|
||||
| ↳ `presetValues` | json | Pre-populated values |
|
||||
| ↳ `defaultValues` | json | Default values for the field |
|
||||
| ↳ `jiraSchema` | json | Jira field schema with type, system, custom, customId |
|
||||
|
||||
|
||||
|
||||
@@ -76,6 +76,7 @@
|
||||
"neo4j",
|
||||
"notion",
|
||||
"onedrive",
|
||||
"onepassword",
|
||||
"openai",
|
||||
"outlook",
|
||||
"parallel_ai",
|
||||
|
||||
260
apps/docs/content/docs/en/tools/onepassword.mdx
Normal file
260
apps/docs/content/docs/en/tools/onepassword.mdx
Normal file
@@ -0,0 +1,260 @@
|
||||
---
|
||||
title: 1Password
|
||||
description: Manage secrets and items in 1Password vaults
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="onepassword"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[1Password](https://1password.com) is a widely trusted password manager and secrets vault solution, allowing individuals and teams to securely store, access, and share passwords, API credentials, and sensitive information. With robust encryption, granular access controls, and seamless syncing across devices, 1Password supports teams and organizations in managing secrets efficiently and securely.
|
||||
|
||||
The [1Password Connect API](https://developer.1password.com/docs/connect/) allows programmatic access to vaults and items within an organization's 1Password account. This integration in Sim lets you automate secret retrieval, onboarding workflows, secret rotation, vault audits, and more, all in a secure and auditable manner.
|
||||
|
||||
With 1Password in your Sim workflow, you can:
|
||||
|
||||
- **List, search, and retrieve vaults**: Access metadata or browse available vaults for organizing secrets by project or purpose
|
||||
- **Fetch items and secrets**: Get credentials, API keys, or custom secrets in real time to power your workflows securely
|
||||
- **Create, update, or delete secrets**: Automate secret management, provisioning, and rotation for enhanced security practices
|
||||
- **Integrate with CI/CD and automation**: Fetch credentials or tokens only when needed, reducing manual work and reducing risk
|
||||
- **Ensure access controls**: Leverage role-based access and fine-grained permissions to control which agents or users can access specific secrets
|
||||
|
||||
By connecting Sim with 1Password, you empower your agents to securely manage secrets, reduce manual overhead, and maintain best practices for security automation, incident response, and DevOps workflows—all while ensuring secrets never leave a controlled environment.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Access and manage secrets stored in 1Password vaults using the Connect API or Service Account SDK. List vaults, retrieve items with their fields and secrets, create new items, update existing ones, delete items, and resolve secret references.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `onepassword_list_vaults`
|
||||
|
||||
List all vaults accessible by the Connect token or Service Account
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `filter` | string | No | SCIM filter expression \(e.g., name eq "My Vault"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `vaults` | array | List of accessible vaults |
|
||||
| ↳ `id` | string | Vault ID |
|
||||
| ↳ `name` | string | Vault name |
|
||||
| ↳ `description` | string | Vault description |
|
||||
| ↳ `attributeVersion` | number | Vault attribute version |
|
||||
| ↳ `contentVersion` | number | Vault content version |
|
||||
| ↳ `type` | string | Vault type \(USER_CREATED, PERSONAL, EVERYONE, TRANSFER\) |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
|
||||
### `onepassword_get_vault`
|
||||
|
||||
Get details of a specific vault by ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Vault ID |
|
||||
| `name` | string | Vault name |
|
||||
| `description` | string | Vault description |
|
||||
| `attributeVersion` | number | Vault attribute version |
|
||||
| `contentVersion` | number | Vault content version |
|
||||
| `items` | number | Number of items in the vault |
|
||||
| `type` | string | Vault type \(USER_CREATED, PERSONAL, EVERYONE, TRANSFER\) |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
| `updatedAt` | string | Last update timestamp |
|
||||
|
||||
### `onepassword_list_items`
|
||||
|
||||
List items in a vault. Returns summaries without field values.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID to list items from |
|
||||
| `filter` | string | No | SCIM filter expression \(e.g., title eq "API Key" or tag eq "production"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `items` | array | List of items in the vault \(summaries without field values\) |
|
||||
| ↳ `id` | string | Item ID |
|
||||
| ↳ `title` | string | Item title |
|
||||
| ↳ `vault` | object | Vault reference |
|
||||
| ↳ `id` | string | Vault ID |
|
||||
| ↳ `category` | string | Item category \(e.g., LOGIN, API_CREDENTIAL\) |
|
||||
| ↳ `urls` | array | URLs associated with the item |
|
||||
| ↳ `href` | string | URL |
|
||||
| ↳ `label` | string | URL label |
|
||||
| ↳ `primary` | boolean | Whether this is the primary URL |
|
||||
| ↳ `favorite` | boolean | Whether the item is favorited |
|
||||
| ↳ `tags` | array | Item tags |
|
||||
| ↳ `version` | number | Item version number |
|
||||
| ↳ `state` | string | Item state \(ARCHIVED or DELETED\) |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| ↳ `lastEditedBy` | string | ID of the last editor |
|
||||
|
||||
### `onepassword_get_item`
|
||||
|
||||
Get full details of an item including all fields and secrets
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID |
|
||||
| `itemId` | string | Yes | The item UUID to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `response` | json | Operation response data |
|
||||
|
||||
### `onepassword_create_item`
|
||||
|
||||
Create a new item in a vault
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID to create the item in |
|
||||
| `category` | string | Yes | Item category \(e.g., LOGIN, PASSWORD, API_CREDENTIAL, SECURE_NOTE, SERVER, DATABASE\) |
|
||||
| `title` | string | No | Item title |
|
||||
| `tags` | string | No | Comma-separated list of tags |
|
||||
| `fields` | string | No | JSON array of field objects \(e.g., \[\{"label":"username","value":"admin","type":"STRING","purpose":"USERNAME"\}\]\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `response` | json | Operation response data |
|
||||
|
||||
### `onepassword_replace_item`
|
||||
|
||||
Replace an entire item with new data (full update)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID |
|
||||
| `itemId` | string | Yes | The item UUID to replace |
|
||||
| `item` | string | Yes | JSON object representing the full item \(e.g., \{"vault":\{"id":"..."\},"category":"LOGIN","title":"My Item","fields":\[...\]\}\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `response` | json | Operation response data |
|
||||
|
||||
### `onepassword_update_item`
|
||||
|
||||
Update an existing item using JSON Patch operations (RFC6902)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID |
|
||||
| `itemId` | string | Yes | The item UUID to update |
|
||||
| `operations` | string | Yes | JSON array of RFC6902 patch operations \(e.g., \[\{"op":"replace","path":"/title","value":"New Title"\}\]\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `response` | json | Operation response data |
|
||||
|
||||
### `onepassword_delete_item`
|
||||
|
||||
Delete an item from a vault
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||
| `vaultId` | string | Yes | The vault UUID |
|
||||
| `itemId` | string | Yes | The item UUID to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the item was successfully deleted |
|
||||
|
||||
### `onepassword_resolve_secret`
|
||||
|
||||
Resolve a secret reference (op://vault/item/field) to its value. Service Account mode only.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `connectionMode` | string | No | Connection mode: must be "service_account" for this operation |
|
||||
| `serviceAccountToken` | string | Yes | 1Password Service Account token |
|
||||
| `secretReference` | string | Yes | Secret reference URI \(e.g., op://vault-name/item-name/field-name or op://vault-name/item-name/section-name/field-name\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `value` | string | The resolved secret value |
|
||||
| `reference` | string | The original secret reference URI |
|
||||
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
}
|
||||
@@ -18,6 +18,7 @@ const UpdateCostSchema = z.object({
|
||||
model: z.string().min(1, 'Model is required'),
|
||||
inputTokens: z.number().min(0).default(0),
|
||||
outputTokens: z.number().min(0).default(0),
|
||||
source: z.enum(['copilot', 'mcp_copilot']).default('copilot'),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -75,12 +76,14 @@ export async function POST(req: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { userId, cost, model, inputTokens, outputTokens } = validation.data
|
||||
const { userId, cost, model, inputTokens, outputTokens, source } = validation.data
|
||||
const isMcp = source === 'mcp_copilot'
|
||||
|
||||
logger.info(`[${requestId}] Processing cost update`, {
|
||||
userId,
|
||||
cost,
|
||||
model,
|
||||
source,
|
||||
})
|
||||
|
||||
// Check if user stats record exists (same as ExecutionLogger)
|
||||
@@ -96,7 +99,7 @@ export async function POST(req: NextRequest) {
|
||||
return NextResponse.json({ error: 'User stats record not found' }, { status: 500 })
|
||||
}
|
||||
|
||||
const updateFields = {
|
||||
const updateFields: Record<string, unknown> = {
|
||||
totalCost: sql`total_cost + ${cost}`,
|
||||
currentPeriodCost: sql`current_period_cost + ${cost}`,
|
||||
totalCopilotCost: sql`total_copilot_cost + ${cost}`,
|
||||
@@ -105,17 +108,24 @@ export async function POST(req: NextRequest) {
|
||||
lastActive: new Date(),
|
||||
}
|
||||
|
||||
// Also increment MCP-specific counters when source is mcp_copilot
|
||||
if (isMcp) {
|
||||
updateFields.totalMcpCopilotCost = sql`total_mcp_copilot_cost + ${cost}`
|
||||
updateFields.currentPeriodMcpCopilotCost = sql`current_period_mcp_copilot_cost + ${cost}`
|
||||
}
|
||||
|
||||
await db.update(userStats).set(updateFields).where(eq(userStats.userId, userId))
|
||||
|
||||
logger.info(`[${requestId}] Updated user stats record`, {
|
||||
userId,
|
||||
addedCost: cost,
|
||||
source,
|
||||
})
|
||||
|
||||
// Log usage for complete audit trail
|
||||
await logModelUsage({
|
||||
userId,
|
||||
source: 'copilot',
|
||||
source: isMcp ? 'mcp_copilot' : 'copilot',
|
||||
model,
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const GenerateApiKeySchema = z.object({
|
||||
@@ -17,9 +17,6 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
// Move environment variable access inside the function
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const body = await req.json().catch(() => ({}))
|
||||
const validationResult = GenerateApiKeySchema.safeParse(body)
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ describe('Copilot API Keys API Route', () => {
|
||||
|
||||
vi.doMock('@/lib/copilot/constants', () => ({
|
||||
SIM_AGENT_API_URL_DEFAULT: 'https://agent.sim.example.com',
|
||||
SIM_AGENT_API_URL: 'https://agent.sim.example.com',
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/core/config/env', async () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
@@ -12,8 +12,6 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -68,8 +66,6 @@ export async function DELETE(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import {
|
||||
getStreamMeta,
|
||||
readStreamEvents,
|
||||
type StreamMeta,
|
||||
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
|
||||
const logger = createLogger('CopilotChatStreamAPI')
|
||||
const POLL_INTERVAL_MS = 250
|
||||
const MAX_STREAM_MS = 10 * 60 * 1000
|
||||
|
||||
function encodeEvent(event: Record<string, any>): Uint8Array {
|
||||
return new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`)
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const { userId: authenticatedUserId, isAuthenticated } =
|
||||
await authenticateCopilotRequestSessionOnly()
|
||||
|
||||
if (!isAuthenticated || !authenticatedUserId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const url = new URL(request.url)
|
||||
const streamId = url.searchParams.get('streamId') || ''
|
||||
const fromParam = url.searchParams.get('from') || '0'
|
||||
const fromEventId = Number(fromParam || 0)
|
||||
// If batch=true, return buffered events as JSON instead of SSE
|
||||
const batchMode = url.searchParams.get('batch') === 'true'
|
||||
const toParam = url.searchParams.get('to')
|
||||
const toEventId = toParam ? Number(toParam) : undefined
|
||||
|
||||
if (!streamId) {
|
||||
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const meta = (await getStreamMeta(streamId)) as StreamMeta | null
|
||||
logger.info('[Resume] Stream lookup', {
|
||||
streamId,
|
||||
fromEventId,
|
||||
toEventId,
|
||||
batchMode,
|
||||
hasMeta: !!meta,
|
||||
metaStatus: meta?.status,
|
||||
})
|
||||
if (!meta) {
|
||||
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
|
||||
}
|
||||
if (meta.userId && meta.userId !== authenticatedUserId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Batch mode: return all buffered events as JSON
|
||||
if (batchMode) {
|
||||
const events = await readStreamEvents(streamId, fromEventId)
|
||||
const filteredEvents = toEventId ? events.filter((e) => e.eventId <= toEventId) : events
|
||||
logger.info('[Resume] Batch response', {
|
||||
streamId,
|
||||
fromEventId,
|
||||
toEventId,
|
||||
eventCount: filteredEvents.length,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
events: filteredEvents,
|
||||
status: meta.status,
|
||||
})
|
||||
}
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
let lastEventId = Number.isFinite(fromEventId) ? fromEventId : 0
|
||||
|
||||
const flushEvents = async () => {
|
||||
const events = await readStreamEvents(streamId, lastEventId)
|
||||
if (events.length > 0) {
|
||||
logger.info('[Resume] Flushing events', {
|
||||
streamId,
|
||||
fromEventId: lastEventId,
|
||||
eventCount: events.length,
|
||||
})
|
||||
}
|
||||
for (const entry of events) {
|
||||
lastEventId = entry.eventId
|
||||
const payload = {
|
||||
...entry.event,
|
||||
eventId: entry.eventId,
|
||||
streamId: entry.streamId,
|
||||
}
|
||||
controller.enqueue(encodeEvent(payload))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await flushEvents()
|
||||
|
||||
while (Date.now() - startTime < MAX_STREAM_MS) {
|
||||
const currentMeta = await getStreamMeta(streamId)
|
||||
if (!currentMeta) break
|
||||
|
||||
await flushEvents()
|
||||
|
||||
if (currentMeta.status === 'complete' || currentMeta.status === 'error') {
|
||||
break
|
||||
}
|
||||
|
||||
if (request.signal.aborted) {
|
||||
break
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Stream replay failed', {
|
||||
streamId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
} finally {
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(stream, { headers: SSE_HEADERS })
|
||||
}
|
||||
@@ -139,7 +139,6 @@ describe('Copilot Confirm API Route', () => {
|
||||
status: 'success',
|
||||
})
|
||||
|
||||
expect(mockRedisExists).toHaveBeenCalled()
|
||||
expect(mockRedisSet).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -256,11 +255,11 @@ describe('Copilot Confirm API Route', () => {
|
||||
expect(responseData.error).toBe('Failed to update tool call status or tool call not found')
|
||||
})
|
||||
|
||||
it('should return 400 when tool call is not found in Redis', async () => {
|
||||
it('should return 400 when Redis set fails', async () => {
|
||||
const authMocks = mockAuth()
|
||||
authMocks.setAuthenticated()
|
||||
|
||||
mockRedisExists.mockResolvedValue(0)
|
||||
mockRedisSet.mockRejectedValueOnce(new Error('Redis set failed'))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
toolCallId: 'non-existent-tool',
|
||||
@@ -279,7 +278,7 @@ describe('Copilot Confirm API Route', () => {
|
||||
const authMocks = mockAuth()
|
||||
authMocks.setAuthenticated()
|
||||
|
||||
mockRedisExists.mockRejectedValue(new Error('Redis connection failed'))
|
||||
mockRedisSet.mockRejectedValueOnce(new Error('Redis connection failed'))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
toolCallId: 'tool-call-123',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
@@ -23,7 +24,8 @@ const ConfirmationSchema = z.object({
|
||||
})
|
||||
|
||||
/**
|
||||
* Update tool call status in Redis
|
||||
* Write the user's tool decision to Redis. The server-side orchestrator's
|
||||
* waitForToolDecision() polls Redis for this value.
|
||||
*/
|
||||
async function updateToolCallStatus(
|
||||
toolCallId: string,
|
||||
@@ -32,57 +34,24 @@ async function updateToolCallStatus(
|
||||
): Promise<boolean> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('updateToolCallStatus: Redis client not available')
|
||||
logger.warn('Redis client not available for tool confirmation')
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const key = `tool_call:${toolCallId}`
|
||||
const timeout = 600000 // 10 minutes timeout for user confirmation
|
||||
const pollInterval = 100 // Poll every 100ms
|
||||
const startTime = Date.now()
|
||||
|
||||
logger.info('Polling for tool call in Redis', { toolCallId, key, timeout })
|
||||
|
||||
// Poll until the key exists or timeout
|
||||
while (Date.now() - startTime < timeout) {
|
||||
const exists = await redis.exists(key)
|
||||
if (exists) {
|
||||
break
|
||||
}
|
||||
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
||||
}
|
||||
|
||||
// Final check if key exists after polling
|
||||
const exists = await redis.exists(key)
|
||||
if (!exists) {
|
||||
logger.warn('Tool call not found in Redis after polling timeout', {
|
||||
toolCallId,
|
||||
key,
|
||||
timeout,
|
||||
pollDuration: Date.now() - startTime,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
// Store both status and message as JSON
|
||||
const toolCallData = {
|
||||
const key = `${REDIS_TOOL_CALL_PREFIX}${toolCallId}`
|
||||
const payload = {
|
||||
status,
|
||||
message: message || null,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400) // Keep 24 hour expiry
|
||||
|
||||
await redis.set(key, JSON.stringify(payload), 'EX', REDIS_TOOL_CALL_TTL_SECONDS)
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Failed to update tool call status in Redis', {
|
||||
logger.error('Failed to update tool call status', {
|
||||
toolCallId,
|
||||
status,
|
||||
message,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
|
||||
/**
|
||||
* GET /api/copilot/credentials
|
||||
* Returns connected OAuth credentials for the authenticated user.
|
||||
* Used by the copilot store for credential masking.
|
||||
*/
|
||||
export async function GET(_req: NextRequest) {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await routeExecution('get_credentials', {}, { userId })
|
||||
return NextResponse.json({ success: true, result })
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to load credentials',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
|
||||
const logger = createLogger('ExecuteCopilotServerToolAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
toolName: z.string(),
|
||||
payload: z.unknown().optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
try {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
try {
|
||||
const preview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview })
|
||||
} catch {}
|
||||
|
||||
const { toolName, payload } = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Executing server tool`, { toolName })
|
||||
const result = await routeExecution(toolName, payload, { userId })
|
||||
|
||||
try {
|
||||
const resultPreview = JSON.stringify(result).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview })
|
||||
} catch {}
|
||||
|
||||
return NextResponse.json({ success: true, result })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
||||
return createBadRequestResponse('Invalid request body for execute-copilot-server-tool')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error)
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute server tool'
|
||||
return createInternalServerErrorResponse(errorMessage)
|
||||
}
|
||||
}
|
||||
@@ -1,247 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, resolveToolId } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('CopilotExecuteToolAPI')
|
||||
|
||||
const ExecuteToolSchema = z.object({
|
||||
toolCallId: z.string(),
|
||||
toolName: z.string(),
|
||||
arguments: z.record(z.any()).optional().default({}),
|
||||
workflowId: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
const body = await req.json()
|
||||
|
||||
try {
|
||||
const preview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming execute-tool request`, { preview })
|
||||
} catch {}
|
||||
|
||||
const { toolCallId, toolName, arguments: toolArgs, workflowId } = ExecuteToolSchema.parse(body)
|
||||
|
||||
const resolvedToolName = resolveToolId(toolName)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Executing tool`, {
|
||||
toolCallId,
|
||||
toolName,
|
||||
resolvedToolName,
|
||||
workflowId,
|
||||
hasArgs: Object.keys(toolArgs).length > 0,
|
||||
})
|
||||
|
||||
const toolConfig = getTool(resolvedToolName)
|
||||
if (!toolConfig) {
|
||||
// Find similar tool names to help debug
|
||||
const { tools: allTools } = await import('@/tools/registry')
|
||||
const allToolNames = Object.keys(allTools)
|
||||
const prefix = toolName.split('_').slice(0, 2).join('_')
|
||||
const similarTools = allToolNames
|
||||
.filter((name) => name.startsWith(`${prefix.split('_')[0]}_`))
|
||||
.slice(0, 10)
|
||||
|
||||
logger.warn(`[${tracker.requestId}] Tool not found in registry`, {
|
||||
toolName,
|
||||
prefix,
|
||||
similarTools,
|
||||
totalToolsInRegistry: allToolNames.length,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Tool not found: ${toolName}. Similar tools: ${similarTools.join(', ')}`,
|
||||
toolCallId,
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get the workspaceId from the workflow (env vars are stored at workspace level)
|
||||
let workspaceId: string | undefined
|
||||
if (workflowId) {
|
||||
const workflowResult = await db
|
||||
.select({ workspaceId: workflow.workspaceId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
||||
}
|
||||
|
||||
// Get decrypted environment variables early so we can resolve all {{VAR}} references
|
||||
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Fetched environment variables`, {
|
||||
workflowId,
|
||||
workspaceId,
|
||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
||||
envVarKeys: Object.keys(decryptedEnvVars),
|
||||
})
|
||||
|
||||
// Build execution params starting with LLM-provided arguments
|
||||
// Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects)
|
||||
const executionParams: Record<string, any> = resolveEnvVarReferences(
|
||||
toolArgs,
|
||||
decryptedEnvVars,
|
||||
{ deep: true }
|
||||
) as Record<string, any>
|
||||
|
||||
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
|
||||
toolName,
|
||||
originalArgKeys: Object.keys(toolArgs),
|
||||
resolvedArgKeys: Object.keys(executionParams),
|
||||
})
|
||||
|
||||
// Resolve OAuth access token if required
|
||||
if (toolConfig.oauth?.required && toolConfig.oauth.provider) {
|
||||
const provider = toolConfig.oauth.provider
|
||||
logger.info(`[${tracker.requestId}] Resolving OAuth token`, { provider })
|
||||
|
||||
try {
|
||||
// Find the account for this provider and user
|
||||
const accounts = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.providerId, provider), eq(account.userId, userId)))
|
||||
.limit(1)
|
||||
|
||||
if (accounts.length > 0) {
|
||||
const acc = accounts[0]
|
||||
const requestId = generateRequestId()
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, acc as any, acc.id)
|
||||
|
||||
if (accessToken) {
|
||||
executionParams.accessToken = accessToken
|
||||
logger.info(`[${tracker.requestId}] OAuth token resolved`, { provider })
|
||||
} else {
|
||||
logger.warn(`[${tracker.requestId}] No access token available`, { provider })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `OAuth token not available for ${provider}. Please reconnect your account.`,
|
||||
toolCallId,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logger.warn(`[${tracker.requestId}] No account found for provider`, { provider })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `No ${provider} account connected. Please connect your account first.`,
|
||||
toolCallId,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${tracker.requestId}] Failed to resolve OAuth token`, {
|
||||
provider,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to get OAuth token for ${provider}`,
|
||||
toolCallId,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if tool requires an API key that wasn't resolved via {{ENV_VAR}} reference
|
||||
const needsApiKey = toolConfig.params?.apiKey?.required
|
||||
|
||||
if (needsApiKey && !executionParams.apiKey) {
|
||||
logger.warn(`[${tracker.requestId}] No API key found for tool`, { toolName })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`,
|
||||
toolCallId,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Add execution context
|
||||
executionParams._context = {
|
||||
workflowId,
|
||||
userId,
|
||||
}
|
||||
|
||||
// Special handling for function_execute - inject environment variables
|
||||
if (toolName === 'function_execute') {
|
||||
executionParams.envVars = decryptedEnvVars
|
||||
executionParams.workflowVariables = {} // No workflow variables in copilot context
|
||||
executionParams.blockData = {} // No block data in copilot context
|
||||
executionParams.blockNameMapping = {} // No block mapping in copilot context
|
||||
executionParams.language = executionParams.language || 'javascript'
|
||||
executionParams.timeout = executionParams.timeout || 30000
|
||||
|
||||
logger.info(`[${tracker.requestId}] Injected env vars for function_execute`, {
|
||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
||||
})
|
||||
}
|
||||
|
||||
// Execute the tool
|
||||
logger.info(`[${tracker.requestId}] Executing tool with resolved credentials`, {
|
||||
toolName,
|
||||
hasAccessToken: !!executionParams.accessToken,
|
||||
hasApiKey: !!executionParams.apiKey,
|
||||
})
|
||||
|
||||
const result = await executeTool(resolvedToolName, executionParams)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
||||
toolName,
|
||||
success: result.success,
|
||||
hasOutput: !!result.output,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
toolCallId,
|
||||
result: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
error: result.error,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
||||
return createBadRequestResponse('Invalid request body for execute-tool')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to execute tool:`, error)
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute tool'
|
||||
return createInternalServerErrorResponse(errorMessage)
|
||||
}
|
||||
}
|
||||
@@ -40,6 +40,7 @@ describe('Copilot Stats API Route', () => {
|
||||
|
||||
vi.doMock('@/lib/copilot/constants', () => ({
|
||||
SIM_AGENT_API_URL_DEFAULT: 'https://agent.sim.example.com',
|
||||
SIM_AGENT_API_URL: 'https://agent.sim.example.com',
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/core/config/env', async () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
@@ -10,8 +10,6 @@ import {
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const BodySchema = z.object({
|
||||
messageId: z.string(),
|
||||
diffCreated: z.boolean(),
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const MarkCompleteSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
status: z.number().int(),
|
||||
message: z.any().optional(),
|
||||
data: z.any().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/tools/mark-complete
|
||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
|
||||
try {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
|
||||
// Log raw body shape for diagnostics (avoid dumping huge payloads)
|
||||
try {
|
||||
const bodyPreview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming mark-complete raw body preview`, {
|
||||
preview: `${bodyPreview}${bodyPreview.length === 300 ? '...' : ''}`,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
const parsed = MarkCompleteSchema.parse(body)
|
||||
|
||||
const messagePreview = (() => {
|
||||
try {
|
||||
const s =
|
||||
typeof parsed.message === 'string' ? parsed.message : JSON.stringify(parsed.message)
|
||||
return s ? `${s.slice(0, 200)}${s.length > 200 ? '...' : ''}` : undefined
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
})()
|
||||
|
||||
logger.info(`[${tracker.requestId}] Forwarding tool mark-complete`, {
|
||||
userId,
|
||||
toolCallId: parsed.id,
|
||||
toolName: parsed.name,
|
||||
status: parsed.status,
|
||||
hasMessage: parsed.message !== undefined,
|
||||
hasData: parsed.data !== undefined,
|
||||
messagePreview,
|
||||
agentUrl: `${SIM_AGENT_API_URL}/api/tools/mark-complete`,
|
||||
})
|
||||
|
||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify(parsed),
|
||||
})
|
||||
|
||||
// Attempt to parse agent response JSON
|
||||
let agentJson: any = null
|
||||
let agentText: string | null = null
|
||||
try {
|
||||
agentJson = await agentRes.json()
|
||||
} catch (_) {
|
||||
try {
|
||||
agentText = await agentRes.text()
|
||||
} catch {}
|
||||
}
|
||||
|
||||
logger.info(`[${tracker.requestId}] Agent responded to mark-complete`, {
|
||||
status: agentRes.status,
|
||||
ok: agentRes.ok,
|
||||
responseJsonPreview: agentJson ? JSON.stringify(agentJson).slice(0, 300) : undefined,
|
||||
responseTextPreview: agentText ? agentText.slice(0, 300) : undefined,
|
||||
})
|
||||
|
||||
if (agentRes.ok) {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
const errorMessage =
|
||||
agentJson?.error || agentText || `Agent responded with status ${agentRes.status}`
|
||||
const status = agentRes.status >= 500 ? 500 : 400
|
||||
|
||||
logger.warn(`[${tracker.requestId}] Mark-complete failed`, {
|
||||
status,
|
||||
error: errorMessage,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: false, error: errorMessage }, { status })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${tracker.requestId}] Invalid mark-complete request body`, {
|
||||
issues: error.issues,
|
||||
})
|
||||
return createBadRequestResponse('Invalid request body for mark-complete')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to proxy mark-complete:`, error)
|
||||
return createInternalServerErrorResponse('Failed to mark tool as complete')
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,7 @@ const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
||||
'claude-4-sonnet': false,
|
||||
'claude-4.5-haiku': true,
|
||||
'claude-4.5-sonnet': true,
|
||||
'claude-4.6-opus': true,
|
||||
'claude-4.5-opus': true,
|
||||
'claude-4.1-opus': false,
|
||||
'gemini-3-pro': true,
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
}
|
||||
793
apps/sim/app/api/mcp/copilot/route.ts
Normal file
793
apps/sim/app/api/mcp/copilot/route.ts
Normal file
@@ -0,0 +1,793 @@
|
||||
import { randomUUID } from 'node:crypto'
|
||||
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
type CallToolResult,
|
||||
ErrorCode,
|
||||
type JSONRPCError,
|
||||
ListToolsRequestSchema,
|
||||
type ListToolsResult,
|
||||
McpError,
|
||||
type RequestId,
|
||||
} from '@modelcontextprotocol/sdk/types.js'
|
||||
import { db } from '@sim/db'
|
||||
import { userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import {
|
||||
ORCHESTRATION_TIMEOUT_MS,
|
||||
SIM_AGENT_API_URL,
|
||||
SIM_AGENT_VERSION,
|
||||
} from '@/lib/copilot/constants'
|
||||
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||
import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent'
|
||||
import {
|
||||
executeToolServerSide,
|
||||
prepareExecutionContext,
|
||||
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('CopilotMcpAPI')
|
||||
const mcpRateLimiter = new RateLimiter()
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
export const maxDuration = 300
|
||||
|
||||
interface CopilotKeyAuthResult {
|
||||
success: boolean
|
||||
userId?: string
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a copilot API key by forwarding it to the Go copilot service's
|
||||
* `/api/validate-key` endpoint. Returns the associated userId on success.
|
||||
*/
|
||||
async function authenticateCopilotApiKey(apiKey: string): Promise<CopilotKeyAuthResult> {
|
||||
try {
|
||||
const internalSecret = env.INTERNAL_API_SECRET
|
||||
if (!internalSecret) {
|
||||
logger.error('INTERNAL_API_SECRET not configured')
|
||||
return { success: false, error: 'Server configuration error' }
|
||||
}
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': internalSecret,
|
||||
},
|
||||
body: JSON.stringify({ targetApiKey: apiKey }),
|
||||
signal: AbortSignal.timeout(10_000),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const body = await res.json().catch(() => null)
|
||||
const upstream = (body as Record<string, unknown>)?.message
|
||||
const status = res.status
|
||||
|
||||
if (status === 401 || status === 403) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Invalid Copilot API key. Generate a new key in Settings → Copilot and set it in the x-api-key header.`,
|
||||
}
|
||||
}
|
||||
if (status === 402) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Usage limit exceeded for this Copilot API key. Upgrade your plan or wait for your quota to reset.`,
|
||||
}
|
||||
}
|
||||
|
||||
return { success: false, error: String(upstream ?? 'Copilot API key validation failed') }
|
||||
}
|
||||
|
||||
const data = (await res.json()) as { ok?: boolean; userId?: string }
|
||||
if (!data.ok || !data.userId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid Copilot API key. Generate a new key in Settings → Copilot.',
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true, userId: data.userId }
|
||||
} catch (error) {
|
||||
logger.error('Copilot API key validation failed', { error })
|
||||
return {
|
||||
success: false,
|
||||
error:
|
||||
'Could not validate Copilot API key — the authentication service is temporarily unreachable. This is NOT a problem with the API key itself; please retry shortly.',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||
* This is included in the initialize response to help external LLMs understand
|
||||
* the workflow lifecycle and best practices.
|
||||
*/
|
||||
const MCP_SERVER_INSTRUCTIONS = `
|
||||
## Sim Workflow Copilot
|
||||
|
||||
Sim is a workflow automation platform. Workflows are visual pipelines of connected blocks (Agent, Function, Condition, API, integrations, etc.). The Agent block is the core — an LLM with tools, memory, structured output, and knowledge bases.
|
||||
|
||||
### Workflow Lifecycle (Happy Path)
|
||||
|
||||
1. \`list_workspaces\` → know where to work
|
||||
2. \`create_workflow(name, workspaceId)\` → get a workflowId
|
||||
3. \`sim_build(request, workflowId)\` → plan and build in one pass
|
||||
4. \`sim_test(request, workflowId)\` → verify it works
|
||||
5. \`sim_deploy("deploy as api", workflowId)\` → make it accessible externally (optional)
|
||||
|
||||
For fine-grained control, use \`sim_plan\` → \`sim_edit\` instead of \`sim_build\`. Pass the plan object from sim_plan EXACTLY as-is to sim_edit's context.plan field.
|
||||
|
||||
### Working with Existing Workflows
|
||||
|
||||
When the user refers to a workflow by name or description ("the email one", "my Slack bot"):
|
||||
1. Use \`sim_discovery\` to find it by functionality
|
||||
2. Or use \`list_workflows\` and match by name
|
||||
3. Then pass the workflowId to other tools
|
||||
|
||||
### Organization
|
||||
|
||||
- \`rename_workflow\` — rename a workflow
|
||||
- \`move_workflow\` — move a workflow into a folder (or root with null)
|
||||
- \`move_folder\` — nest a folder inside another (or root with null)
|
||||
- \`create_folder(name, parentId)\` — create nested folder hierarchies
|
||||
|
||||
### Key Rules
|
||||
|
||||
- You can test workflows immediately after building — deployment is only needed for external access (API, chat, MCP).
|
||||
- All copilot tools (build, plan, edit, deploy, test, debug) require workflowId.
|
||||
- If the user reports errors → use \`sim_debug\` first, don't guess.
|
||||
- Variable syntax: \`<blockname.field>\` for block outputs, \`{{ENV_VAR}}\` for env vars.
|
||||
`
|
||||
|
||||
type HeaderMap = Record<string, string | string[] | undefined>
|
||||
|
||||
function createError(id: RequestId, code: ErrorCode | number, message: string): JSONRPCError {
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
id,
|
||||
error: { code, message },
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeRequestHeaders(request: NextRequest): HeaderMap {
|
||||
const headers: HeaderMap = {}
|
||||
|
||||
request.headers.forEach((value, key) => {
|
||||
headers[key.toLowerCase()] = value
|
||||
})
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
function readHeader(headers: HeaderMap | undefined, name: string): string | undefined {
|
||||
if (!headers) return undefined
|
||||
const value = headers[name.toLowerCase()]
|
||||
if (Array.isArray(value)) {
|
||||
return value[0]
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
class NextResponseCapture {
|
||||
private _status = 200
|
||||
private _headers = new Headers()
|
||||
private _controller: ReadableStreamDefaultController<Uint8Array> | null = null
|
||||
private _pendingChunks: Uint8Array[] = []
|
||||
private _closeHandlers: Array<() => void> = []
|
||||
private _errorHandlers: Array<(error: Error) => void> = []
|
||||
private _headersWritten = false
|
||||
private _ended = false
|
||||
private _headersPromise: Promise<void>
|
||||
private _resolveHeaders: (() => void) | null = null
|
||||
private _endedPromise: Promise<void>
|
||||
private _resolveEnded: (() => void) | null = null
|
||||
readonly readable: ReadableStream<Uint8Array>
|
||||
|
||||
constructor() {
|
||||
this._headersPromise = new Promise<void>((resolve) => {
|
||||
this._resolveHeaders = resolve
|
||||
})
|
||||
|
||||
this._endedPromise = new Promise<void>((resolve) => {
|
||||
this._resolveEnded = resolve
|
||||
})
|
||||
|
||||
this.readable = new ReadableStream<Uint8Array>({
|
||||
start: (controller) => {
|
||||
this._controller = controller
|
||||
if (this._pendingChunks.length > 0) {
|
||||
for (const chunk of this._pendingChunks) {
|
||||
controller.enqueue(chunk)
|
||||
}
|
||||
this._pendingChunks = []
|
||||
}
|
||||
},
|
||||
cancel: () => {
|
||||
this._ended = true
|
||||
this._resolveEnded?.()
|
||||
this.triggerCloseHandlers()
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
private markHeadersWritten(): void {
|
||||
if (this._headersWritten) return
|
||||
this._headersWritten = true
|
||||
this._resolveHeaders?.()
|
||||
}
|
||||
|
||||
private triggerCloseHandlers(): void {
|
||||
for (const handler of this._closeHandlers) {
|
||||
try {
|
||||
handler()
|
||||
} catch (error) {
|
||||
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private triggerErrorHandlers(error: Error): void {
|
||||
for (const errorHandler of this._errorHandlers) {
|
||||
errorHandler(error)
|
||||
}
|
||||
}
|
||||
|
||||
private normalizeChunk(chunk: unknown): Uint8Array | null {
|
||||
if (typeof chunk === 'string') {
|
||||
return new TextEncoder().encode(chunk)
|
||||
}
|
||||
|
||||
if (chunk instanceof Uint8Array) {
|
||||
return chunk
|
||||
}
|
||||
|
||||
if (chunk === undefined || chunk === null) {
|
||||
return null
|
||||
}
|
||||
|
||||
return new TextEncoder().encode(String(chunk))
|
||||
}
|
||||
|
||||
writeHead(status: number, headers?: Record<string, string | number | string[]>): this {
|
||||
this._status = status
|
||||
|
||||
if (headers) {
|
||||
Object.entries(headers).forEach(([key, value]) => {
|
||||
if (Array.isArray(value)) {
|
||||
this._headers.set(key, value.join(', '))
|
||||
} else {
|
||||
this._headers.set(key, String(value))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
this.markHeadersWritten()
|
||||
return this
|
||||
}
|
||||
|
||||
flushHeaders(): this {
|
||||
this.markHeadersWritten()
|
||||
return this
|
||||
}
|
||||
|
||||
write(chunk: unknown): boolean {
|
||||
const normalized = this.normalizeChunk(chunk)
|
||||
if (!normalized) return true
|
||||
|
||||
this.markHeadersWritten()
|
||||
|
||||
if (this._controller) {
|
||||
try {
|
||||
this._controller.enqueue(normalized)
|
||||
} catch (error) {
|
||||
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||
}
|
||||
} else {
|
||||
this._pendingChunks.push(normalized)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
end(chunk?: unknown): this {
|
||||
if (chunk !== undefined) this.write(chunk)
|
||||
this.markHeadersWritten()
|
||||
if (this._ended) return this
|
||||
|
||||
this._ended = true
|
||||
this._resolveEnded?.()
|
||||
|
||||
if (this._controller) {
|
||||
try {
|
||||
this._controller.close()
|
||||
} catch (error) {
|
||||
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||
}
|
||||
}
|
||||
|
||||
this.triggerCloseHandlers()
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
async waitForHeaders(timeoutMs = 30000): Promise<void> {
|
||||
if (this._headersWritten) return
|
||||
|
||||
await Promise.race([
|
||||
this._headersPromise,
|
||||
new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, timeoutMs)
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
async waitForEnd(timeoutMs = 30000): Promise<void> {
|
||||
if (this._ended) return
|
||||
|
||||
await Promise.race([
|
||||
this._endedPromise,
|
||||
new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, timeoutMs)
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
on(event: 'close' | 'error', handler: (() => void) | ((error: Error) => void)): this {
|
||||
if (event === 'close') {
|
||||
this._closeHandlers.push(handler as () => void)
|
||||
}
|
||||
|
||||
if (event === 'error') {
|
||||
this._errorHandlers.push(handler as (error: Error) => void)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
toNextResponse(): NextResponse {
|
||||
return new NextResponse(this.readable, {
|
||||
status: this._status,
|
||||
headers: this._headers,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||
const server = new Server(
|
||||
{
|
||||
name: 'sim-copilot',
|
||||
version: '1.0.0',
|
||||
},
|
||||
{
|
||||
capabilities: { tools: {} },
|
||||
instructions: MCP_SERVER_INSTRUCTIONS,
|
||||
}
|
||||
)
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: tool.inputSchema,
|
||||
}))
|
||||
|
||||
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: tool.inputSchema,
|
||||
}))
|
||||
|
||||
const result: ListToolsResult = {
|
||||
tools: [...directTools, ...subagentTools],
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request, extra) => {
|
||||
const headers = (extra.requestInfo?.headers || {}) as HeaderMap
|
||||
const apiKeyHeader = readHeader(headers, 'x-api-key')
|
||||
|
||||
if (!apiKeyHeader) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: 'AUTHENTICATION ERROR: No Copilot API key provided. The user must set their Copilot API key in the x-api-key header. They can generate one in the Sim app under Settings → Copilot. Do NOT retry — this will fail until the key is configured.',
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
|
||||
const authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn('MCP copilot key auth failed', { method: request.method })
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
|
||||
const rateLimitResult = await mcpRateLimiter.checkRateLimitWithSubscription(
|
||||
authResult.userId,
|
||||
await getHighestPrioritySubscription(authResult.userId),
|
||||
'api-endpoint',
|
||||
false
|
||||
)
|
||||
|
||||
if (!rateLimitResult.allowed) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: `RATE LIMIT: Too many requests. Please wait and retry after ${rateLimitResult.resetAt.toISOString()}.`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
|
||||
const params = request.params as
|
||||
| { name?: string; arguments?: Record<string, unknown> }
|
||||
| undefined
|
||||
if (!params?.name) {
|
||||
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
||||
}
|
||||
|
||||
const result = await handleToolsCall(
|
||||
{
|
||||
name: params.name,
|
||||
arguments: params.arguments,
|
||||
},
|
||||
authResult.userId,
|
||||
abortSignal
|
||||
)
|
||||
|
||||
trackMcpCopilotCall(authResult.userId)
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
async function handleMcpRequestWithSdk(
|
||||
request: NextRequest,
|
||||
parsedBody: unknown
|
||||
): Promise<NextResponse> {
|
||||
const server = buildMcpServer(request.signal)
|
||||
const transport = new StreamableHTTPServerTransport({
|
||||
sessionIdGenerator: undefined,
|
||||
enableJsonResponse: true,
|
||||
})
|
||||
|
||||
const responseCapture = new NextResponseCapture()
|
||||
const requestAdapter = {
|
||||
method: request.method,
|
||||
headers: normalizeRequestHeaders(request),
|
||||
}
|
||||
|
||||
await server.connect(transport)
|
||||
|
||||
try {
|
||||
await transport.handleRequest(requestAdapter as any, responseCapture as any, parsedBody)
|
||||
await responseCapture.waitForHeaders()
|
||||
// Must exceed the longest possible tool execution (build = 5 min).
|
||||
// Using ORCHESTRATION_TIMEOUT_MS + 60 s buffer so the orchestrator can
|
||||
// finish or time-out on its own before the transport is torn down.
|
||||
await responseCapture.waitForEnd(ORCHESTRATION_TIMEOUT_MS + 60_000)
|
||||
return responseCapture.toNextResponse()
|
||||
} finally {
|
||||
await server.close().catch(() => {})
|
||||
await transport.close().catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET() {
|
||||
// Return 405 to signal that server-initiated SSE notifications are not
|
||||
// supported. Without this, clients like mcp-remote will repeatedly
|
||||
// reconnect trying to open an SSE stream, flooding the logs with GETs.
|
||||
return new NextResponse(null, { status: 405 })
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
let parsedBody: unknown
|
||||
|
||||
try {
|
||||
parsedBody = await request.json()
|
||||
} catch {
|
||||
return NextResponse.json(createError(0, ErrorCode.ParseError, 'Invalid JSON body'), {
|
||||
status: 400,
|
||||
})
|
||||
}
|
||||
|
||||
return await handleMcpRequestWithSdk(request, parsedBody)
|
||||
} catch (error) {
|
||||
logger.error('Error handling MCP request', { error })
|
||||
return NextResponse.json(createError(0, ErrorCode.InternalError, 'Internal error'), {
|
||||
status: 500,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest) {
|
||||
void request
|
||||
return NextResponse.json(createError(0, -32000, 'Method not allowed.'), { status: 405 })
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment MCP copilot call counter in userStats (fire-and-forget).
|
||||
*/
|
||||
function trackMcpCopilotCall(userId: string): void {
|
||||
db.update(userStats)
|
||||
.set({
|
||||
totalMcpCopilotCalls: sql`total_mcp_copilot_calls + 1`,
|
||||
lastActive: new Date(),
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
.then(() => {})
|
||||
.catch((error) => {
|
||||
logger.error('Failed to track MCP copilot call', { error, userId })
|
||||
})
|
||||
}
|
||||
|
||||
async function handleToolsCall(
|
||||
params: { name: string; arguments?: Record<string, unknown> },
|
||||
userId: string,
|
||||
abortSignal?: AbortSignal
|
||||
): Promise<CallToolResult> {
|
||||
const args = params.arguments || {}
|
||||
|
||||
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||
if (directTool) {
|
||||
return handleDirectToolCall(directTool, args, userId)
|
||||
}
|
||||
|
||||
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||
if (subagentTool) {
|
||||
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
||||
}
|
||||
|
||||
throw new McpError(ErrorCode.MethodNotFound, `Tool not found: ${params.name}`)
|
||||
}
|
||||
|
||||
async function handleDirectToolCall(
|
||||
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
||||
args: Record<string, unknown>,
|
||||
userId: string
|
||||
): Promise<CallToolResult> {
|
||||
try {
|
||||
const execContext = await prepareExecutionContext(userId, (args.workflowId as string) || '')
|
||||
|
||||
const toolCall = {
|
||||
id: randomUUID(),
|
||||
name: toolDef.toolId,
|
||||
status: 'pending' as const,
|
||||
params: args as Record<string, any>,
|
||||
startTime: Date.now(),
|
||||
}
|
||||
|
||||
const result = await executeToolServerSide(toolCall, execContext)
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(result.output ?? result, null, 2),
|
||||
},
|
||||
],
|
||||
isError: !result.success,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Direct tool execution failed', { tool: toolDef.name, error })
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Tool execution failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build mode uses the main chat orchestrator with the 'fast' command instead of
|
||||
* the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode
|
||||
* (ModeFast) on the main chat processor that bypasses subagent orchestration and
|
||||
* executes all tools directly.
|
||||
*/
|
||||
async function handleBuildToolCall(
|
||||
args: Record<string, unknown>,
|
||||
userId: string,
|
||||
abortSignal?: AbortSignal
|
||||
): Promise<CallToolResult> {
|
||||
try {
|
||||
const requestText = (args.request as string) || JSON.stringify(args)
|
||||
const { model } = getCopilotModel('chat')
|
||||
const workflowId = args.workflowId as string | undefined
|
||||
|
||||
const resolved = workflowId ? { workflowId } : await resolveWorkflowIdForUser(userId)
|
||||
|
||||
if (!resolved?.workflowId) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(
|
||||
{
|
||||
success: false,
|
||||
error: 'workflowId is required for build. Call create_workflow first.',
|
||||
},
|
||||
null,
|
||||
2
|
||||
),
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
|
||||
const chatId = randomUUID()
|
||||
|
||||
const requestPayload = {
|
||||
message: requestText,
|
||||
workflowId: resolved.workflowId,
|
||||
userId,
|
||||
model,
|
||||
mode: 'agent',
|
||||
commands: ['fast'],
|
||||
messageId: randomUUID(),
|
||||
version: SIM_AGENT_VERSION,
|
||||
headless: true,
|
||||
chatId,
|
||||
source: 'mcp',
|
||||
}
|
||||
|
||||
const result = await orchestrateCopilotStream(requestPayload, {
|
||||
userId,
|
||||
workflowId: resolved.workflowId,
|
||||
chatId,
|
||||
autoExecuteTools: true,
|
||||
timeout: 300000,
|
||||
interactive: false,
|
||||
abortSignal,
|
||||
})
|
||||
|
||||
const responseData = {
|
||||
success: result.success,
|
||||
content: result.content,
|
||||
toolCalls: result.toolCalls,
|
||||
error: result.error,
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify(responseData, null, 2) }],
|
||||
isError: !result.success,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Build tool call failed', { error })
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Build failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSubagentToolCall(
|
||||
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
||||
args: Record<string, unknown>,
|
||||
userId: string,
|
||||
abortSignal?: AbortSignal
|
||||
): Promise<CallToolResult> {
|
||||
if (toolDef.agentId === 'build') {
|
||||
return handleBuildToolCall(args, userId, abortSignal)
|
||||
}
|
||||
|
||||
try {
|
||||
const requestText =
|
||||
(args.request as string) ||
|
||||
(args.message as string) ||
|
||||
(args.error as string) ||
|
||||
JSON.stringify(args)
|
||||
|
||||
const context = (args.context as Record<string, unknown>) || {}
|
||||
if (args.plan && !context.plan) {
|
||||
context.plan = args.plan
|
||||
}
|
||||
|
||||
const { model } = getCopilotModel('chat')
|
||||
|
||||
const result = await orchestrateSubagentStream(
|
||||
toolDef.agentId,
|
||||
{
|
||||
message: requestText,
|
||||
workflowId: args.workflowId,
|
||||
workspaceId: args.workspaceId,
|
||||
context,
|
||||
model,
|
||||
headless: true,
|
||||
source: 'mcp',
|
||||
},
|
||||
{
|
||||
userId,
|
||||
workflowId: args.workflowId as string | undefined,
|
||||
workspaceId: args.workspaceId as string | undefined,
|
||||
abortSignal,
|
||||
}
|
||||
)
|
||||
|
||||
let responseData: unknown
|
||||
|
||||
if (result.structuredResult) {
|
||||
responseData = {
|
||||
success: result.structuredResult.success ?? result.success,
|
||||
type: result.structuredResult.type,
|
||||
summary: result.structuredResult.summary,
|
||||
data: result.structuredResult.data,
|
||||
}
|
||||
} else if (result.error) {
|
||||
responseData = {
|
||||
success: false,
|
||||
error: result.error,
|
||||
errors: result.errors,
|
||||
}
|
||||
} else {
|
||||
responseData = {
|
||||
success: result.success,
|
||||
content: result.content,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(responseData, null, 2),
|
||||
},
|
||||
],
|
||||
isError: !result.success,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Subagent tool call failed', {
|
||||
tool: toolDef.name,
|
||||
agentId: toolDef.agentId,
|
||||
error,
|
||||
})
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `Subagent call failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
98
apps/sim/app/api/mcp/events/route.test.ts
Normal file
98
apps/sim/app/api/mcp/events/route.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* Tests for MCP SSE events endpoint
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockConsoleLogger } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
mockConsoleLogger()
|
||||
const auth = mockAuth()
|
||||
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/mcp/connection-manager', () => ({
|
||||
mcpConnectionManager: null,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/mcp/pubsub', () => ({
|
||||
mcpPubSub: null,
|
||||
}))
|
||||
|
||||
const { GET } = await import('./route')
|
||||
|
||||
describe('MCP Events SSE Endpoint', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('returns 401 when session is missing', async () => {
|
||||
auth.setUnauthenticated()
|
||||
|
||||
const request = createMockRequest(
|
||||
'GET',
|
||||
undefined,
|
||||
{},
|
||||
'http://localhost:3000/api/mcp/events?workspaceId=ws-123'
|
||||
)
|
||||
|
||||
const response = await GET(request as any)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const text = await response.text()
|
||||
expect(text).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it('returns 400 when workspaceId is missing', async () => {
|
||||
auth.setAuthenticated()
|
||||
|
||||
const request = createMockRequest('GET', undefined, {}, 'http://localhost:3000/api/mcp/events')
|
||||
|
||||
const response = await GET(request as any)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const text = await response.text()
|
||||
expect(text).toBe('Missing workspaceId query parameter')
|
||||
})
|
||||
|
||||
it('returns 403 when user lacks workspace access', async () => {
|
||||
auth.setAuthenticated()
|
||||
mockGetUserEntityPermissions.mockResolvedValue(null)
|
||||
|
||||
const request = createMockRequest(
|
||||
'GET',
|
||||
undefined,
|
||||
{},
|
||||
'http://localhost:3000/api/mcp/events?workspaceId=ws-123'
|
||||
)
|
||||
|
||||
const response = await GET(request as any)
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
const text = await response.text()
|
||||
expect(text).toBe('Access denied to workspace')
|
||||
expect(mockGetUserEntityPermissions).toHaveBeenCalledWith('user-123', 'workspace', 'ws-123')
|
||||
})
|
||||
|
||||
it('returns SSE stream when authorized', async () => {
|
||||
auth.setAuthenticated()
|
||||
mockGetUserEntityPermissions.mockResolvedValue({ read: true })
|
||||
|
||||
const request = createMockRequest(
|
||||
'GET',
|
||||
undefined,
|
||||
{},
|
||||
'http://localhost:3000/api/mcp/events?workspaceId=ws-123'
|
||||
)
|
||||
|
||||
const response = await GET(request as any)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.headers.get('Content-Type')).toBe('text/event-stream')
|
||||
expect(response.headers.get('Cache-Control')).toBe('no-cache')
|
||||
expect(response.headers.get('Connection')).toBe('keep-alive')
|
||||
})
|
||||
})
|
||||
111
apps/sim/app/api/mcp/events/route.ts
Normal file
111
apps/sim/app/api/mcp/events/route.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* SSE endpoint for MCP tool-change events.
|
||||
*
|
||||
* Pushes `tools_changed` events to the browser when:
|
||||
* - An external MCP server sends `notifications/tools/list_changed` (via connection manager)
|
||||
* - A workflow CRUD route modifies workflow MCP server tools (via pub/sub)
|
||||
*
|
||||
* Auth is handled via session cookies (EventSource sends cookies automatically).
|
||||
*/
|
||||
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { mcpConnectionManager } from '@/lib/mcp/connection-manager'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('McpEventsSSE')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const HEARTBEAT_INTERVAL_MS = 30_000
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return new Response('Unauthorized', { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
if (!workspaceId) {
|
||||
return new Response('Missing workspaceId query parameter', { status: 400 })
|
||||
}
|
||||
|
||||
const permissions = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (!permissions) {
|
||||
return new Response('Access denied to workspace', { status: 403 })
|
||||
}
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const unsubscribers: Array<() => void> = []
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
const send = (eventName: string, data: Record<string, unknown>) => {
|
||||
try {
|
||||
controller.enqueue(
|
||||
encoder.encode(`event: ${eventName}\ndata: ${JSON.stringify(data)}\n\n`)
|
||||
)
|
||||
} catch {
|
||||
// Stream already closed
|
||||
}
|
||||
}
|
||||
|
||||
// Subscribe to external MCP server tool changes
|
||||
if (mcpConnectionManager) {
|
||||
const unsub = mcpConnectionManager.subscribe((event) => {
|
||||
if (event.workspaceId !== workspaceId) return
|
||||
send('tools_changed', {
|
||||
source: 'external',
|
||||
serverId: event.serverId,
|
||||
timestamp: event.timestamp,
|
||||
})
|
||||
})
|
||||
unsubscribers.push(unsub)
|
||||
}
|
||||
|
||||
// Subscribe to workflow CRUD tool changes
|
||||
if (mcpPubSub) {
|
||||
const unsub = mcpPubSub.onWorkflowToolsChanged((event) => {
|
||||
if (event.workspaceId !== workspaceId) return
|
||||
send('tools_changed', {
|
||||
source: 'workflow',
|
||||
serverId: event.serverId,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
})
|
||||
unsubscribers.push(unsub)
|
||||
}
|
||||
|
||||
// Heartbeat to keep the connection alive
|
||||
const heartbeat = setInterval(() => {
|
||||
try {
|
||||
controller.enqueue(encoder.encode(': heartbeat\n\n'))
|
||||
} catch {
|
||||
clearInterval(heartbeat)
|
||||
}
|
||||
}, HEARTBEAT_INTERVAL_MS)
|
||||
unsubscribers.push(() => clearInterval(heartbeat))
|
||||
|
||||
// Cleanup when client disconnects
|
||||
request.signal.addEventListener('abort', () => {
|
||||
for (const unsub of unsubscribers) {
|
||||
unsub()
|
||||
}
|
||||
try {
|
||||
controller.close()
|
||||
} catch {
|
||||
// Already closed
|
||||
}
|
||||
logger.info(`SSE connection closed for workspace ${workspaceId}`)
|
||||
})
|
||||
|
||||
logger.info(`SSE connection opened for workspace ${workspaceId}`)
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(stream, { headers: SSE_HEADERS })
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServerAPI')
|
||||
@@ -146,6 +147,8 @@ export const DELETE = withMcpAuth<RouteParams>('admin')(
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted workflow MCP server: ${serverId}`)
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
|
||||
@@ -115,6 +116,8 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
|
||||
logger.info(`[${requestId}] Successfully updated tool ${toolId}`)
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
return createMcpSuccessResponse({ tool: updatedTool })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating tool:`, error)
|
||||
@@ -160,6 +163,8 @@ export const DELETE = withMcpAuth<RouteParams>('write')(
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted tool ${toolId}`)
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting tool:`, error)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
||||
@@ -188,6 +189,8 @@ export const POST = withMcpAuth<RouteParams>('write')(
|
||||
`[${requestId}] Successfully added tool ${toolName} (workflow: ${body.workflowId}) to server ${serverId}`
|
||||
)
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
return createMcpSuccessResponse({ tool }, 201)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error adding tool:`, error)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq, inArray, sql } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
||||
@@ -174,6 +175,10 @@ export const POST = withMcpAuth('write')(
|
||||
`[${requestId}] Added ${addedTools.length} tools to server ${serverId}:`,
|
||||
addedTools.map((t) => t.toolName)
|
||||
)
|
||||
|
||||
if (addedTools.length > 0) {
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -90,16 +90,24 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const attachments = await response.json()
|
||||
const attachmentIds = Array.isArray(attachments)
|
||||
? attachments.map((attachment) => attachment.id).filter(Boolean)
|
||||
: []
|
||||
const jiraAttachments = await response.json()
|
||||
const attachmentsList = Array.isArray(jiraAttachments) ? jiraAttachments : []
|
||||
|
||||
const attachmentIds = attachmentsList.map((att: any) => att.id).filter(Boolean)
|
||||
const attachments = attachmentsList.map((att: any) => ({
|
||||
id: att.id ?? '',
|
||||
filename: att.filename ?? '',
|
||||
mimeType: att.mimeType ?? '',
|
||||
size: att.size ?? 0,
|
||||
content: att.content ?? '',
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueKey: validatedData.issueKey,
|
||||
attachments,
|
||||
attachmentIds,
|
||||
files: filesOutput,
|
||||
},
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JiraIssueAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { domain, accessToken, issueId, cloudId: providedCloudId } = await request.json()
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!issueId) {
|
||||
logger.error('Missing issue ID in request')
|
||||
return NextResponse.json({ error: 'Issue ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
|
||||
logger.info('Using cloud ID:', cloudId)
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const issueIdValidation = validateJiraIssueKey(issueId, 'issueId')
|
||||
if (!issueIdValidation.isValid) {
|
||||
return NextResponse.json({ error: issueIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueId}`
|
||||
|
||||
logger.info('Fetching Jira issue from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error('Jira API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
})
|
||||
|
||||
let errorMessage
|
||||
try {
|
||||
const errorData = await response.json()
|
||||
logger.error('Error details:', errorData)
|
||||
errorMessage = errorData.message || `Failed to fetch issue (${response.status})`
|
||||
} catch (_e) {
|
||||
errorMessage = `Failed to fetch issue: ${response.status} ${response.statusText}`
|
||||
}
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
logger.info('Successfully fetched issue:', data.key)
|
||||
|
||||
const issueInfo: any = {
|
||||
id: data.key,
|
||||
name: data.fields.summary,
|
||||
mimeType: 'jira/issue',
|
||||
url: `https://${domain}/browse/${data.key}`,
|
||||
modifiedTime: data.fields.updated,
|
||||
webViewLink: `https://${domain}/browse/${data.key}`,
|
||||
status: data.fields.status?.name,
|
||||
description: data.fields.description,
|
||||
priority: data.fields.priority?.name,
|
||||
assignee: data.fields.assignee?.displayName,
|
||||
reporter: data.fields.reporter?.displayName,
|
||||
project: {
|
||||
key: data.fields.project?.key,
|
||||
name: data.fields.project?.name,
|
||||
},
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
issue: issueInfo,
|
||||
cloudId,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error processing request:', error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to retrieve Jira issue',
|
||||
details: (error as Error).message,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,16 @@ const jiraUpdateSchema = z.object({
|
||||
summary: z.string().optional(),
|
||||
title: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
status: z.string().optional(),
|
||||
priority: z.string().optional(),
|
||||
assignee: z.string().optional(),
|
||||
labels: z.array(z.string()).optional(),
|
||||
components: z.array(z.string()).optional(),
|
||||
duedate: z.string().optional(),
|
||||
fixVersions: z.array(z.string()).optional(),
|
||||
environment: z.string().optional(),
|
||||
customFieldId: z.string().optional(),
|
||||
customFieldValue: z.string().optional(),
|
||||
notifyUsers: z.boolean().optional(),
|
||||
cloudId: z.string().optional(),
|
||||
})
|
||||
|
||||
@@ -45,9 +52,16 @@ export async function PUT(request: NextRequest) {
|
||||
summary,
|
||||
title,
|
||||
description,
|
||||
status,
|
||||
priority,
|
||||
assignee,
|
||||
labels,
|
||||
components,
|
||||
duedate,
|
||||
fixVersions,
|
||||
environment,
|
||||
customFieldId,
|
||||
customFieldValue,
|
||||
notifyUsers,
|
||||
cloudId: providedCloudId,
|
||||
} = validation.data
|
||||
|
||||
@@ -64,7 +78,8 @@ export async function PUT(request: NextRequest) {
|
||||
return NextResponse.json({ error: issueKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}`
|
||||
const notifyParam = notifyUsers === false ? '?notifyUsers=false' : ''
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}${notifyParam}`
|
||||
|
||||
logger.info('Updating Jira issue at:', url)
|
||||
|
||||
@@ -93,24 +108,65 @@ export async function PUT(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
if (status !== undefined && status !== null && status !== '') {
|
||||
fields.status = {
|
||||
name: status,
|
||||
}
|
||||
}
|
||||
|
||||
if (priority !== undefined && priority !== null && priority !== '') {
|
||||
fields.priority = {
|
||||
name: priority,
|
||||
}
|
||||
const isNumericId = /^\d+$/.test(priority)
|
||||
fields.priority = isNumericId ? { id: priority } : { name: priority }
|
||||
}
|
||||
|
||||
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
||||
fields.assignee = {
|
||||
id: assignee,
|
||||
accountId: assignee,
|
||||
}
|
||||
}
|
||||
|
||||
if (labels !== undefined && labels !== null && labels.length > 0) {
|
||||
fields.labels = labels
|
||||
}
|
||||
|
||||
if (components !== undefined && components !== null && components.length > 0) {
|
||||
fields.components = components.map((name) => ({ name }))
|
||||
}
|
||||
|
||||
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
||||
fields.duedate = duedate
|
||||
}
|
||||
|
||||
if (fixVersions !== undefined && fixVersions !== null && fixVersions.length > 0) {
|
||||
fields.fixVersions = fixVersions.map((name) => ({ name }))
|
||||
}
|
||||
|
||||
if (environment !== undefined && environment !== null && environment !== '') {
|
||||
fields.environment = {
|
||||
type: 'doc',
|
||||
version: 1,
|
||||
content: [
|
||||
{
|
||||
type: 'paragraph',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: environment,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
customFieldId !== undefined &&
|
||||
customFieldId !== null &&
|
||||
customFieldId !== '' &&
|
||||
customFieldValue !== undefined &&
|
||||
customFieldValue !== null &&
|
||||
customFieldValue !== ''
|
||||
) {
|
||||
const fieldId = customFieldId.startsWith('customfield_')
|
||||
? customFieldId
|
||||
: `customfield_${customFieldId}`
|
||||
fields[fieldId] = customFieldValue
|
||||
}
|
||||
|
||||
const requestBody = { fields }
|
||||
|
||||
const response = await fetch(url, {
|
||||
|
||||
@@ -32,6 +32,8 @@ export async function POST(request: NextRequest) {
|
||||
environment,
|
||||
customFieldId,
|
||||
customFieldValue,
|
||||
components,
|
||||
fixVersions,
|
||||
} = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
@@ -73,10 +75,9 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info('Creating Jira issue at:', url)
|
||||
|
||||
const isNumericProjectId = /^\d+$/.test(projectId)
|
||||
const fields: Record<string, any> = {
|
||||
project: {
|
||||
id: projectId,
|
||||
},
|
||||
project: isNumericProjectId ? { id: projectId } : { key: projectId },
|
||||
issuetype: {
|
||||
name: normalizedIssueType,
|
||||
},
|
||||
@@ -114,13 +115,31 @@ export async function POST(request: NextRequest) {
|
||||
fields.labels = labels
|
||||
}
|
||||
|
||||
if (
|
||||
components !== undefined &&
|
||||
components !== null &&
|
||||
Array.isArray(components) &&
|
||||
components.length > 0
|
||||
) {
|
||||
fields.components = components.map((name: string) => ({ name }))
|
||||
}
|
||||
|
||||
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
||||
fields.duedate = duedate
|
||||
}
|
||||
|
||||
if (
|
||||
fixVersions !== undefined &&
|
||||
fixVersions !== null &&
|
||||
Array.isArray(fixVersions) &&
|
||||
fixVersions.length > 0
|
||||
) {
|
||||
fields.fixVersions = fixVersions.map((name: string) => ({ name }))
|
||||
}
|
||||
|
||||
if (reporter !== undefined && reporter !== null && reporter !== '') {
|
||||
fields.reporter = {
|
||||
id: reporter,
|
||||
accountId: reporter,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -220,8 +239,10 @@ export async function POST(request: NextRequest) {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
id: responseData.id || '',
|
||||
issueKey: issueKey,
|
||||
summary: responseData.fields?.summary || 'Issue created',
|
||||
self: responseData.self || '',
|
||||
summary: responseData.fields?.summary || summary || 'Issue created',
|
||||
success: true,
|
||||
url: `https://${domain}/browse/${issueKey}`,
|
||||
...(assigneeId && { assigneeId }),
|
||||
|
||||
@@ -165,8 +165,26 @@ export async function POST(request: NextRequest) {
|
||||
issueIdOrKey,
|
||||
approvalId,
|
||||
decision,
|
||||
success: true,
|
||||
id: data.id ?? null,
|
||||
name: data.name ?? null,
|
||||
finalDecision: data.finalDecision ?? null,
|
||||
canAnswerApproval: data.canAnswerApproval ?? null,
|
||||
approvers: (data.approvers ?? []).map((a: Record<string, unknown>) => {
|
||||
const approver = a.approver as Record<string, unknown> | undefined
|
||||
return {
|
||||
approver: {
|
||||
accountId: approver?.accountId ?? null,
|
||||
displayName: approver?.displayName ?? null,
|
||||
emailAddress: approver?.emailAddress ?? null,
|
||||
active: approver?.active ?? null,
|
||||
},
|
||||
approverDecision: a.approverDecision ?? null,
|
||||
}
|
||||
}),
|
||||
createdDate: data.createdDate ?? null,
|
||||
completedDate: data.completedDate ?? null,
|
||||
approval: data,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -95,6 +95,14 @@ export async function POST(request: NextRequest) {
|
||||
commentId: data.id,
|
||||
body: data.body,
|
||||
isPublic: data.public,
|
||||
author: data.author
|
||||
? {
|
||||
accountId: data.author.accountId ?? null,
|
||||
displayName: data.author.displayName ?? null,
|
||||
emailAddress: data.author.emailAddress ?? null,
|
||||
}
|
||||
: null,
|
||||
createdDate: data.created ?? null,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -23,6 +23,7 @@ export async function POST(request: NextRequest) {
|
||||
issueIdOrKey,
|
||||
isPublic,
|
||||
internal,
|
||||
expand,
|
||||
start,
|
||||
limit,
|
||||
} = body
|
||||
@@ -57,8 +58,9 @@ export async function POST(request: NextRequest) {
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
if (isPublic) params.append('public', isPublic)
|
||||
if (internal) params.append('internal', internal)
|
||||
if (isPublic !== undefined) params.append('public', String(isPublic))
|
||||
if (internal !== undefined) params.append('internal', String(internal))
|
||||
if (expand) params.append('expand', expand)
|
||||
if (start) params.append('start', start)
|
||||
if (limit) params.append('limit', limit)
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ export async function POST(request: NextRequest) {
|
||||
query,
|
||||
start,
|
||||
limit,
|
||||
accountIds,
|
||||
emails,
|
||||
} = body
|
||||
|
||||
@@ -56,24 +57,27 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const parsedEmails = emails
|
||||
? typeof emails === 'string'
|
||||
? emails
|
||||
const rawIds = accountIds || emails
|
||||
const parsedAccountIds = rawIds
|
||||
? typeof rawIds === 'string'
|
||||
? rawIds
|
||||
.split(',')
|
||||
.map((email: string) => email.trim())
|
||||
.filter((email: string) => email)
|
||||
: emails
|
||||
.map((id: string) => id.trim())
|
||||
.filter((id: string) => id)
|
||||
: Array.isArray(rawIds)
|
||||
? rawIds
|
||||
: []
|
||||
: []
|
||||
|
||||
const isAddOperation = parsedEmails.length > 0
|
||||
const isAddOperation = parsedAccountIds.length > 0
|
||||
|
||||
if (isAddOperation) {
|
||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/customer`
|
||||
|
||||
logger.info('Adding customers to:', url, { emails: parsedEmails })
|
||||
logger.info('Adding customers to:', url, { accountIds: parsedAccountIds })
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
usernames: parsedEmails,
|
||||
accountIds: parsedAccountIds,
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
|
||||
@@ -31,6 +31,9 @@ export async function POST(request: NextRequest) {
|
||||
description,
|
||||
raiseOnBehalfOf,
|
||||
requestFieldValues,
|
||||
requestParticipants,
|
||||
channel,
|
||||
expand,
|
||||
} = body
|
||||
|
||||
if (!domain) {
|
||||
@@ -80,6 +83,19 @@ export async function POST(request: NextRequest) {
|
||||
if (raiseOnBehalfOf) {
|
||||
requestBody.raiseOnBehalfOf = raiseOnBehalfOf
|
||||
}
|
||||
if (requestParticipants) {
|
||||
requestBody.requestParticipants = Array.isArray(requestParticipants)
|
||||
? requestParticipants
|
||||
: typeof requestParticipants === 'string'
|
||||
? requestParticipants
|
||||
.split(',')
|
||||
.map((id: string) => id.trim())
|
||||
.filter(Boolean)
|
||||
: []
|
||||
}
|
||||
if (channel) {
|
||||
requestBody.channel = channel
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
@@ -111,6 +127,21 @@ export async function POST(request: NextRequest) {
|
||||
issueKey: data.issueKey,
|
||||
requestTypeId: data.requestTypeId,
|
||||
serviceDeskId: data.serviceDeskId,
|
||||
createdDate: data.createdDate ?? null,
|
||||
currentStatus: data.currentStatus
|
||||
? {
|
||||
status: data.currentStatus.status ?? null,
|
||||
statusCategory: data.currentStatus.statusCategory ?? null,
|
||||
statusDate: data.currentStatus.statusDate ?? null,
|
||||
}
|
||||
: null,
|
||||
reporter: data.reporter
|
||||
? {
|
||||
accountId: data.reporter.accountId ?? null,
|
||||
displayName: data.reporter.displayName ?? null,
|
||||
emailAddress: data.reporter.emailAddress ?? null,
|
||||
}
|
||||
: null,
|
||||
success: true,
|
||||
url: `https://${domain}/browse/${data.issueKey}`,
|
||||
},
|
||||
@@ -126,7 +157,10 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}`
|
||||
const params = new URLSearchParams()
|
||||
if (expand) params.append('expand', expand)
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching request from:', url)
|
||||
|
||||
@@ -155,6 +189,32 @@ export async function POST(request: NextRequest) {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueId: data.issueId ?? null,
|
||||
issueKey: data.issueKey ?? null,
|
||||
requestTypeId: data.requestTypeId ?? null,
|
||||
serviceDeskId: data.serviceDeskId ?? null,
|
||||
createdDate: data.createdDate ?? null,
|
||||
currentStatus: data.currentStatus
|
||||
? {
|
||||
status: data.currentStatus.status ?? null,
|
||||
statusCategory: data.currentStatus.statusCategory ?? null,
|
||||
statusDate: data.currentStatus.statusDate ?? null,
|
||||
}
|
||||
: null,
|
||||
reporter: data.reporter
|
||||
? {
|
||||
accountId: data.reporter.accountId ?? null,
|
||||
displayName: data.reporter.displayName ?? null,
|
||||
emailAddress: data.reporter.emailAddress ?? null,
|
||||
active: data.reporter.active ?? true,
|
||||
}
|
||||
: null,
|
||||
requestFieldValues: (data.requestFieldValues ?? []).map((fv: Record<string, unknown>) => ({
|
||||
fieldId: fv.fieldId ?? null,
|
||||
label: fv.label ?? null,
|
||||
value: fv.value ?? null,
|
||||
})),
|
||||
url: `https://${domain}/browse/${data.issueKey}`,
|
||||
request: data,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateEnum,
|
||||
validateJiraCloudId,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -23,7 +27,9 @@ export async function POST(request: NextRequest) {
|
||||
serviceDeskId,
|
||||
requestOwnership,
|
||||
requestStatus,
|
||||
requestTypeId,
|
||||
searchTerm,
|
||||
expand,
|
||||
start,
|
||||
limit,
|
||||
} = body
|
||||
@@ -52,17 +58,45 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
const VALID_REQUEST_OWNERSHIP = [
|
||||
'OWNED_REQUESTS',
|
||||
'PARTICIPATED_REQUESTS',
|
||||
'APPROVER',
|
||||
'ALL_REQUESTS',
|
||||
] as const
|
||||
const VALID_REQUEST_STATUS = ['OPEN_REQUESTS', 'CLOSED_REQUESTS', 'ALL_REQUESTS'] as const
|
||||
|
||||
if (requestOwnership) {
|
||||
const ownershipValidation = validateEnum(
|
||||
requestOwnership,
|
||||
VALID_REQUEST_OWNERSHIP,
|
||||
'requestOwnership'
|
||||
)
|
||||
if (!ownershipValidation.isValid) {
|
||||
return NextResponse.json({ error: ownershipValidation.error }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
if (requestStatus) {
|
||||
const statusValidation = validateEnum(requestStatus, VALID_REQUEST_STATUS, 'requestStatus')
|
||||
if (!statusValidation.isValid) {
|
||||
return NextResponse.json({ error: statusValidation.error }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
if (serviceDeskId) params.append('serviceDeskId', serviceDeskId)
|
||||
if (requestOwnership && requestOwnership !== 'ALL_REQUESTS') {
|
||||
if (requestOwnership) {
|
||||
params.append('requestOwnership', requestOwnership)
|
||||
}
|
||||
if (requestStatus && requestStatus !== 'ALL') {
|
||||
if (requestStatus) {
|
||||
params.append('requestStatus', requestStatus)
|
||||
}
|
||||
if (requestTypeId) params.append('requestTypeId', requestTypeId)
|
||||
if (searchTerm) params.append('searchTerm', searchTerm)
|
||||
if (expand) params.append('expand', expand)
|
||||
if (start) params.append('start', start)
|
||||
if (limit) params.append('limit', limit)
|
||||
|
||||
|
||||
119
apps/sim/app/api/tools/jsm/requesttypefields/route.ts
Normal file
119
apps/sim/app/api/tools/jsm/requesttypefields/route.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmRequestTypeFieldsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, serviceDeskId, requestTypeId } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!serviceDeskId) {
|
||||
logger.error('Missing serviceDeskId in request')
|
||||
return NextResponse.json({ error: 'Service Desk ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!requestTypeId) {
|
||||
logger.error('Missing requestTypeId in request')
|
||||
return NextResponse.json({ error: 'Request Type ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
||||
if (!serviceDeskIdValidation.isValid) {
|
||||
return NextResponse.json({ error: serviceDeskIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const requestTypeIdValidation = validateAlphanumericId(requestTypeId, 'requestTypeId')
|
||||
if (!requestTypeIdValidation.isValid) {
|
||||
return NextResponse.json({ error: requestTypeIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/requesttype/${requestTypeId}/field`
|
||||
|
||||
logger.info('Fetching request type fields from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
serviceDeskId,
|
||||
requestTypeId,
|
||||
canAddRequestParticipants: data.canAddRequestParticipants ?? false,
|
||||
canRaiseOnBehalfOf: data.canRaiseOnBehalfOf ?? false,
|
||||
requestTypeFields: (data.requestTypeFields ?? []).map((field: Record<string, unknown>) => ({
|
||||
fieldId: field.fieldId ?? null,
|
||||
name: field.name ?? null,
|
||||
description: field.description ?? null,
|
||||
required: field.required ?? false,
|
||||
visible: field.visible ?? true,
|
||||
validValues: field.validValues ?? [],
|
||||
presetValues: field.presetValues ?? [],
|
||||
defaultValues: field.defaultValues ?? [],
|
||||
jiraSchema: field.jiraSchema ?? null,
|
||||
})),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching request type fields:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,17 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, serviceDeskId, start, limit } = body
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
cloudId: cloudIdParam,
|
||||
serviceDeskId,
|
||||
searchQuery,
|
||||
groupId,
|
||||
expand,
|
||||
start,
|
||||
limit,
|
||||
} = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
@@ -48,6 +58,9 @@ export async function POST(request: NextRequest) {
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
if (searchQuery) params.append('searchQuery', searchQuery)
|
||||
if (groupId) params.append('groupId', groupId)
|
||||
if (expand) params.append('expand', expand)
|
||||
if (start) params.append('start', start)
|
||||
if (limit) params.append('limit', limit)
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, start, limit } = body
|
||||
const { domain, accessToken, cloudId: cloudIdParam, expand, start, limit } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
@@ -38,6 +38,7 @@ export async function POST(request: NextRequest) {
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
if (expand) params.append('expand', expand)
|
||||
if (start) params.append('start', start)
|
||||
if (limit) params.append('limit', limit)
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey } = body
|
||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey, start, limit } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
@@ -47,7 +47,11 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}/transition`
|
||||
const params = new URLSearchParams()
|
||||
if (start) params.append('start', start)
|
||||
if (limit) params.append('limit', limit)
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}/transition${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching transitions from:', url)
|
||||
|
||||
@@ -78,6 +82,8 @@ export async function POST(request: NextRequest) {
|
||||
ts: new Date().toISOString(),
|
||||
issueIdOrKey,
|
||||
transitions: data.values || [],
|
||||
total: data.size || 0,
|
||||
isLastPage: data.isLastPage ?? true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
113
apps/sim/app/api/tools/onepassword/create-item/route.ts
Normal file
113
apps/sim/app/api/tools/onepassword/create-item/route.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { ItemCreateParams } from '@1password/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkItem,
|
||||
resolveCredentials,
|
||||
toSdkCategory,
|
||||
toSdkFieldType,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordCreateItemAPI')
|
||||
|
||||
const CreateItemSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
category: z.string().min(1, 'Category is required'),
|
||||
title: z.string().nullish(),
|
||||
tags: z.string().nullish(),
|
||||
fields: z.string().nullish(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password create-item attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = CreateItemSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
logger.info(`[${requestId}] Creating item in vault ${params.vaultId} (${creds.mode} mode)`)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
|
||||
const parsedTags = params.tags
|
||||
? params.tags
|
||||
.split(',')
|
||||
.map((t) => t.trim())
|
||||
.filter(Boolean)
|
||||
: undefined
|
||||
|
||||
const parsedFields = params.fields
|
||||
? (JSON.parse(params.fields) as Array<Record<string, any>>).map((f) => ({
|
||||
id: f.id || randomUUID().slice(0, 8),
|
||||
title: f.label || f.title || '',
|
||||
fieldType: toSdkFieldType(f.type || 'STRING'),
|
||||
value: f.value || '',
|
||||
sectionId: f.section?.id ?? f.sectionId,
|
||||
}))
|
||||
: undefined
|
||||
|
||||
const item = await client.items.create({
|
||||
vaultId: params.vaultId,
|
||||
category: toSdkCategory(params.category),
|
||||
title: params.title || '',
|
||||
tags: parsedTags,
|
||||
fields: parsedFields,
|
||||
} as ItemCreateParams)
|
||||
|
||||
return NextResponse.json(normalizeSdkItem(item))
|
||||
}
|
||||
|
||||
const connectBody: Record<string, unknown> = {
|
||||
vault: { id: params.vaultId },
|
||||
category: params.category,
|
||||
}
|
||||
if (params.title) connectBody.title = params.title
|
||||
if (params.tags) connectBody.tags = params.tags.split(',').map((t) => t.trim())
|
||||
if (params.fields) connectBody.fields = JSON.parse(params.fields)
|
||||
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}/items`,
|
||||
method: 'POST',
|
||||
body: connectBody,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to create item' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Create item failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to create item: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
70
apps/sim/app/api/tools/onepassword/delete-item/route.ts
Normal file
70
apps/sim/app/api/tools/onepassword/delete-item/route.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { connectRequest, createOnePasswordClient, resolveCredentials } from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordDeleteItemAPI')
|
||||
|
||||
const DeleteItemSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
itemId: z.string().min(1, 'Item ID is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password delete-item attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteItemSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting item ${params.itemId} from vault ${params.vaultId} (${creds.mode} mode)`
|
||||
)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
await client.items.delete(params.vaultId, params.itemId)
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}/items/${params.itemId}`,
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json().catch(() => ({}))
|
||||
return NextResponse.json(
|
||||
{ error: (data as Record<string, string>).message || 'Failed to delete item' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Delete item failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to delete item: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
75
apps/sim/app/api/tools/onepassword/get-item/route.ts
Normal file
75
apps/sim/app/api/tools/onepassword/get-item/route.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkItem,
|
||||
resolveCredentials,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordGetItemAPI')
|
||||
|
||||
const GetItemSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
itemId: z.string().min(1, 'Item ID is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password get-item attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = GetItemSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Getting item ${params.itemId} from vault ${params.vaultId} (${creds.mode} mode)`
|
||||
)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
const item = await client.items.get(params.vaultId, params.itemId)
|
||||
return NextResponse.json(normalizeSdkItem(item))
|
||||
}
|
||||
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}/items/${params.itemId}`,
|
||||
method: 'GET',
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to get item' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Get item failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to get item: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
78
apps/sim/app/api/tools/onepassword/get-vault/route.ts
Normal file
78
apps/sim/app/api/tools/onepassword/get-vault/route.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkVault,
|
||||
resolveCredentials,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordGetVaultAPI')
|
||||
|
||||
const GetVaultSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password get-vault attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = GetVaultSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
logger.info(`[${requestId}] Getting 1Password vault ${params.vaultId} (${creds.mode} mode)`)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
const vaults = await client.vaults.list()
|
||||
const vault = vaults.find((v) => v.id === params.vaultId)
|
||||
|
||||
if (!vault) {
|
||||
return NextResponse.json({ error: 'Vault not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json(normalizeSdkVault(vault))
|
||||
}
|
||||
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}`,
|
||||
method: 'GET',
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to get vault' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Get vault failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to get vault: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
87
apps/sim/app/api/tools/onepassword/list-items/route.ts
Normal file
87
apps/sim/app/api/tools/onepassword/list-items/route.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkItemOverview,
|
||||
resolveCredentials,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordListItemsAPI')
|
||||
|
||||
const ListItemsSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
filter: z.string().nullish(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password list-items attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ListItemsSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
logger.info(`[${requestId}] Listing items in vault ${params.vaultId} (${creds.mode} mode)`)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
const items = await client.items.list(params.vaultId)
|
||||
const normalized = items.map(normalizeSdkItemOverview)
|
||||
|
||||
if (params.filter) {
|
||||
const filterLower = params.filter.toLowerCase()
|
||||
const filtered = normalized.filter(
|
||||
(item) =>
|
||||
item.title?.toLowerCase().includes(filterLower) ||
|
||||
item.id?.toLowerCase().includes(filterLower)
|
||||
)
|
||||
return NextResponse.json(filtered)
|
||||
}
|
||||
|
||||
return NextResponse.json(normalized)
|
||||
}
|
||||
|
||||
const query = params.filter ? `filter=${encodeURIComponent(params.filter)}` : undefined
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}/items`,
|
||||
method: 'GET',
|
||||
query,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to list items' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] List items failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to list items: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
85
apps/sim/app/api/tools/onepassword/list-vaults/route.ts
Normal file
85
apps/sim/app/api/tools/onepassword/list-vaults/route.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkVault,
|
||||
resolveCredentials,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordListVaultsAPI')
|
||||
|
||||
const ListVaultsSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
filter: z.string().nullish(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password list-vaults attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ListVaultsSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
logger.info(`[${requestId}] Listing 1Password vaults (${creds.mode} mode)`)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
const vaults = await client.vaults.list()
|
||||
const normalized = vaults.map(normalizeSdkVault)
|
||||
|
||||
if (params.filter) {
|
||||
const filterLower = params.filter.toLowerCase()
|
||||
const filtered = normalized.filter(
|
||||
(v) =>
|
||||
v.name?.toLowerCase().includes(filterLower) || v.id?.toLowerCase().includes(filterLower)
|
||||
)
|
||||
return NextResponse.json(filtered)
|
||||
}
|
||||
|
||||
return NextResponse.json(normalized)
|
||||
}
|
||||
|
||||
const query = params.filter ? `filter=${encodeURIComponent(params.filter)}` : undefined
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: '/v1/vaults',
|
||||
method: 'GET',
|
||||
query,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to list vaults' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] List vaults failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to list vaults: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
117
apps/sim/app/api/tools/onepassword/replace-item/route.ts
Normal file
117
apps/sim/app/api/tools/onepassword/replace-item/route.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import type { Item } from '@1password/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkItem,
|
||||
resolveCredentials,
|
||||
toSdkCategory,
|
||||
toSdkFieldType,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordReplaceItemAPI')
|
||||
|
||||
const ReplaceItemSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
itemId: z.string().min(1, 'Item ID is required'),
|
||||
item: z.string().min(1, 'Item JSON is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password replace-item attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ReplaceItemSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
const itemData = JSON.parse(params.item)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Replacing item ${params.itemId} in vault ${params.vaultId} (${creds.mode} mode)`
|
||||
)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
|
||||
const existing = await client.items.get(params.vaultId, params.itemId)
|
||||
|
||||
const sdkItem = {
|
||||
...existing,
|
||||
id: params.itemId,
|
||||
title: itemData.title || existing.title,
|
||||
category: itemData.category ? toSdkCategory(itemData.category) : existing.category,
|
||||
vaultId: params.vaultId,
|
||||
fields: itemData.fields
|
||||
? (itemData.fields as Array<Record<string, any>>).map((f) => ({
|
||||
id: f.id || randomUUID().slice(0, 8),
|
||||
title: f.label || f.title || '',
|
||||
fieldType: toSdkFieldType(f.type || 'STRING'),
|
||||
value: f.value || '',
|
||||
sectionId: f.section?.id ?? f.sectionId,
|
||||
}))
|
||||
: existing.fields,
|
||||
sections: itemData.sections
|
||||
? (itemData.sections as Array<Record<string, any>>).map((s) => ({
|
||||
id: s.id || '',
|
||||
title: s.label || s.title || '',
|
||||
}))
|
||||
: existing.sections,
|
||||
notes: itemData.notes ?? existing.notes,
|
||||
tags: itemData.tags ?? existing.tags,
|
||||
websites:
|
||||
itemData.urls || itemData.websites
|
||||
? (itemData.urls ?? itemData.websites ?? []).map((u: Record<string, any>) => ({
|
||||
url: u.href || u.url || '',
|
||||
label: u.label || '',
|
||||
autofillBehavior: 'AnywhereOnWebsite' as const,
|
||||
}))
|
||||
: existing.websites,
|
||||
} as Item
|
||||
|
||||
const result = await client.items.put(sdkItem)
|
||||
return NextResponse.json(normalizeSdkItem(result))
|
||||
}
|
||||
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}/items/${params.itemId}`,
|
||||
method: 'PUT',
|
||||
body: itemData,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to replace item' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Replace item failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to replace item: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
59
apps/sim/app/api/tools/onepassword/resolve-secret/route.ts
Normal file
59
apps/sim/app/api/tools/onepassword/resolve-secret/route.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createOnePasswordClient, resolveCredentials } from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordResolveSecretAPI')
|
||||
|
||||
const ResolveSecretSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
secretReference: z.string().min(1, 'Secret reference is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password resolve-secret attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ResolveSecretSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
|
||||
if (creds.mode !== 'service_account') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Resolve Secret is only available in Service Account mode' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Resolving secret reference (service_account mode)`)
|
||||
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
const secret = await client.secrets.resolve(params.secretReference)
|
||||
|
||||
return NextResponse.json({
|
||||
value: secret,
|
||||
reference: params.secretReference,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Resolve secret failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to resolve secret: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
136
apps/sim/app/api/tools/onepassword/update-item/route.ts
Normal file
136
apps/sim/app/api/tools/onepassword/update-item/route.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
connectRequest,
|
||||
createOnePasswordClient,
|
||||
normalizeSdkItem,
|
||||
resolveCredentials,
|
||||
} from '../utils'
|
||||
|
||||
const logger = createLogger('OnePasswordUpdateItemAPI')
|
||||
|
||||
const UpdateItemSchema = z.object({
|
||||
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||
serviceAccountToken: z.string().nullish(),
|
||||
serverUrl: z.string().nullish(),
|
||||
apiKey: z.string().nullish(),
|
||||
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||
itemId: z.string().min(1, 'Item ID is required'),
|
||||
operations: z.string().min(1, 'Patch operations are required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized 1Password update-item attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateItemSchema.parse(body)
|
||||
const creds = resolveCredentials(params)
|
||||
const ops = JSON.parse(params.operations) as JsonPatchOperation[]
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating item ${params.itemId} in vault ${params.vaultId} (${creds.mode} mode)`
|
||||
)
|
||||
|
||||
if (creds.mode === 'service_account') {
|
||||
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||
|
||||
const item = await client.items.get(params.vaultId, params.itemId)
|
||||
|
||||
for (const op of ops) {
|
||||
applyPatch(item, op)
|
||||
}
|
||||
|
||||
const result = await client.items.put(item)
|
||||
return NextResponse.json(normalizeSdkItem(result))
|
||||
}
|
||||
|
||||
const response = await connectRequest({
|
||||
serverUrl: creds.serverUrl!,
|
||||
apiKey: creds.apiKey!,
|
||||
path: `/v1/vaults/${params.vaultId}/items/${params.itemId}`,
|
||||
method: 'PATCH',
|
||||
body: ops,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: data.message || 'Failed to update item' },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Update item failed:`, error)
|
||||
return NextResponse.json({ error: `Failed to update item: ${message}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
interface JsonPatchOperation {
|
||||
op: 'add' | 'remove' | 'replace'
|
||||
path: string
|
||||
value?: unknown
|
||||
}
|
||||
|
||||
/** Apply a single RFC6902 JSON Patch operation to a mutable object. */
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function applyPatch(item: Record<string, any>, op: JsonPatchOperation) {
|
||||
const segments = op.path.split('/').filter(Boolean)
|
||||
|
||||
if (segments.length === 1) {
|
||||
const key = segments[0]
|
||||
if (op.op === 'replace' || op.op === 'add') {
|
||||
item[key] = op.value
|
||||
} else if (op.op === 'remove') {
|
||||
delete item[key]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
let target = item
|
||||
for (let i = 0; i < segments.length - 1; i++) {
|
||||
const seg = segments[i]
|
||||
if (Array.isArray(target)) {
|
||||
target = target[Number(seg)]
|
||||
} else {
|
||||
target = target[seg]
|
||||
}
|
||||
if (target === undefined || target === null) return
|
||||
}
|
||||
|
||||
const lastSeg = segments[segments.length - 1]
|
||||
|
||||
if (op.op === 'replace' || op.op === 'add') {
|
||||
if (Array.isArray(target) && lastSeg === '-') {
|
||||
target.push(op.value)
|
||||
} else if (Array.isArray(target)) {
|
||||
target[Number(lastSeg)] = op.value
|
||||
} else {
|
||||
target[lastSeg] = op.value
|
||||
}
|
||||
} else if (op.op === 'remove') {
|
||||
if (Array.isArray(target)) {
|
||||
target.splice(Number(lastSeg), 1)
|
||||
} else {
|
||||
delete target[lastSeg]
|
||||
}
|
||||
}
|
||||
}
|
||||
357
apps/sim/app/api/tools/onepassword/utils.ts
Normal file
357
apps/sim/app/api/tools/onepassword/utils.ts
Normal file
@@ -0,0 +1,357 @@
|
||||
import type {
|
||||
Item,
|
||||
ItemCategory,
|
||||
ItemField,
|
||||
ItemFieldType,
|
||||
ItemOverview,
|
||||
ItemSection,
|
||||
VaultOverview,
|
||||
Website,
|
||||
} from '@1password/sdk'
|
||||
|
||||
/** Connect-format field type strings returned by normalization. */
|
||||
type ConnectFieldType =
|
||||
| 'STRING'
|
||||
| 'CONCEALED'
|
||||
| 'EMAIL'
|
||||
| 'URL'
|
||||
| 'OTP'
|
||||
| 'PHONE'
|
||||
| 'DATE'
|
||||
| 'MONTH_YEAR'
|
||||
| 'MENU'
|
||||
| 'ADDRESS'
|
||||
| 'REFERENCE'
|
||||
| 'SSHKEY'
|
||||
| 'CREDIT_CARD_NUMBER'
|
||||
| 'CREDIT_CARD_TYPE'
|
||||
|
||||
/** Connect-format category strings returned by normalization. */
|
||||
type ConnectCategory =
|
||||
| 'LOGIN'
|
||||
| 'PASSWORD'
|
||||
| 'API_CREDENTIAL'
|
||||
| 'SECURE_NOTE'
|
||||
| 'SERVER'
|
||||
| 'DATABASE'
|
||||
| 'CREDIT_CARD'
|
||||
| 'IDENTITY'
|
||||
| 'SSH_KEY'
|
||||
| 'DOCUMENT'
|
||||
| 'SOFTWARE_LICENSE'
|
||||
| 'EMAIL_ACCOUNT'
|
||||
| 'MEMBERSHIP'
|
||||
| 'PASSPORT'
|
||||
| 'REWARD_PROGRAM'
|
||||
| 'DRIVER_LICENSE'
|
||||
| 'BANK_ACCOUNT'
|
||||
| 'MEDICAL_RECORD'
|
||||
| 'OUTDOOR_LICENSE'
|
||||
| 'WIRELESS_ROUTER'
|
||||
| 'SOCIAL_SECURITY_NUMBER'
|
||||
| 'CUSTOM'
|
||||
|
||||
/** Normalized vault shape matching the Connect API response. */
|
||||
export interface NormalizedVault {
|
||||
id: string
|
||||
name: string
|
||||
description: null
|
||||
attributeVersion: number
|
||||
contentVersion: number
|
||||
items: number
|
||||
type: string
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
/** Normalized item overview shape matching the Connect API response. */
|
||||
export interface NormalizedItemOverview {
|
||||
id: string
|
||||
title: string
|
||||
vault: { id: string }
|
||||
category: ConnectCategory
|
||||
urls: Array<{ href: string; label: string | null; primary: boolean }>
|
||||
favorite: boolean
|
||||
tags: string[]
|
||||
version: number
|
||||
state: string | null
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
lastEditedBy: null
|
||||
}
|
||||
|
||||
/** Normalized field shape matching the Connect API response. */
|
||||
export interface NormalizedField {
|
||||
id: string
|
||||
label: string
|
||||
type: ConnectFieldType
|
||||
purpose: string
|
||||
value: string | null
|
||||
section: { id: string } | null
|
||||
generate: boolean
|
||||
recipe: null
|
||||
entropy: null
|
||||
}
|
||||
|
||||
/** Normalized full item shape matching the Connect API response. */
|
||||
export interface NormalizedItem extends NormalizedItemOverview {
|
||||
fields: NormalizedField[]
|
||||
sections: Array<{ id: string; label: string }>
|
||||
}
|
||||
|
||||
/**
|
||||
* SDK field type string values → Connect field type mapping.
|
||||
* Uses string literals instead of enum imports to avoid loading the WASM module at build time.
|
||||
*/
|
||||
const SDK_TO_CONNECT_FIELD_TYPE: Record<string, ConnectFieldType> = {
|
||||
Text: 'STRING',
|
||||
Concealed: 'CONCEALED',
|
||||
Email: 'EMAIL',
|
||||
Url: 'URL',
|
||||
Totp: 'OTP',
|
||||
Phone: 'PHONE',
|
||||
Date: 'DATE',
|
||||
MonthYear: 'MONTH_YEAR',
|
||||
Menu: 'MENU',
|
||||
Address: 'ADDRESS',
|
||||
Reference: 'REFERENCE',
|
||||
SshKey: 'SSHKEY',
|
||||
CreditCardNumber: 'CREDIT_CARD_NUMBER',
|
||||
CreditCardType: 'CREDIT_CARD_TYPE',
|
||||
}
|
||||
|
||||
/** SDK category string values → Connect category mapping. */
|
||||
const SDK_TO_CONNECT_CATEGORY: Record<string, ConnectCategory> = {
|
||||
Login: 'LOGIN',
|
||||
Password: 'PASSWORD',
|
||||
ApiCredentials: 'API_CREDENTIAL',
|
||||
SecureNote: 'SECURE_NOTE',
|
||||
Server: 'SERVER',
|
||||
Database: 'DATABASE',
|
||||
CreditCard: 'CREDIT_CARD',
|
||||
Identity: 'IDENTITY',
|
||||
SshKey: 'SSH_KEY',
|
||||
Document: 'DOCUMENT',
|
||||
SoftwareLicense: 'SOFTWARE_LICENSE',
|
||||
Email: 'EMAIL_ACCOUNT',
|
||||
Membership: 'MEMBERSHIP',
|
||||
Passport: 'PASSPORT',
|
||||
Rewards: 'REWARD_PROGRAM',
|
||||
DriverLicense: 'DRIVER_LICENSE',
|
||||
BankAccount: 'BANK_ACCOUNT',
|
||||
MedicalRecord: 'MEDICAL_RECORD',
|
||||
OutdoorLicense: 'OUTDOOR_LICENSE',
|
||||
Router: 'WIRELESS_ROUTER',
|
||||
SocialSecurityNumber: 'SOCIAL_SECURITY_NUMBER',
|
||||
CryptoWallet: 'CUSTOM',
|
||||
Person: 'CUSTOM',
|
||||
Unsupported: 'CUSTOM',
|
||||
}
|
||||
|
||||
/** Connect category → SDK category string mapping. */
|
||||
const CONNECT_TO_SDK_CATEGORY: Record<string, `${ItemCategory}`> = {
|
||||
LOGIN: 'Login',
|
||||
PASSWORD: 'Password',
|
||||
API_CREDENTIAL: 'ApiCredentials',
|
||||
SECURE_NOTE: 'SecureNote',
|
||||
SERVER: 'Server',
|
||||
DATABASE: 'Database',
|
||||
CREDIT_CARD: 'CreditCard',
|
||||
IDENTITY: 'Identity',
|
||||
SSH_KEY: 'SshKey',
|
||||
DOCUMENT: 'Document',
|
||||
SOFTWARE_LICENSE: 'SoftwareLicense',
|
||||
EMAIL_ACCOUNT: 'Email',
|
||||
MEMBERSHIP: 'Membership',
|
||||
PASSPORT: 'Passport',
|
||||
REWARD_PROGRAM: 'Rewards',
|
||||
DRIVER_LICENSE: 'DriverLicense',
|
||||
BANK_ACCOUNT: 'BankAccount',
|
||||
MEDICAL_RECORD: 'MedicalRecord',
|
||||
OUTDOOR_LICENSE: 'OutdoorLicense',
|
||||
WIRELESS_ROUTER: 'Router',
|
||||
SOCIAL_SECURITY_NUMBER: 'SocialSecurityNumber',
|
||||
}
|
||||
|
||||
/** Connect field type → SDK field type string mapping. */
|
||||
const CONNECT_TO_SDK_FIELD_TYPE: Record<string, `${ItemFieldType}`> = {
|
||||
STRING: 'Text',
|
||||
CONCEALED: 'Concealed',
|
||||
EMAIL: 'Email',
|
||||
URL: 'Url',
|
||||
OTP: 'Totp',
|
||||
TOTP: 'Totp',
|
||||
PHONE: 'Phone',
|
||||
DATE: 'Date',
|
||||
MONTH_YEAR: 'MonthYear',
|
||||
MENU: 'Menu',
|
||||
ADDRESS: 'Address',
|
||||
REFERENCE: 'Reference',
|
||||
SSHKEY: 'SshKey',
|
||||
CREDIT_CARD_NUMBER: 'CreditCardNumber',
|
||||
CREDIT_CARD_TYPE: 'CreditCardType',
|
||||
}
|
||||
|
||||
export type ConnectionMode = 'service_account' | 'connect'
|
||||
|
||||
export interface CredentialParams {
|
||||
connectionMode?: ConnectionMode | null
|
||||
serviceAccountToken?: string | null
|
||||
serverUrl?: string | null
|
||||
apiKey?: string | null
|
||||
}
|
||||
|
||||
export interface ResolvedCredentials {
|
||||
mode: ConnectionMode
|
||||
serviceAccountToken?: string
|
||||
serverUrl?: string
|
||||
apiKey?: string
|
||||
}
|
||||
|
||||
/** Determine which backend to use based on provided credentials. */
|
||||
export function resolveCredentials(params: CredentialParams): ResolvedCredentials {
|
||||
const mode = params.connectionMode ?? (params.serviceAccountToken ? 'service_account' : 'connect')
|
||||
|
||||
if (mode === 'service_account') {
|
||||
if (!params.serviceAccountToken) {
|
||||
throw new Error('Service Account token is required for Service Account mode')
|
||||
}
|
||||
return { mode, serviceAccountToken: params.serviceAccountToken }
|
||||
}
|
||||
|
||||
if (!params.serverUrl || !params.apiKey) {
|
||||
throw new Error('Server URL and Connect token are required for Connect Server mode')
|
||||
}
|
||||
return { mode, serverUrl: params.serverUrl, apiKey: params.apiKey }
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a 1Password SDK client from a service account token.
|
||||
* Uses dynamic import to avoid loading the WASM module at build time.
|
||||
*/
|
||||
export async function createOnePasswordClient(serviceAccountToken: string) {
|
||||
const { createClient } = await import('@1password/sdk')
|
||||
return createClient({
|
||||
auth: serviceAccountToken,
|
||||
integrationName: 'Sim Studio',
|
||||
integrationVersion: '1.0.0',
|
||||
})
|
||||
}
|
||||
|
||||
/** Proxy a request to the 1Password Connect Server. */
|
||||
export async function connectRequest(options: {
|
||||
serverUrl: string
|
||||
apiKey: string
|
||||
path: string
|
||||
method: string
|
||||
body?: unknown
|
||||
query?: string
|
||||
}): Promise<Response> {
|
||||
const base = options.serverUrl.replace(/\/$/, '')
|
||||
const queryStr = options.query ? `?${options.query}` : ''
|
||||
const url = `${base}${options.path}${queryStr}`
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
Authorization: `Bearer ${options.apiKey}`,
|
||||
}
|
||||
|
||||
if (options.body) {
|
||||
headers['Content-Type'] = 'application/json'
|
||||
}
|
||||
|
||||
return fetch(url, {
|
||||
method: options.method,
|
||||
headers,
|
||||
body: options.body ? JSON.stringify(options.body) : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
/** Normalize an SDK VaultOverview to match Connect API vault shape. */
|
||||
export function normalizeSdkVault(vault: VaultOverview): NormalizedVault {
|
||||
return {
|
||||
id: vault.id,
|
||||
name: vault.title,
|
||||
description: null,
|
||||
attributeVersion: 0,
|
||||
contentVersion: 0,
|
||||
items: 0,
|
||||
type: 'USER_CREATED',
|
||||
createdAt:
|
||||
vault.createdAt instanceof Date ? vault.createdAt.toISOString() : (vault.createdAt ?? null),
|
||||
updatedAt:
|
||||
vault.updatedAt instanceof Date ? vault.updatedAt.toISOString() : (vault.updatedAt ?? null),
|
||||
}
|
||||
}
|
||||
|
||||
/** Normalize an SDK ItemOverview to match Connect API item summary shape. */
|
||||
export function normalizeSdkItemOverview(item: ItemOverview): NormalizedItemOverview {
|
||||
return {
|
||||
id: item.id,
|
||||
title: item.title,
|
||||
vault: { id: item.vaultId },
|
||||
category: SDK_TO_CONNECT_CATEGORY[item.category] ?? 'CUSTOM',
|
||||
urls: (item.websites ?? []).map((w: Website) => ({
|
||||
href: w.url,
|
||||
label: w.label ?? null,
|
||||
primary: false,
|
||||
})),
|
||||
favorite: false,
|
||||
tags: item.tags ?? [],
|
||||
version: 0,
|
||||
state: item.state === 'archived' ? 'ARCHIVED' : null,
|
||||
createdAt:
|
||||
item.createdAt instanceof Date ? item.createdAt.toISOString() : (item.createdAt ?? null),
|
||||
updatedAt:
|
||||
item.updatedAt instanceof Date ? item.updatedAt.toISOString() : (item.updatedAt ?? null),
|
||||
lastEditedBy: null,
|
||||
}
|
||||
}
|
||||
|
||||
/** Normalize a full SDK Item to match Connect API FullItem shape. */
|
||||
export function normalizeSdkItem(item: Item): NormalizedItem {
|
||||
return {
|
||||
id: item.id,
|
||||
title: item.title,
|
||||
vault: { id: item.vaultId },
|
||||
category: SDK_TO_CONNECT_CATEGORY[item.category] ?? 'CUSTOM',
|
||||
urls: (item.websites ?? []).map((w: Website) => ({
|
||||
href: w.url,
|
||||
label: w.label ?? null,
|
||||
primary: false,
|
||||
})),
|
||||
favorite: false,
|
||||
tags: item.tags ?? [],
|
||||
version: item.version ?? 0,
|
||||
state: null,
|
||||
fields: (item.fields ?? []).map((field: ItemField) => ({
|
||||
id: field.id,
|
||||
label: field.title,
|
||||
type: SDK_TO_CONNECT_FIELD_TYPE[field.fieldType] ?? 'STRING',
|
||||
purpose: '',
|
||||
value: field.value ?? null,
|
||||
section: field.sectionId ? { id: field.sectionId } : null,
|
||||
generate: false,
|
||||
recipe: null,
|
||||
entropy: null,
|
||||
})),
|
||||
sections: (item.sections ?? []).map((section: ItemSection) => ({
|
||||
id: section.id,
|
||||
label: section.title,
|
||||
})),
|
||||
createdAt:
|
||||
item.createdAt instanceof Date ? item.createdAt.toISOString() : (item.createdAt ?? null),
|
||||
updatedAt:
|
||||
item.updatedAt instanceof Date ? item.updatedAt.toISOString() : (item.updatedAt ?? null),
|
||||
lastEditedBy: null,
|
||||
}
|
||||
}
|
||||
|
||||
/** Convert a Connect-style category string to the SDK category string. */
|
||||
export function toSdkCategory(category: string): `${ItemCategory}` {
|
||||
return CONNECT_TO_SDK_CATEGORY[category] ?? 'Login'
|
||||
}
|
||||
|
||||
/** Convert a Connect-style field type string to the SDK field type string. */
|
||||
export function toSdkFieldType(type: string): `${ItemFieldType}` {
|
||||
return CONNECT_TO_SDK_FIELD_TYPE[type] ?? 'Text'
|
||||
}
|
||||
114
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
114
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||
import { authenticateV1Request } from '@/app/api/v1/auth'
|
||||
|
||||
const logger = createLogger('CopilotHeadlessAPI')
|
||||
|
||||
const RequestSchema = z.object({
|
||||
message: z.string().min(1, 'message is required'),
|
||||
workflowId: z.string().optional(),
|
||||
workflowName: z.string().optional(),
|
||||
chatId: z.string().optional(),
|
||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||
model: z.string().optional(),
|
||||
autoExecuteTools: z.boolean().optional().default(true),
|
||||
timeout: z.number().optional().default(300000),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/v1/copilot/chat
|
||||
* Headless copilot endpoint for server-side orchestration.
|
||||
*
|
||||
* workflowId is optional - if not provided:
|
||||
* - If workflowName is provided, finds that workflow
|
||||
* - Otherwise uses the user's first workflow as context
|
||||
* - The copilot can still operate on any workflow using list_user_workflows
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const auth = await authenticateV1Request(req)
|
||||
if (!auth.authenticated || !auth.userId) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: auth.error || 'Unauthorized' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await req.json()
|
||||
const parsed = RequestSchema.parse(body)
|
||||
const defaults = getCopilotModel('chat')
|
||||
const selectedModel = parsed.model || defaults.model
|
||||
|
||||
// Resolve workflow ID
|
||||
const resolved = await resolveWorkflowIdForUser(
|
||||
auth.userId,
|
||||
parsed.workflowId,
|
||||
parsed.workflowName
|
||||
)
|
||||
if (!resolved) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No workflows found. Create a workflow first or provide a valid workflowId.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Transform mode to transport mode (same as client API)
|
||||
// build and agent both map to 'agent' on the backend
|
||||
const effectiveMode = parsed.mode === 'agent' ? 'build' : parsed.mode
|
||||
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||
|
||||
// Always generate a chatId - required for artifacts system to work with subagents
|
||||
const chatId = parsed.chatId || crypto.randomUUID()
|
||||
|
||||
const requestPayload = {
|
||||
message: parsed.message,
|
||||
workflowId: resolved.workflowId,
|
||||
userId: auth.userId,
|
||||
model: selectedModel,
|
||||
mode: transportMode,
|
||||
messageId: crypto.randomUUID(),
|
||||
version: SIM_AGENT_VERSION,
|
||||
headless: true,
|
||||
chatId,
|
||||
}
|
||||
|
||||
const result = await orchestrateCopilotStream(requestPayload, {
|
||||
userId: auth.userId,
|
||||
workflowId: resolved.workflowId,
|
||||
chatId,
|
||||
autoExecuteTools: parsed.autoExecuteTools,
|
||||
timeout: parsed.timeout,
|
||||
interactive: false,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: result.success,
|
||||
content: result.content,
|
||||
toolCalls: result.toolCalls,
|
||||
chatId: result.chatId || chatId, // Return the chatId for conversation continuity
|
||||
conversationId: result.conversationId,
|
||||
error: result.error,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Invalid request', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Headless copilot request failed', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return NextResponse.json({ success: false, error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -33,7 +33,11 @@ import { createHttpResponseFromBlock, workflowHasResponseBlock } from '@/lib/wor
|
||||
import { executeWorkflowJob, type WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
import type {
|
||||
ExecutionMetadata,
|
||||
IterationContext,
|
||||
SerializableExecutionState,
|
||||
} from '@/executor/execution/types'
|
||||
import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { Serializer } from '@/serializer'
|
||||
@@ -62,20 +66,23 @@ const ExecuteWorkflowSchema = z.object({
|
||||
runFromBlock: z
|
||||
.object({
|
||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||
sourceSnapshot: z.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
}),
|
||||
sourceSnapshot: z
|
||||
.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
})
|
||||
.optional(),
|
||||
executionId: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
@@ -269,9 +276,47 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
runFromBlock: rawRunFromBlock,
|
||||
} = validation.data
|
||||
|
||||
// Resolve runFromBlock snapshot from executionId if needed
|
||||
let resolvedRunFromBlock:
|
||||
| { startBlockId: string; sourceSnapshot: SerializableExecutionState }
|
||||
| undefined
|
||||
if (rawRunFromBlock) {
|
||||
if (rawRunFromBlock.sourceSnapshot) {
|
||||
resolvedRunFromBlock = {
|
||||
startBlockId: rawRunFromBlock.startBlockId,
|
||||
sourceSnapshot: rawRunFromBlock.sourceSnapshot as SerializableExecutionState,
|
||||
}
|
||||
} else if (rawRunFromBlock.executionId) {
|
||||
const { getExecutionState, getLatestExecutionState } = await import(
|
||||
'@/lib/workflows/executor/execution-state'
|
||||
)
|
||||
const snapshot =
|
||||
rawRunFromBlock.executionId === 'latest'
|
||||
? await getLatestExecutionState(workflowId)
|
||||
: await getExecutionState(rawRunFromBlock.executionId)
|
||||
if (!snapshot) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `No execution state found for ${rawRunFromBlock.executionId === 'latest' ? 'workflow' : `execution ${rawRunFromBlock.executionId}`}. Run the full workflow first.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
resolvedRunFromBlock = {
|
||||
startBlockId: rawRunFromBlock.startBlockId,
|
||||
sourceSnapshot: snapshot,
|
||||
}
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{ error: 'runFromBlock requires either sourceSnapshot or executionId' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||
// For session auth, the input is explicitly provided in the input field
|
||||
const input =
|
||||
@@ -496,7 +541,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
runFromBlock: resolvedRunFromBlock,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
@@ -837,7 +882,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
runFromBlock: resolvedRunFromBlock,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
|
||||
@@ -4,11 +4,8 @@ import type React from 'react'
|
||||
import { useMemo } from 'react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Combobox, type ComboboxOptionGroup } from '@/components/emcn'
|
||||
import {
|
||||
extractFieldsFromSchema,
|
||||
parseResponseFormatSafely,
|
||||
} from '@/lib/core/utils/response-format'
|
||||
import { getToolOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { getEffectiveBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -124,42 +121,27 @@ export function OutputSelect({
|
||||
: `block-${block.id}`
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
const responseFormatValue =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.responseFormat?.value
|
||||
: subBlockValues?.[block.id]?.responseFormat
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
const isTriggerCapable = blockConfig ? hasTriggerCapability(blockConfig) : false
|
||||
const effectiveTriggerMode = Boolean(block.triggerMode && isTriggerCapable)
|
||||
|
||||
let outputsToProcess: Record<string, unknown> = {}
|
||||
|
||||
if (responseFormat) {
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
schemaFields.forEach((field) => {
|
||||
outputsToProcess[field.name] = { type: field.type }
|
||||
})
|
||||
} else {
|
||||
outputsToProcess = blockConfig?.outputs || {}
|
||||
const rawSubBlockValues =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks
|
||||
: subBlockValues?.[block.id]
|
||||
const subBlocks: Record<string, { value: unknown }> = {}
|
||||
if (rawSubBlockValues && typeof rawSubBlockValues === 'object') {
|
||||
for (const [key, val] of Object.entries(rawSubBlockValues)) {
|
||||
// Handle both { value: ... } and raw value formats
|
||||
subBlocks[key] = val && typeof val === 'object' && 'value' in val ? val : { value: val }
|
||||
}
|
||||
} else {
|
||||
// Build subBlocks object for tool selector
|
||||
const rawSubBlockValues =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks
|
||||
: subBlockValues?.[block.id]
|
||||
const subBlocks: Record<string, { value: unknown }> = {}
|
||||
if (rawSubBlockValues && typeof rawSubBlockValues === 'object') {
|
||||
for (const [key, val] of Object.entries(rawSubBlockValues)) {
|
||||
// Handle both { value: ... } and raw value formats
|
||||
subBlocks[key] = val && typeof val === 'object' && 'value' in val ? val : { value: val }
|
||||
}
|
||||
}
|
||||
|
||||
const toolOutputs = blockConfig ? getToolOutputs(blockConfig, subBlocks) : {}
|
||||
outputsToProcess =
|
||||
Object.keys(toolOutputs).length > 0 ? toolOutputs : blockConfig?.outputs || {}
|
||||
}
|
||||
|
||||
outputsToProcess = getEffectiveBlockOutputs(block.type, subBlocks, {
|
||||
triggerMode: effectiveTriggerMode,
|
||||
preferToolOutputs: !effectiveTriggerMode,
|
||||
}) as Record<string, unknown>
|
||||
|
||||
if (Object.keys(outputsToProcess).length === 0) return
|
||||
|
||||
const addOutput = (path: string, outputObj: unknown, prefix = '') => {
|
||||
|
||||
@@ -211,7 +211,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
if (block.type === 'text') {
|
||||
const isLastTextBlock =
|
||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
const parsed = parseSpecialTags(block.content ?? '')
|
||||
// Mask credential IDs in the displayed content
|
||||
const cleanBlockContent = maskCredentialValue(
|
||||
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
@@ -243,7 +243,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return (
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={maskCredentialValue(block.content)}
|
||||
content={maskCredentialValue(block.content ?? '')}
|
||||
isStreaming={isActivelyStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
@@ -251,7 +251,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
</div>
|
||||
)
|
||||
}
|
||||
if (block.type === 'tool_call') {
|
||||
if (block.type === 'tool_call' && block.toolCall) {
|
||||
const blockKey = `tool-${block.toolCall.id}`
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
'use client'
|
||||
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp, LayoutList } from 'lucide-react'
|
||||
import Editor from 'react-simple-code-editor'
|
||||
import { Button, Code, getCodeEditorProps, highlight, languages } from '@/components/emcn'
|
||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/base-tool'
|
||||
import { getClientTool } from '@/lib/copilot/tools/client/manager'
|
||||
import { getRegisteredTools } from '@/lib/copilot/tools/client/registry'
|
||||
import '@/lib/copilot/tools/client/init-tool-configs'
|
||||
import {
|
||||
getSubagentLabels as getSubagentLabelsFromConfig,
|
||||
getToolUIConfig,
|
||||
hasInterrupt as hasInterruptFromConfig,
|
||||
isSpecialTool as isSpecialToolFromConfig,
|
||||
} from '@/lib/copilot/tools/client/ui-config'
|
||||
CLIENT_EXECUTABLE_RUN_TOOLS,
|
||||
executeRunToolOnClient,
|
||||
} from '@/lib/copilot/client-sse/run-tool-execution'
|
||||
import {
|
||||
ClientToolCallState,
|
||||
TOOL_DISPLAY_REGISTRY,
|
||||
} from '@/lib/copilot/tools/client/tool-display-registry'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
||||
@@ -25,7 +24,6 @@ import { getDisplayValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/co
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { CopilotToolCall } from '@/stores/panel'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
import { CLASS_TOOL_METADATA } from '@/stores/panel/copilot/store'
|
||||
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -710,8 +708,8 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
* @returns The completion label from UI config, defaults to 'Thought'
|
||||
*/
|
||||
function getSubagentCompletionLabel(toolName: string): string {
|
||||
const labels = getSubagentLabelsFromConfig(toolName, false)
|
||||
return labels?.completed ?? 'Thought'
|
||||
const labels = TOOL_DISPLAY_REGISTRY[toolName]?.uiConfig?.subagentLabels
|
||||
return labels?.completed || 'Thought'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -943,7 +941,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
* Determines if a tool call should display with special gradient styling.
|
||||
*/
|
||||
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
||||
return isSpecialToolFromConfig(toolCall.name)
|
||||
return TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.isSpecial === true
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1223,143 +1221,88 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
|
||||
/** Checks if a tool is server-side executed (not a client tool) */
|
||||
function isIntegrationTool(toolName: string): boolean {
|
||||
return !CLASS_TOOL_METADATA[toolName]
|
||||
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||
}
|
||||
|
||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
if (hasInterruptFromConfig(toolCall.name) && toolCall.state === 'pending') {
|
||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (toolCall.state !== ClientToolCallState.pending) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Never show buttons for tools the user has marked as always-allowed
|
||||
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
||||
return false
|
||||
}
|
||||
|
||||
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
||||
if (hasInterrupt) {
|
||||
return true
|
||||
}
|
||||
|
||||
const instance = getClientTool(toolCall.id)
|
||||
let hasInterrupt = !!instance?.getInterruptDisplays?.()
|
||||
if (!hasInterrupt) {
|
||||
try {
|
||||
const def = getRegisteredTools()[toolCall.name]
|
||||
if (def) {
|
||||
hasInterrupt =
|
||||
typeof def.hasInterrupt === 'function'
|
||||
? !!def.hasInterrupt(toolCall.params || {})
|
||||
: !!def.hasInterrupt
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (hasInterrupt && toolCall.state === 'pending') {
|
||||
return true
|
||||
}
|
||||
|
||||
const mode = useCopilotStore.getState().mode
|
||||
if (mode === 'build' && isIntegrationTool(toolCall.name) && toolCall.state === 'pending') {
|
||||
// Integration tools (user-installed) always require approval
|
||||
if (isIntegrationTool(toolCall.name)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
const toolCallLogger = createLogger('CopilotToolCall')
|
||||
|
||||
async function sendToolDecision(
|
||||
toolCallId: string,
|
||||
status: 'accepted' | 'rejected' | 'background'
|
||||
) {
|
||||
try {
|
||||
await fetch('/api/copilot/confirm', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolCallId, status }),
|
||||
})
|
||||
} catch (error) {
|
||||
toolCallLogger.warn('Failed to send tool decision', {
|
||||
toolCallId,
|
||||
status,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRun(
|
||||
toolCall: CopilotToolCall,
|
||||
setToolCallState: any,
|
||||
onStateChange?: any,
|
||||
editedParams?: any
|
||||
) {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||
onStateChange?.('executing')
|
||||
await sendToolDecision(toolCall.id, 'accepted')
|
||||
|
||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
||||
onStateChange?.('executing')
|
||||
try {
|
||||
await useCopilotStore.getState().executeIntegrationTool(toolCall.id)
|
||||
} catch (e) {
|
||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
||||
onStateChange?.('error')
|
||||
try {
|
||||
await fetch('/api/copilot/tools/mark-complete', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
id: toolCall.id,
|
||||
name: toolCall.name,
|
||||
status: 500,
|
||||
message: e instanceof Error ? e.message : 'Tool execution failed',
|
||||
data: { error: e instanceof Error ? e.message : String(e) },
|
||||
}),
|
||||
})
|
||||
} catch {
|
||||
console.error('[handleRun] Failed to notify backend of tool error:', toolCall.id)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (!instance) return
|
||||
try {
|
||||
const mergedParams =
|
||||
editedParams ||
|
||||
(toolCall as any).params ||
|
||||
(toolCall as any).parameters ||
|
||||
(toolCall as any).input ||
|
||||
{}
|
||||
await instance.handleAccept?.(mergedParams)
|
||||
onStateChange?.('executing')
|
||||
} catch (e) {
|
||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
||||
// Client-executable run tools: execute on the client for real-time feedback
|
||||
// (block pulsing, console logs, stop button). The server defers execution
|
||||
// for these tools; the client reports back via mark-complete.
|
||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
||||
const params = editedParams || toolCall.params || {}
|
||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
|
||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
||||
setToolCallState(toolCall, 'rejected')
|
||||
onStateChange?.('rejected')
|
||||
|
||||
let notified = false
|
||||
for (let attempt = 0; attempt < 3 && !notified; attempt++) {
|
||||
try {
|
||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
id: toolCall.id,
|
||||
name: toolCall.name,
|
||||
status: 400,
|
||||
message: 'Tool execution skipped by user',
|
||||
data: { skipped: true, reason: 'user_skipped' },
|
||||
}),
|
||||
})
|
||||
if (res.ok) {
|
||||
notified = true
|
||||
}
|
||||
} catch (e) {
|
||||
if (attempt < 2) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!notified) {
|
||||
console.error('[handleSkip] Failed to notify backend after 3 attempts:', toolCall.id)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (instance) {
|
||||
try {
|
||||
await instance.handleReject?.()
|
||||
} catch {}
|
||||
}
|
||||
setToolCallState(toolCall, 'rejected')
|
||||
onStateChange?.('rejected')
|
||||
await sendToolDecision(toolCall.id, 'rejected')
|
||||
}
|
||||
|
||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||
const fromStore = (toolCall as any).display?.text
|
||||
if (fromStore) return fromStore
|
||||
try {
|
||||
const def = getRegisteredTools()[toolCall.name] as any
|
||||
const byState = def?.metadata?.displayNames?.[toolCall.state]
|
||||
if (byState?.text) return byState.text
|
||||
} catch {}
|
||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||
const byState = registryEntry?.displayNames?.[toolCall.state as ClientToolCallState]
|
||||
if (byState?.text) return byState.text
|
||||
|
||||
const stateVerb = getStateVerb(toolCall.state)
|
||||
const formattedName = formatToolName(toolCall.name)
|
||||
@@ -1431,9 +1374,7 @@ function RunSkipButtons({
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
await addAutoAllowedTool(toolCall.name)
|
||||
if (!isIntegrationTool(toolCall.name)) {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
}
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
@@ -1507,10 +1448,10 @@ export function ToolCall({
|
||||
const paramsRef = useRef(params)
|
||||
|
||||
// Check if this integration tool is auto-allowed
|
||||
// Subscribe to autoAllowedTools so we re-render when it changes
|
||||
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
||||
const { removeAutoAllowedTool } = useCopilotStore()
|
||||
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
||||
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||
const isAutoAllowed = useCopilotStore(
|
||||
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
||||
)
|
||||
|
||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||
useEffect(() => {
|
||||
@@ -1526,34 +1467,12 @@ export function ToolCall({
|
||||
toolCall.name === 'mark_todo_in_progress' ||
|
||||
toolCall.name === 'tool_search_tool_regex' ||
|
||||
toolCall.name === 'user_memory' ||
|
||||
toolCall.name === 'edit_respond' ||
|
||||
toolCall.name === 'debug_respond' ||
|
||||
toolCall.name === 'plan_respond' ||
|
||||
toolCall.name === 'research_respond' ||
|
||||
toolCall.name === 'info_respond' ||
|
||||
toolCall.name === 'deploy_respond' ||
|
||||
toolCall.name === 'superagent_respond'
|
||||
toolCall.name.endsWith('_respond')
|
||||
)
|
||||
return null
|
||||
|
||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||
const SUBAGENT_TOOLS = [
|
||||
'plan',
|
||||
'edit',
|
||||
'debug',
|
||||
'test',
|
||||
'deploy',
|
||||
'evaluate',
|
||||
'auth',
|
||||
'research',
|
||||
'knowledge',
|
||||
'custom_tool',
|
||||
'tour',
|
||||
'info',
|
||||
'workflow',
|
||||
'superagent',
|
||||
]
|
||||
const isSubagentTool = SUBAGENT_TOOLS.includes(toolCall.name)
|
||||
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||
|
||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||
if (isSubagentTool) {
|
||||
@@ -1593,17 +1512,18 @@ export function ToolCall({
|
||||
stateStr === 'aborted'
|
||||
|
||||
// Allow rendering if:
|
||||
// 1. Tool is in CLASS_TOOL_METADATA (client tools), OR
|
||||
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||
// 3. Tool call is already completed (historical - should always render)
|
||||
const isClientTool = !!CLASS_TOOL_METADATA[toolCall.name]
|
||||
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||
|
||||
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||
return null
|
||||
}
|
||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||
// Check if tool has params table config (meaning it's expandable)
|
||||
const hasParamsTable = !!getToolUIConfig(toolCall.name)?.paramsTable
|
||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||
const isRunWorkflow = toolCall.name === 'run_workflow'
|
||||
const isExpandableTool =
|
||||
hasParamsTable ||
|
||||
@@ -1613,7 +1533,6 @@ export function ToolCall({
|
||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||
|
||||
// Check UI config for secondary action - only show for current message tool calls
|
||||
const toolUIConfig = getToolUIConfig(toolCall.name)
|
||||
const secondaryAction = toolUIConfig?.secondaryAction
|
||||
const showSecondaryAction = secondaryAction?.showInStates.includes(
|
||||
toolCall.state as ClientToolCallState
|
||||
@@ -2211,16 +2130,9 @@ export function ToolCall({
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
instance?.setState?.((ClientToolCallState as any).background)
|
||||
await instance?.markToolComplete?.(
|
||||
200,
|
||||
'The user has chosen to move the workflow execution to the background. Check back with them later to know when the workflow execution is complete'
|
||||
)
|
||||
forceUpdate({})
|
||||
onStateChange?.('background')
|
||||
} catch {}
|
||||
setToolCallState(toolCall, ClientToolCallState.background)
|
||||
onStateChange?.('background')
|
||||
await sendToolDecision(toolCall.id, 'background')
|
||||
}}
|
||||
variant='tertiary'
|
||||
title='Move to Background'
|
||||
@@ -2232,21 +2144,9 @@ export function ToolCall({
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
||||
instance?.setState?.((ClientToolCallState as any).background, {
|
||||
result: { _elapsedSeconds: elapsedSeconds },
|
||||
})
|
||||
const { updateToolCallParams } = useCopilotStore.getState()
|
||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
||||
await instance?.markToolComplete?.(
|
||||
200,
|
||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
||||
)
|
||||
forceUpdate({})
|
||||
onStateChange?.('background')
|
||||
} catch {}
|
||||
setToolCallState(toolCall, ClientToolCallState.background)
|
||||
onStateChange?.('background')
|
||||
await sendToolDecision(toolCall.id, 'background')
|
||||
}}
|
||||
variant='tertiary'
|
||||
title='Wake'
|
||||
|
||||
@@ -246,6 +246,7 @@ export function getCommandDisplayLabel(commandId: string): string {
|
||||
* Model configuration options
|
||||
*/
|
||||
export const MODEL_OPTIONS = [
|
||||
{ value: 'claude-4.6-opus', label: 'Claude 4.6 Opus' },
|
||||
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
|
||||
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
|
||||
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
|
||||
|
||||
@@ -107,13 +107,13 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
currentChat,
|
||||
selectChat,
|
||||
deleteChat,
|
||||
areChatsFresh,
|
||||
workflowId: copilotWorkflowId,
|
||||
setPlanTodos,
|
||||
closePlanTodos,
|
||||
clearPlanArtifact,
|
||||
savePlanArtifact,
|
||||
loadAutoAllowedTools,
|
||||
resumeActiveStream,
|
||||
} = useCopilotStore()
|
||||
|
||||
// Initialize copilot
|
||||
@@ -126,6 +126,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
loadAutoAllowedTools,
|
||||
currentChat,
|
||||
isSendingMessage,
|
||||
resumeActiveStream,
|
||||
})
|
||||
|
||||
// Handle scroll management (80px stickiness for copilot)
|
||||
@@ -140,7 +141,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
activeWorkflowId,
|
||||
copilotWorkflowId,
|
||||
loadChats,
|
||||
areChatsFresh,
|
||||
isSendingMessage,
|
||||
}
|
||||
)
|
||||
@@ -421,8 +421,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Show loading state until fully initialized */}
|
||||
{!isInitialized ? (
|
||||
{/* Show loading state until fully initialized, but skip if actively streaming (resume case) */}
|
||||
{!isInitialized && !isSendingMessage ? (
|
||||
<div className='flex h-full w-full items-center justify-center'>
|
||||
<div className='flex flex-col items-center gap-3'>
|
||||
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
||||
|
||||
@@ -10,7 +10,6 @@ interface UseChatHistoryProps {
|
||||
activeWorkflowId: string | null
|
||||
copilotWorkflowId: string | null
|
||||
loadChats: (forceRefresh: boolean) => Promise<void>
|
||||
areChatsFresh: (workflowId: string) => boolean
|
||||
isSendingMessage: boolean
|
||||
}
|
||||
|
||||
@@ -21,8 +20,7 @@ interface UseChatHistoryProps {
|
||||
* @returns Chat history utilities
|
||||
*/
|
||||
export function useChatHistory(props: UseChatHistoryProps) {
|
||||
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, areChatsFresh, isSendingMessage } =
|
||||
props
|
||||
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, isSendingMessage } = props
|
||||
|
||||
/** Groups chats by time period (Today, Yesterday, This Week, etc.) */
|
||||
const groupedChats = useMemo(() => {
|
||||
@@ -80,7 +78,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
||||
/** Handles history dropdown opening and loads chats if needed (non-blocking) */
|
||||
const handleHistoryDropdownOpen = useCallback(
|
||||
(open: boolean) => {
|
||||
if (open && activeWorkflowId && !isSendingMessage && !areChatsFresh(activeWorkflowId)) {
|
||||
if (open && activeWorkflowId && !isSendingMessage) {
|
||||
loadChats(false).catch((error) => {
|
||||
logger.error('Failed to load chat history:', error)
|
||||
})
|
||||
@@ -90,7 +88,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
||||
logger.info('Chat history opened during stream - showing cached data only')
|
||||
}
|
||||
},
|
||||
[activeWorkflowId, areChatsFresh, isSendingMessage, loadChats]
|
||||
[activeWorkflowId, isSendingMessage, loadChats]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -14,6 +14,7 @@ interface UseCopilotInitializationProps {
|
||||
loadAutoAllowedTools: () => Promise<void>
|
||||
currentChat: any
|
||||
isSendingMessage: boolean
|
||||
resumeActiveStream: () => Promise<boolean>
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -32,11 +33,13 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
loadAutoAllowedTools,
|
||||
currentChat,
|
||||
isSendingMessage,
|
||||
resumeActiveStream,
|
||||
} = props
|
||||
|
||||
const [isInitialized, setIsInitialized] = useState(false)
|
||||
const lastWorkflowIdRef = useRef<string | null>(null)
|
||||
const hasMountedRef = useRef(false)
|
||||
const hasResumedRef = useRef(false)
|
||||
|
||||
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
||||
useEffect(() => {
|
||||
@@ -105,6 +108,16 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
isSendingMessage,
|
||||
])
|
||||
|
||||
/** Try to resume active stream on mount - runs early, before waiting for chats */
|
||||
useEffect(() => {
|
||||
if (hasResumedRef.current || isSendingMessage) return
|
||||
hasResumedRef.current = true
|
||||
// Resume immediately on mount - don't wait for isInitialized
|
||||
resumeActiveStream().catch((err) => {
|
||||
logger.warn('[Copilot] Failed to resume active stream', err)
|
||||
})
|
||||
}, [isSendingMessage, resumeActiveStream])
|
||||
|
||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
|
||||
@@ -61,8 +61,6 @@ function ConnectionItem({
|
||||
blockId: connection.id,
|
||||
blockType: connection.type,
|
||||
mergedSubBlocks,
|
||||
responseFormat: connection.responseFormat,
|
||||
operation: connection.operation,
|
||||
triggerMode: sourceBlock?.triggerMode,
|
||||
})
|
||||
const hasFields = fields.length > 0
|
||||
|
||||
@@ -14,16 +14,11 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
extractFieldsFromSchema,
|
||||
parseResponseFormatSafely,
|
||||
} from '@/lib/core/utils/response-format'
|
||||
import {
|
||||
getBlockOutputPaths,
|
||||
getBlockOutputType,
|
||||
getEffectiveBlockOutputPaths,
|
||||
getEffectiveBlockOutputType,
|
||||
getOutputPathsFromSchema,
|
||||
getToolOutputPaths,
|
||||
getToolOutputType,
|
||||
} from '@/lib/workflows/blocks/block-outputs'
|
||||
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { TRIGGER_TYPES } from '@/lib/workflows/triggers/triggers'
|
||||
import { KeyboardNavigationHandler } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/components/keyboard-navigation-handler'
|
||||
import type {
|
||||
@@ -214,43 +209,19 @@ const getOutputTypeForPath = (
|
||||
outputPath: string,
|
||||
mergedSubBlocksOverride?: Record<string, any>
|
||||
): string => {
|
||||
if (block?.triggerMode && blockConfig?.triggers?.enabled) {
|
||||
return getBlockOutputType(block.type, outputPath, mergedSubBlocksOverride, true)
|
||||
}
|
||||
if (block?.type === 'starter') {
|
||||
const startWorkflowValue =
|
||||
mergedSubBlocksOverride?.startWorkflow?.value ?? getSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
if (startWorkflowValue === 'chat') {
|
||||
const chatModeTypes: Record<string, string> = {
|
||||
input: 'string',
|
||||
conversationId: 'string',
|
||||
files: 'file[]',
|
||||
}
|
||||
return chatModeTypes[outputPath] || 'any'
|
||||
}
|
||||
const inputFormatValue =
|
||||
mergedSubBlocksOverride?.inputFormat?.value ?? getSubBlockValue(blockId, 'inputFormat')
|
||||
if (inputFormatValue && Array.isArray(inputFormatValue)) {
|
||||
const field = inputFormatValue.find(
|
||||
(f: { name?: string; type?: string }) => f.name === outputPath
|
||||
)
|
||||
if (field?.type) return field.type
|
||||
}
|
||||
} else if (blockConfig?.category === 'triggers') {
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getBlockOutputType(block.type, outputPath, subBlocks)
|
||||
} else if (blockConfig?.tools?.config?.tool) {
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getToolOutputType(blockConfig, subBlocks, outputPath)
|
||||
if (block?.type === 'variables') {
|
||||
return 'any'
|
||||
}
|
||||
|
||||
const subBlocks =
|
||||
mergedSubBlocksOverride ?? useWorkflowStore.getState().blocks[blockId]?.subBlocks
|
||||
const triggerMode = block?.triggerMode && blockConfig?.triggers?.enabled
|
||||
return getBlockOutputType(block?.type ?? '', outputPath, subBlocks, triggerMode)
|
||||
const isTriggerCapable = blockConfig ? hasTriggerCapability(blockConfig) : false
|
||||
const triggerMode = Boolean(block?.triggerMode && isTriggerCapable)
|
||||
|
||||
return getEffectiveBlockOutputType(block?.type ?? '', outputPath, subBlocks, {
|
||||
triggerMode,
|
||||
preferToolOutputs: !triggerMode,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1088,24 +1059,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const normalizedBlockName = normalizeName(blockName)
|
||||
|
||||
const mergedSubBlocks = getMergedSubBlocks(activeSourceBlockId)
|
||||
const responseFormatValue = mergedSubBlocks?.responseFormat?.value
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, activeSourceBlockId)
|
||||
|
||||
let blockTags: string[]
|
||||
|
||||
if (sourceBlock.type === 'evaluator') {
|
||||
const metricsValue = getSubBlockValue(activeSourceBlockId, 'metrics')
|
||||
|
||||
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
|
||||
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
|
||||
blockTags = validMetrics.map(
|
||||
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
|
||||
)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (sourceBlock.type === 'variables') {
|
||||
if (sourceBlock.type === 'variables') {
|
||||
const variablesValue = getSubBlockValue(activeSourceBlockId, 'variables')
|
||||
|
||||
if (variablesValue && Array.isArray(variablesValue) && variablesValue.length > 0) {
|
||||
@@ -1119,106 +1075,24 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
} else {
|
||||
blockTags = [normalizedBlockName]
|
||||
}
|
||||
} else if (responseFormat) {
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
sourceBlock.type,
|
||||
mergedSubBlocks,
|
||||
sourceBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (!blockConfig.outputs || Object.keys(blockConfig.outputs).length === 0) {
|
||||
if (sourceBlock.type === 'starter') {
|
||||
const startWorkflowValue = mergedSubBlocks?.startWorkflow?.value
|
||||
|
||||
if (startWorkflowValue === 'chat') {
|
||||
blockTags = [
|
||||
`${normalizedBlockName}.input`,
|
||||
`${normalizedBlockName}.conversationId`,
|
||||
`${normalizedBlockName}.files`,
|
||||
]
|
||||
} else {
|
||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||
|
||||
if (
|
||||
inputFormatValue &&
|
||||
Array.isArray(inputFormatValue) &&
|
||||
inputFormatValue.length > 0
|
||||
) {
|
||||
blockTags = inputFormatValue
|
||||
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
|
||||
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
blockTags = [normalizedBlockName]
|
||||
}
|
||||
}
|
||||
} else if (sourceBlock.type === 'api_trigger' || sourceBlock.type === 'input_trigger') {
|
||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||
|
||||
if (inputFormatValue && Array.isArray(inputFormatValue) && inputFormatValue.length > 0) {
|
||||
blockTags = inputFormatValue
|
||||
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
|
||||
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
blockTags = []
|
||||
}
|
||||
} else {
|
||||
blockTags = [normalizedBlockName]
|
||||
}
|
||||
} else {
|
||||
if (blockConfig.category === 'triggers' || sourceBlock.type === 'starter') {
|
||||
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
if (dynamicOutputs.length > 0) {
|
||||
blockTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else if (sourceBlock.type === 'starter') {
|
||||
blockTags = [normalizedBlockName]
|
||||
} else if (sourceBlock.type === TRIGGER_TYPES.GENERIC_WEBHOOK) {
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
blockTags = []
|
||||
}
|
||||
} else if (sourceBlock?.triggerMode && blockConfig.triggers?.enabled) {
|
||||
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks, true)
|
||||
if (dynamicOutputs.length > 0) {
|
||||
blockTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks, true)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (sourceBlock.type === 'human_in_the_loop') {
|
||||
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
const sourceBlockConfig = getBlock(sourceBlock.type)
|
||||
const isTriggerCapable = sourceBlockConfig ? hasTriggerCapability(sourceBlockConfig) : false
|
||||
const effectiveTriggerMode = Boolean(sourceBlock.triggerMode && isTriggerCapable)
|
||||
const outputPaths = getEffectiveBlockOutputPaths(sourceBlock.type, mergedSubBlocks, {
|
||||
triggerMode: effectiveTriggerMode,
|
||||
preferToolOutputs: !effectiveTriggerMode,
|
||||
})
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
|
||||
const isSelfReference = activeSourceBlockId === blockId
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
}
|
||||
if (sourceBlock.type === 'human_in_the_loop' && activeSourceBlockId === blockId) {
|
||||
blockTags = allTags.filter(
|
||||
(tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint')
|
||||
)
|
||||
} else if (allTags.length === 0) {
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
const toolOutputPaths = getToolOutputPaths(blockConfig, mergedSubBlocks)
|
||||
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
sourceBlock.type,
|
||||
mergedSubBlocks,
|
||||
sourceBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
blockTags = allTags
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1432,45 +1306,10 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const normalizedBlockName = normalizeName(blockName)
|
||||
|
||||
const mergedSubBlocks = getMergedSubBlocks(accessibleBlockId)
|
||||
const responseFormatValue = mergedSubBlocks?.responseFormat?.value
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, accessibleBlockId)
|
||||
|
||||
let blockTags: string[]
|
||||
|
||||
if (blockConfig.category === 'triggers' || accessibleBlock.type === 'starter') {
|
||||
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
blockTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else if (accessibleBlock.type === 'starter') {
|
||||
const startWorkflowValue = mergedSubBlocks?.startWorkflow?.value
|
||||
if (startWorkflowValue === 'chat') {
|
||||
blockTags = [
|
||||
`${normalizedBlockName}.input`,
|
||||
`${normalizedBlockName}.conversationId`,
|
||||
`${normalizedBlockName}.files`,
|
||||
]
|
||||
} else {
|
||||
blockTags = [normalizedBlockName]
|
||||
}
|
||||
} else if (accessibleBlock.type === TRIGGER_TYPES.GENERIC_WEBHOOK) {
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
blockTags = []
|
||||
}
|
||||
} else if (accessibleBlock.type === 'evaluator') {
|
||||
const metricsValue = getSubBlockValue(accessibleBlockId, 'metrics')
|
||||
|
||||
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
|
||||
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
|
||||
blockTags = validMetrics.map(
|
||||
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
|
||||
)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (accessibleBlock.type === 'variables') {
|
||||
if (accessibleBlock.type === 'variables') {
|
||||
const variablesValue = getSubBlockValue(accessibleBlockId, 'variables')
|
||||
|
||||
if (variablesValue && Array.isArray(variablesValue) && variablesValue.length > 0) {
|
||||
@@ -1484,57 +1323,26 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
} else {
|
||||
blockTags = [normalizedBlockName]
|
||||
}
|
||||
} else if (responseFormat) {
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
accessibleBlock.type,
|
||||
mergedSubBlocks,
|
||||
accessibleBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (!blockConfig.outputs || Object.keys(blockConfig.outputs).length === 0) {
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
const blockState = blocks[accessibleBlockId]
|
||||
if (blockState?.triggerMode && blockConfig.triggers?.enabled) {
|
||||
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks, true)
|
||||
if (dynamicOutputs.length > 0) {
|
||||
blockTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks, true)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (accessibleBlock.type === 'human_in_the_loop') {
|
||||
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
const accessibleBlockConfig = getBlock(accessibleBlock.type)
|
||||
const isTriggerCapable = accessibleBlockConfig
|
||||
? hasTriggerCapability(accessibleBlockConfig)
|
||||
: false
|
||||
const effectiveTriggerMode = Boolean(accessibleBlock.triggerMode && isTriggerCapable)
|
||||
const outputPaths = getEffectiveBlockOutputPaths(accessibleBlock.type, mergedSubBlocks, {
|
||||
triggerMode: effectiveTriggerMode,
|
||||
preferToolOutputs: !effectiveTriggerMode,
|
||||
})
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
|
||||
const isSelfReference = accessibleBlockId === blockId
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
} else {
|
||||
blockTags = [`${normalizedBlockName}.url`, `${normalizedBlockName}.resumeEndpoint`]
|
||||
}
|
||||
if (accessibleBlock.type === 'human_in_the_loop' && accessibleBlockId === blockId) {
|
||||
blockTags = allTags.filter(
|
||||
(tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint')
|
||||
)
|
||||
} else if (allTags.length === 0) {
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
const toolOutputPaths = getToolOutputPaths(blockConfig, mergedSubBlocks)
|
||||
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
accessibleBlock.type,
|
||||
mergedSubBlocks,
|
||||
accessibleBlock.triggerMode
|
||||
)
|
||||
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
blockTags = allTags
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -62,7 +62,12 @@ import {
|
||||
type CustomTool as CustomToolDefinition,
|
||||
useCustomTools,
|
||||
} from '@/hooks/queries/custom-tools'
|
||||
import { useForceRefreshMcpTools, useMcpServers, useStoredMcpTools } from '@/hooks/queries/mcp'
|
||||
import {
|
||||
useForceRefreshMcpTools,
|
||||
useMcpServers,
|
||||
useMcpToolsEvents,
|
||||
useStoredMcpTools,
|
||||
} from '@/hooks/queries/mcp'
|
||||
import {
|
||||
useChildDeploymentStatus,
|
||||
useDeployChildWorkflow,
|
||||
@@ -1035,6 +1040,7 @@ export const ToolInput = memo(function ToolInput({
|
||||
const { data: mcpServers = [], isLoading: mcpServersLoading } = useMcpServers(workspaceId)
|
||||
const { data: storedMcpTools = [] } = useStoredMcpTools(workspaceId)
|
||||
const forceRefreshMcpTools = useForceRefreshMcpTools()
|
||||
useMcpToolsEvents(workspaceId)
|
||||
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
|
||||
const mcpDataLoading = mcpLoading || mcpServersLoading
|
||||
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import {
|
||||
extractFieldsFromSchema,
|
||||
parseResponseFormatSafely,
|
||||
} from '@/lib/core/utils/response-format'
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { getEffectiveBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
||||
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -20,18 +18,7 @@ export interface ConnectedBlock {
|
||||
type: string
|
||||
outputType: string | string[]
|
||||
name: string
|
||||
responseFormat?: {
|
||||
// Support both formats
|
||||
fields?: Field[]
|
||||
name?: string
|
||||
schema?: {
|
||||
type: string
|
||||
properties: Record<string, any>
|
||||
required?: string[]
|
||||
}
|
||||
}
|
||||
outputs?: Record<string, any>
|
||||
operation?: string
|
||||
}
|
||||
|
||||
export function useBlockConnections(blockId: string) {
|
||||
@@ -102,47 +89,32 @@ export function useBlockConnections(blockId: string) {
|
||||
|
||||
// Get merged subblocks for this source block
|
||||
const mergedSubBlocks = getMergedSubBlocks(sourceId)
|
||||
const blockConfig = getBlock(sourceBlock.type)
|
||||
const isTriggerCapable = blockConfig ? hasTriggerCapability(blockConfig) : false
|
||||
const effectiveTriggerMode = Boolean(sourceBlock.triggerMode && isTriggerCapable)
|
||||
|
||||
// Get the response format from the subblock store
|
||||
const responseFormatValue = useSubBlockStore.getState().getValue(sourceId, 'responseFormat')
|
||||
const blockOutputs = getEffectiveBlockOutputs(sourceBlock.type, mergedSubBlocks, {
|
||||
triggerMode: effectiveTriggerMode,
|
||||
preferToolOutputs: !effectiveTriggerMode,
|
||||
})
|
||||
|
||||
// Safely parse response format with proper error handling
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, sourceId)
|
||||
|
||||
// Get operation value for tool-based blocks
|
||||
const operationValue = useSubBlockStore.getState().getValue(sourceId, 'operation')
|
||||
|
||||
// Use getBlockOutputs to properly handle dynamic outputs from inputFormat
|
||||
const blockOutputs = getBlockOutputs(
|
||||
sourceBlock.type,
|
||||
mergedSubBlocks,
|
||||
sourceBlock.triggerMode
|
||||
)
|
||||
|
||||
// Extract fields from the response format if available, otherwise use block outputs
|
||||
let outputFields: Field[]
|
||||
if (responseFormat) {
|
||||
outputFields = extractFieldsFromSchema(responseFormat)
|
||||
} else {
|
||||
// Convert block outputs to field format
|
||||
outputFields = Object.entries(blockOutputs).map(([key, value]: [string, any]) => ({
|
||||
const outputFields: Field[] = Object.entries(blockOutputs).map(
|
||||
([key, value]: [string, any]) => ({
|
||||
name: key,
|
||||
type: value && typeof value === 'object' && 'type' in value ? value.type : 'string',
|
||||
description:
|
||||
value && typeof value === 'object' && 'description' in value
|
||||
? value.description
|
||||
: undefined,
|
||||
}))
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return {
|
||||
id: sourceBlock.id,
|
||||
type: sourceBlock.type,
|
||||
outputType: outputFields.map((field: Field) => field.name),
|
||||
name: sourceBlock.name,
|
||||
responseFormat,
|
||||
outputs: blockOutputs,
|
||||
operation: operationValue,
|
||||
distance: nodeDistances.get(sourceId) || Number.POSITIVE_INFINITY,
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { extractFieldsFromSchema } from '@/lib/core/utils/response-format'
|
||||
import {
|
||||
getBlockOutputPaths,
|
||||
getBlockOutputs,
|
||||
getToolOutputs,
|
||||
} from '@/lib/workflows/blocks/block-outputs'
|
||||
import { TRIGGER_TYPES } from '@/lib/workflows/triggers/triggers'
|
||||
import { getEffectiveBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import type { SchemaField } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/connection-blocks/components/field-item/field-item'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -76,11 +71,7 @@ const extractNestedFields = (properties: Record<string, any>): SchemaField[] =>
|
||||
/**
|
||||
* Creates a schema field from an output definition
|
||||
*/
|
||||
const createFieldFromOutput = (
|
||||
name: string,
|
||||
output: any,
|
||||
responseFormatFields?: SchemaField[]
|
||||
): SchemaField => {
|
||||
const createFieldFromOutput = (name: string, output: any): SchemaField => {
|
||||
const hasExplicitType = isObject(output) && typeof output.type === 'string'
|
||||
const type = hasExplicitType ? output.type : isObject(output) ? 'object' : 'string'
|
||||
|
||||
@@ -90,11 +81,7 @@ const createFieldFromOutput = (
|
||||
description: isObject(output) && 'description' in output ? output.description : undefined,
|
||||
}
|
||||
|
||||
if (name === 'data' && responseFormatFields && responseFormatFields.length > 0) {
|
||||
field.children = responseFormatFields
|
||||
} else {
|
||||
field.children = extractChildFields(output)
|
||||
}
|
||||
field.children = extractChildFields(output)
|
||||
|
||||
return field
|
||||
}
|
||||
@@ -103,8 +90,6 @@ interface UseBlockOutputFieldsParams {
|
||||
blockId: string
|
||||
blockType: string
|
||||
mergedSubBlocks?: Record<string, any>
|
||||
responseFormat?: any
|
||||
operation?: string
|
||||
triggerMode?: boolean
|
||||
}
|
||||
|
||||
@@ -116,8 +101,6 @@ export function useBlockOutputFields({
|
||||
blockId,
|
||||
blockType,
|
||||
mergedSubBlocks,
|
||||
responseFormat,
|
||||
operation,
|
||||
triggerMode,
|
||||
}: UseBlockOutputFieldsParams): SchemaField[] {
|
||||
return useMemo(() => {
|
||||
@@ -138,21 +121,6 @@ export function useBlockOutputFields({
|
||||
return []
|
||||
}
|
||||
|
||||
// Handle evaluator blocks - use metrics if available
|
||||
if (blockType === 'evaluator') {
|
||||
const metricsValue = mergedSubBlocks?.metrics?.value ?? getSubBlockValue(blockId, 'metrics')
|
||||
|
||||
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
|
||||
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
|
||||
return validMetrics.map((metric: { name: string }) => ({
|
||||
name: metric.name.toLowerCase(),
|
||||
type: 'number',
|
||||
description: `Metric: ${metric.name}`,
|
||||
}))
|
||||
}
|
||||
// Fall through to use blockConfig.outputs
|
||||
}
|
||||
|
||||
// Handle variables blocks - use variable assignments if available
|
||||
if (blockType === 'variables') {
|
||||
const variablesValue =
|
||||
@@ -172,123 +140,16 @@ export function useBlockOutputFields({
|
||||
return []
|
||||
}
|
||||
|
||||
// Get base outputs using getBlockOutputs (handles triggers, starter, approval, etc.)
|
||||
let baseOutputs: Record<string, any> = {}
|
||||
|
||||
if (blockConfig.category === 'triggers' || blockType === 'starter') {
|
||||
// Use getBlockOutputPaths to get dynamic outputs, then reconstruct the structure
|
||||
const outputPaths = getBlockOutputPaths(blockType, mergedSubBlocks, triggerMode)
|
||||
if (outputPaths.length > 0) {
|
||||
// Reconstruct outputs structure from paths
|
||||
// This is a simplified approach - we'll use the paths to build the structure
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks, triggerMode)
|
||||
} else if (blockType === 'starter') {
|
||||
const startWorkflowValue = mergedSubBlocks?.startWorkflow?.value
|
||||
if (startWorkflowValue === 'chat') {
|
||||
baseOutputs = {
|
||||
input: { type: 'string', description: 'User message' },
|
||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||
files: { type: 'file[]', description: 'Uploaded files' },
|
||||
}
|
||||
} else {
|
||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||
if (inputFormatValue && Array.isArray(inputFormatValue) && inputFormatValue.length > 0) {
|
||||
baseOutputs = {}
|
||||
inputFormatValue.forEach((field: { name?: string; type?: string }) => {
|
||||
if (field.name && field.name.trim() !== '') {
|
||||
baseOutputs[field.name] = {
|
||||
type: field.type || 'string',
|
||||
description: `Field from input format`,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} else if (blockType === TRIGGER_TYPES.GENERIC_WEBHOOK) {
|
||||
// Generic webhook returns the whole payload
|
||||
baseOutputs = {}
|
||||
} else {
|
||||
baseOutputs = {}
|
||||
}
|
||||
} else if (triggerMode && blockConfig.triggers?.enabled) {
|
||||
// Trigger mode enabled
|
||||
const dynamicOutputs = getBlockOutputPaths(blockType, mergedSubBlocks, true)
|
||||
if (dynamicOutputs.length > 0) {
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks, true)
|
||||
} else {
|
||||
baseOutputs = blockConfig.outputs || {}
|
||||
}
|
||||
} else if (blockType === 'approval') {
|
||||
// Approval block uses dynamic outputs from inputFormat
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks)
|
||||
} else {
|
||||
// For tool-based blocks, try to get tool outputs first
|
||||
const toolOutputs = blockConfig ? getToolOutputs(blockConfig, mergedSubBlocks) : {}
|
||||
|
||||
if (Object.keys(toolOutputs).length > 0) {
|
||||
baseOutputs = toolOutputs
|
||||
} else {
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks, triggerMode)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle responseFormat
|
||||
const responseFormatFields = responseFormat ? extractFieldsFromSchema(responseFormat) : []
|
||||
|
||||
// If responseFormat exists and has fields, merge with base outputs
|
||||
if (responseFormatFields.length > 0) {
|
||||
// If base outputs is empty, use responseFormat fields directly
|
||||
if (Object.keys(baseOutputs).length === 0) {
|
||||
return responseFormatFields.map((field) => ({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
description: field.description,
|
||||
children: undefined, // ResponseFormat fields are flat
|
||||
}))
|
||||
}
|
||||
|
||||
// Otherwise, merge: responseFormat takes precedence for 'data' field
|
||||
const fields: SchemaField[] = []
|
||||
const responseFormatFieldNames = new Set(responseFormatFields.map((f) => f.name))
|
||||
|
||||
// Add base outputs, replacing 'data' with responseFormat fields if present
|
||||
for (const [name, output] of Object.entries(baseOutputs)) {
|
||||
if (name === 'data' && responseFormatFields.length > 0) {
|
||||
fields.push(
|
||||
createFieldFromOutput(
|
||||
name,
|
||||
output,
|
||||
responseFormatFields.map((f) => ({
|
||||
name: f.name,
|
||||
type: f.type,
|
||||
description: f.description,
|
||||
}))
|
||||
)
|
||||
)
|
||||
} else if (!responseFormatFieldNames.has(name)) {
|
||||
fields.push(createFieldFromOutput(name, output))
|
||||
}
|
||||
}
|
||||
|
||||
// Add responseFormat fields that aren't in base outputs
|
||||
for (const field of responseFormatFields) {
|
||||
if (!baseOutputs[field.name]) {
|
||||
fields.push({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
description: field.description,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// No responseFormat, just use base outputs
|
||||
const isTriggerCapable = hasTriggerCapability(blockConfig)
|
||||
const effectiveTriggerMode = Boolean(triggerMode && isTriggerCapable)
|
||||
const baseOutputs = getEffectiveBlockOutputs(blockType, mergedSubBlocks, {
|
||||
triggerMode: effectiveTriggerMode,
|
||||
preferToolOutputs: !effectiveTriggerMode,
|
||||
}) as Record<string, any>
|
||||
if (Object.keys(baseOutputs).length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
return Object.entries(baseOutputs).map(([name, output]) => createFieldFromOutput(name, output))
|
||||
}, [blockId, blockType, mergedSubBlocks, responseFormat, operation, triggerMode])
|
||||
}, [blockId, blockType, mergedSubBlocks, triggerMode])
|
||||
}
|
||||
|
||||
@@ -11,6 +11,12 @@ export interface WorkflowExecutionOptions {
|
||||
executionId?: string
|
||||
onBlockComplete?: (blockId: string, output: any) => Promise<void>
|
||||
overrideTriggerType?: 'chat' | 'manual' | 'api'
|
||||
stopAfterBlockId?: string
|
||||
/** For run_from_block / run_block: start from a specific block using cached state */
|
||||
runFromBlock?: {
|
||||
startBlockId: string
|
||||
executionId?: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -39,6 +45,15 @@ export async function executeWorkflowWithFullLogging(
|
||||
triggerType: options.overrideTriggerType || 'manual',
|
||||
useDraftState: true,
|
||||
isClientSession: true,
|
||||
...(options.stopAfterBlockId ? { stopAfterBlockId: options.stopAfterBlockId } : {}),
|
||||
...(options.runFromBlock
|
||||
? {
|
||||
runFromBlock: {
|
||||
startBlockId: options.runFromBlock.startBlockId,
|
||||
executionId: options.runFromBlock.executionId || 'latest',
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/workflows/${activeWorkflowId}/execute`, {
|
||||
|
||||
@@ -18,7 +18,7 @@ import 'reactflow/dist/style.css'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/other/oauth-request-access'
|
||||
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/base-tool'
|
||||
import type { OAuthProvider } from '@/lib/oauth'
|
||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
|
||||
@@ -894,14 +894,14 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
disabled={!hasParams}
|
||||
>
|
||||
<div className='flex-1'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
<div className='flex h-[16px] items-center gap-[6px]'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-primary)] leading-none'>
|
||||
{tool.name}
|
||||
</p>
|
||||
{issues.length > 0 && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div>
|
||||
<div className='flex items-center'>
|
||||
<Badge
|
||||
variant={getIssueBadgeVariant(issues[0].issue)}
|
||||
size='sm'
|
||||
|
||||
@@ -269,14 +269,32 @@ Return ONLY the description text - no explanations.`,
|
||||
'Describe the issue details (e.g., "users seeing 500 error when clicking submit")...',
|
||||
},
|
||||
},
|
||||
// Write Issue additional fields
|
||||
// Write Issue type and parent
|
||||
{
|
||||
id: 'issueType',
|
||||
title: 'Issue Type',
|
||||
type: 'short-input',
|
||||
placeholder: 'Issue type (e.g., Task, Story, Bug, Epic)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
value: () => 'Task',
|
||||
},
|
||||
{
|
||||
id: 'parentIssue',
|
||||
title: 'Parent Issue Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Parent issue key for subtasks (e.g., PROJ-123)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
// Write/Update Issue additional fields
|
||||
{
|
||||
id: 'assignee',
|
||||
title: 'Assignee Account ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Assignee account ID (e.g., 5b109f2e9729b51b54dc274d)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'priority',
|
||||
@@ -284,7 +302,7 @@ Return ONLY the description text - no explanations.`,
|
||||
type: 'short-input',
|
||||
placeholder: 'Priority ID or name (e.g., "10000" or "High")',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'labels',
|
||||
@@ -292,7 +310,7 @@ Return ONLY the description text - no explanations.`,
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated labels (e.g., bug, urgent)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'duedate',
|
||||
@@ -300,7 +318,7 @@ Return ONLY the description text - no explanations.`,
|
||||
type: 'short-input',
|
||||
placeholder: 'YYYY-MM-DD (e.g., 2024-12-31)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a date in YYYY-MM-DD format based on the user's description.
|
||||
@@ -329,7 +347,7 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
type: 'long-input',
|
||||
placeholder: 'Environment information (e.g., Production, Staging)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'customFieldId',
|
||||
@@ -337,7 +355,7 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., customfield_10001 or 10001',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'customFieldValue',
|
||||
@@ -345,7 +363,34 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
type: 'short-input',
|
||||
placeholder: 'Value for the custom field',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'components',
|
||||
title: 'Components',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated component names',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'fixVersions',
|
||||
title: 'Fix Versions',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated fix version names',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: ['write', 'update'] },
|
||||
},
|
||||
{
|
||||
id: 'notifyUsers',
|
||||
title: 'Notify Users',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
value: () => 'true',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
},
|
||||
// Delete Issue fields
|
||||
{
|
||||
@@ -395,6 +440,13 @@ Return ONLY the comment text - no explanations.`,
|
||||
placeholder: 'Describe the transition reason (e.g., "fixed bug", "ready for QA review")...',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'resolution',
|
||||
title: 'Resolution',
|
||||
type: 'short-input',
|
||||
placeholder: 'Resolution name (e.g., "Fixed", "Won\'t Fix")',
|
||||
condition: { field: 'operation', value: 'transition' },
|
||||
},
|
||||
// Search Issues fields
|
||||
{
|
||||
id: 'jql',
|
||||
@@ -420,6 +472,20 @@ Return ONLY the JQL query - no explanations or markdown formatting.`,
|
||||
generationType: 'sql-query',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'nextPageToken',
|
||||
title: 'Next Page Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Cursor token for next page (omit for first page)',
|
||||
condition: { field: 'operation', value: 'search' },
|
||||
},
|
||||
{
|
||||
id: 'startAt',
|
||||
title: 'Start At',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination start index (default: 0)',
|
||||
condition: { field: 'operation', value: ['get_comments', 'get_worklogs'] },
|
||||
},
|
||||
{
|
||||
id: 'maxResults',
|
||||
title: 'Max Results',
|
||||
@@ -756,7 +822,9 @@ Return ONLY the comment text - no explanations.`,
|
||||
assignee: params.assignee || undefined,
|
||||
priority: params.priority || undefined,
|
||||
labels: parseCommaSeparated(params.labels),
|
||||
components: parseCommaSeparated(params.components),
|
||||
duedate: params.duedate || undefined,
|
||||
fixVersions: parseCommaSeparated(params.fixVersions),
|
||||
reporter: params.reporter || undefined,
|
||||
environment: params.environment || undefined,
|
||||
customFieldId: params.customFieldId || undefined,
|
||||
@@ -768,11 +836,29 @@ Return ONLY the comment text - no explanations.`,
|
||||
}
|
||||
}
|
||||
case 'update': {
|
||||
const parseCommaSeparated = (value: string | undefined): string[] | undefined => {
|
||||
if (!value || value.trim() === '') return undefined
|
||||
return value
|
||||
.split(',')
|
||||
.map((item) => item.trim())
|
||||
.filter((item) => item !== '')
|
||||
}
|
||||
|
||||
const updateParams = {
|
||||
projectId: effectiveProjectId,
|
||||
issueKey: effectiveIssueKey,
|
||||
summary: params.summary || '',
|
||||
description: params.description || '',
|
||||
summary: params.summary || undefined,
|
||||
description: params.description || undefined,
|
||||
assignee: params.assignee || undefined,
|
||||
priority: params.priority || undefined,
|
||||
labels: parseCommaSeparated(params.labels),
|
||||
components: parseCommaSeparated(params.components),
|
||||
duedate: params.duedate || undefined,
|
||||
fixVersions: parseCommaSeparated(params.fixVersions),
|
||||
environment: params.environment || undefined,
|
||||
customFieldId: params.customFieldId || undefined,
|
||||
customFieldValue: params.customFieldValue || undefined,
|
||||
notifyUsers: params.notifyUsers === 'false' ? false : undefined,
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
@@ -813,12 +899,14 @@ Return ONLY the comment text - no explanations.`,
|
||||
issueKey: effectiveIssueKey,
|
||||
transitionId: params.transitionId,
|
||||
comment: params.transitionComment,
|
||||
resolution: params.resolution || undefined,
|
||||
}
|
||||
}
|
||||
case 'search': {
|
||||
return {
|
||||
...baseParams,
|
||||
jql: params.jql,
|
||||
nextPageToken: params.nextPageToken || undefined,
|
||||
maxResults: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
}
|
||||
@@ -833,6 +921,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
return {
|
||||
...baseParams,
|
||||
issueKey: effectiveIssueKey,
|
||||
startAt: params.startAt ? Number.parseInt(params.startAt) : undefined,
|
||||
maxResults: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
}
|
||||
@@ -889,6 +978,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
return {
|
||||
...baseParams,
|
||||
issueKey: effectiveIssueKey,
|
||||
startAt: params.startAt ? Number.parseInt(params.startAt) : undefined,
|
||||
maxResults: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
}
|
||||
@@ -966,15 +1056,19 @@ Return ONLY the comment text - no explanations.`,
|
||||
summary: { type: 'string', description: 'Issue summary' },
|
||||
description: { type: 'string', description: 'Issue description' },
|
||||
issueType: { type: 'string', description: 'Issue type' },
|
||||
// Write operation additional inputs
|
||||
// Write/Update operation additional inputs
|
||||
parentIssue: { type: 'string', description: 'Parent issue key for subtasks' },
|
||||
assignee: { type: 'string', description: 'Assignee account ID' },
|
||||
priority: { type: 'string', description: 'Priority ID or name' },
|
||||
labels: { type: 'string', description: 'Comma-separated labels for the issue' },
|
||||
components: { type: 'string', description: 'Comma-separated component names' },
|
||||
duedate: { type: 'string', description: 'Due date in YYYY-MM-DD format' },
|
||||
fixVersions: { type: 'string', description: 'Comma-separated fix version names' },
|
||||
reporter: { type: 'string', description: 'Reporter account ID' },
|
||||
environment: { type: 'string', description: 'Environment information' },
|
||||
customFieldId: { type: 'string', description: 'Custom field ID (e.g., customfield_10001)' },
|
||||
customFieldValue: { type: 'string', description: 'Value for the custom field' },
|
||||
notifyUsers: { type: 'string', description: 'Whether to send notifications on update' },
|
||||
// Delete operation inputs
|
||||
deleteSubtasks: { type: 'string', description: 'Whether to delete subtasks (true/false)' },
|
||||
// Assign/Watcher operation inputs
|
||||
@@ -985,7 +1079,13 @@ Return ONLY the comment text - no explanations.`,
|
||||
// Transition operation inputs
|
||||
transitionId: { type: 'string', description: 'Transition ID for workflow status changes' },
|
||||
transitionComment: { type: 'string', description: 'Optional comment for transition' },
|
||||
resolution: { type: 'string', description: 'Resolution name for transition (e.g., "Fixed")' },
|
||||
// Search operation inputs
|
||||
nextPageToken: {
|
||||
type: 'string',
|
||||
description: 'Cursor token for the next page of search results',
|
||||
},
|
||||
startAt: { type: 'string', description: 'Pagination start index' },
|
||||
jql: { type: 'string', description: 'JQL (Jira Query Language) search query' },
|
||||
maxResults: { type: 'string', description: 'Maximum number of results to return' },
|
||||
// Comment operation inputs
|
||||
@@ -1038,8 +1138,11 @@ Return ONLY the comment text - no explanations.`,
|
||||
id: { type: 'string', description: 'Jira issue ID' },
|
||||
key: { type: 'string', description: 'Jira issue key' },
|
||||
|
||||
// jira_search_issues outputs
|
||||
// jira_search_issues / jira_bulk_read outputs
|
||||
total: { type: 'number', description: 'Total number of matching issues' },
|
||||
nextPageToken: { type: 'string', description: 'Cursor token for the next page of results' },
|
||||
isLast: { type: 'boolean', description: 'Whether this is the last page of results' },
|
||||
// Shared pagination outputs (get_comments, get_worklogs, get_users)
|
||||
startAt: { type: 'number', description: 'Pagination start index' },
|
||||
maxResults: { type: 'number', description: 'Maximum results per page' },
|
||||
issues: {
|
||||
|
||||
@@ -40,6 +40,7 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
{ label: 'Add Participants', id: 'add_participants' },
|
||||
{ label: 'Get Approvals', id: 'get_approvals' },
|
||||
{ label: 'Answer Approval', id: 'answer_approval' },
|
||||
{ label: 'Get Request Type Fields', id: 'get_request_type_fields' },
|
||||
],
|
||||
value: () => 'get_service_desks',
|
||||
},
|
||||
@@ -109,6 +110,8 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
'get_organizations',
|
||||
'add_organization',
|
||||
'get_queues',
|
||||
'get_requests',
|
||||
'get_request_type_fields',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -118,7 +121,7 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
type: 'short-input',
|
||||
required: true,
|
||||
placeholder: 'Enter request type ID',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
condition: { field: 'operation', value: ['create_request', 'get_request_type_fields'] },
|
||||
},
|
||||
{
|
||||
id: 'issueIdOrKey',
|
||||
@@ -188,6 +191,51 @@ Return ONLY the description text - no explanations.`,
|
||||
placeholder: 'Account ID to raise request on behalf of',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
id: 'requestParticipants',
|
||||
title: 'Request Participants',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated account IDs to add as participants',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
id: 'channel',
|
||||
title: 'Channel',
|
||||
type: 'short-input',
|
||||
placeholder: 'Channel (e.g., portal, email)',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
id: 'requestFieldValues',
|
||||
title: 'Custom Field Values',
|
||||
type: 'long-input',
|
||||
placeholder: 'JSON object of custom field values (e.g., {"customfield_10010": "value"})',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
id: 'searchQuery',
|
||||
title: 'Search Query',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter request types by name',
|
||||
condition: { field: 'operation', value: 'get_request_types' },
|
||||
},
|
||||
{
|
||||
id: 'groupId',
|
||||
title: 'Group ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by request type group',
|
||||
condition: { field: 'operation', value: 'get_request_types' },
|
||||
},
|
||||
{
|
||||
id: 'expand',
|
||||
title: 'Expand',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated fields to expand',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_request', 'get_requests', 'get_comments'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'commentBody',
|
||||
title: 'Comment',
|
||||
@@ -220,11 +268,11 @@ Return ONLY the comment text - no explanations.`,
|
||||
condition: { field: 'operation', value: 'add_comment' },
|
||||
},
|
||||
{
|
||||
id: 'emails',
|
||||
title: 'Email Addresses',
|
||||
id: 'accountIds',
|
||||
title: 'Account IDs',
|
||||
type: 'short-input',
|
||||
required: true,
|
||||
placeholder: 'Comma-separated email addresses',
|
||||
placeholder: 'Comma-separated Atlassian account IDs',
|
||||
condition: { field: 'operation', value: 'add_customer' },
|
||||
},
|
||||
{
|
||||
@@ -269,7 +317,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
{ label: 'All Requests', id: 'ALL_REQUESTS' },
|
||||
{ label: 'My Requests', id: 'OWNED_REQUESTS' },
|
||||
{ label: 'Participated', id: 'PARTICIPATED_REQUESTS' },
|
||||
{ label: 'Organization', id: 'ORGANIZATION' },
|
||||
{ label: 'Approver', id: 'APPROVER' },
|
||||
],
|
||||
value: () => 'ALL_REQUESTS',
|
||||
condition: { field: 'operation', value: 'get_requests' },
|
||||
@@ -279,11 +327,11 @@ Return ONLY the comment text - no explanations.`,
|
||||
title: 'Request Status',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: 'ALL' },
|
||||
{ label: 'Open', id: 'OPEN' },
|
||||
{ label: 'Closed', id: 'CLOSED' },
|
||||
{ label: 'All', id: 'ALL_REQUESTS' },
|
||||
{ label: 'Open', id: 'OPEN_REQUESTS' },
|
||||
{ label: 'Closed', id: 'CLOSED_REQUESTS' },
|
||||
],
|
||||
value: () => 'ALL',
|
||||
value: () => 'ALL_REQUESTS',
|
||||
condition: { field: 'operation', value: 'get_requests' },
|
||||
},
|
||||
{
|
||||
@@ -363,6 +411,9 @@ Return ONLY the comment text - no explanations.`,
|
||||
'get_organizations',
|
||||
'get_queues',
|
||||
'get_sla',
|
||||
'get_transitions',
|
||||
'get_participants',
|
||||
'get_approvals',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -389,6 +440,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
'jsm_add_participants',
|
||||
'jsm_get_approvals',
|
||||
'jsm_answer_approval',
|
||||
'jsm_get_request_type_fields',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -433,6 +485,8 @@ Return ONLY the comment text - no explanations.`,
|
||||
return 'jsm_get_approvals'
|
||||
case 'answer_approval':
|
||||
return 'jsm_answer_approval'
|
||||
case 'get_request_type_fields':
|
||||
return 'jsm_get_request_type_fields'
|
||||
default:
|
||||
return 'jsm_get_service_desks'
|
||||
}
|
||||
@@ -456,6 +510,8 @@ Return ONLY the comment text - no explanations.`,
|
||||
return {
|
||||
...baseParams,
|
||||
serviceDeskId: params.serviceDeskId,
|
||||
searchQuery: params.searchQuery,
|
||||
groupId: params.groupId,
|
||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
case 'create_request':
|
||||
@@ -475,6 +531,11 @@ Return ONLY the comment text - no explanations.`,
|
||||
summary: params.summary,
|
||||
description: params.description,
|
||||
raiseOnBehalfOf: params.raiseOnBehalfOf,
|
||||
requestParticipants: params.requestParticipants,
|
||||
channel: params.channel,
|
||||
requestFieldValues: params.requestFieldValues
|
||||
? JSON.parse(params.requestFieldValues)
|
||||
: undefined,
|
||||
}
|
||||
case 'get_request':
|
||||
if (!params.issueIdOrKey) {
|
||||
@@ -483,6 +544,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
return {
|
||||
...baseParams,
|
||||
issueIdOrKey: params.issueIdOrKey,
|
||||
expand: params.expand,
|
||||
}
|
||||
case 'get_requests':
|
||||
return {
|
||||
@@ -491,6 +553,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
requestOwnership: params.requestOwnership,
|
||||
requestStatus: params.requestStatus,
|
||||
searchTerm: params.searchTerm,
|
||||
expand: params.expand,
|
||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
case 'add_comment':
|
||||
@@ -513,6 +576,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
return {
|
||||
...baseParams,
|
||||
issueIdOrKey: params.issueIdOrKey,
|
||||
expand: params.expand,
|
||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
case 'get_customers':
|
||||
@@ -529,26 +593,14 @@ Return ONLY the comment text - no explanations.`,
|
||||
if (!params.serviceDeskId) {
|
||||
throw new Error('Service Desk ID is required')
|
||||
}
|
||||
const accountIds = params.accountIds
|
||||
? params.accountIds
|
||||
.split(',')
|
||||
.map((id: string) => id.trim())
|
||||
.filter((id: string) => id)
|
||||
: undefined
|
||||
const emails = params.emails
|
||||
? params.emails
|
||||
.split(',')
|
||||
.map((email: string) => email.trim())
|
||||
.filter((email: string) => email)
|
||||
: undefined
|
||||
if ((!accountIds || accountIds.length === 0) && (!emails || emails.length === 0)) {
|
||||
throw new Error('At least one account ID or email is required')
|
||||
if (!params.accountIds && !params.emails) {
|
||||
throw new Error('Account IDs or emails are required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
serviceDeskId: params.serviceDeskId,
|
||||
accountIds,
|
||||
emails,
|
||||
accountIds: params.accountIds,
|
||||
emails: params.emails,
|
||||
}
|
||||
}
|
||||
case 'get_organizations':
|
||||
@@ -586,6 +638,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
return {
|
||||
...baseParams,
|
||||
issueIdOrKey: params.issueIdOrKey,
|
||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
||||
}
|
||||
case 'transition_request':
|
||||
if (!params.issueIdOrKey) {
|
||||
@@ -666,6 +719,18 @@ Return ONLY the comment text - no explanations.`,
|
||||
approvalId: params.approvalId,
|
||||
decision: params.approvalDecision,
|
||||
}
|
||||
case 'get_request_type_fields':
|
||||
if (!params.serviceDeskId) {
|
||||
throw new Error('Service Desk ID is required')
|
||||
}
|
||||
if (!params.requestTypeId) {
|
||||
throw new Error('Request Type ID is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
serviceDeskId: params.serviceDeskId,
|
||||
requestTypeId: params.requestTypeId,
|
||||
}
|
||||
default:
|
||||
return baseParams
|
||||
}
|
||||
@@ -684,8 +749,11 @@ Return ONLY the comment text - no explanations.`,
|
||||
raiseOnBehalfOf: { type: 'string', description: 'Account ID to raise request on behalf of' },
|
||||
commentBody: { type: 'string', description: 'Comment text' },
|
||||
isPublic: { type: 'string', description: 'Whether comment is public or internal' },
|
||||
accountIds: { type: 'string', description: 'Comma-separated account IDs' },
|
||||
emails: { type: 'string', description: 'Comma-separated email addresses' },
|
||||
accountIds: { type: 'string', description: 'Comma-separated Atlassian account IDs' },
|
||||
emails: {
|
||||
type: 'string',
|
||||
description: 'Comma-separated email addresses',
|
||||
},
|
||||
customerQuery: { type: 'string', description: 'Customer search query' },
|
||||
transitionId: { type: 'string', description: 'Transition ID' },
|
||||
transitionComment: { type: 'string', description: 'Transition comment' },
|
||||
@@ -702,6 +770,15 @@ Return ONLY the comment text - no explanations.`,
|
||||
},
|
||||
approvalId: { type: 'string', description: 'Approval ID' },
|
||||
approvalDecision: { type: 'string', description: 'Approval decision (approve/decline)' },
|
||||
requestParticipants: {
|
||||
type: 'string',
|
||||
description: 'Comma-separated account IDs for request participants',
|
||||
},
|
||||
channel: { type: 'string', description: 'Channel (e.g., portal, email)' },
|
||||
requestFieldValues: { type: 'string', description: 'JSON object of custom field values' },
|
||||
searchQuery: { type: 'string', description: 'Filter request types by name' },
|
||||
groupId: { type: 'string', description: 'Filter by request type group ID' },
|
||||
expand: { type: 'string', description: 'Comma-separated fields to expand' },
|
||||
},
|
||||
outputs: {
|
||||
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||
@@ -727,9 +804,19 @@ Return ONLY the comment text - no explanations.`,
|
||||
transitionId: { type: 'string', description: 'Applied transition ID' },
|
||||
participants: { type: 'json', description: 'Array of participants' },
|
||||
approvals: { type: 'json', description: 'Array of approvals' },
|
||||
approval: { type: 'json', description: 'Approval object' },
|
||||
approvalId: { type: 'string', description: 'Approval ID' },
|
||||
decision: { type: 'string', description: 'Approval decision' },
|
||||
total: { type: 'number', description: 'Total count' },
|
||||
isLastPage: { type: 'boolean', description: 'Whether this is the last page' },
|
||||
requestTypeFields: { type: 'json', description: 'Array of request type fields' },
|
||||
canAddRequestParticipants: {
|
||||
type: 'boolean',
|
||||
description: 'Whether participants can be added to this request type',
|
||||
},
|
||||
canRaiseOnBehalfOf: {
|
||||
type: 'boolean',
|
||||
description: 'Whether requests can be raised on behalf of another user',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
268
apps/sim/blocks/blocks/onepassword.ts
Normal file
268
apps/sim/blocks/blocks/onepassword.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
import { OnePasswordIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
|
||||
export const OnePasswordBlock: BlockConfig = {
|
||||
type: 'onepassword',
|
||||
name: '1Password',
|
||||
description: 'Manage secrets and items in 1Password vaults',
|
||||
longDescription:
|
||||
'Access and manage secrets stored in 1Password vaults using the Connect API or Service Account SDK. List vaults, retrieve items with their fields and secrets, create new items, update existing ones, delete items, and resolve secret references.',
|
||||
docsLink: 'https://docs.sim.ai/tools/onepassword',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: OnePasswordIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'List Vaults', id: 'list_vaults' },
|
||||
{ label: 'Get Vault', id: 'get_vault' },
|
||||
{ label: 'List Items', id: 'list_items' },
|
||||
{ label: 'Get Item', id: 'get_item' },
|
||||
{ label: 'Create Item', id: 'create_item' },
|
||||
{ label: 'Replace Item', id: 'replace_item' },
|
||||
{ label: 'Update Item', id: 'update_item' },
|
||||
{ label: 'Delete Item', id: 'delete_item' },
|
||||
{ label: 'Resolve Secret', id: 'resolve_secret' },
|
||||
],
|
||||
value: () => 'get_item',
|
||||
},
|
||||
{
|
||||
id: 'connectionMode',
|
||||
title: 'Connection Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Service Account', id: 'service_account' },
|
||||
{ label: 'Connect Server', id: 'connect' },
|
||||
],
|
||||
value: () => 'service_account',
|
||||
},
|
||||
{
|
||||
id: 'serviceAccountToken',
|
||||
title: 'Service Account Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your 1Password Service Account token',
|
||||
password: true,
|
||||
required: { field: 'connectionMode', value: 'service_account' },
|
||||
condition: { field: 'connectionMode', value: 'service_account' },
|
||||
},
|
||||
{
|
||||
id: 'serverUrl',
|
||||
title: 'Server URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'http://localhost:8080',
|
||||
required: { field: 'connectionMode', value: 'connect' },
|
||||
condition: { field: 'connectionMode', value: 'connect' },
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'Connect Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your 1Password Connect token',
|
||||
password: true,
|
||||
required: { field: 'connectionMode', value: 'connect' },
|
||||
condition: { field: 'connectionMode', value: 'connect' },
|
||||
},
|
||||
{
|
||||
id: 'secretReference',
|
||||
title: 'Secret Reference',
|
||||
type: 'short-input',
|
||||
placeholder: 'op://vault-name-or-id/item-name-or-id/field-name',
|
||||
required: { field: 'operation', value: 'resolve_secret' },
|
||||
condition: { field: 'operation', value: 'resolve_secret' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a 1Password secret reference URI based on the user's description.
|
||||
The format is: op://vault-name-or-id/item-name-or-id/field-name
|
||||
You can also use: op://vault/item/section/field for fields inside sections.
|
||||
Examples:
|
||||
- op://Development/AWS/access-key
|
||||
- op://Production/Database/password
|
||||
- op://MyVault/Stripe/API Keys/secret-key
|
||||
|
||||
Return ONLY the op:// URI - no explanations, no quotes, no markdown.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vaultId',
|
||||
title: 'Vault ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter vault UUID',
|
||||
password: true,
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_vault',
|
||||
'list_items',
|
||||
'get_item',
|
||||
'create_item',
|
||||
'replace_item',
|
||||
'update_item',
|
||||
'delete_item',
|
||||
],
|
||||
},
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_vaults', 'resolve_secret'],
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'itemId',
|
||||
title: 'Item ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter item UUID',
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['get_item', 'replace_item', 'update_item', 'delete_item'],
|
||||
},
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_item', 'replace_item', 'update_item', 'delete_item'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'filter',
|
||||
title: 'Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'SCIM filter (e.g., name eq "My Vault")',
|
||||
condition: { field: 'operation', value: ['list_vaults', 'list_items'] },
|
||||
},
|
||||
{
|
||||
id: 'category',
|
||||
title: 'Category',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Login', id: 'LOGIN' },
|
||||
{ label: 'Password', id: 'PASSWORD' },
|
||||
{ label: 'API Credential', id: 'API_CREDENTIAL' },
|
||||
{ label: 'Secure Note', id: 'SECURE_NOTE' },
|
||||
{ label: 'Server', id: 'SERVER' },
|
||||
{ label: 'Database', id: 'DATABASE' },
|
||||
{ label: 'Credit Card', id: 'CREDIT_CARD' },
|
||||
{ label: 'Identity', id: 'IDENTITY' },
|
||||
{ label: 'SSH Key', id: 'SSH_KEY' },
|
||||
],
|
||||
value: () => 'LOGIN',
|
||||
required: { field: 'operation', value: 'create_item' },
|
||||
condition: { field: 'operation', value: 'create_item' },
|
||||
},
|
||||
{
|
||||
id: 'title',
|
||||
title: 'Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Item title',
|
||||
condition: { field: 'operation', value: 'create_item' },
|
||||
},
|
||||
{
|
||||
id: 'tags',
|
||||
title: 'Tags',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated tags (e.g., production, api)',
|
||||
condition: { field: 'operation', value: 'create_item' },
|
||||
},
|
||||
{
|
||||
id: 'fields',
|
||||
title: 'Fields',
|
||||
type: 'code',
|
||||
placeholder:
|
||||
'[\n {\n "label": "username",\n "value": "admin",\n "type": "STRING",\n "purpose": "USERNAME"\n }\n]',
|
||||
condition: { field: 'operation', value: 'create_item' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a 1Password item fields JSON array based on the user's description.
|
||||
Each field object can have: label, value, type (STRING, CONCEALED, EMAIL, URL, TOTP, DATE), purpose (USERNAME, PASSWORD, NOTES, or empty).
|
||||
Examples:
|
||||
- [{"label":"username","value":"admin","type":"STRING","purpose":"USERNAME"},{"label":"password","value":"secret123","type":"CONCEALED","purpose":"PASSWORD"}]
|
||||
- [{"label":"API Key","value":"sk-abc123","type":"CONCEALED"}]
|
||||
|
||||
Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'item',
|
||||
title: 'Item (JSON)',
|
||||
type: 'code',
|
||||
placeholder:
|
||||
'{\n "vault": {"id": "..."},\n "category": "LOGIN",\n "title": "My Item",\n "fields": []\n}',
|
||||
required: { field: 'operation', value: 'replace_item' },
|
||||
condition: { field: 'operation', value: 'replace_item' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a full 1Password item JSON object based on the user's description.
|
||||
The object must include vault.id, category, and optionally title, tags, fields, and sections.
|
||||
Categories: LOGIN, PASSWORD, API_CREDENTIAL, SECURE_NOTE, SERVER, DATABASE, CREDIT_CARD, IDENTITY, SSH_KEY.
|
||||
Field types: STRING, CONCEALED, EMAIL, URL, TOTP, DATE. Purposes: USERNAME, PASSWORD, NOTES, or empty.
|
||||
Example: {"vault":{"id":"abc123"},"category":"LOGIN","title":"My Login","fields":[{"label":"username","value":"admin","type":"STRING","purpose":"USERNAME"}]}
|
||||
|
||||
Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'operations',
|
||||
title: 'Patch Operations (JSON)',
|
||||
type: 'code',
|
||||
placeholder:
|
||||
'[\n {\n "op": "replace",\n "path": "/title",\n "value": "New Title"\n }\n]',
|
||||
required: { field: 'operation', value: 'update_item' },
|
||||
condition: { field: 'operation', value: 'update_item' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON array of RFC6902 patch operations for a 1Password item based on the user's description.
|
||||
Each operation has: op (add, remove, replace), path (JSON pointer), and value.
|
||||
Examples:
|
||||
- [{"op":"replace","path":"/title","value":"New Title"}]
|
||||
- [{"op":"replace","path":"/fields/username/value","value":"newuser"}]
|
||||
- [{"op":"add","path":"/tags/-","value":"production"}]
|
||||
|
||||
Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'onepassword_list_vaults',
|
||||
'onepassword_get_vault',
|
||||
'onepassword_list_items',
|
||||
'onepassword_get_item',
|
||||
'onepassword_create_item',
|
||||
'onepassword_replace_item',
|
||||
'onepassword_update_item',
|
||||
'onepassword_delete_item',
|
||||
'onepassword_resolve_secret',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => `onepassword_${params.operation}`,
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
connectionMode: { type: 'string', description: 'Connection mode: service_account or connect' },
|
||||
serviceAccountToken: { type: 'string', description: '1Password Service Account token' },
|
||||
serverUrl: { type: 'string', description: '1Password Connect server URL' },
|
||||
apiKey: { type: 'string', description: '1Password Connect token' },
|
||||
secretReference: { type: 'string', description: 'Secret reference URI (op://...)' },
|
||||
vaultId: { type: 'string', description: 'Vault UUID' },
|
||||
itemId: { type: 'string', description: 'Item UUID' },
|
||||
filter: { type: 'string', description: 'SCIM filter expression' },
|
||||
category: { type: 'string', description: 'Item category' },
|
||||
title: { type: 'string', description: 'Item title' },
|
||||
tags: { type: 'string', description: 'Comma-separated tags' },
|
||||
fields: { type: 'string', description: 'JSON array of field objects' },
|
||||
item: { type: 'string', description: 'Full item JSON for replacement' },
|
||||
operations: { type: 'string', description: 'JSON array of patch operations' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
response: {
|
||||
type: 'json',
|
||||
description: 'Operation response data',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -91,6 +91,7 @@ import { Neo4jBlock } from '@/blocks/blocks/neo4j'
|
||||
import { NoteBlock } from '@/blocks/blocks/note'
|
||||
import { NotionBlock, NotionV2Block } from '@/blocks/blocks/notion'
|
||||
import { OneDriveBlock } from '@/blocks/blocks/onedrive'
|
||||
import { OnePasswordBlock } from '@/blocks/blocks/onepassword'
|
||||
import { OpenAIBlock } from '@/blocks/blocks/openai'
|
||||
import { OutlookBlock } from '@/blocks/blocks/outlook'
|
||||
import { ParallelBlock } from '@/blocks/blocks/parallel'
|
||||
@@ -268,6 +269,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
note: NoteBlock,
|
||||
notion: NotionBlock,
|
||||
notion_v2: NotionV2Block,
|
||||
onepassword: OnePasswordBlock,
|
||||
onedrive: OneDriveBlock,
|
||||
openai: OpenAIBlock,
|
||||
outlook: OutlookBlock,
|
||||
|
||||
@@ -5483,3 +5483,37 @@ export function AgentSkillsIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 48 48' xmlns='http://www.w3.org/2000/svg' fill='none'>
|
||||
<circle
|
||||
cx='24'
|
||||
cy='24'
|
||||
r='21.5'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M28.083,17.28a7.8633,7.8633,0,0,1,0,13.44'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M19.917,30.72a7.8633,7.8633,0,0,1,0-13.44'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M26.067,10.43H21.933a2.0172,2.0172,0,0,0-2.016,2.016v6.36c2.358,1.281,2.736,2.562,0,3.843V35.574a2.0169,2.0169,0,0,0,2.016,2.015h4.134a2.0169,2.0169,0,0,0,2.016-2.015V29.213c-2.358-1.281-2.736-2.562,0-3.842V12.446A2.0172,2.0172,0,0,0,26.067,10.43Z'
|
||||
fill='#000000'
|
||||
stroke='#000000'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -5,10 +5,43 @@ import { CheckCircle, ChevronDown, ChevronRight, Loader2, Settings, XCircle } fr
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
|
||||
import type { ToolCallGroup, ToolCallState } from '@/lib/copilot/types'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
|
||||
interface ToolCallState {
|
||||
id: string
|
||||
name: string
|
||||
displayName?: string
|
||||
parameters?: Record<string, unknown>
|
||||
state:
|
||||
| 'detecting'
|
||||
| 'pending'
|
||||
| 'executing'
|
||||
| 'completed'
|
||||
| 'error'
|
||||
| 'rejected'
|
||||
| 'applied'
|
||||
| 'ready_for_review'
|
||||
| 'aborted'
|
||||
| 'skipped'
|
||||
| 'background'
|
||||
startTime?: number
|
||||
endTime?: number
|
||||
duration?: number
|
||||
result?: unknown
|
||||
error?: string
|
||||
progress?: string
|
||||
}
|
||||
|
||||
interface ToolCallGroup {
|
||||
id: string
|
||||
toolCalls: ToolCallState[]
|
||||
status: 'pending' | 'in_progress' | 'completed' | 'error'
|
||||
startTime?: number
|
||||
endTime?: number
|
||||
summary?: string
|
||||
}
|
||||
|
||||
interface ToolCallProps {
|
||||
toolCall: ToolCallState
|
||||
isCompact?: boolean
|
||||
|
||||
@@ -4,6 +4,7 @@ import { BlockType } from '@/executor/constants'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
import type { EdgeManager } from '@/executor/execution/edge-manager'
|
||||
import { serializePauseSnapshot } from '@/executor/execution/snapshot-serializer'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
|
||||
import type {
|
||||
ExecutionContext,
|
||||
@@ -135,6 +136,7 @@ export class ExecutionEngine {
|
||||
success: false,
|
||||
output: this.finalOutput,
|
||||
logs: this.context.blockLogs,
|
||||
executionState: this.getSerializableExecutionState(),
|
||||
metadata: this.context.metadata,
|
||||
status: 'cancelled',
|
||||
}
|
||||
@@ -144,6 +146,7 @@ export class ExecutionEngine {
|
||||
success: true,
|
||||
output: this.finalOutput,
|
||||
logs: this.context.blockLogs,
|
||||
executionState: this.getSerializableExecutionState(),
|
||||
metadata: this.context.metadata,
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -157,6 +160,7 @@ export class ExecutionEngine {
|
||||
success: false,
|
||||
output: this.finalOutput,
|
||||
logs: this.context.blockLogs,
|
||||
executionState: this.getSerializableExecutionState(),
|
||||
metadata: this.context.metadata,
|
||||
status: 'cancelled',
|
||||
}
|
||||
@@ -459,6 +463,7 @@ export class ExecutionEngine {
|
||||
success: true,
|
||||
output: this.collectPauseResponses(),
|
||||
logs: this.context.blockLogs,
|
||||
executionState: this.getSerializableExecutionState(snapshotSeed),
|
||||
metadata: this.context.metadata,
|
||||
status: 'paused',
|
||||
pausePoints,
|
||||
@@ -466,6 +471,24 @@ export class ExecutionEngine {
|
||||
}
|
||||
}
|
||||
|
||||
private getSerializableExecutionState(snapshotSeed?: {
|
||||
snapshot: string
|
||||
}): SerializableExecutionState | undefined {
|
||||
try {
|
||||
const serializedSnapshot =
|
||||
snapshotSeed?.snapshot ?? serializePauseSnapshot(this.context, [], this.dag).snapshot
|
||||
const parsedSnapshot = JSON.parse(serializedSnapshot) as {
|
||||
state?: SerializableExecutionState
|
||||
}
|
||||
return parsedSnapshot.state
|
||||
} catch (error) {
|
||||
logger.warn('Failed to serialize execution state', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
private collectPauseResponses(): NormalizedBlockOutput {
|
||||
const responses = Array.from(this.pausedBlocks.values()).map((pause) => pause.response)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { TraceSpan } from '@/lib/logs/types'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
@@ -302,6 +303,7 @@ export interface ExecutionResult {
|
||||
output: NormalizedBlockOutput
|
||||
error?: string
|
||||
logs?: BlockLog[]
|
||||
executionState?: SerializableExecutionState
|
||||
metadata?: ExecutionMetadata
|
||||
status?: 'completed' | 'paused' | 'cancelled'
|
||||
pausePoints?: PausePoint[]
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import {
|
||||
extractFieldsFromSchema,
|
||||
parseResponseFormatSafely,
|
||||
} from '@/lib/core/utils/response-format'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { getEffectiveBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { isTriggerBehavior, normalizeName } from '@/executor/constants'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||
@@ -12,8 +10,6 @@ import {
|
||||
isBranchNodeId,
|
||||
} from '@/executor/utils/subflow-utils'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
export interface BlockDataCollection {
|
||||
blockData: Record<string, unknown>
|
||||
@@ -21,118 +17,44 @@ export interface BlockDataCollection {
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
/**
|
||||
* Block types where inputFormat fields should be merged into outputs schema.
|
||||
* These are blocks where users define custom fields via inputFormat that become
|
||||
* valid output paths (e.g., <start.myField>, <webhook1.customField>, <hitl1.resumeField>).
|
||||
*
|
||||
* Note: This includes non-trigger blocks like 'starter' and 'human_in_the_loop' which
|
||||
* have category 'blocks' but still need their inputFormat exposed as outputs.
|
||||
*/
|
||||
const BLOCKS_WITH_INPUT_FORMAT_OUTPUTS = [
|
||||
'start_trigger',
|
||||
'starter',
|
||||
'api_trigger',
|
||||
'input_trigger',
|
||||
'generic_webhook',
|
||||
'human_in_the_loop',
|
||||
] as const
|
||||
|
||||
function getInputFormatFields(block: SerializedBlock): OutputSchema {
|
||||
const inputFormat = normalizeInputFormatValue(block.config?.params?.inputFormat)
|
||||
if (inputFormat.length === 0) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const schema: OutputSchema = {}
|
||||
for (const field of inputFormat) {
|
||||
if (!field.name) continue
|
||||
schema[field.name] = { type: field.type || 'any' }
|
||||
}
|
||||
|
||||
return schema
|
||||
interface SubBlockWithValue {
|
||||
value?: unknown
|
||||
}
|
||||
|
||||
function getEvaluatorMetricsSchema(block: SerializedBlock): OutputSchema | undefined {
|
||||
if (block.metadata?.id !== 'evaluator') return undefined
|
||||
function paramsToSubBlocks(
|
||||
params: Record<string, unknown> | undefined
|
||||
): Record<string, SubBlockWithValue> {
|
||||
if (!params) return {}
|
||||
|
||||
const metrics = block.config?.params?.metrics
|
||||
if (!Array.isArray(metrics) || metrics.length === 0) return undefined
|
||||
|
||||
const validMetrics = metrics.filter(
|
||||
(m: { name?: string }) => m?.name && typeof m.name === 'string'
|
||||
)
|
||||
if (validMetrics.length === 0) return undefined
|
||||
|
||||
const schema: OutputSchema = { ...(block.outputs as OutputSchema) }
|
||||
for (const metric of validMetrics) {
|
||||
schema[metric.name.toLowerCase()] = { type: 'number' }
|
||||
const subBlocks: Record<string, SubBlockWithValue> = {}
|
||||
for (const [key, value] of Object.entries(params)) {
|
||||
subBlocks[key] = { value }
|
||||
}
|
||||
return schema
|
||||
return subBlocks
|
||||
}
|
||||
|
||||
function getResponseFormatSchema(block: SerializedBlock): OutputSchema | undefined {
|
||||
const responseFormatValue = block.config?.params?.responseFormat
|
||||
if (!responseFormatValue) return undefined
|
||||
|
||||
const parsed = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
if (!parsed) return undefined
|
||||
|
||||
const fields = extractFieldsFromSchema(parsed)
|
||||
if (fields.length === 0) return undefined
|
||||
|
||||
const schema: OutputSchema = {}
|
||||
for (const field of fields) {
|
||||
schema[field.name] = { type: field.type || 'any' }
|
||||
}
|
||||
return schema
|
||||
}
|
||||
|
||||
export function getBlockSchema(
|
||||
block: SerializedBlock,
|
||||
toolConfig?: ToolConfig
|
||||
): OutputSchema | undefined {
|
||||
function getRegistrySchema(block: SerializedBlock): OutputSchema | undefined {
|
||||
const blockType = block.metadata?.id
|
||||
if (!blockType) return undefined
|
||||
|
||||
if (
|
||||
blockType &&
|
||||
BLOCKS_WITH_INPUT_FORMAT_OUTPUTS.includes(
|
||||
blockType as (typeof BLOCKS_WITH_INPUT_FORMAT_OUTPUTS)[number]
|
||||
)
|
||||
) {
|
||||
const baseOutputs = (block.outputs as OutputSchema) || {}
|
||||
const inputFormatFields = getInputFormatFields(block)
|
||||
const merged = { ...baseOutputs, ...inputFormatFields }
|
||||
if (Object.keys(merged).length > 0) {
|
||||
return merged
|
||||
}
|
||||
const subBlocks = paramsToSubBlocks(block.config?.params)
|
||||
const blockConfig = getBlock(blockType)
|
||||
const isTriggerCapable = blockConfig ? hasTriggerCapability(blockConfig) : false
|
||||
const triggerMode = Boolean(isTriggerBehavior(block) && isTriggerCapable)
|
||||
const outputs = getEffectiveBlockOutputs(blockType, subBlocks, {
|
||||
triggerMode,
|
||||
preferToolOutputs: !triggerMode,
|
||||
includeHidden: true,
|
||||
}) as OutputSchema
|
||||
|
||||
if (!outputs || Object.keys(outputs).length === 0) {
|
||||
return undefined
|
||||
}
|
||||
return outputs
|
||||
}
|
||||
|
||||
const evaluatorSchema = getEvaluatorMetricsSchema(block)
|
||||
if (evaluatorSchema) {
|
||||
return evaluatorSchema
|
||||
}
|
||||
|
||||
const responseFormatSchema = getResponseFormatSchema(block)
|
||||
if (responseFormatSchema) {
|
||||
return responseFormatSchema
|
||||
}
|
||||
|
||||
const isTrigger = isTriggerBehavior(block)
|
||||
|
||||
if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
|
||||
return block.outputs as OutputSchema
|
||||
}
|
||||
|
||||
if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
|
||||
return toolConfig.outputs as OutputSchema
|
||||
}
|
||||
|
||||
if (block.outputs && Object.keys(block.outputs).length > 0) {
|
||||
return block.outputs as OutputSchema
|
||||
}
|
||||
|
||||
return undefined
|
||||
export function getBlockSchema(block: SerializedBlock): OutputSchema | undefined {
|
||||
return getRegistrySchema(block)
|
||||
}
|
||||
|
||||
export function collectBlockData(
|
||||
@@ -170,9 +92,7 @@ export function collectBlockData(
|
||||
blockNameMapping[normalizeName(block.metadata.name)] = id
|
||||
}
|
||||
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const schema = getBlockSchema(block, toolConfig)
|
||||
const schema = getBlockSchema(block)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
}
|
||||
|
||||
@@ -5,10 +5,10 @@ import { BlockResolver } from './block'
|
||||
import type { ResolutionContext } from './reference'
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/workflows/blocks/block-outputs', () => ({
|
||||
getBlockOutputs: vi.fn(() => ({})),
|
||||
}))
|
||||
vi.mock('@/blocks/registry', async () => {
|
||||
const actual = await vi.importActual<typeof import('@/blocks/registry')>('@/blocks/registry')
|
||||
return actual
|
||||
})
|
||||
|
||||
function createTestWorkflow(
|
||||
blocks: Array<{
|
||||
@@ -135,7 +135,7 @@ describe('BlockResolver', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for non-existent path when no schema defined', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source' }])
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { existing: 'value' },
|
||||
@@ -144,55 +144,93 @@ describe('BlockResolver', () => {
|
||||
expect(resolver.resolve('<source.nonexistent>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should throw error for path not in output schema', async () => {
|
||||
const { getBlockOutputs } = await import('@/lib/workflows/blocks/block-outputs')
|
||||
const mockGetBlockOutputs = vi.mocked(getBlockOutputs)
|
||||
const customOutputs = {
|
||||
validField: { type: 'string', description: 'A valid field' },
|
||||
nested: {
|
||||
child: { type: 'number', description: 'Nested child' },
|
||||
},
|
||||
}
|
||||
mockGetBlockOutputs.mockReturnValue(customOutputs as any)
|
||||
|
||||
it.concurrent('should throw error for path not in output schema', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'source',
|
||||
outputs: customOutputs,
|
||||
type: 'start_trigger',
|
||||
},
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { validField: 'value', nested: { child: 42 } },
|
||||
source: { input: 'value' },
|
||||
})
|
||||
|
||||
expect(() => resolver.resolve('<source.invalidField>', ctx)).toThrow(
|
||||
/"invalidField" doesn't exist on block "source"/
|
||||
)
|
||||
expect(() => resolver.resolve('<source.invalidField>', ctx)).toThrow(/Available fields:/)
|
||||
|
||||
mockGetBlockOutputs.mockReturnValue({})
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for path in schema but missing in data', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'source',
|
||||
outputs: {
|
||||
requiredField: { type: 'string', description: 'Always present' },
|
||||
optionalField: { type: 'string', description: 'Sometimes missing' },
|
||||
},
|
||||
type: 'function',
|
||||
},
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { requiredField: 'value' },
|
||||
source: { stdout: 'log output' },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.requiredField>', ctx)).toBe('value')
|
||||
expect(resolver.resolve('<source.optionalField>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<source.stdout>', ctx)).toBe('log output')
|
||||
expect(resolver.resolve('<source.result>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should allow hiddenFromDisplay fields for pre-execution schema validation',
|
||||
() => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'workflow-block',
|
||||
name: 'Workflow',
|
||||
type: 'workflow',
|
||||
},
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
|
||||
expect(resolver.resolve('<workflow.childTraceSpans>', ctx)).toBeUndefined()
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should allow hiddenFromDisplay fields for workflow_input pre-execution schema validation',
|
||||
() => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'workflow-input-block',
|
||||
name: 'Workflow Input',
|
||||
type: 'workflow_input',
|
||||
},
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
|
||||
expect(resolver.resolve('<workflowinput.childTraceSpans>', ctx)).toBeUndefined()
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should allow hiddenFromDisplay fields for HITL pre-execution schema validation',
|
||||
() => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'hitl-block',
|
||||
name: 'HITL',
|
||||
type: 'human_in_the_loop',
|
||||
},
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
|
||||
expect(resolver.resolve('<hitl.response>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<hitl.submission>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<hitl.resumeInput>', ctx)).toBeUndefined()
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should return undefined for non-existent block', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'existing' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
@@ -975,7 +1013,7 @@ describe('BlockResolver', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should handle output with undefined values', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source' }])
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { value: undefined, other: 'exists' },
|
||||
@@ -985,7 +1023,7 @@ describe('BlockResolver', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for deeply nested non-existent path', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source' }])
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { level1: { level2: {} } },
|
||||
|
||||
@@ -17,7 +17,6 @@ import {
|
||||
type Resolver,
|
||||
} from '@/executor/variables/resolvers/reference'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
export class BlockResolver implements Resolver {
|
||||
private nameToBlockId: Map<string, string>
|
||||
@@ -68,9 +67,7 @@ export class BlockResolver implements Resolver {
|
||||
blockData[blockId] = output
|
||||
}
|
||||
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema = getBlockSchema(block, toolConfig)
|
||||
const outputSchema = getBlockSchema(block)
|
||||
|
||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||
blockOutputSchemas[blockId] = outputSchema
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useEffect } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { sanitizeForHttp, sanitizeHeaders } from '@/lib/mcp/shared'
|
||||
@@ -359,3 +360,65 @@ export function useStoredMcpTools(workspaceId: string) {
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared EventSource connections keyed by workspaceId.
|
||||
* Reference-counted so the connection is closed when the last consumer unmounts.
|
||||
* Attached to `globalThis` so connections survive HMR in development.
|
||||
*/
|
||||
const SSE_KEY = '__mcp_sse_connections' as const
|
||||
|
||||
type SseEntry = { source: EventSource; refs: number }
|
||||
|
||||
const sseConnections: Map<string, SseEntry> =
|
||||
((globalThis as Record<string, unknown>)[SSE_KEY] as Map<string, SseEntry>) ??
|
||||
((globalThis as Record<string, unknown>)[SSE_KEY] = new Map<string, SseEntry>())
|
||||
|
||||
/**
|
||||
* Subscribe to MCP tool-change SSE events for a workspace.
|
||||
* On each `tools_changed` event, invalidates the relevant React Query caches
|
||||
* so the UI refreshes automatically.
|
||||
*/
|
||||
export function useMcpToolsEvents(workspaceId: string) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
useEffect(() => {
|
||||
if (!workspaceId) return
|
||||
|
||||
const invalidate = () => {
|
||||
queryClient.invalidateQueries({ queryKey: mcpKeys.tools(workspaceId) })
|
||||
queryClient.invalidateQueries({ queryKey: mcpKeys.servers(workspaceId) })
|
||||
queryClient.invalidateQueries({ queryKey: mcpKeys.storedTools(workspaceId) })
|
||||
}
|
||||
|
||||
let entry = sseConnections.get(workspaceId)
|
||||
|
||||
if (!entry) {
|
||||
const source = new EventSource(`/api/mcp/events?workspaceId=${workspaceId}`)
|
||||
|
||||
source.addEventListener('tools_changed', () => {
|
||||
invalidate()
|
||||
})
|
||||
|
||||
source.onerror = () => {
|
||||
logger.warn(`SSE connection error for workspace ${workspaceId}`)
|
||||
}
|
||||
|
||||
entry = { source, refs: 0 }
|
||||
sseConnections.set(workspaceId, entry)
|
||||
}
|
||||
|
||||
entry.refs++
|
||||
|
||||
return () => {
|
||||
const current = sseConnections.get(workspaceId)
|
||||
if (!current) return
|
||||
|
||||
current.refs--
|
||||
if (current.refs <= 0) {
|
||||
current.source.close()
|
||||
sseConnections.delete(workspaceId)
|
||||
}
|
||||
}
|
||||
}, [workspaceId, queryClient])
|
||||
}
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
__skipDiffRecording?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
import type { Edge } from 'reactflow'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
|
||||
@@ -908,7 +915,7 @@ export function useUndoRedo() {
|
||||
|
||||
// Set flag to skip recording during this operation
|
||||
|
||||
;(window as any).__skipDiffRecording = true
|
||||
window.__skipDiffRecording = true
|
||||
try {
|
||||
// Restore baseline state and broadcast to everyone
|
||||
if (baselineSnapshot && activeWorkflowId) {
|
||||
@@ -945,7 +952,7 @@ export function useUndoRedo() {
|
||||
logger.info('Clearing diff UI state')
|
||||
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||
} finally {
|
||||
;(window as any).__skipDiffRecording = false
|
||||
window.__skipDiffRecording = false
|
||||
}
|
||||
|
||||
logger.info('Undid apply-diff operation successfully')
|
||||
@@ -965,7 +972,7 @@ export function useUndoRedo() {
|
||||
|
||||
// Set flag to skip recording during this operation
|
||||
|
||||
;(window as any).__skipDiffRecording = true
|
||||
window.__skipDiffRecording = true
|
||||
try {
|
||||
// Apply the before-accept state (with markers for this user)
|
||||
useWorkflowStore.getState().replaceWorkflowState(beforeAccept)
|
||||
@@ -1004,7 +1011,7 @@ export function useUndoRedo() {
|
||||
diffAnalysis: diffAnalysis,
|
||||
})
|
||||
} finally {
|
||||
;(window as any).__skipDiffRecording = false
|
||||
window.__skipDiffRecording = false
|
||||
}
|
||||
|
||||
logger.info('Undid accept-diff operation - restored diff view')
|
||||
@@ -1018,7 +1025,7 @@ export function useUndoRedo() {
|
||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||
|
||||
;(window as any).__skipDiffRecording = true
|
||||
window.__skipDiffRecording = true
|
||||
try {
|
||||
// Apply the before-reject state (with markers for this user)
|
||||
useWorkflowStore.getState().replaceWorkflowState(beforeReject)
|
||||
@@ -1055,7 +1062,7 @@ export function useUndoRedo() {
|
||||
diffAnalysis: diffAnalysis,
|
||||
})
|
||||
} finally {
|
||||
;(window as any).__skipDiffRecording = false
|
||||
window.__skipDiffRecording = false
|
||||
}
|
||||
|
||||
logger.info('Undid reject-diff operation - restored diff view')
|
||||
@@ -1526,7 +1533,7 @@ export function useUndoRedo() {
|
||||
|
||||
// Set flag to skip recording during this operation
|
||||
|
||||
;(window as any).__skipDiffRecording = true
|
||||
window.__skipDiffRecording = true
|
||||
try {
|
||||
// Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline)
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
@@ -1567,7 +1574,7 @@ export function useUndoRedo() {
|
||||
diffAnalysis: diffAnalysis,
|
||||
})
|
||||
} finally {
|
||||
;(window as any).__skipDiffRecording = false
|
||||
window.__skipDiffRecording = false
|
||||
}
|
||||
|
||||
logger.info('Redid apply-diff operation')
|
||||
@@ -1583,7 +1590,7 @@ export function useUndoRedo() {
|
||||
|
||||
// Set flag to skip recording during this operation
|
||||
|
||||
;(window as any).__skipDiffRecording = true
|
||||
window.__skipDiffRecording = true
|
||||
try {
|
||||
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
||||
// Use setState directly to ensure synchronous clearing
|
||||
@@ -1621,7 +1628,7 @@ export function useUndoRedo() {
|
||||
operationId: opId,
|
||||
})
|
||||
} finally {
|
||||
;(window as any).__skipDiffRecording = false
|
||||
window.__skipDiffRecording = false
|
||||
}
|
||||
|
||||
logger.info('Redid accept-diff operation - cleared diff view')
|
||||
@@ -1635,7 +1642,7 @@ export function useUndoRedo() {
|
||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||
|
||||
;(window as any).__skipDiffRecording = true
|
||||
window.__skipDiffRecording = true
|
||||
try {
|
||||
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
||||
// Use setState directly to ensure synchronous clearing
|
||||
@@ -1673,7 +1680,7 @@ export function useUndoRedo() {
|
||||
operationId: opId,
|
||||
})
|
||||
} finally {
|
||||
;(window as any).__skipDiffRecording = false
|
||||
window.__skipDiffRecording = false
|
||||
}
|
||||
|
||||
logger.info('Redid reject-diff operation - cleared diff view')
|
||||
|
||||
@@ -14,7 +14,7 @@ export type UsageLogCategory = 'model' | 'fixed'
|
||||
/**
|
||||
* Usage log source types
|
||||
*/
|
||||
export type UsageLogSource = 'workflow' | 'wand' | 'copilot'
|
||||
export type UsageLogSource = 'workflow' | 'wand' | 'copilot' | 'mcp_copilot'
|
||||
|
||||
/**
|
||||
* Metadata for 'model' category charges
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { COPILOT_CHAT_API_PATH, COPILOT_CHAT_STREAM_API_PATH } from '@/lib/copilot/constants'
|
||||
import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models'
|
||||
|
||||
const logger = createLogger('CopilotAPI')
|
||||
@@ -82,6 +83,7 @@ export interface SendMessageRequest {
|
||||
executionId?: string
|
||||
}>
|
||||
commands?: string[]
|
||||
resumeFromEventId?: number
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -120,7 +122,7 @@ export async function sendStreamingMessage(
|
||||
request: SendMessageRequest
|
||||
): Promise<StreamingResponse> {
|
||||
try {
|
||||
const { abortSignal, ...requestBody } = request
|
||||
const { abortSignal, resumeFromEventId, ...requestBody } = request
|
||||
try {
|
||||
const preview = Array.isArray((requestBody as any).contexts)
|
||||
? (requestBody as any).contexts.map((c: any) => ({
|
||||
@@ -136,9 +138,56 @@ export async function sendStreamingMessage(
|
||||
? (requestBody as any).contexts.length
|
||||
: 0,
|
||||
contextsPreview: preview,
|
||||
resumeFromEventId,
|
||||
})
|
||||
} catch {}
|
||||
const response = await fetch('/api/copilot/chat', {
|
||||
} catch (error) {
|
||||
logger.warn('Failed to log streaming message context preview', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
const streamId = request.userMessageId
|
||||
if (typeof resumeFromEventId === 'number') {
|
||||
if (!streamId) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'streamId is required to resume a stream',
|
||||
status: 400,
|
||||
}
|
||||
}
|
||||
const url = `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent(
|
||||
streamId
|
||||
)}&from=${encodeURIComponent(String(resumeFromEventId))}`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
signal: abortSignal,
|
||||
credentials: 'include',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMessage = await handleApiError(response, 'Failed to resume streaming message')
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
status: response.status,
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'No response body received',
|
||||
status: 500,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stream: response.body,
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(COPILOT_CHAT_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ ...requestBody, stream: true }),
|
||||
|
||||
66
apps/sim/lib/copilot/chat-context.ts
Normal file
66
apps/sim/lib/copilot/chat-context.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { CopilotFiles } from '@/lib/uploads'
|
||||
import { createFileContent } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
const logger = createLogger('CopilotChatContext')
|
||||
|
||||
/**
|
||||
* Build conversation history from stored chat messages.
|
||||
*/
|
||||
export function buildConversationHistory(
|
||||
messages: unknown[],
|
||||
conversationId?: string
|
||||
): { history: unknown[]; conversationId?: string } {
|
||||
const history = Array.isArray(messages) ? messages : []
|
||||
return {
|
||||
history,
|
||||
...(conversationId ? { conversationId } : {}),
|
||||
}
|
||||
}
|
||||
|
||||
export interface FileAttachmentInput {
|
||||
id: string
|
||||
key: string
|
||||
name?: string
|
||||
filename?: string
|
||||
mimeType?: string
|
||||
media_type?: string
|
||||
size: number
|
||||
}
|
||||
|
||||
export interface FileContent {
|
||||
type: string
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
/**
|
||||
* Process file attachments into content for the payload.
|
||||
*/
|
||||
export async function processFileAttachments(
|
||||
fileAttachments: FileAttachmentInput[],
|
||||
userId: string
|
||||
): Promise<FileContent[]> {
|
||||
if (!Array.isArray(fileAttachments) || fileAttachments.length === 0) return []
|
||||
|
||||
const processedFileContents: FileContent[] = []
|
||||
const requestId = `copilot-${userId}-${Date.now()}`
|
||||
const processedAttachments = await CopilotFiles.processCopilotAttachments(
|
||||
fileAttachments as Parameters<typeof CopilotFiles.processCopilotAttachments>[0],
|
||||
requestId
|
||||
)
|
||||
|
||||
for (const { buffer, attachment } of processedAttachments) {
|
||||
const fileContent = createFileContent(buffer, attachment.media_type)
|
||||
if (fileContent) {
|
||||
processedFileContents.push(fileContent as FileContent)
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Processed file attachments for payload', {
|
||||
userId,
|
||||
inputCount: fileAttachments.length,
|
||||
outputCount: processedFileContents.length,
|
||||
})
|
||||
|
||||
return processedFileContents
|
||||
}
|
||||
69
apps/sim/lib/copilot/chat-lifecycle.ts
Normal file
69
apps/sim/lib/copilot/chat-lifecycle.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
|
||||
const logger = createLogger('CopilotChatLifecycle')
|
||||
|
||||
export interface ChatLoadResult {
|
||||
chatId: string
|
||||
chat: typeof copilotChats.$inferSelect | null
|
||||
conversationHistory: unknown[]
|
||||
isNew: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve or create a copilot chat session.
|
||||
* If chatId is provided, loads the existing chat. Otherwise creates a new one.
|
||||
*/
|
||||
export async function resolveOrCreateChat(params: {
|
||||
chatId?: string
|
||||
userId: string
|
||||
workflowId: string
|
||||
model: string
|
||||
}): Promise<ChatLoadResult> {
|
||||
const { chatId, userId, workflowId, model } = params
|
||||
|
||||
if (chatId) {
|
||||
const [chat] = await db
|
||||
.select()
|
||||
.from(copilotChats)
|
||||
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
|
||||
.limit(1)
|
||||
|
||||
return {
|
||||
chatId,
|
||||
chat: chat ?? null,
|
||||
conversationHistory: chat && Array.isArray(chat.messages) ? chat.messages : [],
|
||||
isNew: false,
|
||||
}
|
||||
}
|
||||
|
||||
const [newChat] = await db
|
||||
.insert(copilotChats)
|
||||
.values({
|
||||
userId,
|
||||
workflowId,
|
||||
title: null,
|
||||
model,
|
||||
messages: [],
|
||||
})
|
||||
.returning()
|
||||
|
||||
if (!newChat) {
|
||||
logger.warn('Failed to create new copilot chat row', { userId, workflowId })
|
||||
return {
|
||||
chatId: '',
|
||||
chat: null,
|
||||
conversationHistory: [],
|
||||
isNew: true,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
chatId: newChat.id,
|
||||
chat: newChat,
|
||||
conversationHistory: [],
|
||||
isNew: true,
|
||||
}
|
||||
}
|
||||
209
apps/sim/lib/copilot/chat-payload.ts
Normal file
209
apps/sim/lib/copilot/chat-payload.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { processFileAttachments } from '@/lib/copilot/chat-context'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { tools } from '@/tools/registry'
|
||||
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('CopilotChatPayload')
|
||||
|
||||
export interface BuildPayloadParams {
|
||||
message: string
|
||||
workflowId: string
|
||||
userId: string
|
||||
userMessageId: string
|
||||
mode: string
|
||||
model: string
|
||||
conversationHistory?: unknown[]
|
||||
contexts?: Array<{ type: string; content: string }>
|
||||
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||
commands?: string[]
|
||||
chatId?: string
|
||||
implicitFeedback?: string
|
||||
}
|
||||
|
||||
interface ToolSchema {
|
||||
name: string
|
||||
description: string
|
||||
input_schema: Record<string, unknown>
|
||||
defer_loading?: boolean
|
||||
executeLocally?: boolean
|
||||
oauth?: { required: boolean; provider: string }
|
||||
}
|
||||
|
||||
interface CredentialsPayload {
|
||||
oauth: Record<
|
||||
string,
|
||||
{ accessToken: string; accountId: string; name: string; expiresAt?: string }
|
||||
>
|
||||
apiKeys: string[]
|
||||
metadata?: {
|
||||
connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }>
|
||||
configuredApiKeys: string[]
|
||||
}
|
||||
}
|
||||
|
||||
function buildProviderConfig(selectedModel: string): CopilotProviderConfig | undefined {
|
||||
const defaults = getCopilotModel('chat')
|
||||
const envModel = env.COPILOT_MODEL || defaults.model
|
||||
const providerEnv = env.COPILOT_PROVIDER
|
||||
|
||||
if (!providerEnv) return undefined
|
||||
|
||||
if (providerEnv === 'azure-openai') {
|
||||
return {
|
||||
provider: 'azure-openai',
|
||||
model: envModel,
|
||||
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||
apiVersion: 'preview',
|
||||
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||
}
|
||||
}
|
||||
|
||||
if (providerEnv === 'azure-anthropic') {
|
||||
return {
|
||||
provider: 'azure-anthropic',
|
||||
model: envModel,
|
||||
apiKey: env.AZURE_ANTHROPIC_API_KEY,
|
||||
apiVersion: env.AZURE_ANTHROPIC_API_VERSION,
|
||||
endpoint: env.AZURE_ANTHROPIC_ENDPOINT,
|
||||
}
|
||||
}
|
||||
|
||||
if (providerEnv === 'vertex') {
|
||||
return {
|
||||
provider: 'vertex',
|
||||
model: envModel,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
vertexProject: env.VERTEX_PROJECT,
|
||||
vertexLocation: env.VERTEX_LOCATION,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
provider: providerEnv as Exclude<string, 'azure-openai' | 'vertex'>,
|
||||
model: selectedModel,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
} as CopilotProviderConfig
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the request payload for the copilot backend.
|
||||
*/
|
||||
export async function buildCopilotRequestPayload(
|
||||
params: BuildPayloadParams,
|
||||
options: {
|
||||
providerConfig?: CopilotProviderConfig
|
||||
selectedModel: string
|
||||
}
|
||||
): Promise<Record<string, unknown>> {
|
||||
const {
|
||||
message,
|
||||
workflowId,
|
||||
userId,
|
||||
userMessageId,
|
||||
mode,
|
||||
contexts,
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId,
|
||||
} = params
|
||||
|
||||
const selectedModel = options.selectedModel
|
||||
const providerConfig = options.providerConfig ?? buildProviderConfig(selectedModel)
|
||||
|
||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||
|
||||
const processedFileContents = await processFileAttachments(fileAttachments ?? [], userId)
|
||||
|
||||
const integrationTools: ToolSchema[] = []
|
||||
let credentials: CredentialsPayload | null = null
|
||||
|
||||
if (effectiveMode === 'build') {
|
||||
// function_execute sandbox tool is now defined in Go — no need to send it
|
||||
|
||||
try {
|
||||
const rawCredentials = await getCredentialsServerTool.execute({ workflowId }, { userId })
|
||||
|
||||
const oauthMap: CredentialsPayload['oauth'] = {}
|
||||
const connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> = []
|
||||
for (const cred of rawCredentials?.oauth?.connected?.credentials ?? []) {
|
||||
if (cred.accessToken) {
|
||||
oauthMap[cred.provider] = {
|
||||
accessToken: cred.accessToken,
|
||||
accountId: cred.id,
|
||||
name: cred.name,
|
||||
}
|
||||
connectedOAuth.push({ provider: cred.provider, name: cred.name })
|
||||
}
|
||||
}
|
||||
|
||||
credentials = {
|
||||
oauth: oauthMap,
|
||||
apiKeys: rawCredentials?.environment?.variableNames ?? [],
|
||||
metadata: {
|
||||
connectedOAuth,
|
||||
configuredApiKeys: rawCredentials?.environment?.variableNames ?? [],
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to fetch credentials for build payload', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const { createUserToolSchema } = await import('@/tools/params')
|
||||
const latestTools = getLatestVersionTools(tools)
|
||||
|
||||
for (const [toolId, toolConfig] of Object.entries(latestTools)) {
|
||||
try {
|
||||
const userSchema = createUserToolSchema(toolConfig)
|
||||
const strippedName = stripVersionSuffix(toolId)
|
||||
integrationTools.push({
|
||||
name: strippedName,
|
||||
description: toolConfig.description || toolConfig.name || strippedName,
|
||||
input_schema: userSchema as unknown as Record<string, unknown>,
|
||||
defer_loading: true,
|
||||
...(toolConfig.oauth?.required && {
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: toolConfig.oauth.provider,
|
||||
},
|
||||
}),
|
||||
})
|
||||
} catch (toolError) {
|
||||
logger.warn('Failed to build schema for tool, skipping', {
|
||||
toolId,
|
||||
error: toolError instanceof Error ? toolError.message : String(toolError),
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to build tool schemas for payload', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message,
|
||||
workflowId,
|
||||
userId,
|
||||
model: selectedModel,
|
||||
mode: transportMode,
|
||||
messageId: userMessageId,
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(providerConfig ? { provider: providerConfig } : {}),
|
||||
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||
...(chatId ? { chatId } : {}),
|
||||
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||
...(credentials ? { credentials } : {}),
|
||||
...(commands && commands.length > 0 ? { commands } : {}),
|
||||
}
|
||||
}
|
||||
147
apps/sim/lib/copilot/client-sse/content-blocks.ts
Normal file
147
apps/sim/lib/copilot/client-sse/content-blocks.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import type {
|
||||
ChatContext,
|
||||
CopilotMessage,
|
||||
MessageFileAttachment,
|
||||
} from '@/stores/panel/copilot/types'
|
||||
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||
|
||||
const TEXT_BLOCK_TYPE = 'text'
|
||||
const THINKING_BLOCK_TYPE = 'thinking'
|
||||
const CONTINUE_OPTIONS_TAG = '<options>{"1":"Continue"}</options>'
|
||||
|
||||
export function createUserMessage(
|
||||
content: string,
|
||||
fileAttachments?: MessageFileAttachment[],
|
||||
contexts?: ChatContext[],
|
||||
messageId?: string
|
||||
): CopilotMessage {
|
||||
return {
|
||||
id: messageId || crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||
...(contexts && contexts.length > 0 && { contexts }),
|
||||
...(contexts &&
|
||||
contexts.length > 0 && {
|
||||
contentBlocks: [{ type: 'contexts', contexts, timestamp: Date.now() }],
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
export function createStreamingMessage(): CopilotMessage {
|
||||
return {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
export function createErrorMessage(
|
||||
messageId: string,
|
||||
content: string,
|
||||
errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required'
|
||||
): CopilotMessage {
|
||||
return {
|
||||
id: messageId,
|
||||
role: 'assistant',
|
||||
content,
|
||||
timestamp: new Date().toISOString(),
|
||||
contentBlocks: [
|
||||
{
|
||||
type: 'text',
|
||||
content,
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
],
|
||||
errorType,
|
||||
}
|
||||
}
|
||||
|
||||
export function appendTextBlock(context: ClientStreamingContext, text: string) {
|
||||
if (!text) return
|
||||
context.accumulatedContent += text
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += text
|
||||
return
|
||||
}
|
||||
}
|
||||
const newBlock: ClientContentBlock = { type: 'text', content: text, timestamp: Date.now() }
|
||||
context.currentTextBlock = newBlock
|
||||
context.contentBlocks.push(newBlock)
|
||||
}
|
||||
|
||||
export function appendContinueOption(content: string): string {
|
||||
if (/<options>/i.test(content)) return content
|
||||
const suffix = content.trim().length > 0 ? '\n\n' : ''
|
||||
return `${content}${suffix}${CONTINUE_OPTIONS_TAG}`
|
||||
}
|
||||
|
||||
export function appendContinueOptionBlock(blocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||
if (!Array.isArray(blocks)) return blocks
|
||||
const hasOptions = blocks.some(
|
||||
(block) =>
|
||||
block?.type === TEXT_BLOCK_TYPE &&
|
||||
typeof block.content === 'string' &&
|
||||
/<options>/i.test(block.content)
|
||||
)
|
||||
if (hasOptions) return blocks
|
||||
return [
|
||||
...blocks,
|
||||
{
|
||||
type: TEXT_BLOCK_TYPE,
|
||||
content: CONTINUE_OPTIONS_TAG,
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
export function stripContinueOption(content: string): string {
|
||||
if (!content || !content.includes(CONTINUE_OPTIONS_TAG)) return content
|
||||
const next = content.replace(CONTINUE_OPTIONS_TAG, '')
|
||||
return next.replace(/\n{2,}\s*$/g, '\n').trimEnd()
|
||||
}
|
||||
|
||||
export function stripContinueOptionFromBlocks(blocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||
if (!Array.isArray(blocks)) return blocks
|
||||
return blocks.flatMap((block) => {
|
||||
if (
|
||||
block?.type === TEXT_BLOCK_TYPE &&
|
||||
typeof block.content === 'string' &&
|
||||
block.content.includes(CONTINUE_OPTIONS_TAG)
|
||||
) {
|
||||
const nextContent = stripContinueOption(block.content)
|
||||
if (!nextContent.trim()) return []
|
||||
return [{ ...block, content: nextContent }]
|
||||
}
|
||||
return [block]
|
||||
})
|
||||
}
|
||||
|
||||
export function beginThinkingBlock(context: ClientStreamingContext) {
|
||||
if (!context.currentThinkingBlock) {
|
||||
const newBlock: ClientContentBlock = {
|
||||
type: 'thinking',
|
||||
content: '',
|
||||
timestamp: Date.now(),
|
||||
startTime: Date.now(),
|
||||
}
|
||||
context.currentThinkingBlock = newBlock
|
||||
context.contentBlocks.push(newBlock)
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
}
|
||||
|
||||
export function finalizeThinkingBlock(context: ClientStreamingContext) {
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.duration =
|
||||
Date.now() - (context.currentThinkingBlock.startTime || Date.now())
|
||||
}
|
||||
context.isInThinkingBlock = false
|
||||
context.currentThinkingBlock = null
|
||||
context.currentTextBlock = null
|
||||
}
|
||||
935
apps/sim/lib/copilot/client-sse/handlers.ts
Normal file
935
apps/sim/lib/copilot/client-sse/handlers.ts
Normal file
@@ -0,0 +1,935 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||
import {
|
||||
isBackgroundState,
|
||||
isRejectedState,
|
||||
isReviewState,
|
||||
resolveToolDisplay,
|
||||
} from '@/lib/copilot/store-utils'
|
||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||
|
||||
const logger = createLogger('CopilotClientSseHandlers')
|
||||
const TEXT_BLOCK_TYPE = 'text'
|
||||
|
||||
const MAX_BATCH_INTERVAL = 50
|
||||
const MIN_BATCH_INTERVAL = 16
|
||||
const MAX_QUEUE_SIZE = 5
|
||||
|
||||
/**
|
||||
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
||||
* The server-side orchestrator polls Redis for this decision.
|
||||
*/
|
||||
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
||||
fetch(COPILOT_CONFIRM_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
||||
}).catch((error) => {
|
||||
logger.warn('Failed to send auto-accept confirmation', {
|
||||
toolCallId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||
if (typeof window === 'undefined') return
|
||||
try {
|
||||
if (!info) {
|
||||
window.sessionStorage.removeItem(STREAM_STORAGE_KEY)
|
||||
return
|
||||
}
|
||||
window.sessionStorage.setItem(STREAM_STORAGE_KEY, JSON.stringify(info))
|
||||
} catch (error) {
|
||||
logger.warn('Failed to write active stream to storage', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type StoreSet = (
|
||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||
) => void
|
||||
|
||||
export type SSEHandler = (
|
||||
data: SSEEvent,
|
||||
context: ClientStreamingContext,
|
||||
get: () => CopilotStore,
|
||||
set: StoreSet
|
||||
) => Promise<void> | void
|
||||
|
||||
const streamingUpdateQueue = new Map<string, ClientStreamingContext>()
|
||||
let streamingUpdateRAF: number | null = null
|
||||
let lastBatchTime = 0
|
||||
|
||||
export function stopStreamingUpdates() {
|
||||
if (streamingUpdateRAF !== null) {
|
||||
cancelAnimationFrame(streamingUpdateRAF)
|
||||
streamingUpdateRAF = null
|
||||
}
|
||||
streamingUpdateQueue.clear()
|
||||
}
|
||||
|
||||
function createOptimizedContentBlocks(contentBlocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||
const result: ClientContentBlock[] = new Array(contentBlocks.length)
|
||||
for (let i = 0; i < contentBlocks.length; i++) {
|
||||
const block = contentBlocks[i]
|
||||
result[i] = { ...block }
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
export function flushStreamingUpdates(set: StoreSet) {
|
||||
if (streamingUpdateRAF !== null) {
|
||||
cancelAnimationFrame(streamingUpdateRAF)
|
||||
streamingUpdateRAF = null
|
||||
}
|
||||
if (streamingUpdateQueue.size === 0) return
|
||||
|
||||
const updates = new Map(streamingUpdateQueue)
|
||||
streamingUpdateQueue.clear()
|
||||
|
||||
set((state: CopilotStore) => {
|
||||
if (updates.size === 0) return state
|
||||
return {
|
||||
messages: state.messages.map((msg) => {
|
||||
const update = updates.get(msg.id)
|
||||
if (update) {
|
||||
return {
|
||||
...msg,
|
||||
content: '',
|
||||
contentBlocks:
|
||||
update.contentBlocks.length > 0
|
||||
? createOptimizedContentBlocks(update.contentBlocks)
|
||||
: [],
|
||||
}
|
||||
}
|
||||
return msg
|
||||
}),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function updateStreamingMessage(set: StoreSet, context: ClientStreamingContext) {
|
||||
if (context.suppressStreamingUpdates) return
|
||||
const now = performance.now()
|
||||
streamingUpdateQueue.set(context.messageId, context)
|
||||
const timeSinceLastBatch = now - lastBatchTime
|
||||
const shouldFlushImmediately =
|
||||
streamingUpdateQueue.size >= MAX_QUEUE_SIZE || timeSinceLastBatch > MAX_BATCH_INTERVAL
|
||||
|
||||
if (streamingUpdateRAF === null) {
|
||||
const scheduleUpdate = () => {
|
||||
streamingUpdateRAF = requestAnimationFrame(() => {
|
||||
const updates = new Map(streamingUpdateQueue)
|
||||
streamingUpdateQueue.clear()
|
||||
streamingUpdateRAF = null
|
||||
lastBatchTime = performance.now()
|
||||
set((state: CopilotStore) => {
|
||||
if (updates.size === 0) return state
|
||||
const messages = state.messages
|
||||
const lastMessage = messages[messages.length - 1]
|
||||
const lastMessageUpdate = lastMessage ? updates.get(lastMessage.id) : null
|
||||
if (updates.size === 1 && lastMessageUpdate) {
|
||||
const newMessages = [...messages]
|
||||
newMessages[messages.length - 1] = {
|
||||
...lastMessage,
|
||||
content: '',
|
||||
contentBlocks:
|
||||
lastMessageUpdate.contentBlocks.length > 0
|
||||
? createOptimizedContentBlocks(lastMessageUpdate.contentBlocks)
|
||||
: [],
|
||||
}
|
||||
return { messages: newMessages }
|
||||
}
|
||||
return {
|
||||
messages: messages.map((msg) => {
|
||||
const update = updates.get(msg.id)
|
||||
if (update) {
|
||||
return {
|
||||
...msg,
|
||||
content: '',
|
||||
contentBlocks:
|
||||
update.contentBlocks.length > 0
|
||||
? createOptimizedContentBlocks(update.contentBlocks)
|
||||
: [],
|
||||
}
|
||||
}
|
||||
return msg
|
||||
}),
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
if (shouldFlushImmediately) scheduleUpdate()
|
||||
else setTimeout(scheduleUpdate, Math.max(0, MIN_BATCH_INTERVAL - timeSinceLastBatch))
|
||||
}
|
||||
}
|
||||
|
||||
export function upsertToolCallBlock(context: ClientStreamingContext, toolCall: CopilotToolCall) {
|
||||
let found = false
|
||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||
const b = context.contentBlocks[i]
|
||||
if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) {
|
||||
context.contentBlocks[i] = { ...b, toolCall }
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
context.contentBlocks.push({ type: 'tool_call', toolCall, timestamp: Date.now() })
|
||||
}
|
||||
}
|
||||
|
||||
function stripThinkingTags(text: string): string {
|
||||
return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '')
|
||||
}
|
||||
|
||||
function appendThinkingContent(context: ClientStreamingContext, text: string) {
|
||||
if (!text) return
|
||||
const cleanedText = stripThinkingTags(text)
|
||||
if (!cleanedText) return
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.content += cleanedText
|
||||
} else {
|
||||
const newBlock: ClientContentBlock = {
|
||||
type: 'thinking',
|
||||
content: cleanedText,
|
||||
timestamp: Date.now(),
|
||||
startTime: Date.now(),
|
||||
}
|
||||
context.currentThinkingBlock = newBlock
|
||||
context.contentBlocks.push(newBlock)
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
}
|
||||
|
||||
export const sseHandlers: Record<string, SSEHandler> = {
|
||||
chat_id: async (data, context, get, set) => {
|
||||
context.newChatId = data.chatId
|
||||
const { currentChat, activeStream } = get()
|
||||
if (!currentChat && context.newChatId) {
|
||||
await get().handleNewChatCreation(context.newChatId)
|
||||
}
|
||||
if (activeStream && context.newChatId && !activeStream.chatId) {
|
||||
const updatedStream = { ...activeStream, chatId: context.newChatId }
|
||||
set({ activeStream: updatedStream })
|
||||
writeActiveStreamToStorage(updatedStream)
|
||||
}
|
||||
},
|
||||
title_updated: (_data, _context, get, set) => {
|
||||
const title = _data.title
|
||||
if (!title) return
|
||||
const { currentChat, chats } = get()
|
||||
if (currentChat) {
|
||||
set({
|
||||
currentChat: { ...currentChat, title },
|
||||
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
||||
})
|
||||
}
|
||||
},
|
||||
tool_result: (data, context, get, set) => {
|
||||
try {
|
||||
const eventData = asRecord(data?.data)
|
||||
const toolCallId: string | undefined =
|
||||
data?.toolCallId || (eventData.id as string | undefined)
|
||||
const success: boolean | undefined = data?.success
|
||||
const failedDependency: boolean = data?.failedDependency === true
|
||||
const resultObj = asRecord(data?.result)
|
||||
const skipped: boolean = resultObj.skipped === true
|
||||
if (!toolCallId) return
|
||||
const { toolCallsById } = get()
|
||||
const current = toolCallsById[toolCallId]
|
||||
if (current) {
|
||||
if (
|
||||
isRejectedState(current.state) ||
|
||||
isReviewState(current.state) ||
|
||||
isBackgroundState(current.state)
|
||||
) {
|
||||
return
|
||||
}
|
||||
const targetState = success
|
||||
? ClientToolCallState.success
|
||||
: failedDependency || skipped
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const updatedMap = { ...toolCallsById }
|
||||
updatedMap[toolCallId] = {
|
||||
...current,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||
}
|
||||
set({ toolCallsById: updatedMap })
|
||||
|
||||
if (targetState === ClientToolCallState.success && current.name === 'checkoff_todo') {
|
||||
try {
|
||||
const result = asRecord(data?.result) || asRecord(eventData.result)
|
||||
const input = asRecord(current.params || current.input)
|
||||
const todoId = (input.id || input.todoId || result.id || result.todoId) as
|
||||
| string
|
||||
| undefined
|
||||
if (todoId) {
|
||||
get().updatePlanTodoStatus(todoId, 'completed')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to process checkoff_todo tool result', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
toolCallId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
current.name === 'mark_todo_in_progress'
|
||||
) {
|
||||
try {
|
||||
const result = asRecord(data?.result) || asRecord(eventData.result)
|
||||
const input = asRecord(current.params || current.input)
|
||||
const todoId = (input.id || input.todoId || result.id || result.todoId) as
|
||||
| string
|
||||
| undefined
|
||||
if (todoId) {
|
||||
get().updatePlanTodoStatus(todoId, 'executing')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to process mark_todo_in_progress tool result', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
toolCallId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (current.name === 'edit_workflow') {
|
||||
try {
|
||||
const resultPayload = asRecord(
|
||||
data?.result || eventData.result || eventData.data || data?.data
|
||||
)
|
||||
const workflowState = asRecord(resultPayload?.workflowState)
|
||||
const hasWorkflowState = !!resultPayload?.workflowState
|
||||
logger.info('[SSE] edit_workflow result received', {
|
||||
hasWorkflowState,
|
||||
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||
})
|
||||
if (hasWorkflowState) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
diffStore
|
||||
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||
.catch((err) => {
|
||||
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[SSE] edit_workflow result handling failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Deploy tools: update deployment status in workflow registry
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
(current.name === 'deploy_api' ||
|
||||
current.name === 'deploy_chat' ||
|
||||
current.name === 'deploy_mcp' ||
|
||||
current.name === 'redeploy')
|
||||
) {
|
||||
try {
|
||||
const resultPayload = asRecord(
|
||||
data?.result || eventData.result || eventData.data || data?.data
|
||||
)
|
||||
const input = asRecord(current.params)
|
||||
const workflowId =
|
||||
(resultPayload?.workflowId as string) ||
|
||||
(input?.workflowId as string) ||
|
||||
useWorkflowRegistry.getState().activeWorkflowId
|
||||
const isDeployed = resultPayload?.isDeployed !== false
|
||||
if (workflowId) {
|
||||
useWorkflowRegistry
|
||||
.getState()
|
||||
.setDeploymentStatus(workflowId, isDeployed, isDeployed ? new Date() : undefined)
|
||||
logger.info('[SSE] Updated deployment status from tool result', {
|
||||
toolName: current.name,
|
||||
workflowId,
|
||||
isDeployed,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('[SSE] Failed to hydrate deployment status', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Environment variables: reload store after successful set
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
current.name === 'set_environment_variables'
|
||||
) {
|
||||
try {
|
||||
useEnvironmentStore.getState().loadEnvironmentVariables()
|
||||
logger.info('[SSE] Triggered environment variables reload')
|
||||
} catch (err) {
|
||||
logger.warn('[SSE] Failed to reload environment variables', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow variables: reload store after successful set
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
current.name === 'set_global_workflow_variables'
|
||||
) {
|
||||
try {
|
||||
const input = asRecord(current.params)
|
||||
const workflowId =
|
||||
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
useVariablesStore.getState().loadForWorkflow(workflowId)
|
||||
logger.info('[SSE] Triggered workflow variables reload', { workflowId })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('[SSE] Failed to reload workflow variables', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Generate API key: update deployment status with the new key
|
||||
if (targetState === ClientToolCallState.success && current.name === 'generate_api_key') {
|
||||
try {
|
||||
const resultPayload = asRecord(
|
||||
data?.result || eventData.result || eventData.data || data?.data
|
||||
)
|
||||
const input = asRecord(current.params)
|
||||
const workflowId =
|
||||
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||
const apiKey = (resultPayload?.apiKey || resultPayload?.key) as string | undefined
|
||||
if (workflowId) {
|
||||
const existingStatus = useWorkflowRegistry
|
||||
.getState()
|
||||
.getWorkflowDeploymentStatus(workflowId)
|
||||
useWorkflowRegistry
|
||||
.getState()
|
||||
.setDeploymentStatus(
|
||||
workflowId,
|
||||
existingStatus?.isDeployed ?? false,
|
||||
existingStatus?.deployedAt,
|
||||
apiKey
|
||||
)
|
||||
logger.info('[SSE] Updated deployment status with API key', {
|
||||
workflowId,
|
||||
hasKey: !!apiKey,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('[SSE] Failed to hydrate API key status', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||
const b = context.contentBlocks[i]
|
||||
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||
if (
|
||||
isRejectedState(b.toolCall?.state) ||
|
||||
isReviewState(b.toolCall?.state) ||
|
||||
isBackgroundState(b.toolCall?.state)
|
||||
)
|
||||
break
|
||||
const targetState = success
|
||||
? ClientToolCallState.success
|
||||
: failedDependency || skipped
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
context.contentBlocks[i] = {
|
||||
...b,
|
||||
toolCall: {
|
||||
...b.toolCall,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(
|
||||
b.toolCall?.name,
|
||||
targetState,
|
||||
toolCallId,
|
||||
b.toolCall?.params
|
||||
),
|
||||
},
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
updateStreamingMessage(set, context)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to process tool_result SSE event', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
},
|
||||
tool_error: (data, context, get, set) => {
|
||||
try {
|
||||
const errorData = asRecord(data?.data)
|
||||
const toolCallId: string | undefined =
|
||||
data?.toolCallId || (errorData.id as string | undefined)
|
||||
const failedDependency: boolean = data?.failedDependency === true
|
||||
if (!toolCallId) return
|
||||
const { toolCallsById } = get()
|
||||
const current = toolCallsById[toolCallId]
|
||||
if (current) {
|
||||
if (
|
||||
isRejectedState(current.state) ||
|
||||
isReviewState(current.state) ||
|
||||
isBackgroundState(current.state)
|
||||
) {
|
||||
return
|
||||
}
|
||||
const targetState = failedDependency
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const updatedMap = { ...toolCallsById }
|
||||
updatedMap[toolCallId] = {
|
||||
...current,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||
}
|
||||
set({ toolCallsById: updatedMap })
|
||||
}
|
||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||
const b = context.contentBlocks[i]
|
||||
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||
if (
|
||||
isRejectedState(b.toolCall?.state) ||
|
||||
isReviewState(b.toolCall?.state) ||
|
||||
isBackgroundState(b.toolCall?.state)
|
||||
)
|
||||
break
|
||||
const targetState = failedDependency
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
context.contentBlocks[i] = {
|
||||
...b,
|
||||
toolCall: {
|
||||
...b.toolCall,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(
|
||||
b.toolCall?.name,
|
||||
targetState,
|
||||
toolCallId,
|
||||
b.toolCall?.params
|
||||
),
|
||||
},
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
updateStreamingMessage(set, context)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to process tool_error SSE event', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
},
|
||||
tool_generating: (data, context, get, set) => {
|
||||
const { toolCallId, toolName } = data
|
||||
if (!toolCallId || !toolName) return
|
||||
const { toolCallsById } = get()
|
||||
|
||||
if (!toolCallsById[toolCallId]) {
|
||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||
const initialState = isAutoAllowed
|
||||
? ClientToolCallState.executing
|
||||
: ClientToolCallState.pending
|
||||
const tc: CopilotToolCall = {
|
||||
id: toolCallId,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||
}
|
||||
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||
set({ toolCallsById: updated })
|
||||
logger.info('[toolCallsById] map updated', updated)
|
||||
|
||||
upsertToolCallBlock(context, tc)
|
||||
updateStreamingMessage(set, context)
|
||||
}
|
||||
},
|
||||
tool_call: (data, context, get, set) => {
|
||||
const toolData = asRecord(data?.data)
|
||||
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||
if (!id) return
|
||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||
const isPartial = toolData.partial === true
|
||||
const { toolCallsById } = get()
|
||||
|
||||
const existing = toolCallsById[id]
|
||||
const toolName = name || existing?.name || 'unknown_tool'
|
||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||
|
||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||
if (
|
||||
existing?.state === ClientToolCallState.executing &&
|
||||
initialState === ClientToolCallState.pending
|
||||
) {
|
||||
initialState = ClientToolCallState.executing
|
||||
}
|
||||
|
||||
const next: CopilotToolCall = existing
|
||||
? {
|
||||
...existing,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
||||
}
|
||||
: {
|
||||
id,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args),
|
||||
}
|
||||
const updated = { ...toolCallsById, [id]: next }
|
||||
set({ toolCallsById: updated })
|
||||
logger.info(`[toolCallsById] → ${initialState}`, { id, name: toolName, params: args })
|
||||
|
||||
upsertToolCallBlock(context, next)
|
||||
updateStreamingMessage(set, context)
|
||||
|
||||
if (isPartial) {
|
||||
return
|
||||
}
|
||||
|
||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||
// without waiting for the user to click "Allow".
|
||||
if (isAutoAllowed) {
|
||||
sendAutoAcceptConfirmation(id)
|
||||
}
|
||||
|
||||
// Client-executable run tools: execute on the client for real-time feedback
|
||||
// (block pulsing, console logs, stop button). The server defers execution
|
||||
// for these tools in interactive mode; the client reports back via mark-complete.
|
||||
if (
|
||||
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
||||
initialState === ClientToolCallState.executing
|
||||
) {
|
||||
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
||||
}
|
||||
|
||||
// OAuth: dispatch event to open the OAuth connect modal
|
||||
if (toolName === 'oauth_request_access' && args && typeof window !== 'undefined') {
|
||||
try {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('open-oauth-connect', {
|
||||
detail: {
|
||||
providerName: (args.providerName || args.provider_name || '') as string,
|
||||
serviceId: (args.serviceId || args.service_id || '') as string,
|
||||
providerId: (args.providerId || args.provider_id || '') as string,
|
||||
requiredScopes: (args.requiredScopes || args.required_scopes || []) as string[],
|
||||
newScopes: (args.newScopes || args.new_scopes || []) as string[],
|
||||
},
|
||||
})
|
||||
)
|
||||
logger.info('[SSE] Dispatched OAuth connect event', {
|
||||
providerId: args.providerId || args.provider_id,
|
||||
providerName: args.providerName || args.provider_name,
|
||||
})
|
||||
} catch (err) {
|
||||
logger.warn('[SSE] Failed to dispatch OAuth connect event', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
},
|
||||
reasoning: (data, context, _get, set) => {
|
||||
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
||||
if (phase === 'start') {
|
||||
beginThinkingBlock(context)
|
||||
updateStreamingMessage(set, context)
|
||||
return
|
||||
}
|
||||
if (phase === 'end') {
|
||||
finalizeThinkingBlock(context)
|
||||
updateStreamingMessage(set, context)
|
||||
return
|
||||
}
|
||||
const chunk: string = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||
if (!chunk) return
|
||||
appendThinkingContent(context, chunk)
|
||||
updateStreamingMessage(set, context)
|
||||
},
|
||||
content: (data, context, get, set) => {
|
||||
if (!data.data) return
|
||||
context.pendingContent += data.data
|
||||
|
||||
let contentToProcess = context.pendingContent
|
||||
let hasProcessedContent = false
|
||||
|
||||
const thinkingStartRegex = /<thinking>/
|
||||
const thinkingEndRegex = /<\/thinking>/
|
||||
const designWorkflowStartRegex = /<design_workflow>/
|
||||
const designWorkflowEndRegex = /<\/design_workflow>/
|
||||
|
||||
const splitTrailingPartialTag = (
|
||||
text: string,
|
||||
tags: string[]
|
||||
): { text: string; remaining: string } => {
|
||||
const partialIndex = text.lastIndexOf('<')
|
||||
if (partialIndex < 0) {
|
||||
return { text, remaining: '' }
|
||||
}
|
||||
const possibleTag = text.substring(partialIndex)
|
||||
const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag))
|
||||
if (!matchesTagStart) {
|
||||
return { text, remaining: '' }
|
||||
}
|
||||
return {
|
||||
text: text.substring(0, partialIndex),
|
||||
remaining: possibleTag,
|
||||
}
|
||||
}
|
||||
|
||||
while (contentToProcess.length > 0) {
|
||||
if (context.isInDesignWorkflowBlock) {
|
||||
const endMatch = designWorkflowEndRegex.exec(contentToProcess)
|
||||
if (endMatch) {
|
||||
const designContent = contentToProcess.substring(0, endMatch.index)
|
||||
context.designWorkflowContent += designContent
|
||||
context.isInDesignWorkflowBlock = false
|
||||
|
||||
logger.info('[design_workflow] Tag complete, setting plan content', {
|
||||
contentLength: context.designWorkflowContent.length,
|
||||
})
|
||||
set({ streamingPlanContent: context.designWorkflowContent })
|
||||
|
||||
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
const { text, remaining } = splitTrailingPartialTag(contentToProcess, [
|
||||
'</design_workflow>',
|
||||
])
|
||||
context.designWorkflowContent += text
|
||||
|
||||
set({ streamingPlanContent: context.designWorkflowContent })
|
||||
|
||||
contentToProcess = remaining
|
||||
hasProcessedContent = true
|
||||
if (remaining) {
|
||||
break
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) {
|
||||
const designStartMatch = designWorkflowStartRegex.exec(contentToProcess)
|
||||
if (designStartMatch) {
|
||||
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
|
||||
if (textBeforeDesign) {
|
||||
appendTextBlock(context, textBeforeDesign)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
context.isInDesignWorkflowBlock = true
|
||||
context.designWorkflowContent = ''
|
||||
contentToProcess = contentToProcess.substring(
|
||||
designStartMatch.index + designStartMatch[0].length
|
||||
)
|
||||
hasProcessedContent = true
|
||||
continue
|
||||
}
|
||||
|
||||
const nextMarkIndex = contentToProcess.indexOf('<marktodo>')
|
||||
const nextCheckIndex = contentToProcess.indexOf('<checkofftodo>')
|
||||
const hasMark = nextMarkIndex >= 0
|
||||
const hasCheck = nextCheckIndex >= 0
|
||||
|
||||
const nextTagIndex =
|
||||
hasMark && hasCheck
|
||||
? Math.min(nextMarkIndex, nextCheckIndex)
|
||||
: hasMark
|
||||
? nextMarkIndex
|
||||
: hasCheck
|
||||
? nextCheckIndex
|
||||
: -1
|
||||
|
||||
if (nextTagIndex >= 0) {
|
||||
const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex
|
||||
const tagStart = isMarkTodo ? '<marktodo>' : '<checkofftodo>'
|
||||
const tagEnd = isMarkTodo ? '</marktodo>' : '</checkofftodo>'
|
||||
const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length)
|
||||
|
||||
if (closingIndex === -1) {
|
||||
break
|
||||
}
|
||||
|
||||
const todoId = contentToProcess
|
||||
.substring(nextTagIndex + tagStart.length, closingIndex)
|
||||
.trim()
|
||||
logger.info(
|
||||
isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag',
|
||||
{ todoId }
|
||||
)
|
||||
|
||||
if (todoId) {
|
||||
try {
|
||||
get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed')
|
||||
logger.info(
|
||||
isMarkTodo
|
||||
? '[TODO] Successfully marked todo in progress'
|
||||
: '[TODO] Successfully checked off todo',
|
||||
{ todoId }
|
||||
)
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
isMarkTodo
|
||||
? '[TODO] Failed to mark todo in progress'
|
||||
: '[TODO] Failed to checkoff todo',
|
||||
{ todoId, error: e }
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart })
|
||||
}
|
||||
|
||||
let beforeTag = contentToProcess.substring(0, nextTagIndex)
|
||||
let afterTag = contentToProcess.substring(closingIndex + tagEnd.length)
|
||||
|
||||
const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag)
|
||||
const hadNewlineAfter = /^(\r?\n)+/.test(afterTag)
|
||||
|
||||
beforeTag = beforeTag.replace(/(\r?\n)+$/, '')
|
||||
afterTag = afterTag.replace(/^(\r?\n)+/, '')
|
||||
|
||||
contentToProcess =
|
||||
beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag
|
||||
context.currentTextBlock = null
|
||||
hasProcessedContent = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (context.isInThinkingBlock) {
|
||||
const endMatch = thinkingEndRegex.exec(contentToProcess)
|
||||
if (endMatch) {
|
||||
const thinkingContent = contentToProcess.substring(0, endMatch.index)
|
||||
appendThinkingContent(context, thinkingContent)
|
||||
finalizeThinkingBlock(context)
|
||||
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['</thinking>'])
|
||||
if (text) {
|
||||
appendThinkingContent(context, text)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
contentToProcess = remaining
|
||||
if (remaining) {
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const startMatch = thinkingStartRegex.exec(contentToProcess)
|
||||
if (startMatch) {
|
||||
const textBeforeThinking = contentToProcess.substring(0, startMatch.index)
|
||||
if (textBeforeThinking) {
|
||||
appendTextBlock(context, textBeforeThinking)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length)
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
let partialTagIndex = contentToProcess.lastIndexOf('<')
|
||||
|
||||
const partialMarkTodo = contentToProcess.lastIndexOf('<marktodo')
|
||||
const partialCheckoffTodo = contentToProcess.lastIndexOf('<checkofftodo')
|
||||
|
||||
if (partialMarkTodo > partialTagIndex) {
|
||||
partialTagIndex = partialMarkTodo
|
||||
}
|
||||
if (partialCheckoffTodo > partialTagIndex) {
|
||||
partialTagIndex = partialCheckoffTodo
|
||||
}
|
||||
|
||||
let textToAdd = contentToProcess
|
||||
let remaining = ''
|
||||
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) {
|
||||
textToAdd = contentToProcess.substring(0, partialTagIndex)
|
||||
remaining = contentToProcess.substring(partialTagIndex)
|
||||
}
|
||||
if (textToAdd) {
|
||||
appendTextBlock(context, textToAdd)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
contentToProcess = remaining
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
context.pendingContent = contentToProcess
|
||||
if (hasProcessedContent) {
|
||||
updateStreamingMessage(set, context)
|
||||
}
|
||||
},
|
||||
done: (_data, context) => {
|
||||
logger.info('[SSE] DONE EVENT RECEIVED', {
|
||||
doneEventCount: context.doneEventCount,
|
||||
data: _data,
|
||||
})
|
||||
context.doneEventCount++
|
||||
if (context.doneEventCount >= 1) {
|
||||
logger.info('[SSE] Setting streamComplete = true, stream will terminate')
|
||||
context.streamComplete = true
|
||||
}
|
||||
},
|
||||
error: (data, context, _get, set) => {
|
||||
logger.error('Stream error:', data.error)
|
||||
set((state: CopilotStore) => ({
|
||||
messages: state.messages.map((msg) =>
|
||||
msg.id === context.messageId
|
||||
? {
|
||||
...msg,
|
||||
content: context.accumulatedContent || 'An error occurred.',
|
||||
error: data.error,
|
||||
}
|
||||
: msg
|
||||
),
|
||||
}))
|
||||
context.streamComplete = true
|
||||
},
|
||||
stream_end: (_data, context, _get, set) => {
|
||||
if (context.pendingContent) {
|
||||
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
||||
appendThinkingContent(context, context.pendingContent)
|
||||
} else if (context.pendingContent.trim()) {
|
||||
appendTextBlock(context, context.pendingContent)
|
||||
}
|
||||
context.pendingContent = ''
|
||||
}
|
||||
finalizeThinkingBlock(context)
|
||||
updateStreamingMessage(set, context)
|
||||
},
|
||||
default: () => {},
|
||||
}
|
||||
3
apps/sim/lib/copilot/client-sse/index.ts
Normal file
3
apps/sim/lib/copilot/client-sse/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export type { SSEHandler } from './handlers'
|
||||
export { sseHandlers } from './handlers'
|
||||
export { applySseEvent, subAgentSSEHandlers } from './subagent-handlers'
|
||||
221
apps/sim/lib/copilot/client-sse/run-tool-execution.ts
Normal file
221
apps/sim/lib/copilot/client-sse/run-tool-execution.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { COPILOT_CONFIRM_API_PATH } from '@/lib/copilot/constants'
|
||||
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||
import { executeWorkflowWithFullLogging } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-execution-utils'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('CopilotRunToolExecution')
|
||||
|
||||
/**
|
||||
* Run tools that execute client-side for real-time feedback
|
||||
* (block pulsing, logs, stop button, etc.).
|
||||
*/
|
||||
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||
'run_workflow',
|
||||
'run_workflow_until_block',
|
||||
'run_from_block',
|
||||
'run_block',
|
||||
])
|
||||
|
||||
/**
|
||||
* Execute a run tool on the client side using the streaming execute endpoint.
|
||||
* This gives full interactive feedback: block pulsing, console logs, stop button.
|
||||
*
|
||||
* Mirrors staging's RunWorkflowClientTool.handleAccept():
|
||||
* 1. Execute via executeWorkflowWithFullLogging
|
||||
* 2. Update client tool state directly (success/error)
|
||||
* 3. Report completion to server via /api/copilot/confirm (Redis),
|
||||
* where the server-side handler picks it up and tells Go
|
||||
*/
|
||||
export function executeRunToolOnClient(
|
||||
toolCallId: string,
|
||||
toolName: string,
|
||||
params: Record<string, unknown>
|
||||
): void {
|
||||
doExecuteRunTool(toolCallId, toolName, params).catch((err) => {
|
||||
logger.error('[RunTool] Unhandled error in client-side run tool execution', {
|
||||
toolCallId,
|
||||
toolName,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function doExecuteRunTool(
|
||||
toolCallId: string,
|
||||
toolName: string,
|
||||
params: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
const { isExecuting, setIsExecuting } = useExecutionStore.getState()
|
||||
|
||||
if (isExecuting) {
|
||||
logger.warn('[RunTool] Execution prevented: already executing', { toolCallId, toolName })
|
||||
setToolState(toolCallId, ClientToolCallState.error)
|
||||
await reportCompletion(toolCallId, false, 'Workflow is already executing. Try again later')
|
||||
return
|
||||
}
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (!activeWorkflowId) {
|
||||
logger.warn('[RunTool] Execution prevented: no active workflow', { toolCallId, toolName })
|
||||
setToolState(toolCallId, ClientToolCallState.error)
|
||||
await reportCompletion(toolCallId, false, 'No active workflow found')
|
||||
return
|
||||
}
|
||||
|
||||
// Extract params for all tool types
|
||||
const workflowInput = (params.workflow_input || params.input || undefined) as
|
||||
| Record<string, unknown>
|
||||
| undefined
|
||||
|
||||
const stopAfterBlockId = (() => {
|
||||
if (toolName === 'run_workflow_until_block')
|
||||
return params.stopAfterBlockId as string | undefined
|
||||
if (toolName === 'run_block') return params.blockId as string | undefined
|
||||
return undefined
|
||||
})()
|
||||
|
||||
const runFromBlock = (() => {
|
||||
if (toolName === 'run_from_block' && params.startBlockId) {
|
||||
return {
|
||||
startBlockId: params.startBlockId as string,
|
||||
executionId: (params.executionId as string | undefined) || 'latest',
|
||||
}
|
||||
}
|
||||
if (toolName === 'run_block' && params.blockId) {
|
||||
return {
|
||||
startBlockId: params.blockId as string,
|
||||
executionId: (params.executionId as string | undefined) || 'latest',
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
})()
|
||||
|
||||
setIsExecuting(true)
|
||||
const executionId = uuidv4()
|
||||
const executionStartTime = new Date().toISOString()
|
||||
|
||||
logger.info('[RunTool] Starting client-side workflow execution', {
|
||||
toolCallId,
|
||||
toolName,
|
||||
executionId,
|
||||
activeWorkflowId,
|
||||
hasInput: !!workflowInput,
|
||||
stopAfterBlockId,
|
||||
runFromBlock: runFromBlock ? { startBlockId: runFromBlock.startBlockId } : undefined,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeWorkflowWithFullLogging({
|
||||
workflowInput,
|
||||
executionId,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
})
|
||||
|
||||
// Determine success (same logic as staging's RunWorkflowClientTool)
|
||||
let succeeded = true
|
||||
let errorMessage: string | undefined
|
||||
try {
|
||||
if (result && typeof result === 'object' && 'success' in (result as any)) {
|
||||
succeeded = Boolean((result as any).success)
|
||||
if (!succeeded) {
|
||||
errorMessage = (result as any)?.error || (result as any)?.output?.error
|
||||
}
|
||||
} else if (
|
||||
result &&
|
||||
typeof result === 'object' &&
|
||||
'execution' in (result as any) &&
|
||||
(result as any).execution
|
||||
) {
|
||||
succeeded = Boolean((result as any).execution.success)
|
||||
if (!succeeded) {
|
||||
errorMessage =
|
||||
(result as any).execution?.error || (result as any).execution?.output?.error
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (succeeded) {
|
||||
logger.info('[RunTool] Workflow execution succeeded', { toolCallId, toolName })
|
||||
setToolState(toolCallId, ClientToolCallState.success)
|
||||
await reportCompletion(
|
||||
toolCallId,
|
||||
true,
|
||||
`Workflow execution completed. Started at: ${executionStartTime}`
|
||||
)
|
||||
} else {
|
||||
const msg = errorMessage || 'Workflow execution failed'
|
||||
logger.error('[RunTool] Workflow execution failed', { toolCallId, toolName, error: msg })
|
||||
setToolState(toolCallId, ClientToolCallState.error)
|
||||
await reportCompletion(toolCallId, false, msg)
|
||||
}
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
logger.error('[RunTool] Workflow execution threw', { toolCallId, toolName, error: msg })
|
||||
setToolState(toolCallId, ClientToolCallState.error)
|
||||
await reportCompletion(toolCallId, false, msg)
|
||||
} finally {
|
||||
setIsExecuting(false)
|
||||
}
|
||||
}
|
||||
|
||||
/** Update the tool call state directly in the copilot store (like staging's setState). */
|
||||
function setToolState(toolCallId: string, state: ClientToolCallState): void {
|
||||
try {
|
||||
const store = useCopilotStore.getState()
|
||||
const current = store.toolCallsById[toolCallId]
|
||||
if (!current) return
|
||||
const updated = {
|
||||
...store.toolCallsById,
|
||||
[toolCallId]: {
|
||||
...current,
|
||||
state,
|
||||
display: resolveToolDisplay(current.name, state, toolCallId, current.params),
|
||||
},
|
||||
}
|
||||
useCopilotStore.setState({ toolCallsById: updated })
|
||||
} catch (err) {
|
||||
logger.warn('[RunTool] Failed to update tool state', {
|
||||
toolCallId,
|
||||
state,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Report tool completion to the server via the existing /api/copilot/confirm endpoint.
|
||||
* This writes {status: 'success'|'error', message} to Redis. The server-side handler
|
||||
* is polling Redis via waitForToolCompletion() and will pick this up, then fire-and-forget
|
||||
* markToolComplete to the Go backend.
|
||||
*/
|
||||
async function reportCompletion(
|
||||
toolCallId: string,
|
||||
success: boolean,
|
||||
message?: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
const res = await fetch(COPILOT_CONFIRM_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
toolCallId,
|
||||
status: success ? 'success' : 'error',
|
||||
message: message || (success ? 'Tool completed' : 'Tool failed'),
|
||||
}),
|
||||
})
|
||||
if (!res.ok) {
|
||||
logger.warn('[RunTool] reportCompletion failed', { toolCallId, status: res.status })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[RunTool] reportCompletion error', {
|
||||
toolCallId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user