mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-11 07:04:58 -05:00
Compare commits
20 Commits
cursor/dev
...
feat/strea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
031866e07c | ||
|
|
3d5bd003ef | ||
|
|
13a91113fd | ||
|
|
af01dce2c3 | ||
|
|
8a24b56f51 | ||
|
|
c471627ce1 | ||
|
|
f5dc180d9f | ||
|
|
78fef22d0e | ||
|
|
6d16f216c8 | ||
|
|
f8e9614c9c | ||
|
|
c5dd90e79d | ||
|
|
20b230d1aa | ||
|
|
be3cdcf981 | ||
|
|
73540e3936 | ||
|
|
e321f883b0 | ||
|
|
8b4b3af120 | ||
|
|
190f12fd77 | ||
|
|
e5d30494cb | ||
|
|
b3dbb4487f | ||
|
|
622d0cad22 |
@@ -1,4 +1,4 @@
|
|||||||
FROM oven/bun:1.3.3-alpine
|
FROM oven/bun:1.3.9-alpine
|
||||||
|
|
||||||
# Install necessary packages for development
|
# Install necessary packages for development
|
||||||
RUN apk add --no-cache \
|
RUN apk add --no-cache \
|
||||||
|
|||||||
2
.github/workflows/docs-embeddings.yml
vendored
2
.github/workflows/docs-embeddings.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
|
|||||||
4
.github/workflows/i18n.yml
vendored
4
.github/workflows/i18n.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Cache Bun dependencies
|
- name: Cache Bun dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
@@ -125,7 +125,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Cache Bun dependencies
|
- name: Cache Bun dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
|
|||||||
2
.github/workflows/migrations.yml
vendored
2
.github/workflows/migrations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Cache Bun dependencies
|
- name: Cache Bun dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
|
|||||||
2
.github/workflows/publish-cli.yml
vendored
2
.github/workflows/publish-cli.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Setup Node.js for npm publishing
|
- name: Setup Node.js for npm publishing
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
|
|||||||
2
.github/workflows/publish-ts-sdk.yml
vendored
2
.github/workflows/publish-ts-sdk.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Setup Node.js for npm publishing
|
- name: Setup Node.js for npm publishing
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
|
|||||||
2
.github/workflows/test-build.yml
vendored
2
.github/workflows/test-build.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
- name: Setup Bun
|
- name: Setup Bun
|
||||||
uses: oven-sh/setup-bun@v2
|
uses: oven-sh/setup-bun@v2
|
||||||
with:
|
with:
|
||||||
bun-version: 1.3.3
|
bun-version: 1.3.9
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
|
|||||||
@@ -5483,3 +5483,37 @@ export function AgentSkillsIcon(props: SVGProps<SVGSVGElement>) {
|
|||||||
</svg>
|
</svg>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
|
return (
|
||||||
|
<svg {...props} viewBox='0 0 48 48' xmlns='http://www.w3.org/2000/svg' fill='none'>
|
||||||
|
<circle
|
||||||
|
cx='24'
|
||||||
|
cy='24'
|
||||||
|
r='21.5'
|
||||||
|
stroke='#000000'
|
||||||
|
strokeLinecap='round'
|
||||||
|
strokeLinejoin='round'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
d='M28.083,17.28a7.8633,7.8633,0,0,1,0,13.44'
|
||||||
|
stroke='#000000'
|
||||||
|
strokeLinecap='round'
|
||||||
|
strokeLinejoin='round'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
d='M19.917,30.72a7.8633,7.8633,0,0,1,0-13.44'
|
||||||
|
stroke='#000000'
|
||||||
|
strokeLinecap='round'
|
||||||
|
strokeLinejoin='round'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
d='M26.067,10.43H21.933a2.0172,2.0172,0,0,0-2.016,2.016v6.36c2.358,1.281,2.736,2.562,0,3.843V35.574a2.0169,2.0169,0,0,0,2.016,2.015h4.134a2.0169,2.0169,0,0,0,2.016-2.015V29.213c-2.358-1.281-2.736-2.562,0-3.842V12.446A2.0172,2.0172,0,0,0,26.067,10.43Z'
|
||||||
|
fill='#000000'
|
||||||
|
stroke='#000000'
|
||||||
|
strokeLinecap='round'
|
||||||
|
strokeLinejoin='round'
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@@ -80,6 +80,7 @@ import {
|
|||||||
MySQLIcon,
|
MySQLIcon,
|
||||||
Neo4jIcon,
|
Neo4jIcon,
|
||||||
NotionIcon,
|
NotionIcon,
|
||||||
|
OnePasswordIcon,
|
||||||
OpenAIIcon,
|
OpenAIIcon,
|
||||||
OutlookIcon,
|
OutlookIcon,
|
||||||
PackageSearchIcon,
|
PackageSearchIcon,
|
||||||
@@ -214,6 +215,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
neo4j: Neo4jIcon,
|
neo4j: Neo4jIcon,
|
||||||
notion_v2: NotionIcon,
|
notion_v2: NotionIcon,
|
||||||
onedrive: MicrosoftOneDriveIcon,
|
onedrive: MicrosoftOneDriveIcon,
|
||||||
|
onepassword: OnePasswordIcon,
|
||||||
openai: OpenAIIcon,
|
openai: OpenAIIcon,
|
||||||
outlook: OutlookIcon,
|
outlook: OutlookIcon,
|
||||||
parallel_ai: ParallelIcon,
|
parallel_ai: ParallelIcon,
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ Switch between modes using the mode selector at the bottom of the input area.
|
|||||||
Select your preferred AI model using the model selector at the bottom right of the input area.
|
Select your preferred AI model using the model selector at the bottom right of the input area.
|
||||||
|
|
||||||
**Available Models:**
|
**Available Models:**
|
||||||
- Claude 4.5 Opus, Sonnet (default), Haiku
|
- Claude 4.6 Opus (default), 4.5 Opus, Sonnet, Haiku
|
||||||
- GPT 5.2 Codex, Pro
|
- GPT 5.2 Codex, Pro
|
||||||
- Gemini 3 Pro
|
- Gemini 3 Pro
|
||||||
|
|
||||||
@@ -190,3 +190,99 @@ Copilot usage is billed per token from the underlying LLM. If you reach your usa
|
|||||||
<Callout type="info">
|
<Callout type="info">
|
||||||
See the [Cost Calculation page](/execution/costs) for billing details.
|
See the [Cost Calculation page](/execution/costs) for billing details.
|
||||||
</Callout>
|
</Callout>
|
||||||
|
## Copilot MCP
|
||||||
|
|
||||||
|
You can use Copilot as an MCP server in your favorite editor or AI client. This lets you build, test, deploy, and manage Sim workflows directly from tools like Cursor, Claude Code, Claude Desktop, and VS Code.
|
||||||
|
|
||||||
|
### Generating a Copilot API Key
|
||||||
|
|
||||||
|
To connect to the Copilot MCP server, you need a **Copilot API key**:
|
||||||
|
|
||||||
|
1. Go to [sim.ai](https://sim.ai) and sign in
|
||||||
|
2. Navigate to **Settings** → **Copilot**
|
||||||
|
3. Click **Generate API Key**
|
||||||
|
4. Copy the key — it is only shown once
|
||||||
|
|
||||||
|
The key will look like `sk-sim-copilot-...`. You will use this in the configuration below.
|
||||||
|
|
||||||
|
### Cursor
|
||||||
|
|
||||||
|
Add the following to your `.cursor/mcp.json` (project-level) or global Cursor MCP settings:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"sim-copilot": {
|
||||||
|
"url": "https://www.sim.ai/api/mcp/copilot",
|
||||||
|
"headers": {
|
||||||
|
"X-API-Key": "YOUR_COPILOT_API_KEY"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with the key you generated above.
|
||||||
|
|
||||||
|
### Claude Code
|
||||||
|
|
||||||
|
Run the following command to add the Copilot MCP server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
claude mcp add sim-copilot \
|
||||||
|
--transport http \
|
||||||
|
https://www.sim.ai/api/mcp/copilot \
|
||||||
|
--header "X-API-Key: YOUR_COPILOT_API_KEY"
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||||
|
|
||||||
|
### Claude Desktop
|
||||||
|
|
||||||
|
Claude Desktop requires [`mcp-remote`](https://www.npmjs.com/package/mcp-remote) to connect to HTTP-based MCP servers. Add the following to your Claude Desktop config file (`~/Library/Application Support/Claude/claude_desktop_config.json` on macOS):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"sim-copilot": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": [
|
||||||
|
"-y",
|
||||||
|
"mcp-remote",
|
||||||
|
"https://www.sim.ai/api/mcp/copilot",
|
||||||
|
"--header",
|
||||||
|
"X-API-Key: YOUR_COPILOT_API_KEY"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||||
|
|
||||||
|
### VS Code
|
||||||
|
|
||||||
|
Add the following to your VS Code `settings.json` or workspace `.vscode/settings.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcp": {
|
||||||
|
"servers": {
|
||||||
|
"sim-copilot": {
|
||||||
|
"type": "http",
|
||||||
|
"url": "https://www.sim.ai/api/mcp/copilot",
|
||||||
|
"headers": {
|
||||||
|
"X-API-Key": "YOUR_COPILOT_API_KEY"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
For self-hosted deployments, replace `https://www.sim.ai` with your self-hosted Sim URL.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ With Airweave, you can:
|
|||||||
In Sim, the Airweave integration empowers your agents to search, summarize, and extract insights from all your organization’s data via a single tool. Use Airweave to drive rich, contextual knowledge retrieval within your workflows—whether answering questions, generating summaries, or supporting dynamic decision-making.
|
In Sim, the Airweave integration empowers your agents to search, summarize, and extract insights from all your organization’s data via a single tool. Use Airweave to drive rich, contextual knowledge retrieval within your workflows—whether answering questions, generating summaries, or supporting dynamic decision-making.
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Search across your synced data sources using Airweave. Supports semantic search with hybrid, neural, or keyword retrieval strategies. Optionally generate AI-powered answers from search results.
|
Search across your synced data sources using Airweave. Supports semantic search with hybrid, neural, or keyword retrieval strategies. Optionally generate AI-powered answers from search results.
|
||||||
|
|||||||
@@ -43,21 +43,198 @@ Retrieve detailed information about a specific Jira issue
|
|||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `projectId` | string | No | Jira project key \(e.g., PROJ\). Optional when retrieving a single issue. |
|
|
||||||
| `issueKey` | string | Yes | Jira issue key to retrieve \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key to retrieve \(e.g., PROJ-123\) |
|
||||||
|
| `includeAttachments` | boolean | No | Download attachment file contents and include them as files in the output |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `issueKey` | string | Issue key \(e.g., PROJ-123\) |
|
| `id` | string | Issue ID |
|
||||||
|
| `key` | string | Issue key \(e.g., PROJ-123\) |
|
||||||
|
| `self` | string | REST API URL for this issue |
|
||||||
| `summary` | string | Issue summary |
|
| `summary` | string | Issue summary |
|
||||||
| `description` | json | Issue description content |
|
| `description` | string | Issue description text \(extracted from ADF\) |
|
||||||
| `created` | string | Issue creation timestamp |
|
| `status` | object | Issue status |
|
||||||
| `updated` | string | Issue last updated timestamp |
|
| ↳ `id` | string | Status ID |
|
||||||
| `issue` | json | Complete issue object with all fields |
|
| ↳ `name` | string | Status name \(e.g., Open, In Progress, Done\) |
|
||||||
|
| ↳ `description` | string | Status description |
|
||||||
|
| ↳ `statusCategory` | object | Status category grouping |
|
||||||
|
| ↳ `id` | number | Status category ID |
|
||||||
|
| ↳ `key` | string | Status category key \(e.g., new, indeterminate, done\) |
|
||||||
|
| ↳ `name` | string | Status category name \(e.g., To Do, In Progress, Done\) |
|
||||||
|
| ↳ `colorName` | string | Status category color \(e.g., blue-gray, yellow, green\) |
|
||||||
|
| `statusName` | string | Issue status name \(e.g., Open, In Progress, Done\) |
|
||||||
|
| `issuetype` | object | Issue type |
|
||||||
|
| ↳ `id` | string | Issue type ID |
|
||||||
|
| ↳ `name` | string | Issue type name \(e.g., Task, Bug, Story, Epic\) |
|
||||||
|
| ↳ `description` | string | Issue type description |
|
||||||
|
| ↳ `subtask` | boolean | Whether this is a subtask type |
|
||||||
|
| ↳ `iconUrl` | string | URL to the issue type icon |
|
||||||
|
| `project` | object | Project the issue belongs to |
|
||||||
|
| ↳ `id` | string | Project ID |
|
||||||
|
| ↳ `key` | string | Project key \(e.g., PROJ\) |
|
||||||
|
| ↳ `name` | string | Project name |
|
||||||
|
| ↳ `projectTypeKey` | string | Project type key \(e.g., software, business\) |
|
||||||
|
| `priority` | object | Issue priority |
|
||||||
|
| ↳ `id` | string | Priority ID |
|
||||||
|
| ↳ `name` | string | Priority name \(e.g., Highest, High, Medium, Low, Lowest\) |
|
||||||
|
| ↳ `iconUrl` | string | URL to the priority icon |
|
||||||
|
| `assignee` | object | Assigned user |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `assigneeName` | string | Assignee display name or account ID |
|
||||||
|
| `reporter` | object | Reporter user |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `creator` | object | Issue creator |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `labels` | array | Issue labels |
|
||||||
|
| `components` | array | Issue components |
|
||||||
|
| ↳ `id` | string | Component ID |
|
||||||
|
| ↳ `name` | string | Component name |
|
||||||
|
| ↳ `description` | string | Component description |
|
||||||
|
| `fixVersions` | array | Fix versions |
|
||||||
|
| ↳ `id` | string | Version ID |
|
||||||
|
| ↳ `name` | string | Version name |
|
||||||
|
| ↳ `released` | boolean | Whether the version is released |
|
||||||
|
| ↳ `releaseDate` | string | Release date \(YYYY-MM-DD\) |
|
||||||
|
| `resolution` | object | Issue resolution |
|
||||||
|
| ↳ `id` | string | Resolution ID |
|
||||||
|
| ↳ `name` | string | Resolution name \(e.g., Fixed, Duplicate, Won't Fix\) |
|
||||||
|
| ↳ `description` | string | Resolution description |
|
||||||
|
| `duedate` | string | Due date \(YYYY-MM-DD\) |
|
||||||
|
| `created` | string | ISO 8601 timestamp when the issue was created |
|
||||||
|
| `updated` | string | ISO 8601 timestamp when the issue was last updated |
|
||||||
|
| `resolutiondate` | string | ISO 8601 timestamp when the issue was resolved |
|
||||||
|
| `timetracking` | object | Time tracking information |
|
||||||
|
| ↳ `originalEstimate` | string | Original estimate in human-readable format \(e.g., 1w 2d\) |
|
||||||
|
| ↳ `remainingEstimate` | string | Remaining estimate in human-readable format |
|
||||||
|
| ↳ `timeSpent` | string | Time spent in human-readable format |
|
||||||
|
| ↳ `originalEstimateSeconds` | number | Original estimate in seconds |
|
||||||
|
| ↳ `remainingEstimateSeconds` | number | Remaining estimate in seconds |
|
||||||
|
| ↳ `timeSpentSeconds` | number | Time spent in seconds |
|
||||||
|
| `parent` | object | Parent issue \(for subtasks\) |
|
||||||
|
| ↳ `id` | string | Parent issue ID |
|
||||||
|
| ↳ `key` | string | Parent issue key |
|
||||||
|
| ↳ `summary` | string | Parent issue summary |
|
||||||
|
| `issuelinks` | array | Linked issues |
|
||||||
|
| ↳ `id` | string | Issue link ID |
|
||||||
|
| ↳ `type` | object | Link type information |
|
||||||
|
| ↳ `id` | string | Link type ID |
|
||||||
|
| ↳ `name` | string | Link type name \(e.g., Blocks, Relates\) |
|
||||||
|
| ↳ `inward` | string | Inward description \(e.g., is blocked by\) |
|
||||||
|
| ↳ `outward` | string | Outward description \(e.g., blocks\) |
|
||||||
|
| ↳ `inwardIssue` | object | Inward linked issue |
|
||||||
|
| ↳ `id` | string | Issue ID |
|
||||||
|
| ↳ `key` | string | Issue key |
|
||||||
|
| ↳ `statusName` | string | Issue status name |
|
||||||
|
| ↳ `summary` | string | Issue summary |
|
||||||
|
| ↳ `outwardIssue` | object | Outward linked issue |
|
||||||
|
| ↳ `id` | string | Issue ID |
|
||||||
|
| ↳ `key` | string | Issue key |
|
||||||
|
| ↳ `statusName` | string | Issue status name |
|
||||||
|
| ↳ `summary` | string | Issue summary |
|
||||||
|
| `subtasks` | array | Subtask issues |
|
||||||
|
| ↳ `id` | string | Subtask issue ID |
|
||||||
|
| ↳ `key` | string | Subtask issue key |
|
||||||
|
| ↳ `summary` | string | Subtask summary |
|
||||||
|
| ↳ `statusName` | string | Subtask status name |
|
||||||
|
| ↳ `issueTypeName` | string | Subtask issue type name |
|
||||||
|
| `votes` | object | Vote information |
|
||||||
|
| ↳ `votes` | number | Number of votes |
|
||||||
|
| ↳ `hasVoted` | boolean | Whether the current user has voted |
|
||||||
|
| `watches` | object | Watch information |
|
||||||
|
| ↳ `watchCount` | number | Number of watchers |
|
||||||
|
| ↳ `isWatching` | boolean | Whether the current user is watching |
|
||||||
|
| `comments` | array | Issue comments \(fetched separately\) |
|
||||||
|
| ↳ `id` | string | Comment ID |
|
||||||
|
| ↳ `body` | string | Comment body text \(extracted from ADF\) |
|
||||||
|
| ↳ `author` | object | Comment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `authorName` | string | Comment author display name |
|
||||||
|
| ↳ `updateAuthor` | object | User who last updated the comment |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the comment was created |
|
||||||
|
| ↳ `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||||
|
| ↳ `visibility` | object | Comment visibility restriction |
|
||||||
|
| ↳ `type` | string | Restriction type \(e.g., role, group\) |
|
||||||
|
| ↳ `value` | string | Restriction value \(e.g., Administrators\) |
|
||||||
|
| `worklogs` | array | Issue worklogs \(fetched separately\) |
|
||||||
|
| ↳ `id` | string | Worklog ID |
|
||||||
|
| ↳ `author` | object | Worklog author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `authorName` | string | Worklog author display name |
|
||||||
|
| ↳ `updateAuthor` | object | User who last updated the worklog |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `comment` | string | Worklog comment text |
|
||||||
|
| ↳ `started` | string | ISO 8601 timestamp when the work started |
|
||||||
|
| ↳ `timeSpent` | string | Time spent in human-readable format \(e.g., 3h 20m\) |
|
||||||
|
| ↳ `timeSpentSeconds` | number | Time spent in seconds |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the worklog was created |
|
||||||
|
| ↳ `updated` | string | ISO 8601 timestamp when the worklog was last updated |
|
||||||
|
| `attachments` | array | Issue attachments |
|
||||||
|
| ↳ `id` | string | Attachment ID |
|
||||||
|
| ↳ `filename` | string | Attachment file name |
|
||||||
|
| ↳ `mimeType` | string | MIME type of the attachment |
|
||||||
|
| ↳ `size` | number | File size in bytes |
|
||||||
|
| ↳ `content` | string | URL to download the attachment content |
|
||||||
|
| ↳ `thumbnail` | string | URL to the attachment thumbnail |
|
||||||
|
| ↳ `author` | object | Attachment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `authorName` | string | Attachment author display name |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the attachment was created |
|
||||||
|
| `issueKey` | string | Issue key \(e.g., PROJ-123\) |
|
||||||
|
| `issue` | json | Complete raw Jira issue object from the API |
|
||||||
|
| `files` | file[] | Downloaded attachment files \(only when includeAttachments is true\) |
|
||||||
|
|
||||||
### `jira_update`
|
### `jira_update`
|
||||||
|
|
||||||
@@ -68,26 +245,33 @@ Update a Jira issue
|
|||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `projectId` | string | No | Jira project key \(e.g., PROJ\). Optional when updating a single issue. |
|
|
||||||
| `issueKey` | string | Yes | Jira issue key to update \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key to update \(e.g., PROJ-123\) |
|
||||||
| `summary` | string | No | New summary for the issue |
|
| `summary` | string | No | New summary for the issue |
|
||||||
| `description` | string | No | New description for the issue |
|
| `description` | string | No | New description for the issue |
|
||||||
| `status` | string | No | New status for the issue |
|
| `priority` | string | No | New priority ID or name for the issue \(e.g., "High"\) |
|
||||||
| `priority` | string | No | New priority for the issue |
|
| `assignee` | string | No | New assignee account ID for the issue |
|
||||||
| `assignee` | string | No | New assignee for the issue |
|
| `labels` | json | No | Labels to set on the issue \(array of label name strings\) |
|
||||||
|
| `components` | json | No | Components to set on the issue \(array of component name strings\) |
|
||||||
|
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
||||||
|
| `fixVersions` | json | No | Fix versions to set \(array of version name strings\) |
|
||||||
|
| `environment` | string | No | Environment information for the issue |
|
||||||
|
| `customFieldId` | string | No | Custom field ID to update \(e.g., customfield_10001\) |
|
||||||
|
| `customFieldValue` | string | No | Value for the custom field |
|
||||||
|
| `notifyUsers` | boolean | No | Whether to send email notifications about this update \(default: true\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Updated issue key \(e.g., PROJ-123\) |
|
| `issueKey` | string | Updated issue key \(e.g., PROJ-123\) |
|
||||||
| `summary` | string | Issue summary after update |
|
| `summary` | string | Issue summary after update |
|
||||||
|
|
||||||
### `jira_write`
|
### `jira_write`
|
||||||
|
|
||||||
Write a Jira issue
|
Create a new Jira issue
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
@@ -100,9 +284,12 @@ Write a Jira issue
|
|||||||
| `priority` | string | No | Priority ID or name for the issue \(e.g., "10000" or "High"\) |
|
| `priority` | string | No | Priority ID or name for the issue \(e.g., "10000" or "High"\) |
|
||||||
| `assignee` | string | No | Assignee account ID for the issue |
|
| `assignee` | string | No | Assignee account ID for the issue |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story\) |
|
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story, Bug, Epic, Sub-task\) |
|
||||||
|
| `parent` | json | No | Parent issue key for creating subtasks \(e.g., \{ "key": "PROJ-123" \}\) |
|
||||||
| `labels` | array | No | Labels for the issue \(array of label names\) |
|
| `labels` | array | No | Labels for the issue \(array of label names\) |
|
||||||
|
| `components` | array | No | Components for the issue \(array of component names\) |
|
||||||
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
||||||
|
| `fixVersions` | array | No | Fix versions for the issue \(array of version names\) |
|
||||||
| `reporter` | string | No | Reporter account ID for the issue |
|
| `reporter` | string | No | Reporter account ID for the issue |
|
||||||
| `environment` | string | No | Environment information for the issue |
|
| `environment` | string | No | Environment information for the issue |
|
||||||
| `customFieldId` | string | No | Custom field ID \(e.g., customfield_10001\) |
|
| `customFieldId` | string | No | Custom field ID \(e.g., customfield_10001\) |
|
||||||
@@ -112,15 +299,18 @@ Write a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `id` | string | Created issue ID |
|
||||||
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
|
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
|
||||||
|
| `self` | string | REST API URL for the created issue |
|
||||||
| `summary` | string | Issue summary |
|
| `summary` | string | Issue summary |
|
||||||
| `url` | string | URL to the created issue |
|
| `success` | boolean | Whether the issue was created successfully |
|
||||||
| `assigneeId` | string | Account ID of the assigned user \(if assigned\) |
|
| `url` | string | URL to the created issue in Jira |
|
||||||
|
| `assigneeId` | string | Account ID of the assigned user \(null if no assignee was set\) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
Retrieve multiple Jira issues in bulk
|
Retrieve multiple Jira issues from a project in bulk
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
@@ -134,7 +324,30 @@ Retrieve multiple Jira issues in bulk
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `issues` | array | Array of Jira issues with ts, summary, description, created, and updated timestamps |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `total` | number | Total number of issues in the project \(may not always be available\) |
|
||||||
|
| `issues` | array | Array of Jira issues |
|
||||||
|
| ↳ `id` | string | Issue ID |
|
||||||
|
| ↳ `key` | string | Issue key \(e.g., PROJ-123\) |
|
||||||
|
| ↳ `self` | string | REST API URL for this issue |
|
||||||
|
| ↳ `summary` | string | Issue summary |
|
||||||
|
| ↳ `description` | string | Issue description text |
|
||||||
|
| ↳ `status` | object | Issue status |
|
||||||
|
| ↳ `id` | string | Status ID |
|
||||||
|
| ↳ `name` | string | Status name |
|
||||||
|
| ↳ `issuetype` | object | Issue type |
|
||||||
|
| ↳ `id` | string | Issue type ID |
|
||||||
|
| ↳ `name` | string | Issue type name |
|
||||||
|
| ↳ `priority` | object | Issue priority |
|
||||||
|
| ↳ `id` | string | Priority ID |
|
||||||
|
| ↳ `name` | string | Priority name |
|
||||||
|
| ↳ `assignee` | object | Assigned user |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | Display name |
|
||||||
|
| ↳ `created` | string | ISO 8601 creation timestamp |
|
||||||
|
| ↳ `updated` | string | ISO 8601 last updated timestamp |
|
||||||
|
| `nextPageToken` | string | Cursor token for the next page. Null when no more results. |
|
||||||
|
| `isLast` | boolean | Whether this is the last page of results |
|
||||||
|
|
||||||
### `jira_delete_issue`
|
### `jira_delete_issue`
|
||||||
|
|
||||||
@@ -153,7 +366,8 @@ Delete a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Deleted issue key |
|
| `issueKey` | string | Deleted issue key |
|
||||||
|
|
||||||
### `jira_assign_issue`
|
### `jira_assign_issue`
|
||||||
@@ -173,9 +387,10 @@ Assign a Jira issue to a user
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key that was assigned |
|
| `issueKey` | string | Issue key that was assigned |
|
||||||
| `assigneeId` | string | Account ID of the assignee |
|
| `assigneeId` | string | Account ID of the assignee \(use "-1" for auto-assign, null to unassign\) |
|
||||||
|
|
||||||
### `jira_transition_issue`
|
### `jira_transition_issue`
|
||||||
|
|
||||||
@@ -189,15 +404,21 @@ Move a Jira issue between workflow statuses (e.g., To Do -> In Progress)
|
|||||||
| `issueKey` | string | Yes | Jira issue key to transition \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key to transition \(e.g., PROJ-123\) |
|
||||||
| `transitionId` | string | Yes | ID of the transition to execute \(e.g., "11" for "To Do", "21" for "In Progress"\) |
|
| `transitionId` | string | Yes | ID of the transition to execute \(e.g., "11" for "To Do", "21" for "In Progress"\) |
|
||||||
| `comment` | string | No | Optional comment to add when transitioning the issue |
|
| `comment` | string | No | Optional comment to add when transitioning the issue |
|
||||||
|
| `resolution` | string | No | Resolution name to set during transition \(e.g., "Fixed", "Won\'t Fix"\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key that was transitioned |
|
| `issueKey` | string | Issue key that was transitioned |
|
||||||
| `transitionId` | string | Applied transition ID |
|
| `transitionId` | string | Applied transition ID |
|
||||||
|
| `transitionName` | string | Applied transition name |
|
||||||
|
| `toStatus` | object | Target status after transition |
|
||||||
|
| ↳ `id` | string | Status ID |
|
||||||
|
| ↳ `name` | string | Status name |
|
||||||
|
|
||||||
### `jira_search_issues`
|
### `jira_search_issues`
|
||||||
|
|
||||||
@@ -209,20 +430,79 @@ Search for Jira issues using JQL (Jira Query Language)
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `jql` | string | Yes | JQL query string to search for issues \(e.g., "project = PROJ AND status = Open"\) |
|
| `jql` | string | Yes | JQL query string to search for issues \(e.g., "project = PROJ AND status = Open"\) |
|
||||||
| `startAt` | number | No | The index of the first result to return \(for pagination\) |
|
| `nextPageToken` | string | No | Cursor token for the next page of results. Omit for the first page. |
|
||||||
| `maxResults` | number | No | Maximum number of results to return \(default: 50\) |
|
| `maxResults` | number | No | Maximum number of results to return per page \(default: 50\) |
|
||||||
| `fields` | array | No | Array of field names to return \(default: \['summary', 'status', 'assignee', 'created', 'updated'\]\) |
|
| `fields` | array | No | Array of field names to return \(default: all navigable\). Use "*all" for every field. |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `total` | number | Total number of matching issues |
|
| `issues` | array | Array of matching issues |
|
||||||
| `startAt` | number | Pagination start index |
|
| ↳ `id` | string | Issue ID |
|
||||||
| `maxResults` | number | Maximum results per page |
|
| ↳ `key` | string | Issue key \(e.g., PROJ-123\) |
|
||||||
| `issues` | array | Array of matching issues with key, summary, status, assignee, created, updated |
|
| ↳ `self` | string | REST API URL for this issue |
|
||||||
|
| ↳ `summary` | string | Issue summary |
|
||||||
|
| ↳ `description` | string | Issue description text \(extracted from ADF\) |
|
||||||
|
| ↳ `status` | object | Issue status |
|
||||||
|
| ↳ `id` | string | Status ID |
|
||||||
|
| ↳ `name` | string | Status name \(e.g., Open, In Progress, Done\) |
|
||||||
|
| ↳ `description` | string | Status description |
|
||||||
|
| ↳ `statusCategory` | object | Status category grouping |
|
||||||
|
| ↳ `id` | number | Status category ID |
|
||||||
|
| ↳ `key` | string | Status category key \(e.g., new, indeterminate, done\) |
|
||||||
|
| ↳ `name` | string | Status category name \(e.g., To Do, In Progress, Done\) |
|
||||||
|
| ↳ `colorName` | string | Status category color \(e.g., blue-gray, yellow, green\) |
|
||||||
|
| ↳ `statusName` | string | Issue status name \(e.g., Open, In Progress, Done\) |
|
||||||
|
| ↳ `issuetype` | object | Issue type |
|
||||||
|
| ↳ `id` | string | Issue type ID |
|
||||||
|
| ↳ `name` | string | Issue type name \(e.g., Task, Bug, Story, Epic\) |
|
||||||
|
| ↳ `description` | string | Issue type description |
|
||||||
|
| ↳ `subtask` | boolean | Whether this is a subtask type |
|
||||||
|
| ↳ `iconUrl` | string | URL to the issue type icon |
|
||||||
|
| ↳ `project` | object | Project the issue belongs to |
|
||||||
|
| ↳ `id` | string | Project ID |
|
||||||
|
| ↳ `key` | string | Project key \(e.g., PROJ\) |
|
||||||
|
| ↳ `name` | string | Project name |
|
||||||
|
| ↳ `projectTypeKey` | string | Project type key \(e.g., software, business\) |
|
||||||
|
| ↳ `priority` | object | Issue priority |
|
||||||
|
| ↳ `id` | string | Priority ID |
|
||||||
|
| ↳ `name` | string | Priority name \(e.g., Highest, High, Medium, Low, Lowest\) |
|
||||||
|
| ↳ `iconUrl` | string | URL to the priority icon |
|
||||||
|
| ↳ `assignee` | object | Assigned user |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `assigneeName` | string | Assignee display name or account ID |
|
||||||
|
| ↳ `reporter` | object | Reporter user |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `labels` | array | Issue labels |
|
||||||
|
| ↳ `components` | array | Issue components |
|
||||||
|
| ↳ `id` | string | Component ID |
|
||||||
|
| ↳ `name` | string | Component name |
|
||||||
|
| ↳ `description` | string | Component description |
|
||||||
|
| ↳ `resolution` | object | Issue resolution |
|
||||||
|
| ↳ `id` | string | Resolution ID |
|
||||||
|
| ↳ `name` | string | Resolution name \(e.g., Fixed, Duplicate, Won't Fix\) |
|
||||||
|
| ↳ `description` | string | Resolution description |
|
||||||
|
| ↳ `duedate` | string | Due date \(YYYY-MM-DD\) |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the issue was created |
|
||||||
|
| ↳ `updated` | string | ISO 8601 timestamp when the issue was last updated |
|
||||||
|
| `nextPageToken` | string | Cursor token for the next page. Null when no more results. |
|
||||||
|
| `isLast` | boolean | Whether this is the last page of results |
|
||||||
|
| `total` | number | Total number of matching issues \(may not always be available\) |
|
||||||
|
|
||||||
### `jira_add_comment`
|
### `jira_add_comment`
|
||||||
|
|
||||||
@@ -235,16 +515,28 @@ Add a comment to a Jira issue
|
|||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `issueKey` | string | Yes | Jira issue key to add comment to \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key to add comment to \(e.g., PROJ-123\) |
|
||||||
| `body` | string | Yes | Comment body text |
|
| `body` | string | Yes | Comment body text |
|
||||||
|
| `visibility` | json | No | Restrict comment visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key the comment was added to |
|
| `issueKey` | string | Issue key the comment was added to |
|
||||||
| `commentId` | string | Created comment ID |
|
| `commentId` | string | Created comment ID |
|
||||||
| `body` | string | Comment text content |
|
| `body` | string | Comment text content |
|
||||||
|
| `author` | object | Comment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `created` | string | ISO 8601 timestamp when the comment was created |
|
||||||
|
| `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||||
|
|
||||||
### `jira_get_comments`
|
### `jira_get_comments`
|
||||||
|
|
||||||
@@ -258,16 +550,43 @@ Get all comments from a Jira issue
|
|||||||
| `issueKey` | string | Yes | Jira issue key to get comments from \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key to get comments from \(e.g., PROJ-123\) |
|
||||||
| `startAt` | number | No | Index of the first comment to return \(default: 0\) |
|
| `startAt` | number | No | Index of the first comment to return \(default: 0\) |
|
||||||
| `maxResults` | number | No | Maximum number of comments to return \(default: 50\) |
|
| `maxResults` | number | No | Maximum number of comments to return \(default: 50\) |
|
||||||
|
| `orderBy` | string | No | Sort order for comments: "-created" for newest first, "created" for oldest first |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `total` | number | Total number of comments |
|
| `total` | number | Total number of comments |
|
||||||
| `comments` | array | Array of comments with id, author, body, created, updated |
|
| `startAt` | number | Pagination start index |
|
||||||
|
| `maxResults` | number | Maximum results per page |
|
||||||
|
| `comments` | array | Array of comments |
|
||||||
|
| ↳ `id` | string | Comment ID |
|
||||||
|
| ↳ `body` | string | Comment body text \(extracted from ADF\) |
|
||||||
|
| ↳ `author` | object | Comment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `authorName` | string | Comment author display name |
|
||||||
|
| ↳ `updateAuthor` | object | User who last updated the comment |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the comment was created |
|
||||||
|
| ↳ `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||||
|
| ↳ `visibility` | object | Comment visibility restriction |
|
||||||
|
| ↳ `type` | string | Restriction type \(e.g., role, group\) |
|
||||||
|
| ↳ `value` | string | Restriction value \(e.g., Administrators\) |
|
||||||
|
|
||||||
### `jira_update_comment`
|
### `jira_update_comment`
|
||||||
|
|
||||||
@@ -281,16 +600,28 @@ Update an existing comment on a Jira issue
|
|||||||
| `issueKey` | string | Yes | Jira issue key containing the comment \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key containing the comment \(e.g., PROJ-123\) |
|
||||||
| `commentId` | string | Yes | ID of the comment to update |
|
| `commentId` | string | Yes | ID of the comment to update |
|
||||||
| `body` | string | Yes | Updated comment text |
|
| `body` | string | Yes | Updated comment text |
|
||||||
|
| `visibility` | json | No | Restrict comment visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `commentId` | string | Updated comment ID |
|
| `commentId` | string | Updated comment ID |
|
||||||
| `body` | string | Updated comment text |
|
| `body` | string | Updated comment text |
|
||||||
|
| `author` | object | Comment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `created` | string | ISO 8601 timestamp when the comment was created |
|
||||||
|
| `updated` | string | ISO 8601 timestamp when the comment was last updated |
|
||||||
|
|
||||||
### `jira_delete_comment`
|
### `jira_delete_comment`
|
||||||
|
|
||||||
@@ -309,7 +640,8 @@ Delete a comment from a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `commentId` | string | Deleted comment ID |
|
| `commentId` | string | Deleted comment ID |
|
||||||
|
|
||||||
@@ -323,15 +655,33 @@ Get all attachments from a Jira issue
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `issueKey` | string | Yes | Jira issue key to get attachments from \(e.g., PROJ-123\) |
|
| `issueKey` | string | Yes | Jira issue key to get attachments from \(e.g., PROJ-123\) |
|
||||||
|
| `includeAttachments` | boolean | No | Download attachment file contents and include them as files in the output |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `attachments` | array | Array of attachments with id, filename, size, mimeType, created, author |
|
| `attachments` | array | Array of attachments |
|
||||||
|
| ↳ `id` | string | Attachment ID |
|
||||||
|
| ↳ `filename` | string | Attachment file name |
|
||||||
|
| ↳ `mimeType` | string | MIME type of the attachment |
|
||||||
|
| ↳ `size` | number | File size in bytes |
|
||||||
|
| ↳ `content` | string | URL to download the attachment content |
|
||||||
|
| ↳ `thumbnail` | string | URL to the attachment thumbnail |
|
||||||
|
| ↳ `author` | object | Attachment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `authorName` | string | Attachment author display name |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the attachment was created |
|
||||||
|
| `files` | file[] | Downloaded attachment files \(only when includeAttachments is true\) |
|
||||||
|
|
||||||
### `jira_add_attachment`
|
### `jira_add_attachment`
|
||||||
|
|
||||||
@@ -350,9 +700,15 @@ Add attachments to a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `attachmentIds` | json | IDs of uploaded attachments |
|
| `attachments` | array | Uploaded attachments |
|
||||||
|
| ↳ `id` | string | Attachment ID |
|
||||||
|
| ↳ `filename` | string | Attachment file name |
|
||||||
|
| ↳ `mimeType` | string | MIME type |
|
||||||
|
| ↳ `size` | number | File size in bytes |
|
||||||
|
| ↳ `content` | string | URL to download the attachment |
|
||||||
|
| `attachmentIds` | array | Array of attachment IDs |
|
||||||
| `files` | file[] | Uploaded attachment files |
|
| `files` | file[] | Uploaded attachment files |
|
||||||
|
|
||||||
### `jira_delete_attachment`
|
### `jira_delete_attachment`
|
||||||
@@ -371,7 +727,8 @@ Delete an attachment from a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `attachmentId` | string | Deleted attachment ID |
|
| `attachmentId` | string | Deleted attachment ID |
|
||||||
|
|
||||||
### `jira_add_worklog`
|
### `jira_add_worklog`
|
||||||
@@ -387,16 +744,29 @@ Add a time tracking worklog entry to a Jira issue
|
|||||||
| `timeSpentSeconds` | number | Yes | Time spent in seconds |
|
| `timeSpentSeconds` | number | Yes | Time spent in seconds |
|
||||||
| `comment` | string | No | Optional comment for the worklog entry |
|
| `comment` | string | No | Optional comment for the worklog entry |
|
||||||
| `started` | string | No | Optional start time in ISO format \(defaults to current time\) |
|
| `started` | string | No | Optional start time in ISO format \(defaults to current time\) |
|
||||||
|
| `visibility` | json | No | Restrict worklog visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key the worklog was added to |
|
| `issueKey` | string | Issue key the worklog was added to |
|
||||||
| `worklogId` | string | Created worklog ID |
|
| `worklogId` | string | Created worklog ID |
|
||||||
|
| `timeSpent` | string | Time spent in human-readable format \(e.g., 3h 20m\) |
|
||||||
| `timeSpentSeconds` | number | Time spent in seconds |
|
| `timeSpentSeconds` | number | Time spent in seconds |
|
||||||
|
| `author` | object | Worklog author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `started` | string | ISO 8601 timestamp when the work started |
|
||||||
|
| `created` | string | ISO 8601 timestamp when the worklog was created |
|
||||||
|
|
||||||
### `jira_get_worklogs`
|
### `jira_get_worklogs`
|
||||||
|
|
||||||
@@ -416,10 +786,36 @@ Get all worklog entries from a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `total` | number | Total number of worklogs |
|
| `total` | number | Total number of worklogs |
|
||||||
| `worklogs` | array | Array of worklogs with id, author, timeSpentSeconds, timeSpent, comment, created, updated, started |
|
| `startAt` | number | Pagination start index |
|
||||||
|
| `maxResults` | number | Maximum results per page |
|
||||||
|
| `worklogs` | array | Array of worklogs |
|
||||||
|
| ↳ `id` | string | Worklog ID |
|
||||||
|
| ↳ `author` | object | Worklog author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `authorName` | string | Worklog author display name |
|
||||||
|
| ↳ `updateAuthor` | object | User who last updated the worklog |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `comment` | string | Worklog comment text |
|
||||||
|
| ↳ `started` | string | ISO 8601 timestamp when the work started |
|
||||||
|
| ↳ `timeSpent` | string | Time spent in human-readable format \(e.g., 3h 20m\) |
|
||||||
|
| ↳ `timeSpentSeconds` | number | Time spent in seconds |
|
||||||
|
| ↳ `created` | string | ISO 8601 timestamp when the worklog was created |
|
||||||
|
| ↳ `updated` | string | ISO 8601 timestamp when the worklog was last updated |
|
||||||
|
|
||||||
### `jira_update_worklog`
|
### `jira_update_worklog`
|
||||||
|
|
||||||
@@ -435,15 +831,39 @@ Update an existing worklog entry on a Jira issue
|
|||||||
| `timeSpentSeconds` | number | No | Time spent in seconds |
|
| `timeSpentSeconds` | number | No | Time spent in seconds |
|
||||||
| `comment` | string | No | Optional comment for the worklog entry |
|
| `comment` | string | No | Optional comment for the worklog entry |
|
||||||
| `started` | string | No | Optional start time in ISO format |
|
| `started` | string | No | Optional start time in ISO format |
|
||||||
|
| `visibility` | json | No | Restrict worklog visibility. Object with "type" \("role" or "group"\) and "value" \(role/group name\). |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `worklogId` | string | Updated worklog ID |
|
| `worklogId` | string | Updated worklog ID |
|
||||||
|
| `timeSpent` | string | Human-readable time spent \(e.g., "3h 20m"\) |
|
||||||
|
| `timeSpentSeconds` | number | Time spent in seconds |
|
||||||
|
| `comment` | string | Worklog comment text |
|
||||||
|
| `author` | object | Worklog author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `updateAuthor` | object | User who last updated the worklog |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| `started` | string | Worklog start time in ISO format |
|
||||||
|
| `created` | string | Worklog creation time |
|
||||||
|
| `updated` | string | Worklog last update time |
|
||||||
|
|
||||||
### `jira_delete_worklog`
|
### `jira_delete_worklog`
|
||||||
|
|
||||||
@@ -462,7 +882,8 @@ Delete a worklog entry from a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `worklogId` | string | Deleted worklog ID |
|
| `worklogId` | string | Deleted worklog ID |
|
||||||
|
|
||||||
@@ -485,7 +906,8 @@ Create a link relationship between two Jira issues
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `inwardIssue` | string | Inward issue key |
|
| `inwardIssue` | string | Inward issue key |
|
||||||
| `outwardIssue` | string | Outward issue key |
|
| `outwardIssue` | string | Outward issue key |
|
||||||
| `linkType` | string | Type of issue link |
|
| `linkType` | string | Type of issue link |
|
||||||
@@ -507,7 +929,8 @@ Delete a link between two Jira issues
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `linkId` | string | Deleted link ID |
|
| `linkId` | string | Deleted link ID |
|
||||||
|
|
||||||
### `jira_add_watcher`
|
### `jira_add_watcher`
|
||||||
@@ -527,7 +950,8 @@ Add a watcher to a Jira issue to receive notifications about updates
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `watcherAccountId` | string | Added watcher account ID |
|
| `watcherAccountId` | string | Added watcher account ID |
|
||||||
|
|
||||||
@@ -548,7 +972,8 @@ Remove a watcher from a Jira issue
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
|
| `success` | boolean | Operation success status |
|
||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `watcherAccountId` | string | Removed watcher account ID |
|
| `watcherAccountId` | string | Removed watcher account ID |
|
||||||
|
|
||||||
@@ -570,8 +995,17 @@ Get Jira users. If an account ID is provided, returns a single user. Otherwise,
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `users` | json | Array of users with accountId, displayName, emailAddress, active status, and avatarUrls |
|
| `users` | array | Array of Jira users |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID of the user |
|
||||||
|
| ↳ `displayName` | string | Display name of the user |
|
||||||
|
| ↳ `active` | boolean | Whether the user account is active |
|
||||||
|
| ↳ `emailAddress` | string | Email address of the user |
|
||||||
|
| ↳ `accountType` | string | Type of account \(e.g., atlassian, app, customer\) |
|
||||||
|
| ↳ `avatarUrl` | string | URL to the user avatar \(48x48\) |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
|
| ↳ `avatarUrls` | json | User avatar URLs in multiple sizes \(16x16, 24x24, 32x32, 48x48\) |
|
||||||
|
| ↳ `self` | string | REST API URL for this user |
|
||||||
| `total` | number | Total number of users returned |
|
| `total` | number | Total number of users returned |
|
||||||
| `startAt` | number | Pagination start index |
|
| `startAt` | number | Pagination start index |
|
||||||
| `maxResults` | number | Maximum results per page |
|
| `maxResults` | number | Maximum results per page |
|
||||||
|
|||||||
@@ -46,6 +46,7 @@ Get all service desks from Jira Service Management
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
|
| `expand` | string | No | Comma-separated fields to expand in the response |
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||||
|
|
||||||
@@ -54,7 +55,14 @@ Get all service desks from Jira Service Management
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `serviceDesks` | json | Array of service desks |
|
| `serviceDesks` | array | List of service desks |
|
||||||
|
| ↳ `id` | string | Service desk ID |
|
||||||
|
| ↳ `projectId` | string | Associated Jira project ID |
|
||||||
|
| ↳ `projectName` | string | Associated project name |
|
||||||
|
| ↳ `projectKey` | string | Associated project key |
|
||||||
|
| ↳ `name` | string | Service desk name |
|
||||||
|
| ↳ `description` | string | Service desk description |
|
||||||
|
| ↳ `leadDisplayName` | string | Project lead display name |
|
||||||
| `total` | number | Total number of service desks |
|
| `total` | number | Total number of service desks |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -69,6 +77,9 @@ Get request types for a service desk in Jira Service Management
|
|||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||||
|
| `searchQuery` | string | No | Filter request types by name |
|
||||||
|
| `groupId` | string | No | Filter by request type group ID |
|
||||||
|
| `expand` | string | No | Comma-separated fields to expand in the response |
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||||
|
|
||||||
@@ -77,7 +88,16 @@ Get request types for a service desk in Jira Service Management
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `requestTypes` | json | Array of request types |
|
| `requestTypes` | array | List of request types |
|
||||||
|
| ↳ `id` | string | Request type ID |
|
||||||
|
| ↳ `name` | string | Request type name |
|
||||||
|
| ↳ `description` | string | Request type description |
|
||||||
|
| ↳ `helpText` | string | Help text for customers |
|
||||||
|
| ↳ `issueTypeId` | string | Associated Jira issue type ID |
|
||||||
|
| ↳ `serviceDeskId` | string | Parent service desk ID |
|
||||||
|
| ↳ `groupIds` | json | Groups this request type belongs to |
|
||||||
|
| ↳ `icon` | json | Request type icon with id and links |
|
||||||
|
| ↳ `restrictionStatus` | string | OPEN or RESTRICTED |
|
||||||
| `total` | number | Total number of request types |
|
| `total` | number | Total number of request types |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -96,6 +116,9 @@ Create a new service request in Jira Service Management
|
|||||||
| `summary` | string | Yes | Summary/title for the service request |
|
| `summary` | string | Yes | Summary/title for the service request |
|
||||||
| `description` | string | No | Description for the service request |
|
| `description` | string | No | Description for the service request |
|
||||||
| `raiseOnBehalfOf` | string | No | Account ID of customer to raise request on behalf of |
|
| `raiseOnBehalfOf` | string | No | Account ID of customer to raise request on behalf of |
|
||||||
|
| `requestFieldValues` | json | No | Custom field values as key-value pairs \(overrides summary/description if provided\) |
|
||||||
|
| `requestParticipants` | string | No | Comma-separated account IDs to add as request participants |
|
||||||
|
| `channel` | string | No | Channel the request originates from \(e.g., portal, email\) |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
@@ -106,6 +129,9 @@ Create a new service request in Jira Service Management
|
|||||||
| `issueKey` | string | Created request issue key \(e.g., SD-123\) |
|
| `issueKey` | string | Created request issue key \(e.g., SD-123\) |
|
||||||
| `requestTypeId` | string | Request type ID |
|
| `requestTypeId` | string | Request type ID |
|
||||||
| `serviceDeskId` | string | Service desk ID |
|
| `serviceDeskId` | string | Service desk ID |
|
||||||
|
| `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
||||||
|
| `currentStatus` | json | Current status with status name and category |
|
||||||
|
| `reporter` | json | Reporter user with accountId, displayName, emailAddress |
|
||||||
| `success` | boolean | Whether the request was created successfully |
|
| `success` | boolean | Whether the request was created successfully |
|
||||||
| `url` | string | URL to the created request |
|
| `url` | string | URL to the created request |
|
||||||
|
|
||||||
@@ -120,12 +146,33 @@ Get a single service request from Jira Service Management
|
|||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
||||||
|
| `expand` | string | No | Comma-separated fields to expand: participant, status, sla, requestType, serviceDesk, attachment, comment, action |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
|
| `issueId` | string | Jira issue ID |
|
||||||
|
| `issueKey` | string | Issue key \(e.g., SD-123\) |
|
||||||
|
| `requestTypeId` | string | Request type ID |
|
||||||
|
| `serviceDeskId` | string | Service desk ID |
|
||||||
|
| `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
||||||
|
| `currentStatus` | object | Current request status |
|
||||||
|
| ↳ `status` | string | Status name |
|
||||||
|
| ↳ `statusCategory` | string | Status category \(NEW, INDETERMINATE, DONE\) |
|
||||||
|
| ↳ `statusDate` | json | Status change date with iso8601, friendly, epochMillis |
|
||||||
|
| `reporter` | object | Reporter user details |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | User display name |
|
||||||
|
| ↳ `emailAddress` | string | User email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| `requestFieldValues` | array | Request field values |
|
||||||
|
| ↳ `fieldId` | string | Field identifier |
|
||||||
|
| ↳ `label` | string | Human-readable field label |
|
||||||
|
| ↳ `value` | json | Field value |
|
||||||
|
| ↳ `renderedValue` | json | HTML-rendered field value |
|
||||||
|
| `url` | string | URL to the request |
|
||||||
| `request` | json | The service request object |
|
| `request` | json | The service request object |
|
||||||
|
|
||||||
### `jsm_get_requests`
|
### `jsm_get_requests`
|
||||||
@@ -139,9 +186,11 @@ Get multiple service requests from Jira Service Management
|
|||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
| `serviceDeskId` | string | No | Filter by service desk ID \(e.g., "1", "2"\) |
|
| `serviceDeskId` | string | No | Filter by service desk ID \(e.g., "1", "2"\) |
|
||||||
| `requestOwnership` | string | No | Filter by ownership: OWNED_REQUESTS, PARTICIPATED_REQUESTS, ORGANIZATION, ALL_REQUESTS |
|
| `requestOwnership` | string | No | Filter by ownership: OWNED_REQUESTS, PARTICIPATED_REQUESTS, APPROVER, ALL_REQUESTS |
|
||||||
| `requestStatus` | string | No | Filter by status: OPEN, CLOSED, ALL |
|
| `requestStatus` | string | No | Filter by status: OPEN_REQUESTS, CLOSED_REQUESTS, ALL_REQUESTS |
|
||||||
|
| `requestTypeId` | string | No | Filter by request type ID |
|
||||||
| `searchTerm` | string | No | Search term to filter requests \(e.g., "password reset", "laptop"\) |
|
| `searchTerm` | string | No | Search term to filter requests \(e.g., "password reset", "laptop"\) |
|
||||||
|
| `expand` | string | No | Comma-separated fields to expand: participant, status, sla, requestType, serviceDesk, attachment, comment, action |
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||||
|
|
||||||
@@ -150,8 +199,27 @@ Get multiple service requests from Jira Service Management
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `requests` | json | Array of service requests |
|
| `requests` | array | List of service requests |
|
||||||
| `total` | number | Total number of requests |
|
| ↳ `issueId` | string | Jira issue ID |
|
||||||
|
| ↳ `issueKey` | string | Issue key \(e.g., SD-123\) |
|
||||||
|
| ↳ `requestTypeId` | string | Request type ID |
|
||||||
|
| ↳ `serviceDeskId` | string | Service desk ID |
|
||||||
|
| ↳ `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
||||||
|
| ↳ `currentStatus` | object | Current request status |
|
||||||
|
| ↳ `status` | string | Status name |
|
||||||
|
| ↳ `statusCategory` | string | Status category \(NEW, INDETERMINATE, DONE\) |
|
||||||
|
| ↳ `statusDate` | json | Status change date with iso8601, friendly, epochMillis |
|
||||||
|
| ↳ `reporter` | object | Reporter user details |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | User display name |
|
||||||
|
| ↳ `emailAddress` | string | User email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| ↳ `requestFieldValues` | array | Request field values |
|
||||||
|
| ↳ `fieldId` | string | Field identifier |
|
||||||
|
| ↳ `label` | string | Human-readable field label |
|
||||||
|
| ↳ `value` | json | Field value |
|
||||||
|
| ↳ `renderedValue` | json | HTML-rendered field value |
|
||||||
|
| `total` | number | Total number of requests in current page |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_add_comment`
|
### `jsm_add_comment`
|
||||||
@@ -177,6 +245,12 @@ Add a comment (public or internal) to a service request in Jira Service Manageme
|
|||||||
| `commentId` | string | Created comment ID |
|
| `commentId` | string | Created comment ID |
|
||||||
| `body` | string | Comment body text |
|
| `body` | string | Comment body text |
|
||||||
| `isPublic` | boolean | Whether the comment is public |
|
| `isPublic` | boolean | Whether the comment is public |
|
||||||
|
| `author` | object | Comment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | User display name |
|
||||||
|
| ↳ `emailAddress` | string | User email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| `createdDate` | json | Comment creation date with iso8601, friendly, epochMillis |
|
||||||
| `success` | boolean | Whether the comment was added successfully |
|
| `success` | boolean | Whether the comment was added successfully |
|
||||||
|
|
||||||
### `jsm_get_comments`
|
### `jsm_get_comments`
|
||||||
@@ -192,6 +266,7 @@ Get comments for a service request in Jira Service Management
|
|||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
||||||
| `isPublic` | boolean | No | Filter to only public comments \(true/false\) |
|
| `isPublic` | boolean | No | Filter to only public comments \(true/false\) |
|
||||||
| `internal` | boolean | No | Filter to only internal comments \(true/false\) |
|
| `internal` | boolean | No | Filter to only internal comments \(true/false\) |
|
||||||
|
| `expand` | string | No | Comma-separated fields to expand: renderedBody, attachment |
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||||
|
|
||||||
@@ -201,7 +276,17 @@ Get comments for a service request in Jira Service Management
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `comments` | json | Array of comments |
|
| `comments` | array | List of comments |
|
||||||
|
| ↳ `id` | string | Comment ID |
|
||||||
|
| ↳ `body` | string | Comment body text |
|
||||||
|
| ↳ `public` | boolean | Whether the comment is public |
|
||||||
|
| ↳ `author` | object | Comment author |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | User display name |
|
||||||
|
| ↳ `emailAddress` | string | User email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| ↳ `created` | json | Creation date with iso8601, friendly, epochMillis |
|
||||||
|
| ↳ `renderedBody` | json | HTML-rendered comment body \(when expand=renderedBody\) |
|
||||||
| `total` | number | Total number of comments |
|
| `total` | number | Total number of comments |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -225,7 +310,12 @@ Get customers for a service desk in Jira Service Management
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `customers` | json | Array of customers |
|
| `customers` | array | List of customers |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | Display name |
|
||||||
|
| ↳ `emailAddress` | string | Email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| ↳ `timeZone` | string | User timezone |
|
||||||
| `total` | number | Total number of customers |
|
| `total` | number | Total number of customers |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -240,7 +330,8 @@ Add customers to a service desk in Jira Service Management
|
|||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||||
| `emails` | string | Yes | Comma-separated email addresses to add as customers |
|
| `accountIds` | string | No | Comma-separated Atlassian account IDs to add as customers |
|
||||||
|
| `emails` | string | No | Comma-separated email addresses to add as customers |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
@@ -269,7 +360,9 @@ Get organizations for a service desk in Jira Service Management
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `organizations` | json | Array of organizations |
|
| `organizations` | array | List of organizations |
|
||||||
|
| ↳ `id` | string | Organization ID |
|
||||||
|
| ↳ `name` | string | Organization name |
|
||||||
| `total` | number | Total number of organizations |
|
| `total` | number | Total number of organizations |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -336,7 +429,12 @@ Get queues for a service desk in Jira Service Management
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `queues` | json | Array of queues |
|
| `queues` | array | List of queues |
|
||||||
|
| ↳ `id` | string | Queue ID |
|
||||||
|
| ↳ `name` | string | Queue name |
|
||||||
|
| ↳ `jql` | string | JQL filter for the queue |
|
||||||
|
| ↳ `fields` | json | Fields displayed in the queue |
|
||||||
|
| ↳ `issueCount` | number | Number of issues in the queue |
|
||||||
| `total` | number | Total number of queues |
|
| `total` | number | Total number of queues |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -360,7 +458,11 @@ Get SLA information for a service request in Jira Service Management
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `slas` | json | Array of SLA information |
|
| `slas` | array | List of SLA metrics |
|
||||||
|
| ↳ `id` | string | SLA metric ID |
|
||||||
|
| ↳ `name` | string | SLA metric name |
|
||||||
|
| ↳ `completedCycles` | json | Completed SLA cycles with startTime, stopTime, breachTime, breached, goalDuration, elapsedTime, remainingTime \(each time as DateDTO, durations as DurationDTO\) |
|
||||||
|
| ↳ `ongoingCycle` | json | Ongoing SLA cycle with startTime, breachTime, breached, paused, withinCalendarHours, goalDuration, elapsedTime, remainingTime |
|
||||||
| `total` | number | Total number of SLAs |
|
| `total` | number | Total number of SLAs |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -375,6 +477,8 @@ Get available transitions for a service request in Jira Service Management
|
|||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
||||||
|
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
||||||
|
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
@@ -382,7 +486,11 @@ Get available transitions for a service request in Jira Service Management
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `transitions` | json | Array of available transitions |
|
| `transitions` | array | List of available transitions |
|
||||||
|
| ↳ `id` | string | Transition ID |
|
||||||
|
| ↳ `name` | string | Transition name |
|
||||||
|
| `total` | number | Total number of transitions |
|
||||||
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_transition_request`
|
### `jsm_transition_request`
|
||||||
|
|
||||||
@@ -427,7 +535,11 @@ Get participants for a request in Jira Service Management
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `participants` | json | Array of participants |
|
| `participants` | array | List of participants |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | Display name |
|
||||||
|
| ↳ `emailAddress` | string | Email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
| `total` | number | Total number of participants |
|
| `total` | number | Total number of participants |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -450,7 +562,11 @@ Add participants to a request in Jira Service Management
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `participants` | json | Array of added participants |
|
| `participants` | array | List of added participants |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | Display name |
|
||||||
|
| ↳ `emailAddress` | string | Email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
| `success` | boolean | Whether the operation succeeded |
|
||||||
|
|
||||||
### `jsm_get_approvals`
|
### `jsm_get_approvals`
|
||||||
@@ -473,7 +589,20 @@ Get approvals for a request in Jira Service Management
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `approvals` | json | Array of approvals |
|
| `approvals` | array | List of approvals |
|
||||||
|
| ↳ `id` | string | Approval ID |
|
||||||
|
| ↳ `name` | string | Approval description |
|
||||||
|
| ↳ `finalDecision` | string | Final decision: pending, approved, or declined |
|
||||||
|
| ↳ `canAnswerApproval` | boolean | Whether current user can respond |
|
||||||
|
| ↳ `approvers` | array | List of approvers with their decisions |
|
||||||
|
| ↳ `approver` | object | Approver user details |
|
||||||
|
| ↳ `accountId` | string | Atlassian account ID |
|
||||||
|
| ↳ `displayName` | string | User display name |
|
||||||
|
| ↳ `emailAddress` | string | User email address |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| ↳ `approverDecision` | string | Decision: pending, approved, or declined |
|
||||||
|
| ↳ `createdDate` | json | Creation date |
|
||||||
|
| ↳ `completedDate` | json | Completion date |
|
||||||
| `total` | number | Total number of approvals |
|
| `total` | number | Total number of approvals |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
@@ -499,6 +628,53 @@ Approve or decline an approval request in Jira Service Management
|
|||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `issueIdOrKey` | string | Issue ID or key |
|
||||||
| `approvalId` | string | Approval ID |
|
| `approvalId` | string | Approval ID |
|
||||||
| `decision` | string | Decision made \(approve/decline\) |
|
| `decision` | string | Decision made \(approve/decline\) |
|
||||||
|
| `id` | string | Approval ID from response |
|
||||||
|
| `name` | string | Approval description |
|
||||||
|
| `finalDecision` | string | Final approval decision: pending, approved, or declined |
|
||||||
|
| `canAnswerApproval` | boolean | Whether the current user can still respond |
|
||||||
|
| `approvers` | array | Updated list of approvers with decisions |
|
||||||
|
| ↳ `approver` | object | Approver user details |
|
||||||
|
| ↳ `accountId` | string | Approver account ID |
|
||||||
|
| ↳ `displayName` | string | Approver display name |
|
||||||
|
| ↳ `emailAddress` | string | Approver email |
|
||||||
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
|
| ↳ `approverDecision` | string | Individual approver decision |
|
||||||
|
| `createdDate` | json | Approval creation date |
|
||||||
|
| `completedDate` | json | Approval completion date |
|
||||||
|
| `approval` | json | The approval object |
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
| `success` | boolean | Whether the operation succeeded |
|
||||||
|
|
||||||
|
### `jsm_get_request_type_fields`
|
||||||
|
|
||||||
|
Get the fields required to create a request of a specific type in Jira Service Management
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
|
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||||
|
| `requestTypeId` | string | Yes | Request Type ID \(e.g., "10", "15"\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | string | Timestamp of the operation |
|
||||||
|
| `serviceDeskId` | string | Service desk ID |
|
||||||
|
| `requestTypeId` | string | Request type ID |
|
||||||
|
| `canAddRequestParticipants` | boolean | Whether participants can be added to requests of this type |
|
||||||
|
| `canRaiseOnBehalfOf` | boolean | Whether requests can be raised on behalf of another user |
|
||||||
|
| `requestTypeFields` | array | List of fields for this request type |
|
||||||
|
| ↳ `fieldId` | string | Field identifier \(e.g., summary, description, customfield_10010\) |
|
||||||
|
| ↳ `name` | string | Human-readable field name |
|
||||||
|
| ↳ `description` | string | Help text for the field |
|
||||||
|
| ↳ `required` | boolean | Whether the field is required |
|
||||||
|
| ↳ `visible` | boolean | Whether the field is visible |
|
||||||
|
| ↳ `validValues` | json | Allowed values for select fields |
|
||||||
|
| ↳ `presetValues` | json | Pre-populated values |
|
||||||
|
| ↳ `defaultValues` | json | Default values for the field |
|
||||||
|
| ↳ `jiraSchema` | json | Jira field schema with type, system, custom, customId |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -76,6 +76,7 @@
|
|||||||
"neo4j",
|
"neo4j",
|
||||||
"notion",
|
"notion",
|
||||||
"onedrive",
|
"onedrive",
|
||||||
|
"onepassword",
|
||||||
"openai",
|
"openai",
|
||||||
"outlook",
|
"outlook",
|
||||||
"parallel_ai",
|
"parallel_ai",
|
||||||
|
|||||||
260
apps/docs/content/docs/en/tools/onepassword.mdx
Normal file
260
apps/docs/content/docs/en/tools/onepassword.mdx
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
---
|
||||||
|
title: 1Password
|
||||||
|
description: Manage secrets and items in 1Password vaults
|
||||||
|
---
|
||||||
|
|
||||||
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
|
<BlockInfoCard
|
||||||
|
type="onepassword"
|
||||||
|
color="#E0E0E0"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
[1Password](https://1password.com) is a widely trusted password manager and secrets vault solution, allowing individuals and teams to securely store, access, and share passwords, API credentials, and sensitive information. With robust encryption, granular access controls, and seamless syncing across devices, 1Password supports teams and organizations in managing secrets efficiently and securely.
|
||||||
|
|
||||||
|
The [1Password Connect API](https://developer.1password.com/docs/connect/) allows programmatic access to vaults and items within an organization's 1Password account. This integration in Sim lets you automate secret retrieval, onboarding workflows, secret rotation, vault audits, and more, all in a secure and auditable manner.
|
||||||
|
|
||||||
|
With 1Password in your Sim workflow, you can:
|
||||||
|
|
||||||
|
- **List, search, and retrieve vaults**: Access metadata or browse available vaults for organizing secrets by project or purpose
|
||||||
|
- **Fetch items and secrets**: Get credentials, API keys, or custom secrets in real time to power your workflows securely
|
||||||
|
- **Create, update, or delete secrets**: Automate secret management, provisioning, and rotation for enhanced security practices
|
||||||
|
- **Integrate with CI/CD and automation**: Fetch credentials or tokens only when needed, reducing manual work and reducing risk
|
||||||
|
- **Ensure access controls**: Leverage role-based access and fine-grained permissions to control which agents or users can access specific secrets
|
||||||
|
|
||||||
|
By connecting Sim with 1Password, you empower your agents to securely manage secrets, reduce manual overhead, and maintain best practices for security automation, incident response, and DevOps workflows—all while ensuring secrets never leave a controlled environment.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
## Usage Instructions
|
||||||
|
|
||||||
|
Access and manage secrets stored in 1Password vaults using the Connect API or Service Account SDK. List vaults, retrieve items with their fields and secrets, create new items, update existing ones, delete items, and resolve secret references.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
### `onepassword_list_vaults`
|
||||||
|
|
||||||
|
List all vaults accessible by the Connect token or Service Account
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `filter` | string | No | SCIM filter expression \(e.g., name eq "My Vault"\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `vaults` | array | List of accessible vaults |
|
||||||
|
| ↳ `id` | string | Vault ID |
|
||||||
|
| ↳ `name` | string | Vault name |
|
||||||
|
| ↳ `description` | string | Vault description |
|
||||||
|
| ↳ `attributeVersion` | number | Vault attribute version |
|
||||||
|
| ↳ `contentVersion` | number | Vault content version |
|
||||||
|
| ↳ `type` | string | Vault type \(USER_CREATED, PERSONAL, EVERYONE, TRANSFER\) |
|
||||||
|
| ↳ `createdAt` | string | Creation timestamp |
|
||||||
|
| ↳ `updatedAt` | string | Last update timestamp |
|
||||||
|
|
||||||
|
### `onepassword_get_vault`
|
||||||
|
|
||||||
|
Get details of a specific vault by ID
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `id` | string | Vault ID |
|
||||||
|
| `name` | string | Vault name |
|
||||||
|
| `description` | string | Vault description |
|
||||||
|
| `attributeVersion` | number | Vault attribute version |
|
||||||
|
| `contentVersion` | number | Vault content version |
|
||||||
|
| `items` | number | Number of items in the vault |
|
||||||
|
| `type` | string | Vault type \(USER_CREATED, PERSONAL, EVERYONE, TRANSFER\) |
|
||||||
|
| `createdAt` | string | Creation timestamp |
|
||||||
|
| `updatedAt` | string | Last update timestamp |
|
||||||
|
|
||||||
|
### `onepassword_list_items`
|
||||||
|
|
||||||
|
List items in a vault. Returns summaries without field values.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID to list items from |
|
||||||
|
| `filter` | string | No | SCIM filter expression \(e.g., title eq "API Key" or tag eq "production"\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `items` | array | List of items in the vault \(summaries without field values\) |
|
||||||
|
| ↳ `id` | string | Item ID |
|
||||||
|
| ↳ `title` | string | Item title |
|
||||||
|
| ↳ `vault` | object | Vault reference |
|
||||||
|
| ↳ `id` | string | Vault ID |
|
||||||
|
| ↳ `category` | string | Item category \(e.g., LOGIN, API_CREDENTIAL\) |
|
||||||
|
| ↳ `urls` | array | URLs associated with the item |
|
||||||
|
| ↳ `href` | string | URL |
|
||||||
|
| ↳ `label` | string | URL label |
|
||||||
|
| ↳ `primary` | boolean | Whether this is the primary URL |
|
||||||
|
| ↳ `favorite` | boolean | Whether the item is favorited |
|
||||||
|
| ↳ `tags` | array | Item tags |
|
||||||
|
| ↳ `version` | number | Item version number |
|
||||||
|
| ↳ `state` | string | Item state \(ARCHIVED or DELETED\) |
|
||||||
|
| ↳ `createdAt` | string | Creation timestamp |
|
||||||
|
| ↳ `updatedAt` | string | Last update timestamp |
|
||||||
|
| ↳ `lastEditedBy` | string | ID of the last editor |
|
||||||
|
|
||||||
|
### `onepassword_get_item`
|
||||||
|
|
||||||
|
Get full details of an item including all fields and secrets
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID |
|
||||||
|
| `itemId` | string | Yes | The item UUID to retrieve |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `response` | json | Operation response data |
|
||||||
|
|
||||||
|
### `onepassword_create_item`
|
||||||
|
|
||||||
|
Create a new item in a vault
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID to create the item in |
|
||||||
|
| `category` | string | Yes | Item category \(e.g., LOGIN, PASSWORD, API_CREDENTIAL, SECURE_NOTE, SERVER, DATABASE\) |
|
||||||
|
| `title` | string | No | Item title |
|
||||||
|
| `tags` | string | No | Comma-separated list of tags |
|
||||||
|
| `fields` | string | No | JSON array of field objects \(e.g., \[\{"label":"username","value":"admin","type":"STRING","purpose":"USERNAME"\}\]\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `response` | json | Operation response data |
|
||||||
|
|
||||||
|
### `onepassword_replace_item`
|
||||||
|
|
||||||
|
Replace an entire item with new data (full update)
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID |
|
||||||
|
| `itemId` | string | Yes | The item UUID to replace |
|
||||||
|
| `item` | string | Yes | JSON object representing the full item \(e.g., \{"vault":\{"id":"..."\},"category":"LOGIN","title":"My Item","fields":\[...\]\}\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `response` | json | Operation response data |
|
||||||
|
|
||||||
|
### `onepassword_update_item`
|
||||||
|
|
||||||
|
Update an existing item using JSON Patch operations (RFC6902)
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID |
|
||||||
|
| `itemId` | string | Yes | The item UUID to update |
|
||||||
|
| `operations` | string | Yes | JSON array of RFC6902 patch operations \(e.g., \[\{"op":"replace","path":"/title","value":"New Title"\}\]\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `response` | json | Operation response data |
|
||||||
|
|
||||||
|
### `onepassword_delete_item`
|
||||||
|
|
||||||
|
Delete an item from a vault
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: "service_account" or "connect" |
|
||||||
|
| `serviceAccountToken` | string | No | 1Password Service Account token \(for Service Account mode\) |
|
||||||
|
| `apiKey` | string | No | 1Password Connect API token \(for Connect Server mode\) |
|
||||||
|
| `serverUrl` | string | No | 1Password Connect server URL \(for Connect Server mode\) |
|
||||||
|
| `vaultId` | string | Yes | The vault UUID |
|
||||||
|
| `itemId` | string | Yes | The item UUID to delete |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `success` | boolean | Whether the item was successfully deleted |
|
||||||
|
|
||||||
|
### `onepassword_resolve_secret`
|
||||||
|
|
||||||
|
Resolve a secret reference (op://vault/item/field) to its value. Service Account mode only.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `connectionMode` | string | No | Connection mode: must be "service_account" for this operation |
|
||||||
|
| `serviceAccountToken` | string | Yes | 1Password Service Account token |
|
||||||
|
| `secretReference` | string | Yes | Secret reference URI \(e.g., op://vault-name/item-name/field-name or op://vault-name/item-name/section-name/field-name\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `value` | string | The resolved secret value |
|
||||||
|
| `reference` | string | The original secret reference URI |
|
||||||
|
|
||||||
|
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpProtectedResourceMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpProtectedResourceMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -41,7 +41,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<Ro
|
|||||||
|
|
||||||
if (!agent.agent.isPublished) {
|
if (!agent.agent.isPublished) {
|
||||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||||
if (!auth.success) {
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceAccess = await checkWorkspaceAccess(agent.agent.workspaceId, auth.userId)
|
||||||
|
if (!workspaceAccess.exists || !workspaceAccess.hasAccess) {
|
||||||
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
|
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import { sanitizeAgentName } from '@/lib/a2a/utils'
|
|||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||||
import { getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
const logger = createLogger('A2AAgentsAPI')
|
const logger = createLogger('A2AAgentsAPI')
|
||||||
|
|
||||||
@@ -39,10 +39,13 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const ws = await getWorkspaceById(workspaceId)
|
const workspaceAccess = await checkWorkspaceAccess(workspaceId, auth.userId)
|
||||||
if (!ws) {
|
if (!workspaceAccess.exists) {
|
||||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
if (!workspaceAccess.hasAccess) {
|
||||||
|
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
const agents = await db
|
const agents = await db
|
||||||
.select({
|
.select({
|
||||||
@@ -103,6 +106,14 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workspaceAccess = await checkWorkspaceAccess(workspaceId, auth.userId)
|
||||||
|
if (!workspaceAccess.exists) {
|
||||||
|
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
if (!workspaceAccess.canWrite) {
|
||||||
|
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
const [wf] = await db
|
const [wf] = await db
|
||||||
.select({
|
.select({
|
||||||
id: workflow.id,
|
id: workflow.id,
|
||||||
|
|||||||
@@ -13,12 +13,14 @@ import {
|
|||||||
isTerminalState,
|
isTerminalState,
|
||||||
parseWorkflowSSEChunk,
|
parseWorkflowSSEChunk,
|
||||||
} from '@/lib/a2a/utils'
|
} from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
|
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||||
|
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||||
import {
|
import {
|
||||||
A2A_ERROR_CODES,
|
A2A_ERROR_CODES,
|
||||||
A2A_METHODS,
|
A2A_METHODS,
|
||||||
@@ -191,6 +193,9 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
|
|
||||||
const authSchemes = (agent.authentication as { schemes?: string[] })?.schemes || []
|
const authSchemes = (agent.authentication as { schemes?: string[] })?.schemes || []
|
||||||
const requiresAuth = !authSchemes.includes('none')
|
const requiresAuth = !authSchemes.includes('none')
|
||||||
|
let authenticatedUserId: string | null = null
|
||||||
|
let authenticatedAuthType: AuthResult['authType']
|
||||||
|
let authenticatedApiKeyType: AuthResult['apiKeyType']
|
||||||
|
|
||||||
if (requiresAuth) {
|
if (requiresAuth) {
|
||||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
@@ -200,6 +205,17 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
{ status: 401 }
|
{ status: 401 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
authenticatedUserId = auth.userId
|
||||||
|
authenticatedAuthType = auth.authType
|
||||||
|
authenticatedApiKeyType = auth.apiKeyType
|
||||||
|
|
||||||
|
const workspaceAccess = await checkWorkspaceAccess(agent.workspaceId, authenticatedUserId)
|
||||||
|
if (!workspaceAccess.exists || !workspaceAccess.hasAccess) {
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(null, A2A_ERROR_CODES.AUTHENTICATION_REQUIRED, 'Access denied'),
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const [wf] = await db
|
const [wf] = await db
|
||||||
@@ -225,34 +241,61 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { id, method, params: rpcParams } = body
|
const { id, method, params: rpcParams } = body
|
||||||
const apiKey = request.headers.get('X-API-Key')
|
const requestApiKey = request.headers.get('X-API-Key')
|
||||||
|
const apiKey = authenticatedAuthType === 'api_key' ? requestApiKey : null
|
||||||
|
const isPersonalApiKeyCaller =
|
||||||
|
authenticatedAuthType === 'api_key' && authenticatedApiKeyType === 'personal'
|
||||||
|
const billedUserId = await getWorkspaceBilledAccountUserId(agent.workspaceId)
|
||||||
|
if (!billedUserId) {
|
||||||
|
logger.error('Unable to resolve workspace billed account for A2A execution', {
|
||||||
|
agentId: agent.id,
|
||||||
|
workspaceId: agent.workspaceId,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(
|
||||||
|
id,
|
||||||
|
A2A_ERROR_CODES.INTERNAL_ERROR,
|
||||||
|
'Unable to resolve billing account for this workspace'
|
||||||
|
),
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const executionUserId =
|
||||||
|
isPersonalApiKeyCaller && authenticatedUserId ? authenticatedUserId : billedUserId
|
||||||
|
|
||||||
logger.info(`A2A request: ${method} for agent ${agentId}`)
|
logger.info(`A2A request: ${method} for agent ${agentId}`)
|
||||||
|
|
||||||
switch (method) {
|
switch (method) {
|
||||||
case A2A_METHODS.MESSAGE_SEND:
|
case A2A_METHODS.MESSAGE_SEND:
|
||||||
return handleMessageSend(id, agent, rpcParams as MessageSendParams, apiKey)
|
return handleMessageSend(id, agent, rpcParams as MessageSendParams, apiKey, executionUserId)
|
||||||
|
|
||||||
case A2A_METHODS.MESSAGE_STREAM:
|
case A2A_METHODS.MESSAGE_STREAM:
|
||||||
return handleMessageStream(request, id, agent, rpcParams as MessageSendParams, apiKey)
|
return handleMessageStream(
|
||||||
|
request,
|
||||||
|
id,
|
||||||
|
agent,
|
||||||
|
rpcParams as MessageSendParams,
|
||||||
|
apiKey,
|
||||||
|
executionUserId
|
||||||
|
)
|
||||||
|
|
||||||
case A2A_METHODS.TASKS_GET:
|
case A2A_METHODS.TASKS_GET:
|
||||||
return handleTaskGet(id, rpcParams as TaskIdParams)
|
return handleTaskGet(id, agent.id, rpcParams as TaskIdParams)
|
||||||
|
|
||||||
case A2A_METHODS.TASKS_CANCEL:
|
case A2A_METHODS.TASKS_CANCEL:
|
||||||
return handleTaskCancel(id, rpcParams as TaskIdParams)
|
return handleTaskCancel(id, agent.id, rpcParams as TaskIdParams)
|
||||||
|
|
||||||
case A2A_METHODS.TASKS_RESUBSCRIBE:
|
case A2A_METHODS.TASKS_RESUBSCRIBE:
|
||||||
return handleTaskResubscribe(request, id, rpcParams as TaskIdParams)
|
return handleTaskResubscribe(request, id, agent.id, rpcParams as TaskIdParams)
|
||||||
|
|
||||||
case A2A_METHODS.PUSH_NOTIFICATION_SET:
|
case A2A_METHODS.PUSH_NOTIFICATION_SET:
|
||||||
return handlePushNotificationSet(id, rpcParams as PushNotificationSetParams)
|
return handlePushNotificationSet(id, agent.id, rpcParams as PushNotificationSetParams)
|
||||||
|
|
||||||
case A2A_METHODS.PUSH_NOTIFICATION_GET:
|
case A2A_METHODS.PUSH_NOTIFICATION_GET:
|
||||||
return handlePushNotificationGet(id, rpcParams as TaskIdParams)
|
return handlePushNotificationGet(id, agent.id, rpcParams as TaskIdParams)
|
||||||
|
|
||||||
case A2A_METHODS.PUSH_NOTIFICATION_DELETE:
|
case A2A_METHODS.PUSH_NOTIFICATION_DELETE:
|
||||||
return handlePushNotificationDelete(id, rpcParams as TaskIdParams)
|
return handlePushNotificationDelete(id, agent.id, rpcParams as TaskIdParams)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -268,6 +311,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function getTaskForAgent(taskId: string, agentId: string) {
|
||||||
|
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, taskId)).limit(1)
|
||||||
|
if (!task || task.agentId !== agentId) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return task
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle message/send - Send a message (v0.3)
|
* Handle message/send - Send a message (v0.3)
|
||||||
*/
|
*/
|
||||||
@@ -280,7 +331,8 @@ async function handleMessageSend(
|
|||||||
workspaceId: string
|
workspaceId: string
|
||||||
},
|
},
|
||||||
params: MessageSendParams,
|
params: MessageSendParams,
|
||||||
apiKey?: string | null
|
apiKey?: string | null,
|
||||||
|
executionUserId?: string
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
if (!params?.message) {
|
if (!params?.message) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -318,6 +370,13 @@ async function handleMessageSend(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (existingTask.agentId !== agent.id) {
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'),
|
||||||
|
{ status: 404 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
if (isTerminalState(existingTask.status as TaskState)) {
|
if (isTerminalState(existingTask.status as TaskState)) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
createError(id, A2A_ERROR_CODES.TASK_ALREADY_COMPLETE, 'Task already in terminal state'),
|
createError(id, A2A_ERROR_CODES.TASK_ALREADY_COMPLETE, 'Task already in terminal state'),
|
||||||
@@ -363,6 +422,7 @@ async function handleMessageSend(
|
|||||||
} = await buildExecuteRequest({
|
} = await buildExecuteRequest({
|
||||||
workflowId: agent.workflowId,
|
workflowId: agent.workflowId,
|
||||||
apiKey,
|
apiKey,
|
||||||
|
userId: executionUserId,
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info(`Executing workflow ${agent.workflowId} for A2A task ${taskId}`)
|
logger.info(`Executing workflow ${agent.workflowId} for A2A task ${taskId}`)
|
||||||
@@ -475,7 +535,8 @@ async function handleMessageStream(
|
|||||||
workspaceId: string
|
workspaceId: string
|
||||||
},
|
},
|
||||||
params: MessageSendParams,
|
params: MessageSendParams,
|
||||||
apiKey?: string | null
|
apiKey?: string | null,
|
||||||
|
executionUserId?: string
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
if (!params?.message) {
|
if (!params?.message) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -522,6 +583,13 @@ async function handleMessageStream(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (existingTask.agentId !== agent.id) {
|
||||||
|
await releaseLock(lockKey, lockValue)
|
||||||
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
|
status: 404,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (isTerminalState(existingTask.status as TaskState)) {
|
if (isTerminalState(existingTask.status as TaskState)) {
|
||||||
await releaseLock(lockKey, lockValue)
|
await releaseLock(lockKey, lockValue)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -595,6 +663,7 @@ async function handleMessageStream(
|
|||||||
} = await buildExecuteRequest({
|
} = await buildExecuteRequest({
|
||||||
workflowId: agent.workflowId,
|
workflowId: agent.workflowId,
|
||||||
apiKey,
|
apiKey,
|
||||||
|
userId: executionUserId,
|
||||||
stream: true,
|
stream: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -788,7 +857,11 @@ async function handleMessageStream(
|
|||||||
/**
|
/**
|
||||||
* Handle tasks/get - Query task status
|
* Handle tasks/get - Query task status
|
||||||
*/
|
*/
|
||||||
async function handleTaskGet(id: string | number, params: TaskIdParams): Promise<NextResponse> {
|
async function handleTaskGet(
|
||||||
|
id: string | number,
|
||||||
|
agentId: string,
|
||||||
|
params: TaskIdParams
|
||||||
|
): Promise<NextResponse> {
|
||||||
if (!params?.id) {
|
if (!params?.id) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Task ID is required'),
|
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Task ID is required'),
|
||||||
@@ -801,7 +874,7 @@ async function handleTaskGet(id: string | number, params: TaskIdParams): Promise
|
|||||||
? params.historyLength
|
? params.historyLength
|
||||||
: undefined
|
: undefined
|
||||||
|
|
||||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
|
const task = await getTaskForAgent(params.id, agentId)
|
||||||
|
|
||||||
if (!task) {
|
if (!task) {
|
||||||
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
@@ -825,7 +898,11 @@ async function handleTaskGet(id: string | number, params: TaskIdParams): Promise
|
|||||||
/**
|
/**
|
||||||
* Handle tasks/cancel - Cancel a running task
|
* Handle tasks/cancel - Cancel a running task
|
||||||
*/
|
*/
|
||||||
async function handleTaskCancel(id: string | number, params: TaskIdParams): Promise<NextResponse> {
|
async function handleTaskCancel(
|
||||||
|
id: string | number,
|
||||||
|
agentId: string,
|
||||||
|
params: TaskIdParams
|
||||||
|
): Promise<NextResponse> {
|
||||||
if (!params?.id) {
|
if (!params?.id) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Task ID is required'),
|
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Task ID is required'),
|
||||||
@@ -833,7 +910,7 @@ async function handleTaskCancel(id: string | number, params: TaskIdParams): Prom
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
|
const task = await getTaskForAgent(params.id, agentId)
|
||||||
|
|
||||||
if (!task) {
|
if (!task) {
|
||||||
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
@@ -897,6 +974,7 @@ async function handleTaskCancel(id: string | number, params: TaskIdParams): Prom
|
|||||||
async function handleTaskResubscribe(
|
async function handleTaskResubscribe(
|
||||||
request: NextRequest,
|
request: NextRequest,
|
||||||
id: string | number,
|
id: string | number,
|
||||||
|
agentId: string,
|
||||||
params: TaskIdParams
|
params: TaskIdParams
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
if (!params?.id) {
|
if (!params?.id) {
|
||||||
@@ -906,7 +984,7 @@ async function handleTaskResubscribe(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
|
const task = await getTaskForAgent(params.id, agentId)
|
||||||
|
|
||||||
if (!task) {
|
if (!task) {
|
||||||
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
@@ -1103,6 +1181,7 @@ async function handleTaskResubscribe(
|
|||||||
*/
|
*/
|
||||||
async function handlePushNotificationSet(
|
async function handlePushNotificationSet(
|
||||||
id: string | number,
|
id: string | number,
|
||||||
|
agentId: string,
|
||||||
params: PushNotificationSetParams
|
params: PushNotificationSetParams
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
if (!params?.id) {
|
if (!params?.id) {
|
||||||
@@ -1130,7 +1209,7 @@ async function handlePushNotificationSet(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
|
const task = await getTaskForAgent(params.id, agentId)
|
||||||
|
|
||||||
if (!task) {
|
if (!task) {
|
||||||
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
@@ -1181,6 +1260,7 @@ async function handlePushNotificationSet(
|
|||||||
*/
|
*/
|
||||||
async function handlePushNotificationGet(
|
async function handlePushNotificationGet(
|
||||||
id: string | number,
|
id: string | number,
|
||||||
|
agentId: string,
|
||||||
params: TaskIdParams
|
params: TaskIdParams
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
if (!params?.id) {
|
if (!params?.id) {
|
||||||
@@ -1190,7 +1270,7 @@ async function handlePushNotificationGet(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
|
const task = await getTaskForAgent(params.id, agentId)
|
||||||
|
|
||||||
if (!task) {
|
if (!task) {
|
||||||
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
@@ -1224,6 +1304,7 @@ async function handlePushNotificationGet(
|
|||||||
*/
|
*/
|
||||||
async function handlePushNotificationDelete(
|
async function handlePushNotificationDelete(
|
||||||
id: string | number,
|
id: string | number,
|
||||||
|
agentId: string,
|
||||||
params: TaskIdParams
|
params: TaskIdParams
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
if (!params?.id) {
|
if (!params?.id) {
|
||||||
@@ -1233,7 +1314,7 @@ async function handlePushNotificationDelete(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
|
const task = await getTaskForAgent(params.id, agentId)
|
||||||
|
|
||||||
if (!task) {
|
if (!task) {
|
||||||
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
return NextResponse.json(createError(id, A2A_ERROR_CODES.TASK_NOT_FOUND, 'Task not found'), {
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ export function formatTaskResponse(task: Task, historyLength?: number): Task {
|
|||||||
export interface ExecuteRequestConfig {
|
export interface ExecuteRequestConfig {
|
||||||
workflowId: string
|
workflowId: string
|
||||||
apiKey?: string | null
|
apiKey?: string | null
|
||||||
|
userId?: string
|
||||||
stream?: boolean
|
stream?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -124,7 +125,7 @@ export async function buildExecuteRequest(
|
|||||||
if (config.apiKey) {
|
if (config.apiKey) {
|
||||||
headers['X-API-Key'] = config.apiKey
|
headers['X-API-Key'] = config.apiKey
|
||||||
} else {
|
} else {
|
||||||
const internalToken = await generateInternalToken()
|
const internalToken = await generateInternalToken(config.userId)
|
||||||
headers.Authorization = `Bearer ${internalToken}`
|
headers.Authorization = `Bearer ${internalToken}`
|
||||||
useInternalAuth = true
|
useInternalAuth = true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { account, user, workflow } from '@sim/db/schema'
|
import { account, user } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { jwtDecode } from 'jwt-decode'
|
import { jwtDecode } from 'jwt-decode'
|
||||||
@@ -8,7 +8,7 @@ import { z } from 'zod'
|
|||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
|
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const { provider: providerParam, workflowId, credentialId } = parseResult.data
|
const { provider: providerParam, workflowId, credentialId } = parseResult.data
|
||||||
|
|
||||||
// Authenticate requester (supports session, API key, internal JWT)
|
// Authenticate requester (supports session and internal JWT)
|
||||||
const authResult = await checkSessionOrInternalAuth(request)
|
const authResult = await checkSessionOrInternalAuth(request)
|
||||||
if (!authResult.success || !authResult.userId) {
|
if (!authResult.success || !authResult.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthenticated credentials request rejected`)
|
logger.warn(`[${requestId}] Unauthenticated credentials request rejected`)
|
||||||
@@ -88,66 +88,41 @@ export async function GET(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
const requesterUserId = authResult.userId
|
const requesterUserId = authResult.userId
|
||||||
|
|
||||||
// Resolve effective user id: workflow owner if workflowId provided (with access check); else requester
|
const effectiveUserId = requesterUserId
|
||||||
let effectiveUserId: string
|
|
||||||
if (workflowId) {
|
if (workflowId) {
|
||||||
// Load workflow owner and workspace for access control
|
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
const rows = await db
|
workflowId,
|
||||||
.select({ userId: workflow.userId, workspaceId: workflow.workspaceId })
|
userId: requesterUserId,
|
||||||
.from(workflow)
|
action: 'read',
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!rows.length) {
|
|
||||||
logger.warn(`[${requestId}] Workflow not found for credentials request`, { workflowId })
|
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const wf = rows[0]
|
|
||||||
|
|
||||||
if (requesterUserId !== wf.userId) {
|
|
||||||
if (!wf.workspaceId) {
|
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] Forbidden - workflow has no workspace and requester is not owner`,
|
|
||||||
{
|
|
||||||
requesterUserId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const perm = await getUserEntityPermissions(requesterUserId, 'workspace', wf.workspaceId)
|
|
||||||
if (perm === null) {
|
|
||||||
logger.warn(`[${requestId}] Forbidden credentials request - no workspace access`, {
|
|
||||||
requesterUserId,
|
|
||||||
workspaceId: wf.workspaceId,
|
|
||||||
})
|
})
|
||||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
if (!workflowAuthorization.allowed) {
|
||||||
|
logger.warn(`[${requestId}] Forbidden credentials request for workflow`, {
|
||||||
|
requesterUserId,
|
||||||
|
workflowId,
|
||||||
|
status: workflowAuthorization.status,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: workflowAuthorization.message || 'Forbidden' },
|
||||||
|
{ status: workflowAuthorization.status }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
effectiveUserId = wf.userId
|
|
||||||
} else {
|
|
||||||
effectiveUserId = requesterUserId
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the provider to get base provider and feature type (if provider is present)
|
// Parse the provider to get base provider and feature type (if provider is present)
|
||||||
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider)
|
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider)
|
||||||
|
|
||||||
let accountsData
|
let accountsData
|
||||||
|
|
||||||
if (credentialId) {
|
if (credentialId && workflowId) {
|
||||||
// Foreign-aware lookup for a specific credential by id
|
// When both workflowId and credentialId are provided, fetch by ID only.
|
||||||
// If workflowId is provided and requester has access (checked above), allow fetching by id only
|
// Workspace authorization above already proves access; the credential
|
||||||
if (workflowId) {
|
// may belong to another workspace member (e.g. for display name resolution).
|
||||||
accountsData = await db.select().from(account).where(eq(account.id, credentialId))
|
accountsData = await db.select().from(account).where(eq(account.id, credentialId))
|
||||||
} else {
|
} else if (credentialId) {
|
||||||
// Fallback: constrain to requester's own credentials when not in a workflow context
|
|
||||||
accountsData = await db
|
accountsData = await db
|
||||||
.select()
|
.select()
|
||||||
.from(account)
|
.from(account)
|
||||||
.where(and(eq(account.userId, effectiveUserId), eq(account.id, credentialId)))
|
.where(and(eq(account.userId, effectiveUserId), eq(account.id, credentialId)))
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Fetch all credentials for provider and effective user
|
// Fetch all credentials for provider and effective user
|
||||||
accountsData = await db
|
accountsData = await db
|
||||||
|
|||||||
@@ -4,16 +4,9 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { createSession, loggerMock } from '@sim/testing'
|
import { loggerMock } from '@sim/testing'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const mockSession = createSession({ userId: 'test-user-id' })
|
|
||||||
const mockGetSession = vi.fn()
|
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
|
||||||
getSession: () => mockGetSession(),
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnThis(),
|
select: vi.fn().mockReturnThis(),
|
||||||
@@ -37,7 +30,6 @@ import { db } from '@sim/db'
|
|||||||
import { refreshOAuthToken } from '@/lib/oauth'
|
import { refreshOAuthToken } from '@/lib/oauth'
|
||||||
import {
|
import {
|
||||||
getCredential,
|
getCredential,
|
||||||
getUserId,
|
|
||||||
refreshAccessTokenIfNeeded,
|
refreshAccessTokenIfNeeded,
|
||||||
refreshTokenIfNeeded,
|
refreshTokenIfNeeded,
|
||||||
} from '@/app/api/auth/oauth/utils'
|
} from '@/app/api/auth/oauth/utils'
|
||||||
@@ -48,7 +40,6 @@ const mockRefreshOAuthToken = refreshOAuthToken as any
|
|||||||
describe('OAuth Utils', () => {
|
describe('OAuth Utils', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
mockGetSession.mockResolvedValue(mockSession)
|
|
||||||
mockDbTyped.limit.mockReturnValue([])
|
mockDbTyped.limit.mockReturnValue([])
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -56,42 +47,6 @@ describe('OAuth Utils', () => {
|
|||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('getUserId', () => {
|
|
||||||
it('should get user ID from session when no workflowId is provided', async () => {
|
|
||||||
const userId = await getUserId('request-id')
|
|
||||||
|
|
||||||
expect(userId).toBe('test-user-id')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should get user ID from workflow when workflowId is provided', async () => {
|
|
||||||
mockDbTyped.limit.mockReturnValueOnce([{ userId: 'workflow-owner-id' }])
|
|
||||||
|
|
||||||
const userId = await getUserId('request-id', 'workflow-id')
|
|
||||||
|
|
||||||
expect(mockDbTyped.select).toHaveBeenCalled()
|
|
||||||
expect(mockDbTyped.from).toHaveBeenCalled()
|
|
||||||
expect(mockDbTyped.where).toHaveBeenCalled()
|
|
||||||
expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
|
|
||||||
expect(userId).toBe('workflow-owner-id')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should return undefined if no session is found', async () => {
|
|
||||||
mockGetSession.mockResolvedValueOnce(null)
|
|
||||||
|
|
||||||
const userId = await getUserId('request-id')
|
|
||||||
|
|
||||||
expect(userId).toBeUndefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should return undefined if workflow is not found', async () => {
|
|
||||||
mockDbTyped.limit.mockReturnValueOnce([])
|
|
||||||
|
|
||||||
const userId = await getUserId('request-id', 'nonexistent-workflow-id')
|
|
||||||
|
|
||||||
expect(userId).toBeUndefined()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('getCredential', () => {
|
describe('getCredential', () => {
|
||||||
it('should return credential when found', async () => {
|
it('should return credential when found', async () => {
|
||||||
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { account, credentialSetMember, workflow } from '@sim/db/schema'
|
import { account, credentialSetMember } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { refreshOAuthToken } from '@/lib/oauth'
|
import { refreshOAuthToken } from '@/lib/oauth'
|
||||||
import {
|
import {
|
||||||
getMicrosoftRefreshTokenExpiry,
|
getMicrosoftRefreshTokenExpiry,
|
||||||
@@ -49,41 +48,6 @@ export async function safeAccountInsert(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the user ID based on either a session or a workflow ID
|
|
||||||
*/
|
|
||||||
export async function getUserId(
|
|
||||||
requestId: string,
|
|
||||||
workflowId?: string
|
|
||||||
): Promise<string | undefined> {
|
|
||||||
// If workflowId is provided, this is a server-side request
|
|
||||||
if (workflowId) {
|
|
||||||
// Get the workflow to verify the user ID
|
|
||||||
const workflows = await db
|
|
||||||
.select({ userId: workflow.userId })
|
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!workflows.length) {
|
|
||||||
logger.warn(`[${requestId}] Workflow not found`)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
return workflows[0].userId
|
|
||||||
}
|
|
||||||
// This is a client-side request, use the session
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
// Check if the user is authenticated
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthenticated request rejected`)
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
return session.user.id
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a credential by ID and verify it belongs to the user
|
* Get a credential by ID and verify it belongs to the user
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { and, eq, or } from 'drizzle-orm'
|
import { and, eq, or } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { isOrganizationOwnerOrAdmin } from '@/lib/billing/core/organization'
|
||||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
|
||||||
@@ -32,6 +33,11 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'organizationId is required' }, { status: 400 })
|
return NextResponse.json({ error: 'organizationId is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const hasPermission = await isOrganizationOwnerOrAdmin(session.user.id, organizationId)
|
||||||
|
if (!hasPermission) {
|
||||||
|
return NextResponse.json({ error: 'Permission denied' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
const rows = await db
|
const rows = await db
|
||||||
.select({ customer: subscriptionTable.stripeCustomerId })
|
.select({ customer: subscriptionTable.stripeCustomerId })
|
||||||
.from(subscriptionTable)
|
.from(subscriptionTable)
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ const UpdateCostSchema = z.object({
|
|||||||
model: z.string().min(1, 'Model is required'),
|
model: z.string().min(1, 'Model is required'),
|
||||||
inputTokens: z.number().min(0).default(0),
|
inputTokens: z.number().min(0).default(0),
|
||||||
outputTokens: z.number().min(0).default(0),
|
outputTokens: z.number().min(0).default(0),
|
||||||
|
source: z.enum(['copilot', 'mcp_copilot']).default('copilot'),
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -75,12 +76,14 @@ export async function POST(req: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { userId, cost, model, inputTokens, outputTokens } = validation.data
|
const { userId, cost, model, inputTokens, outputTokens, source } = validation.data
|
||||||
|
const isMcp = source === 'mcp_copilot'
|
||||||
|
|
||||||
logger.info(`[${requestId}] Processing cost update`, {
|
logger.info(`[${requestId}] Processing cost update`, {
|
||||||
userId,
|
userId,
|
||||||
cost,
|
cost,
|
||||||
model,
|
model,
|
||||||
|
source,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Check if user stats record exists (same as ExecutionLogger)
|
// Check if user stats record exists (same as ExecutionLogger)
|
||||||
@@ -96,7 +99,7 @@ export async function POST(req: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'User stats record not found' }, { status: 500 })
|
return NextResponse.json({ error: 'User stats record not found' }, { status: 500 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateFields = {
|
const updateFields: Record<string, unknown> = {
|
||||||
totalCost: sql`total_cost + ${cost}`,
|
totalCost: sql`total_cost + ${cost}`,
|
||||||
currentPeriodCost: sql`current_period_cost + ${cost}`,
|
currentPeriodCost: sql`current_period_cost + ${cost}`,
|
||||||
totalCopilotCost: sql`total_copilot_cost + ${cost}`,
|
totalCopilotCost: sql`total_copilot_cost + ${cost}`,
|
||||||
@@ -105,17 +108,24 @@ export async function POST(req: NextRequest) {
|
|||||||
lastActive: new Date(),
|
lastActive: new Date(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Also increment MCP-specific counters when source is mcp_copilot
|
||||||
|
if (isMcp) {
|
||||||
|
updateFields.totalMcpCopilotCost = sql`total_mcp_copilot_cost + ${cost}`
|
||||||
|
updateFields.currentPeriodMcpCopilotCost = sql`current_period_mcp_copilot_cost + ${cost}`
|
||||||
|
}
|
||||||
|
|
||||||
await db.update(userStats).set(updateFields).where(eq(userStats.userId, userId))
|
await db.update(userStats).set(updateFields).where(eq(userStats.userId, userId))
|
||||||
|
|
||||||
logger.info(`[${requestId}] Updated user stats record`, {
|
logger.info(`[${requestId}] Updated user stats record`, {
|
||||||
userId,
|
userId,
|
||||||
addedCost: cost,
|
addedCost: cost,
|
||||||
|
source,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Log usage for complete audit trail
|
// Log usage for complete audit trail
|
||||||
await logModelUsage({
|
await logModelUsage({
|
||||||
userId,
|
userId,
|
||||||
source: 'copilot',
|
source: isMcp ? 'mcp_copilot' : 'copilot',
|
||||||
model,
|
model,
|
||||||
inputTokens,
|
inputTokens,
|
||||||
outputTokens,
|
outputTokens,
|
||||||
|
|||||||
@@ -47,6 +47,10 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
|
|||||||
isProd: false,
|
isProd: false,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/lib/workflows/utils', () => ({
|
||||||
|
authorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
|
}))
|
||||||
|
|
||||||
describe('Chat API Utils', () => {
|
describe('Chat API Utils', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.stubGlobal('process', {
|
vi.stubGlobal('process', {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
validateAuthToken,
|
validateAuthToken,
|
||||||
} from '@/lib/core/security/deployment'
|
} from '@/lib/core/security/deployment'
|
||||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||||
import { hasAdminPermission } from '@/lib/workspaces/permissions/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
const logger = createLogger('ChatAuthUtils')
|
const logger = createLogger('ChatAuthUtils')
|
||||||
|
|
||||||
@@ -24,29 +24,23 @@ export function setChatAuthCookie(
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if user has permission to create a chat for a specific workflow
|
* Check if user has permission to create a chat for a specific workflow
|
||||||
* Either the user owns the workflow directly OR has admin permission for the workflow's workspace
|
|
||||||
*/
|
*/
|
||||||
export async function checkWorkflowAccessForChatCreation(
|
export async function checkWorkflowAccessForChatCreation(
|
||||||
workflowId: string,
|
workflowId: string,
|
||||||
userId: string
|
userId: string
|
||||||
): Promise<{ hasAccess: boolean; workflow?: any }> {
|
): Promise<{ hasAccess: boolean; workflow?: any }> {
|
||||||
const workflowData = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'admin',
|
||||||
|
})
|
||||||
|
|
||||||
if (workflowData.length === 0) {
|
if (!authorization.workflow) {
|
||||||
return { hasAccess: false }
|
return { hasAccess: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowRecord = workflowData[0]
|
if (authorization.allowed) {
|
||||||
|
return { hasAccess: true, workflow: authorization.workflow }
|
||||||
if (workflowRecord.userId === userId) {
|
|
||||||
return { hasAccess: true, workflow: workflowRecord }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowRecord.workspaceId) {
|
|
||||||
const hasAdmin = await hasAdminPermission(userId, workflowRecord.workspaceId)
|
|
||||||
if (hasAdmin) {
|
|
||||||
return { hasAccess: true, workflow: workflowRecord }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { hasAccess: false }
|
return { hasAccess: false }
|
||||||
@@ -54,7 +48,6 @@ export async function checkWorkflowAccessForChatCreation(
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if user has access to view/edit/delete a specific chat
|
* Check if user has access to view/edit/delete a specific chat
|
||||||
* Either the user owns the chat directly OR has admin permission for the workflow's workspace
|
|
||||||
*/
|
*/
|
||||||
export async function checkChatAccess(
|
export async function checkChatAccess(
|
||||||
chatId: string,
|
chatId: string,
|
||||||
@@ -75,21 +68,19 @@ export async function checkChatAccess(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { chat: chatRecord, workflowWorkspaceId } = chatData[0]
|
const { chat: chatRecord, workflowWorkspaceId } = chatData[0]
|
||||||
|
if (!workflowWorkspaceId) {
|
||||||
if (chatRecord.userId === userId) {
|
|
||||||
return { hasAccess: true, chat: chatRecord }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowWorkspaceId) {
|
|
||||||
const hasAdmin = await hasAdminPermission(userId, workflowWorkspaceId)
|
|
||||||
if (hasAdmin) {
|
|
||||||
return { hasAccess: true, chat: chatRecord }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { hasAccess: false }
|
return { hasAccess: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: chatRecord.workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'admin',
|
||||||
|
})
|
||||||
|
|
||||||
|
return authorization.allowed ? { hasAccess: true, chat: chatRecord } : { hasAccess: false }
|
||||||
|
}
|
||||||
|
|
||||||
export async function validateChatAuth(
|
export async function validateChatAuth(
|
||||||
requestId: string,
|
requestId: string,
|
||||||
deployment: any,
|
deployment: any,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const GenerateApiKeySchema = z.object({
|
const GenerateApiKeySchema = z.object({
|
||||||
@@ -17,9 +17,6 @@ export async function POST(req: NextRequest) {
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
// Move environment variable access inside the function
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const body = await req.json().catch(() => ({}))
|
const body = await req.json().catch(() => ({}))
|
||||||
const validationResult = GenerateApiKeySchema.safeParse(body)
|
const validationResult = GenerateApiKeySchema.safeParse(body)
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ describe('Copilot API Keys API Route', () => {
|
|||||||
|
|
||||||
vi.doMock('@/lib/copilot/constants', () => ({
|
vi.doMock('@/lib/copilot/constants', () => ({
|
||||||
SIM_AGENT_API_URL_DEFAULT: 'https://agent.sim.example.com',
|
SIM_AGENT_API_URL_DEFAULT: 'https://agent.sim.example.com',
|
||||||
|
SIM_AGENT_API_URL: 'https://agent.sim.example.com',
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/core/config/env', async () => {
|
vi.doMock('@/lib/core/config/env', async () => {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
@@ -12,8 +12,6 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -68,8 +66,6 @@ export async function DELETE(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import {
|
||||||
|
getStreamMeta,
|
||||||
|
readStreamEvents,
|
||||||
|
type StreamMeta,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatStreamAPI')
|
||||||
|
const POLL_INTERVAL_MS = 250
|
||||||
|
const MAX_STREAM_MS = 10 * 60 * 1000
|
||||||
|
|
||||||
|
function encodeEvent(event: Record<string, any>): Uint8Array {
|
||||||
|
return new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const { userId: authenticatedUserId, isAuthenticated } =
|
||||||
|
await authenticateCopilotRequestSessionOnly()
|
||||||
|
|
||||||
|
if (!isAuthenticated || !authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const streamId = url.searchParams.get('streamId') || ''
|
||||||
|
const fromParam = url.searchParams.get('from') || '0'
|
||||||
|
const fromEventId = Number(fromParam || 0)
|
||||||
|
// If batch=true, return buffered events as JSON instead of SSE
|
||||||
|
const batchMode = url.searchParams.get('batch') === 'true'
|
||||||
|
const toParam = url.searchParams.get('to')
|
||||||
|
const toEventId = toParam ? Number(toParam) : undefined
|
||||||
|
|
||||||
|
if (!streamId) {
|
||||||
|
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = (await getStreamMeta(streamId)) as StreamMeta | null
|
||||||
|
logger.info('[Resume] Stream lookup', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
batchMode,
|
||||||
|
hasMeta: !!meta,
|
||||||
|
metaStatus: meta?.status,
|
||||||
|
})
|
||||||
|
if (!meta) {
|
||||||
|
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
if (meta.userId && meta.userId !== authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch mode: return all buffered events as JSON
|
||||||
|
if (batchMode) {
|
||||||
|
const events = await readStreamEvents(streamId, fromEventId)
|
||||||
|
const filteredEvents = toEventId ? events.filter((e) => e.eventId <= toEventId) : events
|
||||||
|
logger.info('[Resume] Batch response', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
eventCount: filteredEvents.length,
|
||||||
|
})
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
events: filteredEvents,
|
||||||
|
status: meta.status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
let lastEventId = Number.isFinite(fromEventId) ? fromEventId : 0
|
||||||
|
|
||||||
|
const flushEvents = async () => {
|
||||||
|
const events = await readStreamEvents(streamId, lastEventId)
|
||||||
|
if (events.length > 0) {
|
||||||
|
logger.info('[Resume] Flushing events', {
|
||||||
|
streamId,
|
||||||
|
fromEventId: lastEventId,
|
||||||
|
eventCount: events.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (const entry of events) {
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
const payload = {
|
||||||
|
...entry.event,
|
||||||
|
eventId: entry.eventId,
|
||||||
|
streamId: entry.streamId,
|
||||||
|
}
|
||||||
|
controller.enqueue(encodeEvent(payload))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
while (Date.now() - startTime < MAX_STREAM_MS) {
|
||||||
|
const currentMeta = await getStreamMeta(streamId)
|
||||||
|
if (!currentMeta) break
|
||||||
|
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
if (currentMeta.status === 'complete' || currentMeta.status === 'error') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.signal.aborted) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Stream replay failed', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
} finally {
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Response(stream, { headers: SSE_HEADERS })
|
||||||
|
}
|
||||||
@@ -25,6 +25,13 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
|||||||
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/workflows/utils', () => ({
|
||||||
|
authorizeWorkflowByWorkspacePermission: vi.fn().mockResolvedValue({
|
||||||
|
allowed: true,
|
||||||
|
status: 200,
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
mockSelect.mockReturnValue({ from: mockFrom })
|
mockSelect.mockReturnValue({ from: mockFrom })
|
||||||
mockFrom.mockReturnValue({ where: mockWhere })
|
mockFrom.mockReturnValue({ where: mockWhere })
|
||||||
mockWhere.mockReturnValue({ then: mockThen })
|
mockWhere.mockReturnValue({ then: mockThen })
|
||||||
@@ -212,6 +219,12 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
|||||||
.mockResolvedValueOnce(mockCheckpoint) // Checkpoint found
|
.mockResolvedValueOnce(mockCheckpoint) // Checkpoint found
|
||||||
.mockResolvedValueOnce(mockWorkflow) // Workflow found but different user
|
.mockResolvedValueOnce(mockWorkflow) // Workflow found but different user
|
||||||
|
|
||||||
|
const { authorizeWorkflowByWorkspacePermission } = await import('@/lib/workflows/utils')
|
||||||
|
vi.mocked(authorizeWorkflowByWorkspacePermission).mockResolvedValueOnce({
|
||||||
|
allowed: false,
|
||||||
|
status: 403,
|
||||||
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
checkpointId: 'checkpoint-123',
|
checkpointId: 'checkpoint-123',
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import {
|
|||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import { isUuidV4 } from '@/executor/constants'
|
import { isUuidV4 } from '@/executor/constants'
|
||||||
|
|
||||||
const logger = createLogger('CheckpointRevertAPI')
|
const logger = createLogger('CheckpointRevertAPI')
|
||||||
@@ -58,7 +59,12 @@ export async function POST(request: NextRequest) {
|
|||||||
return createNotFoundResponse('Workflow not found')
|
return createNotFoundResponse('Workflow not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
if (workflowData.userId !== userId) {
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: checkpoint.workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'write',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
return createUnauthorizedResponse()
|
return createUnauthorizedResponse()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -139,7 +139,6 @@ describe('Copilot Confirm API Route', () => {
|
|||||||
status: 'success',
|
status: 'success',
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(mockRedisExists).toHaveBeenCalled()
|
|
||||||
expect(mockRedisSet).toHaveBeenCalled()
|
expect(mockRedisSet).toHaveBeenCalled()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -256,11 +255,11 @@ describe('Copilot Confirm API Route', () => {
|
|||||||
expect(responseData.error).toBe('Failed to update tool call status or tool call not found')
|
expect(responseData.error).toBe('Failed to update tool call status or tool call not found')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return 400 when tool call is not found in Redis', async () => {
|
it('should return 400 when Redis set fails', async () => {
|
||||||
const authMocks = mockAuth()
|
const authMocks = mockAuth()
|
||||||
authMocks.setAuthenticated()
|
authMocks.setAuthenticated()
|
||||||
|
|
||||||
mockRedisExists.mockResolvedValue(0)
|
mockRedisSet.mockRejectedValueOnce(new Error('Redis set failed'))
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
toolCallId: 'non-existent-tool',
|
toolCallId: 'non-existent-tool',
|
||||||
@@ -279,7 +278,7 @@ describe('Copilot Confirm API Route', () => {
|
|||||||
const authMocks = mockAuth()
|
const authMocks = mockAuth()
|
||||||
authMocks.setAuthenticated()
|
authMocks.setAuthenticated()
|
||||||
|
|
||||||
mockRedisExists.mockRejectedValue(new Error('Redis connection failed'))
|
mockRedisSet.mockRejectedValueOnce(new Error('Redis connection failed'))
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
toolCallId: 'tool-call-123',
|
toolCallId: 'tool-call-123',
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -23,7 +24,8 @@ const ConfirmationSchema = z.object({
|
|||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update tool call status in Redis
|
* Write the user's tool decision to Redis. The server-side orchestrator's
|
||||||
|
* waitForToolDecision() polls Redis for this value.
|
||||||
*/
|
*/
|
||||||
async function updateToolCallStatus(
|
async function updateToolCallStatus(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
@@ -32,57 +34,24 @@ async function updateToolCallStatus(
|
|||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const redis = getRedisClient()
|
const redis = getRedisClient()
|
||||||
if (!redis) {
|
if (!redis) {
|
||||||
logger.warn('updateToolCallStatus: Redis client not available')
|
logger.warn('Redis client not available for tool confirmation')
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const key = `tool_call:${toolCallId}`
|
const key = `${REDIS_TOOL_CALL_PREFIX}${toolCallId}`
|
||||||
const timeout = 600000 // 10 minutes timeout for user confirmation
|
const payload = {
|
||||||
const pollInterval = 100 // Poll every 100ms
|
|
||||||
const startTime = Date.now()
|
|
||||||
|
|
||||||
logger.info('Polling for tool call in Redis', { toolCallId, key, timeout })
|
|
||||||
|
|
||||||
// Poll until the key exists or timeout
|
|
||||||
while (Date.now() - startTime < timeout) {
|
|
||||||
const exists = await redis.exists(key)
|
|
||||||
if (exists) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait before next poll
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Final check if key exists after polling
|
|
||||||
const exists = await redis.exists(key)
|
|
||||||
if (!exists) {
|
|
||||||
logger.warn('Tool call not found in Redis after polling timeout', {
|
|
||||||
toolCallId,
|
|
||||||
key,
|
|
||||||
timeout,
|
|
||||||
pollDuration: Date.now() - startTime,
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store both status and message as JSON
|
|
||||||
const toolCallData = {
|
|
||||||
status,
|
status,
|
||||||
message: message || null,
|
message: message || null,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
}
|
}
|
||||||
|
await redis.set(key, JSON.stringify(payload), 'EX', REDIS_TOOL_CALL_TTL_SECONDS)
|
||||||
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400) // Keep 24 hour expiry
|
|
||||||
|
|
||||||
return true
|
return true
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to update tool call status in Redis', {
|
logger.error('Failed to update tool call status', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
message,
|
error: error instanceof Error ? error.message : String(error),
|
||||||
error: error instanceof Error ? error.message : 'Unknown error',
|
|
||||||
})
|
})
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/copilot/credentials
|
||||||
|
* Returns connected OAuth credentials for the authenticated user.
|
||||||
|
* Used by the copilot store for credential masking.
|
||||||
|
*/
|
||||||
|
export async function GET(_req: NextRequest) {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await routeExecution('get_credentials', {}, { userId })
|
||||||
|
return NextResponse.json({ success: true, result })
|
||||||
|
} catch (error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to load credentials',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import {
|
|
||||||
authenticateCopilotRequestSessionOnly,
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
|
||||||
|
|
||||||
const logger = createLogger('ExecuteCopilotServerToolAPI')
|
|
||||||
|
|
||||||
const ExecuteSchema = z.object({
|
|
||||||
toolName: z.string(),
|
|
||||||
payload: z.unknown().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
try {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await req.json()
|
|
||||||
try {
|
|
||||||
const preview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const { toolName, payload } = ExecuteSchema.parse(body)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Executing server tool`, { toolName })
|
|
||||||
const result = await routeExecution(toolName, payload, { userId })
|
|
||||||
|
|
||||||
try {
|
|
||||||
const resultPreview = JSON.stringify(result).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, result })
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
|
||||||
return createBadRequestResponse('Invalid request body for execute-copilot-server-tool')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error)
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute server tool'
|
|
||||||
return createInternalServerErrorResponse(errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { account, workflow } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import {
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
|
||||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
|
||||||
import { executeTool } from '@/tools'
|
|
||||||
import { getTool, resolveToolId } from '@/tools/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotExecuteToolAPI')
|
|
||||||
|
|
||||||
const ExecuteToolSchema = z.object({
|
|
||||||
toolCallId: z.string(),
|
|
||||||
toolName: z.string(),
|
|
||||||
arguments: z.record(z.any()).optional().default({}),
|
|
||||||
workflowId: z.string().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
const body = await req.json()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const preview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming execute-tool request`, { preview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const { toolCallId, toolName, arguments: toolArgs, workflowId } = ExecuteToolSchema.parse(body)
|
|
||||||
|
|
||||||
const resolvedToolName = resolveToolId(toolName)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Executing tool`, {
|
|
||||||
toolCallId,
|
|
||||||
toolName,
|
|
||||||
resolvedToolName,
|
|
||||||
workflowId,
|
|
||||||
hasArgs: Object.keys(toolArgs).length > 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
const toolConfig = getTool(resolvedToolName)
|
|
||||||
if (!toolConfig) {
|
|
||||||
// Find similar tool names to help debug
|
|
||||||
const { tools: allTools } = await import('@/tools/registry')
|
|
||||||
const allToolNames = Object.keys(allTools)
|
|
||||||
const prefix = toolName.split('_').slice(0, 2).join('_')
|
|
||||||
const similarTools = allToolNames
|
|
||||||
.filter((name) => name.startsWith(`${prefix.split('_')[0]}_`))
|
|
||||||
.slice(0, 10)
|
|
||||||
|
|
||||||
logger.warn(`[${tracker.requestId}] Tool not found in registry`, {
|
|
||||||
toolName,
|
|
||||||
prefix,
|
|
||||||
similarTools,
|
|
||||||
totalToolsInRegistry: allToolNames.length,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Tool not found: ${toolName}. Similar tools: ${similarTools.join(', ')}`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the workspaceId from the workflow (env vars are stored at workspace level)
|
|
||||||
let workspaceId: string | undefined
|
|
||||||
if (workflowId) {
|
|
||||||
const workflowResult = await db
|
|
||||||
.select({ workspaceId: workflow.workspaceId })
|
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get decrypted environment variables early so we can resolve all {{VAR}} references
|
|
||||||
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Fetched environment variables`, {
|
|
||||||
workflowId,
|
|
||||||
workspaceId,
|
|
||||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
|
||||||
envVarKeys: Object.keys(decryptedEnvVars),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Build execution params starting with LLM-provided arguments
|
|
||||||
// Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects)
|
|
||||||
const executionParams: Record<string, any> = resolveEnvVarReferences(
|
|
||||||
toolArgs,
|
|
||||||
decryptedEnvVars,
|
|
||||||
{ deep: true }
|
|
||||||
) as Record<string, any>
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
|
|
||||||
toolName,
|
|
||||||
originalArgKeys: Object.keys(toolArgs),
|
|
||||||
resolvedArgKeys: Object.keys(executionParams),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Resolve OAuth access token if required
|
|
||||||
if (toolConfig.oauth?.required && toolConfig.oauth.provider) {
|
|
||||||
const provider = toolConfig.oauth.provider
|
|
||||||
logger.info(`[${tracker.requestId}] Resolving OAuth token`, { provider })
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Find the account for this provider and user
|
|
||||||
const accounts = await db
|
|
||||||
.select()
|
|
||||||
.from(account)
|
|
||||||
.where(and(eq(account.providerId, provider), eq(account.userId, userId)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (accounts.length > 0) {
|
|
||||||
const acc = accounts[0]
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { accessToken } = await refreshTokenIfNeeded(requestId, acc as any, acc.id)
|
|
||||||
|
|
||||||
if (accessToken) {
|
|
||||||
executionParams.accessToken = accessToken
|
|
||||||
logger.info(`[${tracker.requestId}] OAuth token resolved`, { provider })
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No access token available`, { provider })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `OAuth token not available for ${provider}. Please reconnect your account.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No account found for provider`, { provider })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `No ${provider} account connected. Please connect your account first.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to resolve OAuth token`, {
|
|
||||||
provider,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Failed to get OAuth token for ${provider}`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if tool requires an API key that wasn't resolved via {{ENV_VAR}} reference
|
|
||||||
const needsApiKey = toolConfig.params?.apiKey?.required
|
|
||||||
|
|
||||||
if (needsApiKey && !executionParams.apiKey) {
|
|
||||||
logger.warn(`[${tracker.requestId}] No API key found for tool`, { toolName })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add execution context
|
|
||||||
executionParams._context = {
|
|
||||||
workflowId,
|
|
||||||
userId,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special handling for function_execute - inject environment variables
|
|
||||||
if (toolName === 'function_execute') {
|
|
||||||
executionParams.envVars = decryptedEnvVars
|
|
||||||
executionParams.workflowVariables = {} // No workflow variables in copilot context
|
|
||||||
executionParams.blockData = {} // No block data in copilot context
|
|
||||||
executionParams.blockNameMapping = {} // No block mapping in copilot context
|
|
||||||
executionParams.language = executionParams.language || 'javascript'
|
|
||||||
executionParams.timeout = executionParams.timeout || 30000
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Injected env vars for function_execute`, {
|
|
||||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute the tool
|
|
||||||
logger.info(`[${tracker.requestId}] Executing tool with resolved credentials`, {
|
|
||||||
toolName,
|
|
||||||
hasAccessToken: !!executionParams.accessToken,
|
|
||||||
hasApiKey: !!executionParams.apiKey,
|
|
||||||
})
|
|
||||||
|
|
||||||
const result = await executeTool(resolvedToolName, executionParams)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
|
||||||
toolName,
|
|
||||||
success: result.success,
|
|
||||||
hasOutput: !!result.output,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
toolCallId,
|
|
||||||
result: {
|
|
||||||
success: result.success,
|
|
||||||
output: result.output,
|
|
||||||
error: result.error,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
|
||||||
return createBadRequestResponse('Invalid request body for execute-tool')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to execute tool:`, error)
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute tool'
|
|
||||||
return createInternalServerErrorResponse(errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
84
apps/sim/app/api/copilot/models/route.ts
Normal file
84
apps/sim/app/api/copilot/models/route.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import type { AvailableModel } from '@/lib/copilot/types'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotModelsAPI')
|
||||||
|
|
||||||
|
interface RawAvailableModel {
|
||||||
|
id: string
|
||||||
|
friendlyName?: string
|
||||||
|
displayName?: string
|
||||||
|
provider?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
function isRawAvailableModel(item: unknown): item is RawAvailableModel {
|
||||||
|
return (
|
||||||
|
typeof item === 'object' &&
|
||||||
|
item !== null &&
|
||||||
|
'id' in item &&
|
||||||
|
typeof (item as { id: unknown }).id === 'string'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(_req: NextRequest) {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
if (env.COPILOT_API_KEY) {
|
||||||
|
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/get-available-models`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers,
|
||||||
|
cache: 'no-store',
|
||||||
|
})
|
||||||
|
|
||||||
|
const payload = await response.json().catch(() => ({}))
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.warn('Failed to fetch available models from copilot backend', {
|
||||||
|
status: response.status,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: payload?.error || 'Failed to fetch available models',
|
||||||
|
models: [],
|
||||||
|
},
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawModels = Array.isArray(payload?.models) ? payload.models : []
|
||||||
|
const models: AvailableModel[] = rawModels
|
||||||
|
.filter((item: unknown): item is RawAvailableModel => isRawAvailableModel(item))
|
||||||
|
.map((item: RawAvailableModel) => ({
|
||||||
|
id: item.id,
|
||||||
|
friendlyName: item.friendlyName || item.displayName || item.id,
|
||||||
|
provider: item.provider || 'unknown',
|
||||||
|
}))
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, models })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching available models', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to fetch available models',
|
||||||
|
models: [],
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -40,6 +40,7 @@ describe('Copilot Stats API Route', () => {
|
|||||||
|
|
||||||
vi.doMock('@/lib/copilot/constants', () => ({
|
vi.doMock('@/lib/copilot/constants', () => ({
|
||||||
SIM_AGENT_API_URL_DEFAULT: 'https://agent.sim.example.com',
|
SIM_AGENT_API_URL_DEFAULT: 'https://agent.sim.example.com',
|
||||||
|
SIM_AGENT_API_URL: 'https://agent.sim.example.com',
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/core/config/env', async () => {
|
vi.doMock('@/lib/core/config/env', async () => {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -10,8 +10,6 @@ import {
|
|||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const BodySchema = z.object({
|
const BodySchema = z.object({
|
||||||
messageId: z.string(),
|
messageId: z.string(),
|
||||||
diffCreated: z.boolean(),
|
diffCreated: z.boolean(),
|
||||||
|
|||||||
@@ -1,123 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
|
||||||
import {
|
|
||||||
authenticateCopilotRequestSessionOnly,
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const MarkCompleteSchema = z.object({
|
|
||||||
id: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
status: z.number().int(),
|
|
||||||
message: z.any().optional(),
|
|
||||||
data: z.any().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* POST /api/copilot/tools/mark-complete
|
|
||||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
|
||||||
*/
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await req.json()
|
|
||||||
|
|
||||||
// Log raw body shape for diagnostics (avoid dumping huge payloads)
|
|
||||||
try {
|
|
||||||
const bodyPreview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming mark-complete raw body preview`, {
|
|
||||||
preview: `${bodyPreview}${bodyPreview.length === 300 ? '...' : ''}`,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const parsed = MarkCompleteSchema.parse(body)
|
|
||||||
|
|
||||||
const messagePreview = (() => {
|
|
||||||
try {
|
|
||||||
const s =
|
|
||||||
typeof parsed.message === 'string' ? parsed.message : JSON.stringify(parsed.message)
|
|
||||||
return s ? `${s.slice(0, 200)}${s.length > 200 ? '...' : ''}` : undefined
|
|
||||||
} catch {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Forwarding tool mark-complete`, {
|
|
||||||
userId,
|
|
||||||
toolCallId: parsed.id,
|
|
||||||
toolName: parsed.name,
|
|
||||||
status: parsed.status,
|
|
||||||
hasMessage: parsed.message !== undefined,
|
|
||||||
hasData: parsed.data !== undefined,
|
|
||||||
messagePreview,
|
|
||||||
agentUrl: `${SIM_AGENT_API_URL}/api/tools/mark-complete`,
|
|
||||||
})
|
|
||||||
|
|
||||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
|
||||||
},
|
|
||||||
body: JSON.stringify(parsed),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Attempt to parse agent response JSON
|
|
||||||
let agentJson: any = null
|
|
||||||
let agentText: string | null = null
|
|
||||||
try {
|
|
||||||
agentJson = await agentRes.json()
|
|
||||||
} catch (_) {
|
|
||||||
try {
|
|
||||||
agentText = await agentRes.text()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Agent responded to mark-complete`, {
|
|
||||||
status: agentRes.status,
|
|
||||||
ok: agentRes.ok,
|
|
||||||
responseJsonPreview: agentJson ? JSON.stringify(agentJson).slice(0, 300) : undefined,
|
|
||||||
responseTextPreview: agentText ? agentText.slice(0, 300) : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (agentRes.ok) {
|
|
||||||
return NextResponse.json({ success: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorMessage =
|
|
||||||
agentJson?.error || agentText || `Agent responded with status ${agentRes.status}`
|
|
||||||
const status = agentRes.status >= 500 ? 500 : 400
|
|
||||||
|
|
||||||
logger.warn(`[${tracker.requestId}] Mark-complete failed`, {
|
|
||||||
status,
|
|
||||||
error: errorMessage,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({ success: false, error: errorMessage }, { status })
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Invalid mark-complete request body`, {
|
|
||||||
issues: error.issues,
|
|
||||||
})
|
|
||||||
return createBadRequestResponse('Invalid request body for mark-complete')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to proxy mark-complete:`, error)
|
|
||||||
return createInternalServerErrorResponse('Failed to mark tool as complete')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import type { CopilotModelId } from '@/lib/copilot/models'
|
|
||||||
import { db } from '@/../../packages/db'
|
|
||||||
import { settings } from '@/../../packages/db/schema'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotUserModelsAPI')
|
|
||||||
|
|
||||||
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
|
||||||
'gpt-4o': false,
|
|
||||||
'gpt-4.1': false,
|
|
||||||
'gpt-5-fast': false,
|
|
||||||
'gpt-5': true,
|
|
||||||
'gpt-5-medium': false,
|
|
||||||
'gpt-5-high': false,
|
|
||||||
'gpt-5.1-fast': false,
|
|
||||||
'gpt-5.1': false,
|
|
||||||
'gpt-5.1-medium': false,
|
|
||||||
'gpt-5.1-high': false,
|
|
||||||
'gpt-5-codex': false,
|
|
||||||
'gpt-5.1-codex': false,
|
|
||||||
'gpt-5.2': false,
|
|
||||||
'gpt-5.2-codex': true,
|
|
||||||
'gpt-5.2-pro': true,
|
|
||||||
o3: true,
|
|
||||||
'claude-4-sonnet': false,
|
|
||||||
'claude-4.5-haiku': true,
|
|
||||||
'claude-4.5-sonnet': true,
|
|
||||||
'claude-4.5-opus': true,
|
|
||||||
'claude-4.1-opus': false,
|
|
||||||
'gemini-3-pro': true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// GET - Fetch user's enabled models
|
|
||||||
export async function GET(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
|
|
||||||
const [userSettings] = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (userSettings) {
|
|
||||||
const userModelsMap = (userSettings.copilotEnabledModels as Record<string, boolean>) || {}
|
|
||||||
|
|
||||||
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
|
|
||||||
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
|
|
||||||
if (modelId in mergedModels) {
|
|
||||||
mergedModels[modelId as CopilotModelId] = enabled
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(
|
|
||||||
(key) => !(key in userModelsMap)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (hasNewModels) {
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({
|
|
||||||
copilotEnabledModels: mergedModels,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
enabledModels: mergedModels,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.insert(settings).values({
|
|
||||||
id: userId,
|
|
||||||
userId,
|
|
||||||
copilotEnabledModels: DEFAULT_ENABLED_MODELS,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Created new settings record with default models', { userId })
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
enabledModels: DEFAULT_ENABLED_MODELS,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to fetch user models', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PUT - Update user's enabled models
|
|
||||||
export async function PUT(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
if (!body.enabledModels || typeof body.enabledModels !== 'object') {
|
|
||||||
return NextResponse.json({ error: 'enabledModels must be an object' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({
|
|
||||||
copilotEnabledModels: body.enabledModels,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
} else {
|
|
||||||
await db.insert(settings).values({
|
|
||||||
id: userId,
|
|
||||||
userId,
|
|
||||||
copilotEnabledModels: body.enabledModels,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to update user models', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { webhook as webhookTable, workflow as workflowTable } from '@sim/db/schema'
|
import { account, webhook as webhookTable } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, eq, or } from 'drizzle-orm'
|
import { and, eq, or } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
@@ -8,6 +8,16 @@ import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
|||||||
|
|
||||||
const logger = createLogger('TeamsSubscriptionRenewal')
|
const logger = createLogger('TeamsSubscriptionRenewal')
|
||||||
|
|
||||||
|
async function getCredentialOwnerUserId(credentialId: string): Promise<string | null> {
|
||||||
|
const [credentialRecord] = await db
|
||||||
|
.select({ userId: account.userId })
|
||||||
|
.from(account)
|
||||||
|
.where(eq(account.id, credentialId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
return credentialRecord?.userId ?? null
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Cron endpoint to renew Microsoft Teams chat subscriptions before they expire
|
* Cron endpoint to renew Microsoft Teams chat subscriptions before they expire
|
||||||
*
|
*
|
||||||
@@ -27,14 +37,12 @@ export async function GET(request: NextRequest) {
|
|||||||
let totalFailed = 0
|
let totalFailed = 0
|
||||||
let totalChecked = 0
|
let totalChecked = 0
|
||||||
|
|
||||||
// Get all active Microsoft Teams webhooks with their workflows
|
// Get all active Microsoft Teams webhooks
|
||||||
const webhooksWithWorkflows = await db
|
const webhooksWithWorkflows = await db
|
||||||
.select({
|
.select({
|
||||||
webhook: webhookTable,
|
webhook: webhookTable,
|
||||||
workflow: workflowTable,
|
|
||||||
})
|
})
|
||||||
.from(webhookTable)
|
.from(webhookTable)
|
||||||
.innerJoin(workflowTable, eq(webhookTable.workflowId, workflowTable.id))
|
|
||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
eq(webhookTable.isActive, true),
|
eq(webhookTable.isActive, true),
|
||||||
@@ -52,7 +60,7 @@ export async function GET(request: NextRequest) {
|
|||||||
// Renewal threshold: 48 hours before expiration
|
// Renewal threshold: 48 hours before expiration
|
||||||
const renewalThreshold = new Date(Date.now() + 48 * 60 * 60 * 1000)
|
const renewalThreshold = new Date(Date.now() + 48 * 60 * 60 * 1000)
|
||||||
|
|
||||||
for (const { webhook, workflow } of webhooksWithWorkflows) {
|
for (const { webhook } of webhooksWithWorkflows) {
|
||||||
const config = (webhook.providerConfig as Record<string, any>) || {}
|
const config = (webhook.providerConfig as Record<string, any>) || {}
|
||||||
|
|
||||||
// Check if this is a Teams chat subscription that needs renewal
|
// Check if this is a Teams chat subscription that needs renewal
|
||||||
@@ -80,10 +88,17 @@ export async function GET(request: NextRequest) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialOwnerUserId = await getCredentialOwnerUserId(credentialId)
|
||||||
|
if (!credentialOwnerUserId) {
|
||||||
|
logger.error(`Credential owner not found for credential ${credentialId}`)
|
||||||
|
totalFailed++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
// Get fresh access token
|
// Get fresh access token
|
||||||
const accessToken = await refreshAccessTokenIfNeeded(
|
const accessToken = await refreshAccessTokenIfNeeded(
|
||||||
credentialId,
|
credentialId,
|
||||||
workflow.userId,
|
credentialOwnerUserId,
|
||||||
`renewal-${webhook.id}`
|
`renewal-${webhook.id}`
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -42,6 +42,10 @@ function setupFileApiMocks(
|
|||||||
verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
|
verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||||
|
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/uploads/contexts/workspace', () => ({
|
vi.doMock('@/lib/uploads/contexts/workspace', () => ({
|
||||||
uploadWorkspaceFile: vi.fn().mockResolvedValue({
|
uploadWorkspaceFile: vi.fn().mockResolvedValue({
|
||||||
id: 'test-file-id',
|
id: 'test-file-id',
|
||||||
|
|||||||
@@ -206,6 +206,13 @@ export async function POST(request: NextRequest) {
|
|||||||
if (!workspaceId) {
|
if (!workspaceId) {
|
||||||
throw new InvalidRequestError('Workspace context requires workspaceId parameter')
|
throw new InvalidRequestError('Workspace context requires workspaceId parameter')
|
||||||
}
|
}
|
||||||
|
const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||||
|
if (permission !== 'admin' && permission !== 'write') {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Write or Admin access required for workspace uploads' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||||
|
|||||||
@@ -22,8 +22,8 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
|
|||||||
isProd: false,
|
isProd: false,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
vi.mock('@/lib/workflows/utils', () => ({
|
||||||
hasAdminPermission: vi.fn(),
|
authorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
describe('Form API Utils', () => {
|
describe('Form API Utils', () => {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
validateAuthToken,
|
validateAuthToken,
|
||||||
} from '@/lib/core/security/deployment'
|
} from '@/lib/core/security/deployment'
|
||||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||||
import { hasAdminPermission } from '@/lib/workspaces/permissions/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
const logger = createLogger('FormAuthUtils')
|
const logger = createLogger('FormAuthUtils')
|
||||||
|
|
||||||
@@ -24,29 +24,23 @@ export function setFormAuthCookie(
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if user has permission to create a form for a specific workflow
|
* Check if user has permission to create a form for a specific workflow
|
||||||
* Either the user owns the workflow directly OR has admin permission for the workflow's workspace
|
|
||||||
*/
|
*/
|
||||||
export async function checkWorkflowAccessForFormCreation(
|
export async function checkWorkflowAccessForFormCreation(
|
||||||
workflowId: string,
|
workflowId: string,
|
||||||
userId: string
|
userId: string
|
||||||
): Promise<{ hasAccess: boolean; workflow?: any }> {
|
): Promise<{ hasAccess: boolean; workflow?: any }> {
|
||||||
const workflowData = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'admin',
|
||||||
|
})
|
||||||
|
|
||||||
if (workflowData.length === 0) {
|
if (!authorization.workflow) {
|
||||||
return { hasAccess: false }
|
return { hasAccess: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowRecord = workflowData[0]
|
if (authorization.allowed) {
|
||||||
|
return { hasAccess: true, workflow: authorization.workflow }
|
||||||
if (workflowRecord.userId === userId) {
|
|
||||||
return { hasAccess: true, workflow: workflowRecord }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowRecord.workspaceId) {
|
|
||||||
const hasAdmin = await hasAdminPermission(userId, workflowRecord.workspaceId)
|
|
||||||
if (hasAdmin) {
|
|
||||||
return { hasAccess: true, workflow: workflowRecord }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { hasAccess: false }
|
return { hasAccess: false }
|
||||||
@@ -54,17 +48,13 @@ export async function checkWorkflowAccessForFormCreation(
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if user has access to view/edit/delete a specific form
|
* Check if user has access to view/edit/delete a specific form
|
||||||
* Either the user owns the form directly OR has admin permission for the workflow's workspace
|
|
||||||
*/
|
*/
|
||||||
export async function checkFormAccess(
|
export async function checkFormAccess(
|
||||||
formId: string,
|
formId: string,
|
||||||
userId: string
|
userId: string
|
||||||
): Promise<{ hasAccess: boolean; form?: any }> {
|
): Promise<{ hasAccess: boolean; form?: any }> {
|
||||||
const formData = await db
|
const formData = await db
|
||||||
.select({
|
.select({ form: form, workflowWorkspaceId: workflow.workspaceId })
|
||||||
form: form,
|
|
||||||
workflowWorkspaceId: workflow.workspaceId,
|
|
||||||
})
|
|
||||||
.from(form)
|
.from(form)
|
||||||
.innerJoin(workflow, eq(form.workflowId, workflow.id))
|
.innerJoin(workflow, eq(form.workflowId, workflow.id))
|
||||||
.where(eq(form.id, formId))
|
.where(eq(form.id, formId))
|
||||||
@@ -75,21 +65,19 @@ export async function checkFormAccess(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { form: formRecord, workflowWorkspaceId } = formData[0]
|
const { form: formRecord, workflowWorkspaceId } = formData[0]
|
||||||
|
if (!workflowWorkspaceId) {
|
||||||
if (formRecord.userId === userId) {
|
|
||||||
return { hasAccess: true, form: formRecord }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowWorkspaceId) {
|
|
||||||
const hasAdmin = await hasAdminPermission(userId, workflowWorkspaceId)
|
|
||||||
if (hasAdmin) {
|
|
||||||
return { hasAccess: true, form: formRecord }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { hasAccess: false }
|
return { hasAccess: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: formRecord.workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'admin',
|
||||||
|
})
|
||||||
|
|
||||||
|
return authorization.allowed ? { hasAccess: true, form: formRecord } : { hasAccess: false }
|
||||||
|
}
|
||||||
|
|
||||||
export async function validateFormAuth(
|
export async function validateFormAuth(
|
||||||
requestId: string,
|
requestId: string,
|
||||||
deployment: any,
|
deployment: any,
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { validateHallucination } from '@/lib/guardrails/validate_hallucination'
|
import { validateHallucination } from '@/lib/guardrails/validate_hallucination'
|
||||||
import { validateJson } from '@/lib/guardrails/validate_json'
|
import { validateJson } from '@/lib/guardrails/validate_json'
|
||||||
@@ -13,6 +14,11 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Guardrails validation request received`)
|
logger.info(`[${requestId}] Guardrails validation request received`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const {
|
const {
|
||||||
validationType,
|
validationType,
|
||||||
@@ -109,6 +115,10 @@ export async function POST(request: NextRequest) {
|
|||||||
validationType,
|
validationType,
|
||||||
inputType: typeof input,
|
inputType: typeof input,
|
||||||
})
|
})
|
||||||
|
const authHeaders = {
|
||||||
|
cookie: request.headers.get('cookie') || undefined,
|
||||||
|
authorization: request.headers.get('authorization') || undefined,
|
||||||
|
}
|
||||||
|
|
||||||
const validationResult = await executeValidation(
|
const validationResult = await executeValidation(
|
||||||
validationType,
|
validationType,
|
||||||
@@ -134,6 +144,7 @@ export async function POST(request: NextRequest) {
|
|||||||
piiEntityTypes,
|
piiEntityTypes,
|
||||||
piiMode,
|
piiMode,
|
||||||
piiLanguage,
|
piiLanguage,
|
||||||
|
authHeaders,
|
||||||
requestId
|
requestId
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -213,6 +224,7 @@ async function executeValidation(
|
|||||||
piiEntityTypes: string[] | undefined,
|
piiEntityTypes: string[] | undefined,
|
||||||
piiMode: string | undefined,
|
piiMode: string | undefined,
|
||||||
piiLanguage: string | undefined,
|
piiLanguage: string | undefined,
|
||||||
|
authHeaders: { cookie?: string; authorization?: string } | undefined,
|
||||||
requestId: string
|
requestId: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
passed: boolean
|
passed: boolean
|
||||||
@@ -253,6 +265,7 @@ async function executeValidation(
|
|||||||
providerCredentials,
|
providerCredentials,
|
||||||
workflowId,
|
workflowId,
|
||||||
workspaceId,
|
workspaceId,
|
||||||
|
authHeaders,
|
||||||
requestId,
|
requestId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ export async function GET(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (job.status === JOB_STATUS.PROCESSING || job.status === JOB_STATUS.PENDING) {
|
if (job.status === JOB_STATUS.PROCESSING || job.status === JOB_STATUS.PENDING) {
|
||||||
response.estimatedDuration = 180000
|
response.estimatedDuration = 300000
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(response)
|
return NextResponse.json(response)
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { batchChunkOperation, createChunk, queryChunks } from '@/lib/knowledge/chunks/service'
|
import { batchChunkOperation, createChunk, queryChunks } from '@/lib/knowledge/chunks/service'
|
||||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import { checkDocumentAccess, checkDocumentWriteAccess } from '@/app/api/knowledge/utils'
|
import { checkDocumentAccess, checkDocumentWriteAccess } from '@/app/api/knowledge/utils'
|
||||||
import { calculateCost } from '@/providers/utils'
|
import { calculateCost } from '@/providers/utils'
|
||||||
|
|
||||||
@@ -38,13 +38,14 @@ export async function GET(
|
|||||||
const { id: knowledgeBaseId, documentId } = await params
|
const { id: knowledgeBaseId, documentId } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized chunks access attempt`)
|
logger.warn(`[${requestId}] Unauthorized chunks access attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkDocumentAccess(knowledgeBaseId, documentId, session.user.id)
|
const accessCheck = await checkDocumentAccess(knowledgeBaseId, documentId, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if (accessCheck.notFound) {
|
if (accessCheck.notFound) {
|
||||||
@@ -54,7 +55,7 @@ export async function GET(
|
|||||||
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted unauthorized chunks access: ${accessCheck.reason}`
|
`[${requestId}] User ${userId} attempted unauthorized chunks access: ${accessCheck.reason}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
@@ -113,13 +114,25 @@ export async function POST(
|
|||||||
const body = await req.json()
|
const body = await req.json()
|
||||||
const { workflowId, ...searchParams } = body
|
const { workflowId, ...searchParams } = body
|
||||||
|
|
||||||
const userId = await getUserId(requestId, workflowId)
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
logger.warn(`[${requestId}] Authentication failed: ${auth.error || 'Unauthorized'}`)
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
if (!userId) {
|
if (workflowId) {
|
||||||
const errorMessage = workflowId ? 'Workflow not found' : 'Unauthorized'
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
const statusCode = workflowId ? 404 : 401
|
workflowId,
|
||||||
logger.warn(`[${requestId}] Authentication failed: ${errorMessage}`)
|
userId,
|
||||||
return NextResponse.json({ error: errorMessage }, { status: statusCode })
|
action: 'write',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: authorization.message || 'Access denied' },
|
||||||
|
{ status: authorization.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const accessCheck = await checkDocumentWriteAccess(knowledgeBaseId, documentId, userId)
|
const accessCheck = await checkDocumentWriteAccess(knowledgeBaseId, documentId, userId)
|
||||||
@@ -248,13 +261,14 @@ export async function PATCH(
|
|||||||
const { id: knowledgeBaseId, documentId } = await params
|
const { id: knowledgeBaseId, documentId } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized batch chunk operation attempt`)
|
logger.warn(`[${requestId}] Unauthorized batch chunk operation attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkDocumentAccess(knowledgeBaseId, documentId, session.user.id)
|
const accessCheck = await checkDocumentAccess(knowledgeBaseId, documentId, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if (accessCheck.notFound) {
|
if (accessCheck.notFound) {
|
||||||
@@ -264,7 +278,7 @@ export async function PATCH(
|
|||||||
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted unauthorized batch chunk operation: ${accessCheck.reason}`
|
`[${requestId}] User ${userId} attempted unauthorized batch chunk operation: ${accessCheck.reason}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import {
|
import {
|
||||||
deleteDocument,
|
deleteDocument,
|
||||||
@@ -54,13 +54,14 @@ export async function GET(
|
|||||||
const { id: knowledgeBaseId, documentId } = await params
|
const { id: knowledgeBaseId, documentId } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized document access attempt`)
|
logger.warn(`[${requestId}] Unauthorized document access attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkDocumentAccess(knowledgeBaseId, documentId, session.user.id)
|
const accessCheck = await checkDocumentAccess(knowledgeBaseId, documentId, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if (accessCheck.notFound) {
|
if (accessCheck.notFound) {
|
||||||
@@ -70,7 +71,7 @@ export async function GET(
|
|||||||
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted unauthorized document access: ${accessCheck.reason}`
|
`[${requestId}] User ${userId} attempted unauthorized document access: ${accessCheck.reason}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
@@ -97,13 +98,14 @@ export async function PUT(
|
|||||||
const { id: knowledgeBaseId, documentId } = await params
|
const { id: knowledgeBaseId, documentId } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized document update attempt`)
|
logger.warn(`[${requestId}] Unauthorized document update attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkDocumentWriteAccess(knowledgeBaseId, documentId, session.user.id)
|
const accessCheck = await checkDocumentWriteAccess(knowledgeBaseId, documentId, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if (accessCheck.notFound) {
|
if (accessCheck.notFound) {
|
||||||
@@ -113,7 +115,7 @@ export async function PUT(
|
|||||||
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted unauthorized document update: ${accessCheck.reason}`
|
`[${requestId}] User ${userId} attempted unauthorized document update: ${accessCheck.reason}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
@@ -227,13 +229,14 @@ export async function DELETE(
|
|||||||
const { id: knowledgeBaseId, documentId } = await params
|
const { id: knowledgeBaseId, documentId } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized document delete attempt`)
|
logger.warn(`[${requestId}] Unauthorized document delete attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkDocumentWriteAccess(knowledgeBaseId, documentId, session.user.id)
|
const accessCheck = await checkDocumentWriteAccess(knowledgeBaseId, documentId, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if (accessCheck.notFound) {
|
if (accessCheck.notFound) {
|
||||||
@@ -243,7 +246,7 @@ export async function DELETE(
|
|||||||
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
return NextResponse.json({ error: accessCheck.reason }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted unauthorized document deletion: ${accessCheck.reason}`
|
`[${requestId}] User ${userId} attempted unauthorized document deletion: ${accessCheck.reason}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import {
|
import {
|
||||||
bulkDocumentOperation,
|
bulkDocumentOperation,
|
||||||
bulkDocumentOperationByFilter,
|
bulkDocumentOperationByFilter,
|
||||||
@@ -13,7 +14,7 @@ import {
|
|||||||
processDocumentsWithQueue,
|
processDocumentsWithQueue,
|
||||||
} from '@/lib/knowledge/documents/service'
|
} from '@/lib/knowledge/documents/service'
|
||||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||||
|
|
||||||
const logger = createLogger('DocumentsAPI')
|
const logger = createLogger('DocumentsAPI')
|
||||||
@@ -170,16 +171,28 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
bodyKeys: Object.keys(body),
|
bodyKeys: Object.keys(body),
|
||||||
})
|
})
|
||||||
|
|
||||||
const userId = await getUserId(requestId, workflowId)
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
if (!userId) {
|
logger.warn(`[${requestId}] Authentication failed: ${auth.error || 'Unauthorized'}`, {
|
||||||
const errorMessage = workflowId ? 'Workflow not found' : 'Unauthorized'
|
|
||||||
const statusCode = workflowId ? 404 : 401
|
|
||||||
logger.warn(`[${requestId}] Authentication failed: ${errorMessage}`, {
|
|
||||||
workflowId,
|
workflowId,
|
||||||
hasWorkflowId: !!workflowId,
|
hasWorkflowId: !!workflowId,
|
||||||
})
|
})
|
||||||
return NextResponse.json({ error: errorMessage }, { status: statusCode })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
|
if (workflowId) {
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'write',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: authorization.message || 'Access denied' },
|
||||||
|
{ status: authorization.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, userId)
|
const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, userId)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import {
|
import {
|
||||||
@@ -54,13 +54,14 @@ export async function GET(_request: NextRequest, { params }: { params: Promise<{
|
|||||||
const { id } = await params
|
const { id } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(_request, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized knowledge base access attempt`)
|
logger.warn(`[${requestId}] Unauthorized knowledge base access attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkKnowledgeBaseAccess(id, session.user.id)
|
const accessCheck = await checkKnowledgeBaseAccess(id, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if ('notFound' in accessCheck && accessCheck.notFound) {
|
if ('notFound' in accessCheck && accessCheck.notFound) {
|
||||||
@@ -68,7 +69,7 @@ export async function GET(_request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted to access unauthorized knowledge base ${id}`
|
`[${requestId}] User ${userId} attempted to access unauthorized knowledge base ${id}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
@@ -79,7 +80,7 @@ export async function GET(_request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Retrieved knowledge base: ${id} for user ${session.user.id}`)
|
logger.info(`[${requestId}] Retrieved knowledge base: ${id} for user ${userId}`)
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -96,13 +97,14 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
const { id } = await params
|
const { id } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized knowledge base update attempt`)
|
logger.warn(`[${requestId}] Unauthorized knowledge base update attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkKnowledgeBaseWriteAccess(id, session.user.id)
|
const accessCheck = await checkKnowledgeBaseWriteAccess(id, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if ('notFound' in accessCheck && accessCheck.notFound) {
|
if ('notFound' in accessCheck && accessCheck.notFound) {
|
||||||
@@ -110,7 +112,7 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted to update unauthorized knowledge base ${id}`
|
`[${requestId}] User ${userId} attempted to update unauthorized knowledge base ${id}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
@@ -131,7 +133,7 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
requestId
|
requestId
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Knowledge base updated: ${id} for user ${session.user.id}`)
|
logger.info(`[${requestId}] Knowledge base updated: ${id} for user ${userId}`)
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -163,13 +165,14 @@ export async function DELETE(
|
|||||||
const { id } = await params
|
const { id } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const auth = await checkSessionOrInternalAuth(_request, { requireWorkflowId: false })
|
||||||
if (!session?.user?.id) {
|
if (!auth.success || !auth.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized knowledge base delete attempt`)
|
logger.warn(`[${requestId}] Unauthorized knowledge base delete attempt`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
const accessCheck = await checkKnowledgeBaseWriteAccess(id, session.user.id)
|
const accessCheck = await checkKnowledgeBaseWriteAccess(id, userId)
|
||||||
|
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
if ('notFound' in accessCheck && accessCheck.notFound) {
|
if ('notFound' in accessCheck && accessCheck.notFound) {
|
||||||
@@ -177,7 +180,7 @@ export async function DELETE(
|
|||||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} attempted to delete unauthorized knowledge base ${id}`
|
`[${requestId}] User ${userId} attempted to delete unauthorized knowledge base ${id}`
|
||||||
)
|
)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
@@ -192,7 +195,7 @@ export async function DELETE(
|
|||||||
// Telemetry should not fail the operation
|
// Telemetry should not fail the operation
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Knowledge base deleted: ${id} for user ${session.user.id}`)
|
logger.info(`[${requestId}] Knowledge base deleted: ${id} for user ${userId}`)
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ mockDrizzleOrm()
|
|||||||
mockConsoleLogger()
|
mockConsoleLogger()
|
||||||
|
|
||||||
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue({ role: 'owner' }),
|
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
describe('Knowledge Base API Route', () => {
|
describe('Knowledge Base API Route', () => {
|
||||||
|
|||||||
@@ -104,6 +104,8 @@ describe('Knowledge Search API Route', () => {
|
|||||||
|
|
||||||
const mockGetUserId = vi.fn()
|
const mockGetUserId = vi.fn()
|
||||||
const mockFetch = vi.fn()
|
const mockFetch = vi.fn()
|
||||||
|
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||||
|
const mockAuthorizeWorkflowByWorkspacePermission = vi.fn()
|
||||||
|
|
||||||
const mockEmbedding = [0.1, 0.2, 0.3, 0.4, 0.5]
|
const mockEmbedding = [0.1, 0.2, 0.3, 0.4, 0.5]
|
||||||
const mockSearchResults = [
|
const mockSearchResults = [
|
||||||
@@ -132,8 +134,12 @@ describe('Knowledge Search API Route', () => {
|
|||||||
db: mockDbChain,
|
db: mockDbChain,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/app/api/auth/oauth/utils', () => ({
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
getUserId: mockGetUserId,
|
checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth,
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/workflows/utils', () => ({
|
||||||
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
Object.values(mockDbChain).forEach((fn) => {
|
Object.values(mockDbChain).forEach((fn) => {
|
||||||
@@ -157,6 +163,15 @@ describe('Knowledge Search API Route', () => {
|
|||||||
doc2: 'Document 2',
|
doc2: 'Document 2',
|
||||||
})
|
})
|
||||||
mockGetDocumentTagDefinitions.mockClear()
|
mockGetDocumentTagDefinitions.mockClear()
|
||||||
|
mockCheckSessionOrInternalAuth.mockClear().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
userId: 'user-123',
|
||||||
|
authType: 'session',
|
||||||
|
})
|
||||||
|
mockAuthorizeWorkflowByWorkspacePermission.mockClear().mockResolvedValue({
|
||||||
|
allowed: true,
|
||||||
|
status: 200,
|
||||||
|
})
|
||||||
|
|
||||||
vi.stubGlobal('crypto', {
|
vi.stubGlobal('crypto', {
|
||||||
randomUUID: vi.fn().mockReturnValue('mock-uuid-1234-5678'),
|
randomUUID: vi.fn().mockReturnValue('mock-uuid-1234-5678'),
|
||||||
@@ -311,11 +326,18 @@ describe('Knowledge Search API Route', () => {
|
|||||||
|
|
||||||
expect(response.status).toBe(200)
|
expect(response.status).toBe(200)
|
||||||
expect(data.success).toBe(true)
|
expect(data.success).toBe(true)
|
||||||
expect(mockGetUserId).toHaveBeenCalledWith(expect.any(String), 'workflow-123')
|
expect(mockAuthorizeWorkflowByWorkspacePermission).toHaveBeenCalledWith({
|
||||||
|
workflowId: 'workflow-123',
|
||||||
|
userId: 'user-123',
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('should return unauthorized for unauthenticated request', async () => {
|
it.concurrent('should return unauthorized for unauthenticated request', async () => {
|
||||||
mockGetUserId.mockResolvedValue(null)
|
mockCheckSessionOrInternalAuth.mockResolvedValueOnce({
|
||||||
|
success: false,
|
||||||
|
error: 'Unauthorized',
|
||||||
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', validSearchData)
|
const req = createMockRequest('POST', validSearchData)
|
||||||
const { POST } = await import('@/app/api/knowledge/search/route')
|
const { POST } = await import('@/app/api/knowledge/search/route')
|
||||||
@@ -332,7 +354,11 @@ describe('Knowledge Search API Route', () => {
|
|||||||
workflowId: 'nonexistent-workflow',
|
workflowId: 'nonexistent-workflow',
|
||||||
}
|
}
|
||||||
|
|
||||||
mockGetUserId.mockResolvedValue(null)
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValueOnce({
|
||||||
|
allowed: false,
|
||||||
|
status: 404,
|
||||||
|
message: 'Workflow not found',
|
||||||
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', workflowData)
|
const req = createMockRequest('POST', workflowData)
|
||||||
const { POST } = await import('@/app/api/knowledge/search/route')
|
const { POST } = await import('@/app/api/knowledge/search/route')
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { ALL_TAG_SLOTS } from '@/lib/knowledge/constants'
|
import { ALL_TAG_SLOTS } from '@/lib/knowledge/constants'
|
||||||
@@ -8,7 +9,7 @@ import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
|
|||||||
import { buildUndefinedTagsError, validateTagValue } from '@/lib/knowledge/tags/utils'
|
import { buildUndefinedTagsError, validateTagValue } from '@/lib/knowledge/tags/utils'
|
||||||
import type { StructuredFilter } from '@/lib/knowledge/types'
|
import type { StructuredFilter } from '@/lib/knowledge/types'
|
||||||
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import {
|
import {
|
||||||
generateSearchEmbedding,
|
generateSearchEmbedding,
|
||||||
getDocumentNamesByIds,
|
getDocumentNamesByIds,
|
||||||
@@ -76,12 +77,24 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { workflowId, ...searchParams } = body
|
const { workflowId, ...searchParams } = body
|
||||||
|
|
||||||
const userId = await getUserId(requestId, workflowId)
|
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
const userId = auth.userId
|
||||||
|
|
||||||
if (!userId) {
|
if (workflowId) {
|
||||||
const errorMessage = workflowId ? 'Workflow not found' : 'Unauthorized'
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
const statusCode = workflowId ? 404 : 401
|
workflowId,
|
||||||
return NextResponse.json({ error: errorMessage }, { status: statusCode })
|
userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: authorization.message || 'Access denied' },
|
||||||
|
{ status: authorization.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpProtectedResourceMetadataResponse(request)
|
||||||
|
}
|
||||||
802
apps/sim/app/api/mcp/copilot/route.ts
Normal file
802
apps/sim/app/api/mcp/copilot/route.ts
Normal file
@@ -0,0 +1,802 @@
|
|||||||
|
import { randomUUID } from 'node:crypto'
|
||||||
|
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||||
|
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'
|
||||||
|
import {
|
||||||
|
CallToolRequestSchema,
|
||||||
|
type CallToolResult,
|
||||||
|
ErrorCode,
|
||||||
|
type JSONRPCError,
|
||||||
|
ListToolsRequestSchema,
|
||||||
|
type ListToolsResult,
|
||||||
|
McpError,
|
||||||
|
type RequestId,
|
||||||
|
} from '@modelcontextprotocol/sdk/types.js'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { userStats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq, sql } from 'drizzle-orm'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||||
|
import {
|
||||||
|
ORCHESTRATION_TIMEOUT_MS,
|
||||||
|
SIM_AGENT_API_URL,
|
||||||
|
SIM_AGENT_VERSION,
|
||||||
|
} from '@/lib/copilot/constants'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent'
|
||||||
|
import {
|
||||||
|
executeToolServerSide,
|
||||||
|
prepareExecutionContext,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||||
|
import {
|
||||||
|
authorizeWorkflowByWorkspacePermission,
|
||||||
|
resolveWorkflowIdForUser,
|
||||||
|
} from '@/lib/workflows/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMcpAPI')
|
||||||
|
const mcpRateLimiter = new RateLimiter()
|
||||||
|
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-6'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
export const runtime = 'nodejs'
|
||||||
|
export const maxDuration = 300
|
||||||
|
|
||||||
|
interface CopilotKeyAuthResult {
|
||||||
|
success: boolean
|
||||||
|
userId?: string
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a copilot API key by forwarding it to the Go copilot service's
|
||||||
|
* `/api/validate-key` endpoint. Returns the associated userId on success.
|
||||||
|
*/
|
||||||
|
async function authenticateCopilotApiKey(apiKey: string): Promise<CopilotKeyAuthResult> {
|
||||||
|
try {
|
||||||
|
const internalSecret = env.INTERNAL_API_SECRET
|
||||||
|
if (!internalSecret) {
|
||||||
|
logger.error('INTERNAL_API_SECRET not configured')
|
||||||
|
return { success: false, error: 'Server configuration error' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': internalSecret,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ targetApiKey: apiKey }),
|
||||||
|
signal: AbortSignal.timeout(10_000),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
const body = await res.json().catch(() => null)
|
||||||
|
const upstream = (body as Record<string, unknown>)?.message
|
||||||
|
const status = res.status
|
||||||
|
|
||||||
|
if (status === 401 || status === 403) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Invalid Copilot API key. Generate a new key in Settings → Copilot and set it in the x-api-key header.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (status === 402) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Usage limit exceeded for this Copilot API key. Upgrade your plan or wait for your quota to reset.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: String(upstream ?? 'Copilot API key validation failed') }
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await res.json()) as { ok?: boolean; userId?: string }
|
||||||
|
if (!data.ok || !data.userId) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid Copilot API key. Generate a new key in Settings → Copilot.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, userId: data.userId }
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Copilot API key validation failed', { error })
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
'Could not validate Copilot API key — the authentication service is temporarily unreachable. This is NOT a problem with the API key itself; please retry shortly.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||||
|
* This is included in the initialize response to help external LLMs understand
|
||||||
|
* the workflow lifecycle and best practices.
|
||||||
|
*/
|
||||||
|
const MCP_SERVER_INSTRUCTIONS = `
|
||||||
|
## Sim Workflow Copilot
|
||||||
|
|
||||||
|
Sim is a workflow automation platform. Workflows are visual pipelines of connected blocks (Agent, Function, Condition, API, integrations, etc.). The Agent block is the core — an LLM with tools, memory, structured output, and knowledge bases.
|
||||||
|
|
||||||
|
### Workflow Lifecycle (Happy Path)
|
||||||
|
|
||||||
|
1. \`list_workspaces\` → know where to work
|
||||||
|
2. \`create_workflow(name, workspaceId)\` → get a workflowId
|
||||||
|
3. \`sim_build(request, workflowId)\` → plan and build in one pass
|
||||||
|
4. \`sim_test(request, workflowId)\` → verify it works
|
||||||
|
5. \`sim_deploy("deploy as api", workflowId)\` → make it accessible externally (optional)
|
||||||
|
|
||||||
|
For fine-grained control, use \`sim_plan\` → \`sim_edit\` instead of \`sim_build\`. Pass the plan object from sim_plan EXACTLY as-is to sim_edit's context.plan field.
|
||||||
|
|
||||||
|
### Working with Existing Workflows
|
||||||
|
|
||||||
|
When the user refers to a workflow by name or description ("the email one", "my Slack bot"):
|
||||||
|
1. Use \`sim_discovery\` to find it by functionality
|
||||||
|
2. Or use \`list_workflows\` and match by name
|
||||||
|
3. Then pass the workflowId to other tools
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
- \`rename_workflow\` — rename a workflow
|
||||||
|
- \`move_workflow\` — move a workflow into a folder (or root with null)
|
||||||
|
- \`move_folder\` — nest a folder inside another (or root with null)
|
||||||
|
- \`create_folder(name, parentId)\` — create nested folder hierarchies
|
||||||
|
|
||||||
|
### Key Rules
|
||||||
|
|
||||||
|
- You can test workflows immediately after building — deployment is only needed for external access (API, chat, MCP).
|
||||||
|
- All copilot tools (build, plan, edit, deploy, test, debug) require workflowId.
|
||||||
|
- If the user reports errors → use \`sim_debug\` first, don't guess.
|
||||||
|
- Variable syntax: \`<blockname.field>\` for block outputs, \`{{ENV_VAR}}\` for env vars.
|
||||||
|
`
|
||||||
|
|
||||||
|
type HeaderMap = Record<string, string | string[] | undefined>
|
||||||
|
|
||||||
|
function createError(id: RequestId, code: ErrorCode | number, message: string): JSONRPCError {
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
error: { code, message },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeRequestHeaders(request: NextRequest): HeaderMap {
|
||||||
|
const headers: HeaderMap = {}
|
||||||
|
|
||||||
|
request.headers.forEach((value, key) => {
|
||||||
|
headers[key.toLowerCase()] = value
|
||||||
|
})
|
||||||
|
|
||||||
|
return headers
|
||||||
|
}
|
||||||
|
|
||||||
|
function readHeader(headers: HeaderMap | undefined, name: string): string | undefined {
|
||||||
|
if (!headers) return undefined
|
||||||
|
const value = headers[name.toLowerCase()]
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value[0]
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
class NextResponseCapture {
|
||||||
|
private _status = 200
|
||||||
|
private _headers = new Headers()
|
||||||
|
private _controller: ReadableStreamDefaultController<Uint8Array> | null = null
|
||||||
|
private _pendingChunks: Uint8Array[] = []
|
||||||
|
private _closeHandlers: Array<() => void> = []
|
||||||
|
private _errorHandlers: Array<(error: Error) => void> = []
|
||||||
|
private _headersWritten = false
|
||||||
|
private _ended = false
|
||||||
|
private _headersPromise: Promise<void>
|
||||||
|
private _resolveHeaders: (() => void) | null = null
|
||||||
|
private _endedPromise: Promise<void>
|
||||||
|
private _resolveEnded: (() => void) | null = null
|
||||||
|
readonly readable: ReadableStream<Uint8Array>
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this._headersPromise = new Promise<void>((resolve) => {
|
||||||
|
this._resolveHeaders = resolve
|
||||||
|
})
|
||||||
|
|
||||||
|
this._endedPromise = new Promise<void>((resolve) => {
|
||||||
|
this._resolveEnded = resolve
|
||||||
|
})
|
||||||
|
|
||||||
|
this.readable = new ReadableStream<Uint8Array>({
|
||||||
|
start: (controller) => {
|
||||||
|
this._controller = controller
|
||||||
|
if (this._pendingChunks.length > 0) {
|
||||||
|
for (const chunk of this._pendingChunks) {
|
||||||
|
controller.enqueue(chunk)
|
||||||
|
}
|
||||||
|
this._pendingChunks = []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel: () => {
|
||||||
|
this._ended = true
|
||||||
|
this._resolveEnded?.()
|
||||||
|
this.triggerCloseHandlers()
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private markHeadersWritten(): void {
|
||||||
|
if (this._headersWritten) return
|
||||||
|
this._headersWritten = true
|
||||||
|
this._resolveHeaders?.()
|
||||||
|
}
|
||||||
|
|
||||||
|
private triggerCloseHandlers(): void {
|
||||||
|
for (const handler of this._closeHandlers) {
|
||||||
|
try {
|
||||||
|
handler()
|
||||||
|
} catch (error) {
|
||||||
|
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private triggerErrorHandlers(error: Error): void {
|
||||||
|
for (const errorHandler of this._errorHandlers) {
|
||||||
|
errorHandler(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeChunk(chunk: unknown): Uint8Array | null {
|
||||||
|
if (typeof chunk === 'string') {
|
||||||
|
return new TextEncoder().encode(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunk instanceof Uint8Array) {
|
||||||
|
return chunk
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunk === undefined || chunk === null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return new TextEncoder().encode(String(chunk))
|
||||||
|
}
|
||||||
|
|
||||||
|
writeHead(status: number, headers?: Record<string, string | number | string[]>): this {
|
||||||
|
this._status = status
|
||||||
|
|
||||||
|
if (headers) {
|
||||||
|
Object.entries(headers).forEach(([key, value]) => {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
this._headers.set(key, value.join(', '))
|
||||||
|
} else {
|
||||||
|
this._headers.set(key, String(value))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
this.markHeadersWritten()
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
flushHeaders(): this {
|
||||||
|
this.markHeadersWritten()
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
write(chunk: unknown): boolean {
|
||||||
|
const normalized = this.normalizeChunk(chunk)
|
||||||
|
if (!normalized) return true
|
||||||
|
|
||||||
|
this.markHeadersWritten()
|
||||||
|
|
||||||
|
if (this._controller) {
|
||||||
|
try {
|
||||||
|
this._controller.enqueue(normalized)
|
||||||
|
} catch (error) {
|
||||||
|
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this._pendingChunks.push(normalized)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
end(chunk?: unknown): this {
|
||||||
|
if (chunk !== undefined) this.write(chunk)
|
||||||
|
this.markHeadersWritten()
|
||||||
|
if (this._ended) return this
|
||||||
|
|
||||||
|
this._ended = true
|
||||||
|
this._resolveEnded?.()
|
||||||
|
|
||||||
|
if (this._controller) {
|
||||||
|
try {
|
||||||
|
this._controller.close()
|
||||||
|
} catch (error) {
|
||||||
|
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.triggerCloseHandlers()
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
async waitForHeaders(timeoutMs = 30000): Promise<void> {
|
||||||
|
if (this._headersWritten) return
|
||||||
|
|
||||||
|
await Promise.race([
|
||||||
|
this._headersPromise,
|
||||||
|
new Promise<void>((resolve) => {
|
||||||
|
setTimeout(resolve, timeoutMs)
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
async waitForEnd(timeoutMs = 30000): Promise<void> {
|
||||||
|
if (this._ended) return
|
||||||
|
|
||||||
|
await Promise.race([
|
||||||
|
this._endedPromise,
|
||||||
|
new Promise<void>((resolve) => {
|
||||||
|
setTimeout(resolve, timeoutMs)
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
on(event: 'close' | 'error', handler: (() => void) | ((error: Error) => void)): this {
|
||||||
|
if (event === 'close') {
|
||||||
|
this._closeHandlers.push(handler as () => void)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (event === 'error') {
|
||||||
|
this._errorHandlers.push(handler as (error: Error) => void)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
toNextResponse(): NextResponse {
|
||||||
|
return new NextResponse(this.readable, {
|
||||||
|
status: this._status,
|
||||||
|
headers: this._headers,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||||
|
const server = new Server(
|
||||||
|
{
|
||||||
|
name: 'sim-copilot',
|
||||||
|
version: '1.0.0',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
capabilities: { tools: {} },
|
||||||
|
instructions: MCP_SERVER_INSTRUCTIONS,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||||
|
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const result: ListToolsResult = {
|
||||||
|
tools: [...directTools, ...subagentTools],
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
|
||||||
|
server.setRequestHandler(CallToolRequestSchema, async (request, extra) => {
|
||||||
|
const headers = (extra.requestInfo?.headers || {}) as HeaderMap
|
||||||
|
const apiKeyHeader = readHeader(headers, 'x-api-key')
|
||||||
|
|
||||||
|
if (!apiKeyHeader) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: 'AUTHENTICATION ERROR: No Copilot API key provided. The user must set their Copilot API key in the x-api-key header. They can generate one in the Sim app under Settings → Copilot. Do NOT retry — this will fail until the key is configured.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn('MCP copilot key auth failed', { method: request.method })
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const rateLimitResult = await mcpRateLimiter.checkRateLimitWithSubscription(
|
||||||
|
authResult.userId,
|
||||||
|
await getHighestPrioritySubscription(authResult.userId),
|
||||||
|
'api-endpoint',
|
||||||
|
false
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!rateLimitResult.allowed) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `RATE LIMIT: Too many requests. Please wait and retry after ${rateLimitResult.resetAt.toISOString()}.`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = request.params as
|
||||||
|
| { name?: string; arguments?: Record<string, unknown> }
|
||||||
|
| undefined
|
||||||
|
if (!params?.name) {
|
||||||
|
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await handleToolsCall(
|
||||||
|
{
|
||||||
|
name: params.name,
|
||||||
|
arguments: params.arguments,
|
||||||
|
},
|
||||||
|
authResult.userId,
|
||||||
|
abortSignal
|
||||||
|
)
|
||||||
|
|
||||||
|
trackMcpCopilotCall(authResult.userId)
|
||||||
|
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
|
||||||
|
return server
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleMcpRequestWithSdk(
|
||||||
|
request: NextRequest,
|
||||||
|
parsedBody: unknown
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
const server = buildMcpServer(request.signal)
|
||||||
|
const transport = new StreamableHTTPServerTransport({
|
||||||
|
sessionIdGenerator: undefined,
|
||||||
|
enableJsonResponse: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseCapture = new NextResponseCapture()
|
||||||
|
const requestAdapter = {
|
||||||
|
method: request.method,
|
||||||
|
headers: normalizeRequestHeaders(request),
|
||||||
|
}
|
||||||
|
|
||||||
|
await server.connect(transport)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await transport.handleRequest(requestAdapter as any, responseCapture as any, parsedBody)
|
||||||
|
await responseCapture.waitForHeaders()
|
||||||
|
// Must exceed the longest possible tool execution (build = 5 min).
|
||||||
|
// Using ORCHESTRATION_TIMEOUT_MS + 60 s buffer so the orchestrator can
|
||||||
|
// finish or time-out on its own before the transport is torn down.
|
||||||
|
await responseCapture.waitForEnd(ORCHESTRATION_TIMEOUT_MS + 60_000)
|
||||||
|
return responseCapture.toNextResponse()
|
||||||
|
} finally {
|
||||||
|
await server.close().catch(() => {})
|
||||||
|
await transport.close().catch(() => {})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
// Return 405 to signal that server-initiated SSE notifications are not
|
||||||
|
// supported. Without this, clients like mcp-remote will repeatedly
|
||||||
|
// reconnect trying to open an SSE stream, flooding the logs with GETs.
|
||||||
|
return new NextResponse(null, { status: 405 })
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
let parsedBody: unknown
|
||||||
|
|
||||||
|
try {
|
||||||
|
parsedBody = await request.json()
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json(createError(0, ErrorCode.ParseError, 'Invalid JSON body'), {
|
||||||
|
status: 400,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return await handleMcpRequestWithSdk(request, parsedBody)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error handling MCP request', { error })
|
||||||
|
return NextResponse.json(createError(0, ErrorCode.InternalError, 'Internal error'), {
|
||||||
|
status: 500,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function DELETE(request: NextRequest) {
|
||||||
|
void request
|
||||||
|
return NextResponse.json(createError(0, -32000, 'Method not allowed.'), { status: 405 })
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Increment MCP copilot call counter in userStats (fire-and-forget).
|
||||||
|
*/
|
||||||
|
function trackMcpCopilotCall(userId: string): void {
|
||||||
|
db.update(userStats)
|
||||||
|
.set({
|
||||||
|
totalMcpCopilotCalls: sql`total_mcp_copilot_calls + 1`,
|
||||||
|
lastActive: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(userStats.userId, userId))
|
||||||
|
.then(() => {})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('Failed to track MCP copilot call', { error, userId })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToolsCall(
|
||||||
|
params: { name: string; arguments?: Record<string, unknown> },
|
||||||
|
userId: string,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
const args = params.arguments || {}
|
||||||
|
|
||||||
|
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (directTool) {
|
||||||
|
return handleDirectToolCall(directTool, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (subagentTool) {
|
||||||
|
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new McpError(ErrorCode.MethodNotFound, `Tool not found: ${params.name}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleDirectToolCall(
|
||||||
|
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
try {
|
||||||
|
const execContext = await prepareExecutionContext(userId, (args.workflowId as string) || '')
|
||||||
|
|
||||||
|
const toolCall = {
|
||||||
|
id: randomUUID(),
|
||||||
|
name: toolDef.toolId,
|
||||||
|
status: 'pending' as const,
|
||||||
|
params: args as Record<string, any>,
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(result.output ?? result, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Direct tool execution failed', { tool: toolDef.name, error })
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Tool execution failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build mode uses the main chat orchestrator with the 'fast' command instead of
|
||||||
|
* the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode
|
||||||
|
* (ModeFast) on the main chat processor that bypasses subagent orchestration and
|
||||||
|
* executes all tools directly.
|
||||||
|
*/
|
||||||
|
async function handleBuildToolCall(
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
try {
|
||||||
|
const requestText = (args.request as string) || JSON.stringify(args)
|
||||||
|
const workflowId = args.workflowId as string | undefined
|
||||||
|
|
||||||
|
const resolved = workflowId
|
||||||
|
? await (async () => {
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
return authorization.allowed ? { workflowId } : null
|
||||||
|
})()
|
||||||
|
: await resolveWorkflowIdForUser(userId)
|
||||||
|
|
||||||
|
if (!resolved?.workflowId) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'workflowId is required for build. Call create_workflow first.',
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
2
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const chatId = randomUUID()
|
||||||
|
|
||||||
|
const requestPayload = {
|
||||||
|
message: requestText,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
userId,
|
||||||
|
model: DEFAULT_COPILOT_MODEL,
|
||||||
|
mode: 'agent',
|
||||||
|
commands: ['fast'],
|
||||||
|
messageId: randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
headless: true,
|
||||||
|
chatId,
|
||||||
|
source: 'mcp',
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
|
userId,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
chatId,
|
||||||
|
autoExecuteTools: true,
|
||||||
|
timeout: 300000,
|
||||||
|
interactive: false,
|
||||||
|
abortSignal,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
toolCalls: result.toolCalls,
|
||||||
|
error: result.error,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text', text: JSON.stringify(responseData, null, 2) }],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Build tool call failed', { error })
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Build failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSubagentToolCall(
|
||||||
|
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
if (toolDef.agentId === 'build') {
|
||||||
|
return handleBuildToolCall(args, userId, abortSignal)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const requestText =
|
||||||
|
(args.request as string) ||
|
||||||
|
(args.message as string) ||
|
||||||
|
(args.error as string) ||
|
||||||
|
JSON.stringify(args)
|
||||||
|
|
||||||
|
const context = (args.context as Record<string, unknown>) || {}
|
||||||
|
if (args.plan && !context.plan) {
|
||||||
|
context.plan = args.plan
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateSubagentStream(
|
||||||
|
toolDef.agentId,
|
||||||
|
{
|
||||||
|
message: requestText,
|
||||||
|
workflowId: args.workflowId,
|
||||||
|
workspaceId: args.workspaceId,
|
||||||
|
context,
|
||||||
|
model: DEFAULT_COPILOT_MODEL,
|
||||||
|
headless: true,
|
||||||
|
source: 'mcp',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
workflowId: args.workflowId as string | undefined,
|
||||||
|
workspaceId: args.workspaceId as string | undefined,
|
||||||
|
abortSignal,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
let responseData: unknown
|
||||||
|
|
||||||
|
if (result.structuredResult) {
|
||||||
|
responseData = {
|
||||||
|
success: result.structuredResult.success ?? result.success,
|
||||||
|
type: result.structuredResult.type,
|
||||||
|
summary: result.structuredResult.summary,
|
||||||
|
data: result.structuredResult.data,
|
||||||
|
}
|
||||||
|
} else if (result.error) {
|
||||||
|
responseData = {
|
||||||
|
success: false,
|
||||||
|
error: result.error,
|
||||||
|
errors: result.errors,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(responseData, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Subagent tool call failed', {
|
||||||
|
tool: toolDef.name,
|
||||||
|
agentId: toolDef.agentId,
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Subagent call failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
98
apps/sim/app/api/mcp/events/route.test.ts
Normal file
98
apps/sim/app/api/mcp/events/route.test.ts
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
/**
|
||||||
|
* Tests for MCP SSE events endpoint
|
||||||
|
*
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
import { createMockRequest, mockAuth, mockConsoleLogger } from '@sim/testing'
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
mockConsoleLogger()
|
||||||
|
const auth = mockAuth()
|
||||||
|
|
||||||
|
const mockGetUserEntityPermissions = vi.fn()
|
||||||
|
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||||
|
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/mcp/connection-manager', () => ({
|
||||||
|
mcpConnectionManager: null,
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/mcp/pubsub', () => ({
|
||||||
|
mcpPubSub: null,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const { GET } = await import('./route')
|
||||||
|
|
||||||
|
describe('MCP Events SSE Endpoint', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 401 when session is missing', async () => {
|
||||||
|
auth.setUnauthenticated()
|
||||||
|
|
||||||
|
const request = createMockRequest(
|
||||||
|
'GET',
|
||||||
|
undefined,
|
||||||
|
{},
|
||||||
|
'http://localhost:3000/api/mcp/events?workspaceId=ws-123'
|
||||||
|
)
|
||||||
|
|
||||||
|
const response = await GET(request as any)
|
||||||
|
|
||||||
|
expect(response.status).toBe(401)
|
||||||
|
const text = await response.text()
|
||||||
|
expect(text).toBe('Unauthorized')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 400 when workspaceId is missing', async () => {
|
||||||
|
auth.setAuthenticated()
|
||||||
|
|
||||||
|
const request = createMockRequest('GET', undefined, {}, 'http://localhost:3000/api/mcp/events')
|
||||||
|
|
||||||
|
const response = await GET(request as any)
|
||||||
|
|
||||||
|
expect(response.status).toBe(400)
|
||||||
|
const text = await response.text()
|
||||||
|
expect(text).toBe('Missing workspaceId query parameter')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 403 when user lacks workspace access', async () => {
|
||||||
|
auth.setAuthenticated()
|
||||||
|
mockGetUserEntityPermissions.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const request = createMockRequest(
|
||||||
|
'GET',
|
||||||
|
undefined,
|
||||||
|
{},
|
||||||
|
'http://localhost:3000/api/mcp/events?workspaceId=ws-123'
|
||||||
|
)
|
||||||
|
|
||||||
|
const response = await GET(request as any)
|
||||||
|
|
||||||
|
expect(response.status).toBe(403)
|
||||||
|
const text = await response.text()
|
||||||
|
expect(text).toBe('Access denied to workspace')
|
||||||
|
expect(mockGetUserEntityPermissions).toHaveBeenCalledWith('user-123', 'workspace', 'ws-123')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns SSE stream when authorized', async () => {
|
||||||
|
auth.setAuthenticated()
|
||||||
|
mockGetUserEntityPermissions.mockResolvedValue({ read: true })
|
||||||
|
|
||||||
|
const request = createMockRequest(
|
||||||
|
'GET',
|
||||||
|
undefined,
|
||||||
|
{},
|
||||||
|
'http://localhost:3000/api/mcp/events?workspaceId=ws-123'
|
||||||
|
)
|
||||||
|
|
||||||
|
const response = await GET(request as any)
|
||||||
|
|
||||||
|
expect(response.status).toBe(200)
|
||||||
|
expect(response.headers.get('Content-Type')).toBe('text/event-stream')
|
||||||
|
expect(response.headers.get('Cache-Control')).toBe('no-cache')
|
||||||
|
expect(response.headers.get('Connection')).toBe('keep-alive')
|
||||||
|
})
|
||||||
|
})
|
||||||
111
apps/sim/app/api/mcp/events/route.ts
Normal file
111
apps/sim/app/api/mcp/events/route.ts
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
/**
|
||||||
|
* SSE endpoint for MCP tool-change events.
|
||||||
|
*
|
||||||
|
* Pushes `tools_changed` events to the browser when:
|
||||||
|
* - An external MCP server sends `notifications/tools/list_changed` (via connection manager)
|
||||||
|
* - A workflow CRUD route modifies workflow MCP server tools (via pub/sub)
|
||||||
|
*
|
||||||
|
* Auth is handled via session cookies (EventSource sends cookies automatically).
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import type { NextRequest } from 'next/server'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
import { mcpConnectionManager } from '@/lib/mcp/connection-manager'
|
||||||
|
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('McpEventsSSE')
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const HEARTBEAT_INTERVAL_MS = 30_000
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return new Response('Unauthorized', { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { searchParams } = new URL(request.url)
|
||||||
|
const workspaceId = searchParams.get('workspaceId')
|
||||||
|
if (!workspaceId) {
|
||||||
|
return new Response('Missing workspaceId query parameter', { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const permissions = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||||
|
if (!permissions) {
|
||||||
|
return new Response('Access denied to workspace', { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const encoder = new TextEncoder()
|
||||||
|
const unsubscribers: Array<() => void> = []
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
const send = (eventName: string, data: Record<string, unknown>) => {
|
||||||
|
try {
|
||||||
|
controller.enqueue(
|
||||||
|
encoder.encode(`event: ${eventName}\ndata: ${JSON.stringify(data)}\n\n`)
|
||||||
|
)
|
||||||
|
} catch {
|
||||||
|
// Stream already closed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe to external MCP server tool changes
|
||||||
|
if (mcpConnectionManager) {
|
||||||
|
const unsub = mcpConnectionManager.subscribe((event) => {
|
||||||
|
if (event.workspaceId !== workspaceId) return
|
||||||
|
send('tools_changed', {
|
||||||
|
source: 'external',
|
||||||
|
serverId: event.serverId,
|
||||||
|
timestamp: event.timestamp,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
unsubscribers.push(unsub)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subscribe to workflow CRUD tool changes
|
||||||
|
if (mcpPubSub) {
|
||||||
|
const unsub = mcpPubSub.onWorkflowToolsChanged((event) => {
|
||||||
|
if (event.workspaceId !== workspaceId) return
|
||||||
|
send('tools_changed', {
|
||||||
|
source: 'workflow',
|
||||||
|
serverId: event.serverId,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
unsubscribers.push(unsub)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Heartbeat to keep the connection alive
|
||||||
|
const heartbeat = setInterval(() => {
|
||||||
|
try {
|
||||||
|
controller.enqueue(encoder.encode(': heartbeat\n\n'))
|
||||||
|
} catch {
|
||||||
|
clearInterval(heartbeat)
|
||||||
|
}
|
||||||
|
}, HEARTBEAT_INTERVAL_MS)
|
||||||
|
unsubscribers.push(() => clearInterval(heartbeat))
|
||||||
|
|
||||||
|
// Cleanup when client disconnects
|
||||||
|
request.signal.addEventListener('abort', () => {
|
||||||
|
for (const unsub of unsubscribers) {
|
||||||
|
unsub()
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
controller.close()
|
||||||
|
} catch {
|
||||||
|
// Already closed
|
||||||
|
}
|
||||||
|
logger.info(`SSE connection closed for workspace ${workspaceId}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info(`SSE connection opened for workspace ${workspaceId}`)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Response(stream, { headers: SSE_HEADERS })
|
||||||
|
}
|
||||||
227
apps/sim/app/api/mcp/serve/[serverId]/route.test.ts
Normal file
227
apps/sim/app/api/mcp/serve/[serverId]/route.test.ts
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
/**
|
||||||
|
* Tests for MCP serve route auth propagation.
|
||||||
|
*
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
import { NextRequest } from 'next/server'
|
||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
const mockCheckHybridAuth = vi.fn()
|
||||||
|
const mockGetUserEntityPermissions = vi.fn()
|
||||||
|
const mockGenerateInternalToken = vi.fn()
|
||||||
|
const mockDbSelect = vi.fn()
|
||||||
|
const mockDbFrom = vi.fn()
|
||||||
|
const mockDbWhere = vi.fn()
|
||||||
|
const mockDbLimit = vi.fn()
|
||||||
|
const fetchMock = vi.fn()
|
||||||
|
|
||||||
|
describe('MCP Serve Route', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.resetModules()
|
||||||
|
vi.clearAllMocks()
|
||||||
|
|
||||||
|
mockDbSelect.mockReturnValue({ from: mockDbFrom })
|
||||||
|
mockDbFrom.mockReturnValue({ where: mockDbWhere })
|
||||||
|
mockDbWhere.mockReturnValue({ limit: mockDbLimit })
|
||||||
|
|
||||||
|
vi.doMock('@sim/logger', () => ({
|
||||||
|
createLogger: vi.fn(() => ({
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
})),
|
||||||
|
}))
|
||||||
|
vi.doMock('drizzle-orm', () => ({
|
||||||
|
and: vi.fn(),
|
||||||
|
eq: vi.fn(),
|
||||||
|
}))
|
||||||
|
vi.doMock('@sim/db', () => ({
|
||||||
|
db: {
|
||||||
|
select: mockDbSelect,
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
vi.doMock('@sim/db/schema', () => ({
|
||||||
|
workflowMcpServer: {
|
||||||
|
id: 'id',
|
||||||
|
name: 'name',
|
||||||
|
workspaceId: 'workspaceId',
|
||||||
|
isPublic: 'isPublic',
|
||||||
|
createdBy: 'createdBy',
|
||||||
|
},
|
||||||
|
workflowMcpTool: {
|
||||||
|
serverId: 'serverId',
|
||||||
|
toolName: 'toolName',
|
||||||
|
toolDescription: 'toolDescription',
|
||||||
|
parameterSchema: 'parameterSchema',
|
||||||
|
workflowId: 'workflowId',
|
||||||
|
},
|
||||||
|
workflow: {
|
||||||
|
id: 'id',
|
||||||
|
isDeployed: 'isDeployed',
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: mockCheckHybridAuth,
|
||||||
|
}))
|
||||||
|
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||||
|
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||||
|
}))
|
||||||
|
vi.doMock('@/lib/auth/internal', () => ({
|
||||||
|
generateInternalToken: mockGenerateInternalToken,
|
||||||
|
}))
|
||||||
|
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||||
|
getBaseUrl: () => 'http://localhost:3000',
|
||||||
|
}))
|
||||||
|
vi.doMock('@/lib/core/execution-limits', () => ({
|
||||||
|
getMaxExecutionTimeout: () => 10_000,
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.stubGlobal('fetch', fetchMock)
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.unstubAllGlobals()
|
||||||
|
vi.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 401 for private server when auth fails', async () => {
|
||||||
|
mockDbLimit.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: 'server-1',
|
||||||
|
name: 'Private Server',
|
||||||
|
workspaceId: 'ws-1',
|
||||||
|
isPublic: false,
|
||||||
|
createdBy: 'owner-1',
|
||||||
|
},
|
||||||
|
])
|
||||||
|
mockCheckHybridAuth.mockResolvedValueOnce({ success: false, error: 'Unauthorized' })
|
||||||
|
|
||||||
|
const { POST } = await import('./route')
|
||||||
|
const req = new NextRequest('http://localhost:3000/api/mcp/serve/server-1', {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({ jsonrpc: '2.0', id: 1, method: 'ping' }),
|
||||||
|
})
|
||||||
|
const response = await POST(req, { params: Promise.resolve({ serverId: 'server-1' }) })
|
||||||
|
|
||||||
|
expect(response.status).toBe(401)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('returns 401 on GET for private server when auth fails', async () => {
|
||||||
|
mockDbLimit.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: 'server-1',
|
||||||
|
name: 'Private Server',
|
||||||
|
workspaceId: 'ws-1',
|
||||||
|
isPublic: false,
|
||||||
|
createdBy: 'owner-1',
|
||||||
|
},
|
||||||
|
])
|
||||||
|
mockCheckHybridAuth.mockResolvedValueOnce({ success: false, error: 'Unauthorized' })
|
||||||
|
|
||||||
|
const { GET } = await import('./route')
|
||||||
|
const req = new NextRequest('http://localhost:3000/api/mcp/serve/server-1')
|
||||||
|
const response = await GET(req, { params: Promise.resolve({ serverId: 'server-1' }) })
|
||||||
|
|
||||||
|
expect(response.status).toBe(401)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('forwards X-API-Key for private server api_key auth', async () => {
|
||||||
|
mockDbLimit
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: 'server-1',
|
||||||
|
name: 'Private Server',
|
||||||
|
workspaceId: 'ws-1',
|
||||||
|
isPublic: false,
|
||||||
|
createdBy: 'owner-1',
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.mockResolvedValueOnce([{ toolName: 'tool_a', workflowId: 'wf-1' }])
|
||||||
|
.mockResolvedValueOnce([{ isDeployed: true }])
|
||||||
|
|
||||||
|
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||||
|
success: true,
|
||||||
|
userId: 'user-1',
|
||||||
|
authType: 'api_key',
|
||||||
|
apiKeyType: 'personal',
|
||||||
|
})
|
||||||
|
mockGetUserEntityPermissions.mockResolvedValueOnce('write')
|
||||||
|
fetchMock.mockResolvedValueOnce(
|
||||||
|
new Response(JSON.stringify({ output: { ok: true } }), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const { POST } = await import('./route')
|
||||||
|
const req = new NextRequest('http://localhost:3000/api/mcp/serve/server-1', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'X-API-Key': 'pk_test_123' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 1,
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'tool_a', arguments: { q: 'test' } },
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const response = await POST(req, { params: Promise.resolve({ serverId: 'server-1' }) })
|
||||||
|
|
||||||
|
expect(response.status).toBe(200)
|
||||||
|
expect(fetchMock).toHaveBeenCalledTimes(1)
|
||||||
|
const fetchOptions = fetchMock.mock.calls[0][1] as RequestInit
|
||||||
|
const headers = fetchOptions.headers as Record<string, string>
|
||||||
|
expect(headers['X-API-Key']).toBe('pk_test_123')
|
||||||
|
expect(headers.Authorization).toBeUndefined()
|
||||||
|
expect(mockGenerateInternalToken).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('forwards internal token for private server session auth', async () => {
|
||||||
|
mockDbLimit
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: 'server-1',
|
||||||
|
name: 'Private Server',
|
||||||
|
workspaceId: 'ws-1',
|
||||||
|
isPublic: false,
|
||||||
|
createdBy: 'owner-1',
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.mockResolvedValueOnce([{ toolName: 'tool_a', workflowId: 'wf-1' }])
|
||||||
|
.mockResolvedValueOnce([{ isDeployed: true }])
|
||||||
|
|
||||||
|
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||||
|
success: true,
|
||||||
|
userId: 'user-1',
|
||||||
|
authType: 'session',
|
||||||
|
})
|
||||||
|
mockGetUserEntityPermissions.mockResolvedValueOnce('read')
|
||||||
|
mockGenerateInternalToken.mockResolvedValueOnce('internal-token-user-1')
|
||||||
|
fetchMock.mockResolvedValueOnce(
|
||||||
|
new Response(JSON.stringify({ output: { ok: true } }), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const { POST } = await import('./route')
|
||||||
|
const req = new NextRequest('http://localhost:3000/api/mcp/serve/server-1', {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 1,
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'tool_a' },
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const response = await POST(req, { params: Promise.resolve({ serverId: 'server-1' }) })
|
||||||
|
|
||||||
|
expect(response.status).toBe(200)
|
||||||
|
expect(fetchMock).toHaveBeenCalledTimes(1)
|
||||||
|
const fetchOptions = fetchMock.mock.calls[0][1] as RequestInit
|
||||||
|
const headers = fetchOptions.headers as Record<string, string>
|
||||||
|
expect(headers.Authorization).toBe('Bearer internal-token-user-1')
|
||||||
|
expect(headers['X-API-Key']).toBeUndefined()
|
||||||
|
expect(mockGenerateInternalToken).toHaveBeenCalledWith('user-1')
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -19,10 +19,11 @@ import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowMcpServeAPI')
|
const logger = createLogger('WorkflowMcpServeAPI')
|
||||||
|
|
||||||
@@ -32,6 +33,12 @@ interface RouteParams {
|
|||||||
serverId: string
|
serverId: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ExecuteAuthContext {
|
||||||
|
authType?: AuthResult['authType']
|
||||||
|
userId: string
|
||||||
|
apiKey?: string | null
|
||||||
|
}
|
||||||
|
|
||||||
function createResponse(id: RequestId, result: unknown): JSONRPCResponse {
|
function createResponse(id: RequestId, result: unknown): JSONRPCResponse {
|
||||||
return {
|
return {
|
||||||
jsonrpc: '2.0',
|
jsonrpc: '2.0',
|
||||||
@@ -73,6 +80,22 @@ export async function GET(request: NextRequest, { params }: { params: Promise<Ro
|
|||||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!server.isPublic) {
|
||||||
|
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspacePermission = await getUserEntityPermissions(
|
||||||
|
auth.userId,
|
||||||
|
'workspace',
|
||||||
|
server.workspaceId
|
||||||
|
)
|
||||||
|
if (workspacePermission === null) {
|
||||||
|
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
name: server.name,
|
name: server.name,
|
||||||
version: '1.0.0',
|
version: '1.0.0',
|
||||||
@@ -94,11 +117,27 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let executeAuthContext: ExecuteAuthContext | null = null
|
||||||
if (!server.isPublic) {
|
if (!server.isPublic) {
|
||||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
if (!auth.success || !auth.userId) {
|
if (!auth.success || !auth.userId) {
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workspacePermission = await getUserEntityPermissions(
|
||||||
|
auth.userId,
|
||||||
|
'workspace',
|
||||||
|
server.workspaceId
|
||||||
|
)
|
||||||
|
if (workspacePermission === null) {
|
||||||
|
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
executeAuthContext = {
|
||||||
|
authType: auth.authType,
|
||||||
|
userId: auth.userId,
|
||||||
|
apiKey: auth.authType === 'api_key' ? request.headers.get('X-API-Key') : null,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
@@ -119,9 +158,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { id, method, params: rpcParams } = message
|
const { id, method, params: rpcParams } = message
|
||||||
const apiKey =
|
|
||||||
request.headers.get('X-API-Key') ||
|
|
||||||
request.headers.get('Authorization')?.replace('Bearer ', '')
|
|
||||||
|
|
||||||
switch (method) {
|
switch (method) {
|
||||||
case 'initialize': {
|
case 'initialize': {
|
||||||
@@ -144,7 +180,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
|||||||
id,
|
id,
|
||||||
serverId,
|
serverId,
|
||||||
rpcParams as { name: string; arguments?: Record<string, unknown> },
|
rpcParams as { name: string; arguments?: Record<string, unknown> },
|
||||||
apiKey,
|
executeAuthContext,
|
||||||
server.isPublic ? server.createdBy : undefined
|
server.isPublic ? server.createdBy : undefined
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -207,7 +243,7 @@ async function handleToolsCall(
|
|||||||
id: RequestId,
|
id: RequestId,
|
||||||
serverId: string,
|
serverId: string,
|
||||||
params: { name: string; arguments?: Record<string, unknown> } | undefined,
|
params: { name: string; arguments?: Record<string, unknown> } | undefined,
|
||||||
apiKey?: string | null,
|
executeAuthContext?: ExecuteAuthContext | null,
|
||||||
publicServerOwnerId?: string
|
publicServerOwnerId?: string
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
try {
|
try {
|
||||||
@@ -255,8 +291,13 @@ async function handleToolsCall(
|
|||||||
if (publicServerOwnerId) {
|
if (publicServerOwnerId) {
|
||||||
const internalToken = await generateInternalToken(publicServerOwnerId)
|
const internalToken = await generateInternalToken(publicServerOwnerId)
|
||||||
headers.Authorization = `Bearer ${internalToken}`
|
headers.Authorization = `Bearer ${internalToken}`
|
||||||
} else if (apiKey) {
|
} else if (executeAuthContext) {
|
||||||
headers['X-API-Key'] = apiKey
|
if (executeAuthContext.authType === 'api_key' && executeAuthContext.apiKey) {
|
||||||
|
headers['X-API-Key'] = executeAuthContext.apiKey
|
||||||
|
} else {
|
||||||
|
const internalToken = await generateInternalToken(executeAuthContext.userId)
|
||||||
|
headers.Authorization = `Bearer ${internalToken}`
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)
|
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)
|
||||||
@@ -311,6 +352,17 @@ export async function DELETE(request: NextRequest, { params }: { params: Promise
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!server.isPublic) {
|
||||||
|
const workspacePermission = await getUserEntityPermissions(
|
||||||
|
auth.userId,
|
||||||
|
'workspace',
|
||||||
|
server.workspaceId
|
||||||
|
)
|
||||||
|
if (workspacePermission === null) {
|
||||||
|
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`MCP session terminated for server ${serverId}`)
|
logger.info(`MCP session terminated for server ${serverId}`)
|
||||||
return new NextResponse(null, { status: 204 })
|
return new NextResponse(null, { status: 204 })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||||
|
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowMcpServerAPI')
|
const logger = createLogger('WorkflowMcpServerAPI')
|
||||||
@@ -146,6 +147,8 @@ export const DELETE = withMcpAuth<RouteParams>('admin')(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Successfully deleted workflow MCP server: ${serverId}`)
|
logger.info(`[${requestId}] Successfully deleted workflow MCP server: ${serverId}`)
|
||||||
|
|
||||||
|
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||||
|
|
||||||
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
|
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)
|
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||||
|
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
|
|
||||||
@@ -115,6 +116,8 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Successfully updated tool ${toolId}`)
|
logger.info(`[${requestId}] Successfully updated tool ${toolId}`)
|
||||||
|
|
||||||
|
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||||
|
|
||||||
return createMcpSuccessResponse({ tool: updatedTool })
|
return createMcpSuccessResponse({ tool: updatedTool })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Error updating tool:`, error)
|
logger.error(`[${requestId}] Error updating tool:`, error)
|
||||||
@@ -160,6 +163,8 @@ export const DELETE = withMcpAuth<RouteParams>('write')(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Successfully deleted tool ${toolId}`)
|
logger.info(`[${requestId}] Successfully deleted tool ${toolId}`)
|
||||||
|
|
||||||
|
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||||
|
|
||||||
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
|
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Error deleting tool:`, error)
|
logger.error(`[${requestId}] Error deleting tool:`, error)
|
||||||
|
|||||||
@@ -4,7 +4,9 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||||
|
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||||
|
import { generateParameterSchemaForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
||||||
|
|
||||||
@@ -169,6 +171,11 @@ export const POST = withMcpAuth<RouteParams>('write')(
|
|||||||
workflowRecord.description ||
|
workflowRecord.description ||
|
||||||
`Execute ${workflowRecord.name} workflow`
|
`Execute ${workflowRecord.name} workflow`
|
||||||
|
|
||||||
|
const parameterSchema =
|
||||||
|
body.parameterSchema && Object.keys(body.parameterSchema).length > 0
|
||||||
|
? body.parameterSchema
|
||||||
|
: await generateParameterSchemaForWorkflow(body.workflowId)
|
||||||
|
|
||||||
const toolId = crypto.randomUUID()
|
const toolId = crypto.randomUUID()
|
||||||
const [tool] = await db
|
const [tool] = await db
|
||||||
.insert(workflowMcpTool)
|
.insert(workflowMcpTool)
|
||||||
@@ -178,7 +185,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
|
|||||||
workflowId: body.workflowId,
|
workflowId: body.workflowId,
|
||||||
toolName,
|
toolName,
|
||||||
toolDescription,
|
toolDescription,
|
||||||
parameterSchema: body.parameterSchema || {},
|
parameterSchema,
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
@@ -188,6 +195,8 @@ export const POST = withMcpAuth<RouteParams>('write')(
|
|||||||
`[${requestId}] Successfully added tool ${toolName} (workflow: ${body.workflowId}) to server ${serverId}`
|
`[${requestId}] Successfully added tool ${toolName} (workflow: ${body.workflowId}) to server ${serverId}`
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||||
|
|
||||||
return createMcpSuccessResponse({ tool }, 201)
|
return createMcpSuccessResponse({ tool }, 201)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Error adding tool:`, error)
|
logger.error(`[${requestId}] Error adding tool:`, error)
|
||||||
|
|||||||
@@ -4,7 +4,9 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { eq, inArray, sql } from 'drizzle-orm'
|
import { eq, inArray, sql } from 'drizzle-orm'
|
||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||||
|
import { mcpPubSub } from '@/lib/mcp/pubsub'
|
||||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||||
|
import { generateParameterSchemaForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
||||||
|
|
||||||
@@ -155,6 +157,8 @@ export const POST = withMcpAuth('write')(
|
|||||||
const toolDescription =
|
const toolDescription =
|
||||||
workflowRecord.description || `Execute ${workflowRecord.name} workflow`
|
workflowRecord.description || `Execute ${workflowRecord.name} workflow`
|
||||||
|
|
||||||
|
const parameterSchema = await generateParameterSchemaForWorkflow(workflowRecord.id)
|
||||||
|
|
||||||
const toolId = crypto.randomUUID()
|
const toolId = crypto.randomUUID()
|
||||||
await db.insert(workflowMcpTool).values({
|
await db.insert(workflowMcpTool).values({
|
||||||
id: toolId,
|
id: toolId,
|
||||||
@@ -162,7 +166,7 @@ export const POST = withMcpAuth('write')(
|
|||||||
workflowId: workflowRecord.id,
|
workflowId: workflowRecord.id,
|
||||||
toolName,
|
toolName,
|
||||||
toolDescription,
|
toolDescription,
|
||||||
parameterSchema: {},
|
parameterSchema,
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
@@ -174,6 +178,10 @@ export const POST = withMcpAuth('write')(
|
|||||||
`[${requestId}] Added ${addedTools.length} tools to server ${serverId}:`,
|
`[${requestId}] Added ${addedTools.length} tools to server ${serverId}:`,
|
||||||
addedTools.map((t) => t.toolName)
|
addedTools.map((t) => t.toolName)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (addedTools.length > 0) {
|
||||||
|
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
|
|||||||
@@ -446,6 +446,36 @@ export async function PUT(
|
|||||||
})
|
})
|
||||||
.where(eq(workspaceInvitation.id, wsInvitation.id))
|
.where(eq(workspaceInvitation.id, wsInvitation.id))
|
||||||
|
|
||||||
|
const existingPermission = await tx
|
||||||
|
.select({ id: permissions.id, permissionType: permissions.permissionType })
|
||||||
|
.from(permissions)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(permissions.entityId, wsInvitation.workspaceId),
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.userId, session.user.id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.then((rows) => rows[0])
|
||||||
|
|
||||||
|
if (existingPermission) {
|
||||||
|
const PERMISSION_RANK = { read: 0, write: 1, admin: 2 } as const
|
||||||
|
type PermissionLevel = keyof typeof PERMISSION_RANK
|
||||||
|
const existingRank =
|
||||||
|
PERMISSION_RANK[existingPermission.permissionType as PermissionLevel] ?? 0
|
||||||
|
const newPermission = (wsInvitation.permissions || 'read') as PermissionLevel
|
||||||
|
const newRank = PERMISSION_RANK[newPermission] ?? 0
|
||||||
|
|
||||||
|
if (newRank > existingRank) {
|
||||||
|
await tx
|
||||||
|
.update(permissions)
|
||||||
|
.set({
|
||||||
|
permissionType: newPermission,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(permissions.id, existingPermission.id))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
await tx.insert(permissions).values({
|
await tx.insert(permissions).values({
|
||||||
id: randomUUID(),
|
id: randomUUID(),
|
||||||
entityType: 'workspace',
|
entityType: 'workspace',
|
||||||
@@ -456,6 +486,7 @@ export async function PUT(
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} else if (status === 'cancelled') {
|
} else if (status === 'cancelled') {
|
||||||
await tx
|
await tx
|
||||||
.update(workspaceInvitation)
|
.update(workspaceInvitation)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||||
|
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||||
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowResumeAPI')
|
const logger = createLogger('WorkflowResumeAPI')
|
||||||
@@ -37,7 +38,26 @@ export async function POST(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const resumeInput = payload?.input ?? payload ?? {}
|
const resumeInput = payload?.input ?? payload ?? {}
|
||||||
const userId = workflow.userId ?? ''
|
const isPersonalApiKeyCaller =
|
||||||
|
access.auth?.authType === 'api_key' && access.auth?.apiKeyType === 'personal'
|
||||||
|
|
||||||
|
let userId: string
|
||||||
|
if (isPersonalApiKeyCaller && access.auth?.userId) {
|
||||||
|
userId = access.auth.userId
|
||||||
|
} else {
|
||||||
|
const billedAccountUserId = await getWorkspaceBilledAccountUserId(workflow.workspaceId)
|
||||||
|
if (!billedAccountUserId) {
|
||||||
|
logger.error('Unable to resolve workspace billed account for resume execution', {
|
||||||
|
workflowId,
|
||||||
|
workspaceId: workflow.workspaceId,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Unable to resolve billing account for this workspace' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
userId = billedAccountUserId
|
||||||
|
}
|
||||||
|
|
||||||
const resumeExecutionId = randomUUID()
|
const resumeExecutionId = randomUUID()
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
@@ -58,8 +78,8 @@ export async function POST(
|
|||||||
checkRateLimit: false, // Manual triggers bypass rate limits
|
checkRateLimit: false, // Manual triggers bypass rate limits
|
||||||
checkDeployment: false, // Resuming existing execution, deployment already checked
|
checkDeployment: false, // Resuming existing execution, deployment already checked
|
||||||
skipUsageLimits: true, // Resume is continuation of authorized execution - don't recheck limits
|
skipUsageLimits: true, // Resume is continuation of authorized execution - don't recheck limits
|
||||||
|
useAuthenticatedUserAsActor: isPersonalApiKeyCaller,
|
||||||
workspaceId: workflow.workspaceId || undefined,
|
workspaceId: workflow.workspaceId || undefined,
|
||||||
isResumeContext: true, // Enable billing fallback for paused workflow resumes
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!preprocessResult.success) {
|
if (!preprocessResult.success) {
|
||||||
|
|||||||
@@ -7,21 +7,20 @@ import { loggerMock } from '@sim/testing'
|
|||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const { mockGetSession, mockGetUserEntityPermissions, mockDbSelect, mockDbUpdate } = vi.hoisted(
|
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission, mockDbSelect, mockDbUpdate } =
|
||||||
() => ({
|
vi.hoisted(() => ({
|
||||||
mockGetSession: vi.fn(),
|
mockGetSession: vi.fn(),
|
||||||
mockGetUserEntityPermissions: vi.fn(),
|
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
mockDbSelect: vi.fn(),
|
mockDbSelect: vi.fn(),
|
||||||
mockDbUpdate: vi.fn(),
|
mockDbUpdate: vi.fn(),
|
||||||
})
|
}))
|
||||||
)
|
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
vi.mock('@/lib/auth', () => ({
|
||||||
getSession: mockGetSession,
|
getSession: mockGetSession,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
vi.mock('@/lib/workflows/utils', () => ({
|
||||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => ({
|
||||||
@@ -81,7 +80,12 @@ describe('Schedule PUT API (Reactivate)', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
mockGetSession.mockResolvedValue({ user: { id: 'user-1' } })
|
mockGetSession.mockResolvedValue({ user: { id: 'user-1' } })
|
||||||
mockGetUserEntityPermissions.mockResolvedValue('write')
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: true,
|
||||||
|
status: 200,
|
||||||
|
workflow: { id: 'wf-1', workspaceId: 'ws-1' },
|
||||||
|
workspacePermission: 'write',
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -140,6 +144,13 @@ describe('Schedule PUT API (Reactivate)', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('returns 404 when workflow does not exist for schedule', async () => {
|
it('returns 404 when workflow does not exist for schedule', async () => {
|
||||||
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: false,
|
||||||
|
status: 404,
|
||||||
|
workflow: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
message: 'Workflow not found',
|
||||||
|
})
|
||||||
mockDbChain([[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }], []])
|
mockDbChain([[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }], []])
|
||||||
|
|
||||||
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
|
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
|
||||||
@@ -152,6 +163,14 @@ describe('Schedule PUT API (Reactivate)', () => {
|
|||||||
|
|
||||||
describe('Authorization', () => {
|
describe('Authorization', () => {
|
||||||
it('returns 403 when user is not workflow owner', async () => {
|
it('returns 403 when user is not workflow owner', async () => {
|
||||||
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: false,
|
||||||
|
status: 403,
|
||||||
|
workflow: { id: 'wf-1', workspaceId: null },
|
||||||
|
workspacePermission: null,
|
||||||
|
message:
|
||||||
|
'This workflow is not attached to a workspace. Personal workflows are deprecated and cannot be accessed.',
|
||||||
|
})
|
||||||
mockDbChain([
|
mockDbChain([
|
||||||
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
|
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
|
||||||
[{ userId: 'other-user', workspaceId: null }],
|
[{ userId: 'other-user', workspaceId: null }],
|
||||||
@@ -161,11 +180,17 @@ describe('Schedule PUT API (Reactivate)', () => {
|
|||||||
|
|
||||||
expect(res.status).toBe(403)
|
expect(res.status).toBe(403)
|
||||||
const data = await res.json()
|
const data = await res.json()
|
||||||
expect(data.error).toBe('Not authorized to modify this schedule')
|
expect(data.error).toContain('Personal workflows are deprecated')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('returns 403 for workspace member with only read permission', async () => {
|
it('returns 403 for workspace member with only read permission', async () => {
|
||||||
mockGetUserEntityPermissions.mockResolvedValue('read')
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: false,
|
||||||
|
status: 403,
|
||||||
|
workflow: { id: 'wf-1', workspaceId: 'ws-1' },
|
||||||
|
workspacePermission: 'read',
|
||||||
|
message: 'Unauthorized: Access denied to write this workflow',
|
||||||
|
})
|
||||||
mockDbChain([
|
mockDbChain([
|
||||||
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
|
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
|
||||||
[{ userId: 'other-user', workspaceId: 'ws-1' }],
|
[{ userId: 'other-user', workspaceId: 'ws-1' }],
|
||||||
@@ -198,7 +223,6 @@ describe('Schedule PUT API (Reactivate)', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('allows workspace member with write permission to reactivate', async () => {
|
it('allows workspace member with write permission to reactivate', async () => {
|
||||||
mockGetUserEntityPermissions.mockResolvedValue('write')
|
|
||||||
mockDbChain([
|
mockDbChain([
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -218,7 +242,6 @@ describe('Schedule PUT API (Reactivate)', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('allows workspace admin to reactivate', async () => {
|
it('allows workspace admin to reactivate', async () => {
|
||||||
mockGetUserEntityPermissions.mockResolvedValue('admin')
|
|
||||||
mockDbChain([
|
mockDbChain([
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { workflow, workflowSchedule } from '@sim/db/schema'
|
import { workflowSchedule } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
@@ -7,7 +7,7 @@ import { z } from 'zod'
|
|||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
|
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
const logger = createLogger('ScheduleAPI')
|
const logger = createLogger('ScheduleAPI')
|
||||||
|
|
||||||
@@ -57,31 +57,23 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Schedule not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Schedule not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const [workflowRecord] = await db
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
.select({ userId: workflow.userId, workspaceId: workflow.workspaceId })
|
workflowId: schedule.workflowId,
|
||||||
.from(workflow)
|
userId: session.user.id,
|
||||||
.where(eq(workflow.id, schedule.workflowId))
|
action: 'write',
|
||||||
.limit(1)
|
})
|
||||||
|
|
||||||
if (!workflowRecord) {
|
if (!authorization.workflow) {
|
||||||
logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`)
|
logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`)
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
let isAuthorized = workflowRecord.userId === session.user.id
|
if (!authorization.allowed) {
|
||||||
|
|
||||||
if (!isAuthorized && workflowRecord.workspaceId) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
|
||||||
session.user.id,
|
|
||||||
'workspace',
|
|
||||||
workflowRecord.workspaceId
|
|
||||||
)
|
|
||||||
isAuthorized = userPermission === 'write' || userPermission === 'admin'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAuthorized) {
|
|
||||||
logger.warn(`[${requestId}] User not authorized to modify this schedule: ${scheduleId}`)
|
logger.warn(`[${requestId}] User not authorized to modify this schedule: ${scheduleId}`)
|
||||||
return NextResponse.json({ error: 'Not authorized to modify this schedule' }, { status: 403 })
|
return NextResponse.json(
|
||||||
|
{ error: authorization.message || 'Not authorized to modify this schedule' },
|
||||||
|
{ status: authorization.status }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (schedule.status === 'active') {
|
if (schedule.status === 'active') {
|
||||||
|
|||||||
@@ -7,18 +7,20 @@ import { loggerMock } from '@sim/testing'
|
|||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const { mockGetSession, mockGetUserEntityPermissions, mockDbSelect } = vi.hoisted(() => ({
|
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission, mockDbSelect } = vi.hoisted(
|
||||||
|
() => ({
|
||||||
mockGetSession: vi.fn(),
|
mockGetSession: vi.fn(),
|
||||||
mockGetUserEntityPermissions: vi.fn(),
|
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
mockDbSelect: vi.fn(),
|
mockDbSelect: vi.fn(),
|
||||||
}))
|
})
|
||||||
|
)
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
vi.mock('@/lib/auth', () => ({
|
||||||
getSession: mockGetSession,
|
getSession: mockGetSession,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
vi.mock('@/lib/workflows/utils', () => ({
|
||||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => ({
|
||||||
@@ -80,7 +82,12 @@ describe('Schedule GET API', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
mockGetSession.mockResolvedValue({ user: { id: 'user-1' } })
|
mockGetSession.mockResolvedValue({ user: { id: 'user-1' } })
|
||||||
mockGetUserEntityPermissions.mockResolvedValue('read')
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: true,
|
||||||
|
status: 200,
|
||||||
|
workflow: { id: 'wf-1', workspaceId: 'ws-1' },
|
||||||
|
workspacePermission: 'read',
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -89,7 +96,6 @@ describe('Schedule GET API', () => {
|
|||||||
|
|
||||||
it('returns schedule data for authorized user', async () => {
|
it('returns schedule data for authorized user', async () => {
|
||||||
mockDbChain([
|
mockDbChain([
|
||||||
[{ userId: 'user-1', workspaceId: null }],
|
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
schedule: {
|
schedule: {
|
||||||
@@ -111,7 +117,7 @@ describe('Schedule GET API', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('returns null when no schedule exists', async () => {
|
it('returns null when no schedule exists', async () => {
|
||||||
mockDbChain([[{ userId: 'user-1', workspaceId: null }], []])
|
mockDbChain([[]])
|
||||||
|
|
||||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||||
const data = await res.json()
|
const data = await res.json()
|
||||||
@@ -135,6 +141,13 @@ describe('Schedule GET API', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('returns 404 for non-existent workflow', async () => {
|
it('returns 404 for non-existent workflow', async () => {
|
||||||
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: false,
|
||||||
|
status: 404,
|
||||||
|
message: 'Workflow not found',
|
||||||
|
workflow: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
})
|
||||||
mockDbChain([[]])
|
mockDbChain([[]])
|
||||||
|
|
||||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||||
@@ -143,6 +156,13 @@ describe('Schedule GET API', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('denies access for unauthorized user', async () => {
|
it('denies access for unauthorized user', async () => {
|
||||||
|
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||||
|
allowed: false,
|
||||||
|
status: 403,
|
||||||
|
message: 'Unauthorized: Access denied to read this workflow',
|
||||||
|
workflow: { id: 'wf-1', workspaceId: 'ws-1' },
|
||||||
|
workspacePermission: null,
|
||||||
|
})
|
||||||
mockDbChain([[{ userId: 'other-user', workspaceId: null }]])
|
mockDbChain([[{ userId: 'other-user', workspaceId: null }]])
|
||||||
|
|
||||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||||
@@ -151,10 +171,7 @@ describe('Schedule GET API', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('allows workspace members to view', async () => {
|
it('allows workspace members to view', async () => {
|
||||||
mockDbChain([
|
mockDbChain([[{ schedule: { id: 'sched-1', status: 'active', failedCount: 0 } }]])
|
||||||
[{ userId: 'other-user', workspaceId: 'ws-1' }],
|
|
||||||
[{ schedule: { id: 'sched-1', status: 'active', failedCount: 0 } }],
|
|
||||||
])
|
|
||||||
|
|
||||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||||
|
|
||||||
@@ -162,10 +179,7 @@ describe('Schedule GET API', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('indicates disabled schedule with failures', async () => {
|
it('indicates disabled schedule with failures', async () => {
|
||||||
mockDbChain([
|
mockDbChain([[{ schedule: { id: 'sched-1', status: 'disabled', failedCount: 100 } }]])
|
||||||
[{ userId: 'user-1', workspaceId: null }],
|
|
||||||
[{ schedule: { id: 'sched-1', status: 'disabled', failedCount: 100 } }],
|
|
||||||
])
|
|
||||||
|
|
||||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||||
const data = await res.json()
|
const data = await res.json()
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema'
|
import { workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, eq, isNull, or } from 'drizzle-orm'
|
import { and, eq, isNull, or } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
const logger = createLogger('ScheduledAPI')
|
const logger = createLogger('ScheduledAPI')
|
||||||
|
|
||||||
@@ -29,29 +29,21 @@ export async function GET(req: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Missing workflowId parameter' }, { status: 400 })
|
return NextResponse.json({ error: 'Missing workflowId parameter' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const [workflowRecord] = await db
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
.select({ userId: workflow.userId, workspaceId: workflow.workspaceId })
|
workflowId,
|
||||||
.from(workflow)
|
userId: session.user.id,
|
||||||
.where(eq(workflow.id, workflowId))
|
action: 'read',
|
||||||
.limit(1)
|
})
|
||||||
|
|
||||||
if (!workflowRecord) {
|
if (!authorization.workflow) {
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
let isAuthorized = workflowRecord.userId === session.user.id
|
if (!authorization.allowed) {
|
||||||
|
return NextResponse.json(
|
||||||
if (!isAuthorized && workflowRecord.workspaceId) {
|
{ error: authorization.message || 'Not authorized to view this workflow' },
|
||||||
const userPermission = await getUserEntityPermissions(
|
{ status: authorization.status }
|
||||||
session.user.id,
|
|
||||||
'workspace',
|
|
||||||
workflowRecord.workspaceId
|
|
||||||
)
|
)
|
||||||
isAuthorized = userPermission !== null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAuthorized) {
|
|
||||||
return NextResponse.json({ error: 'Not authorized to view this workflow' }, { status: 403 })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Getting schedule for workflow ${workflowId}`)
|
logger.info(`[${requestId}] Getting schedule for workflow ${workflowId}`)
|
||||||
|
|||||||
@@ -214,6 +214,14 @@ describe('Custom Tools API Routes', () => {
|
|||||||
vi.doMock('@/lib/workflows/custom-tools/operations', () => ({
|
vi.doMock('@/lib/workflows/custom-tools/operations', () => ({
|
||||||
upsertCustomTools: vi.fn().mockResolvedValue(sampleTools),
|
upsertCustomTools: vi.fn().mockResolvedValue(sampleTools),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/workflows/utils', () => ({
|
||||||
|
authorizeWorkflowByWorkspacePermission: vi.fn().mockResolvedValue({
|
||||||
|
allowed: true,
|
||||||
|
status: 200,
|
||||||
|
workflow: { workspaceId: 'workspace-123' },
|
||||||
|
}),
|
||||||
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -272,20 +280,6 @@ describe('Custom Tools API Routes', () => {
|
|||||||
it('should handle workflowId parameter', async () => {
|
it('should handle workflowId parameter', async () => {
|
||||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?workflowId=workflow-123')
|
const req = new NextRequest('http://localhost:3000/api/tools/custom?workflowId=workflow-123')
|
||||||
|
|
||||||
mockLimit.mockResolvedValueOnce([{ workspaceId: 'workspace-123' }])
|
|
||||||
|
|
||||||
mockWhere.mockImplementationOnce((condition) => {
|
|
||||||
const queryBuilder = {
|
|
||||||
limit: mockLimit,
|
|
||||||
then: (resolve: (value: typeof sampleTools) => void) => {
|
|
||||||
resolve(sampleTools)
|
|
||||||
return queryBuilder
|
|
||||||
},
|
|
||||||
catch: (reject: (error: Error) => void) => queryBuilder,
|
|
||||||
}
|
|
||||||
return queryBuilder
|
|
||||||
})
|
|
||||||
|
|
||||||
const { GET } = await import('@/app/api/tools/custom/route')
|
const { GET } = await import('@/app/api/tools/custom/route')
|
||||||
|
|
||||||
const response = await GET(req)
|
const response = await GET(req)
|
||||||
@@ -375,7 +369,8 @@ describe('Custom Tools API Routes', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should handle tool not found', async () => {
|
it('should handle tool not found', async () => {
|
||||||
mockLimit.mockResolvedValueOnce([])
|
const mockLimitNotFound = vi.fn().mockResolvedValue([])
|
||||||
|
mockWhere.mockReturnValueOnce({ limit: mockLimitNotFound })
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=non-existent')
|
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=non-existent')
|
||||||
|
|
||||||
@@ -398,7 +393,8 @@ describe('Custom Tools API Routes', () => {
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
const userScopedTool = { ...sampleTools[0], workspaceId: null, userId: 'user-123' }
|
const userScopedTool = { ...sampleTools[0], workspaceId: null, userId: 'user-123' }
|
||||||
mockLimit.mockResolvedValueOnce([userScopedTool])
|
const mockLimitUserScoped = vi.fn().mockResolvedValue([userScopedTool])
|
||||||
|
mockWhere.mockReturnValueOnce({ limit: mockLimitUserScoped })
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1')
|
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1')
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { customTools, workflow } from '@sim/db/schema'
|
import { customTools } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
@@ -7,6 +7,7 @@ import { z } from 'zod'
|
|||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
const logger = createLogger('CustomToolsAPI')
|
const logger = createLogger('CustomToolsAPI')
|
||||||
@@ -52,27 +53,32 @@ export async function GET(request: NextRequest) {
|
|||||||
const userId = authResult.userId
|
const userId = authResult.userId
|
||||||
|
|
||||||
let resolvedWorkspaceId: string | null = workspaceId
|
let resolvedWorkspaceId: string | null = workspaceId
|
||||||
|
let resolvedFromWorkflowAuthorization = false
|
||||||
|
|
||||||
if (!resolvedWorkspaceId && workflowId) {
|
if (!resolvedWorkspaceId && workflowId) {
|
||||||
const [workflowData] = await db
|
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
.select({ workspaceId: workflow.workspaceId })
|
workflowId,
|
||||||
.from(workflow)
|
userId,
|
||||||
.where(eq(workflow.id, workflowId))
|
action: 'read',
|
||||||
.limit(1)
|
})
|
||||||
|
if (!workflowAuthorization.allowed) {
|
||||||
if (!workflowData) {
|
logger.warn(`[${requestId}] Workflow authorization failed for custom tools`, {
|
||||||
logger.warn(`[${requestId}] Workflow not found: ${workflowId}`)
|
workflowId,
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
userId,
|
||||||
|
status: workflowAuthorization.status,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: workflowAuthorization.message || 'Access denied' },
|
||||||
|
{ status: workflowAuthorization.status }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
resolvedWorkspaceId = workflowData.workspaceId
|
resolvedWorkspaceId = workflowAuthorization.workflow?.workspaceId ?? null
|
||||||
|
resolvedFromWorkflowAuthorization = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check workspace permissions
|
// Check workspace permissions for all auth types
|
||||||
// For internal JWT with workflowId: checkSessionOrInternalAuth already resolved userId from workflow owner
|
if (resolvedWorkspaceId && !resolvedFromWorkflowAuthorization) {
|
||||||
// For session: verify user has access to the workspace
|
|
||||||
// For legacy (no workspaceId): skip workspace check, rely on userId match
|
|
||||||
if (resolvedWorkspaceId && !(authResult.authType === 'internal_jwt' && workflowId)) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
const userPermission = await getUserEntityPermissions(
|
||||||
userId,
|
userId,
|
||||||
'workspace',
|
'workspace',
|
||||||
|
|||||||
@@ -47,16 +47,9 @@ export async function POST(request: NextRequest) {
|
|||||||
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
|
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
|
||||||
|
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
const filesOutput: Array<{ name: string; mimeType: string; data: string; size: number }> = []
|
|
||||||
|
|
||||||
for (const file of userFiles) {
|
for (const file of userFiles) {
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
filesOutput.push({
|
|
||||||
name: file.name,
|
|
||||||
mimeType: file.type || 'application/octet-stream',
|
|
||||||
data: buffer.toString('base64'),
|
|
||||||
size: buffer.length,
|
|
||||||
})
|
|
||||||
const blob = new Blob([new Uint8Array(buffer)], {
|
const blob = new Blob([new Uint8Array(buffer)], {
|
||||||
type: file.type || 'application/octet-stream',
|
type: file.type || 'application/octet-stream',
|
||||||
})
|
})
|
||||||
@@ -90,18 +83,26 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const attachments = await response.json()
|
const jiraAttachments = await response.json()
|
||||||
const attachmentIds = Array.isArray(attachments)
|
const attachmentsList = Array.isArray(jiraAttachments) ? jiraAttachments : []
|
||||||
? attachments.map((attachment) => attachment.id).filter(Boolean)
|
|
||||||
: []
|
const attachmentIds = attachmentsList.map((att: any) => att.id).filter(Boolean)
|
||||||
|
const attachments = attachmentsList.map((att: any) => ({
|
||||||
|
id: att.id ?? '',
|
||||||
|
filename: att.filename ?? '',
|
||||||
|
mimeType: att.mimeType ?? '',
|
||||||
|
size: att.size ?? 0,
|
||||||
|
content: att.content ?? '',
|
||||||
|
}))
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
ts: new Date().toISOString(),
|
ts: new Date().toISOString(),
|
||||||
issueKey: validatedData.issueKey,
|
issueKey: validatedData.issueKey,
|
||||||
|
attachments,
|
||||||
attachmentIds,
|
attachmentIds,
|
||||||
files: filesOutput,
|
files: userFiles,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,111 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JiraIssueAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { domain, accessToken, issueId, cloudId: providedCloudId } = await request.json()
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!issueId) {
|
|
||||||
logger.error('Missing issue ID in request')
|
|
||||||
return NextResponse.json({ error: 'Issue ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
|
|
||||||
logger.info('Using cloud ID:', cloudId)
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const issueIdValidation = validateJiraIssueKey(issueId, 'issueId')
|
|
||||||
if (!issueIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: issueIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueId}`
|
|
||||||
|
|
||||||
logger.info('Fetching Jira issue from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
Accept: 'application/json',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
logger.error('Jira API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
})
|
|
||||||
|
|
||||||
let errorMessage
|
|
||||||
try {
|
|
||||||
const errorData = await response.json()
|
|
||||||
logger.error('Error details:', errorData)
|
|
||||||
errorMessage = errorData.message || `Failed to fetch issue (${response.status})`
|
|
||||||
} catch (_e) {
|
|
||||||
errorMessage = `Failed to fetch issue: ${response.status} ${response.statusText}`
|
|
||||||
}
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
logger.info('Successfully fetched issue:', data.key)
|
|
||||||
|
|
||||||
const issueInfo: any = {
|
|
||||||
id: data.key,
|
|
||||||
name: data.fields.summary,
|
|
||||||
mimeType: 'jira/issue',
|
|
||||||
url: `https://${domain}/browse/${data.key}`,
|
|
||||||
modifiedTime: data.fields.updated,
|
|
||||||
webViewLink: `https://${domain}/browse/${data.key}`,
|
|
||||||
status: data.fields.status?.name,
|
|
||||||
description: data.fields.description,
|
|
||||||
priority: data.fields.priority?.name,
|
|
||||||
assignee: data.fields.assignee?.displayName,
|
|
||||||
reporter: data.fields.reporter?.displayName,
|
|
||||||
project: {
|
|
||||||
key: data.fields.project?.key,
|
|
||||||
name: data.fields.project?.name,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
issue: issueInfo,
|
|
||||||
cloudId,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error processing request:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: 'Failed to retrieve Jira issue',
|
|
||||||
details: (error as Error).message,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -16,9 +16,16 @@ const jiraUpdateSchema = z.object({
|
|||||||
summary: z.string().optional(),
|
summary: z.string().optional(),
|
||||||
title: z.string().optional(),
|
title: z.string().optional(),
|
||||||
description: z.string().optional(),
|
description: z.string().optional(),
|
||||||
status: z.string().optional(),
|
|
||||||
priority: z.string().optional(),
|
priority: z.string().optional(),
|
||||||
assignee: z.string().optional(),
|
assignee: z.string().optional(),
|
||||||
|
labels: z.array(z.string()).optional(),
|
||||||
|
components: z.array(z.string()).optional(),
|
||||||
|
duedate: z.string().optional(),
|
||||||
|
fixVersions: z.array(z.string()).optional(),
|
||||||
|
environment: z.string().optional(),
|
||||||
|
customFieldId: z.string().optional(),
|
||||||
|
customFieldValue: z.string().optional(),
|
||||||
|
notifyUsers: z.boolean().optional(),
|
||||||
cloudId: z.string().optional(),
|
cloudId: z.string().optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -45,9 +52,16 @@ export async function PUT(request: NextRequest) {
|
|||||||
summary,
|
summary,
|
||||||
title,
|
title,
|
||||||
description,
|
description,
|
||||||
status,
|
|
||||||
priority,
|
priority,
|
||||||
assignee,
|
assignee,
|
||||||
|
labels,
|
||||||
|
components,
|
||||||
|
duedate,
|
||||||
|
fixVersions,
|
||||||
|
environment,
|
||||||
|
customFieldId,
|
||||||
|
customFieldValue,
|
||||||
|
notifyUsers,
|
||||||
cloudId: providedCloudId,
|
cloudId: providedCloudId,
|
||||||
} = validation.data
|
} = validation.data
|
||||||
|
|
||||||
@@ -64,7 +78,8 @@ export async function PUT(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: issueKeyValidation.error }, { status: 400 })
|
return NextResponse.json({ error: issueKeyValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}`
|
const notifyParam = notifyUsers === false ? '?notifyUsers=false' : ''
|
||||||
|
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}${notifyParam}`
|
||||||
|
|
||||||
logger.info('Updating Jira issue at:', url)
|
logger.info('Updating Jira issue at:', url)
|
||||||
|
|
||||||
@@ -93,24 +108,65 @@ export async function PUT(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (status !== undefined && status !== null && status !== '') {
|
|
||||||
fields.status = {
|
|
||||||
name: status,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (priority !== undefined && priority !== null && priority !== '') {
|
if (priority !== undefined && priority !== null && priority !== '') {
|
||||||
fields.priority = {
|
const isNumericId = /^\d+$/.test(priority)
|
||||||
name: priority,
|
fields.priority = isNumericId ? { id: priority } : { name: priority }
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
||||||
fields.assignee = {
|
fields.assignee = {
|
||||||
id: assignee,
|
accountId: assignee,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (labels !== undefined && labels !== null && labels.length > 0) {
|
||||||
|
fields.labels = labels
|
||||||
|
}
|
||||||
|
|
||||||
|
if (components !== undefined && components !== null && components.length > 0) {
|
||||||
|
fields.components = components.map((name) => ({ name }))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
||||||
|
fields.duedate = duedate
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fixVersions !== undefined && fixVersions !== null && fixVersions.length > 0) {
|
||||||
|
fields.fixVersions = fixVersions.map((name) => ({ name }))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (environment !== undefined && environment !== null && environment !== '') {
|
||||||
|
fields.environment = {
|
||||||
|
type: 'doc',
|
||||||
|
version: 1,
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'paragraph',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: environment,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
customFieldId !== undefined &&
|
||||||
|
customFieldId !== null &&
|
||||||
|
customFieldId !== '' &&
|
||||||
|
customFieldValue !== undefined &&
|
||||||
|
customFieldValue !== null &&
|
||||||
|
customFieldValue !== ''
|
||||||
|
) {
|
||||||
|
const fieldId = customFieldId.startsWith('customfield_')
|
||||||
|
? customFieldId
|
||||||
|
: `customfield_${customFieldId}`
|
||||||
|
fields[fieldId] = customFieldValue
|
||||||
|
}
|
||||||
|
|
||||||
const requestBody = { fields }
|
const requestBody = { fields }
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
|
|||||||
@@ -32,6 +32,8 @@ export async function POST(request: NextRequest) {
|
|||||||
environment,
|
environment,
|
||||||
customFieldId,
|
customFieldId,
|
||||||
customFieldValue,
|
customFieldValue,
|
||||||
|
components,
|
||||||
|
fixVersions,
|
||||||
} = await request.json()
|
} = await request.json()
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
@@ -73,10 +75,9 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info('Creating Jira issue at:', url)
|
logger.info('Creating Jira issue at:', url)
|
||||||
|
|
||||||
|
const isNumericProjectId = /^\d+$/.test(projectId)
|
||||||
const fields: Record<string, any> = {
|
const fields: Record<string, any> = {
|
||||||
project: {
|
project: isNumericProjectId ? { id: projectId } : { key: projectId },
|
||||||
id: projectId,
|
|
||||||
},
|
|
||||||
issuetype: {
|
issuetype: {
|
||||||
name: normalizedIssueType,
|
name: normalizedIssueType,
|
||||||
},
|
},
|
||||||
@@ -114,13 +115,31 @@ export async function POST(request: NextRequest) {
|
|||||||
fields.labels = labels
|
fields.labels = labels
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
components !== undefined &&
|
||||||
|
components !== null &&
|
||||||
|
Array.isArray(components) &&
|
||||||
|
components.length > 0
|
||||||
|
) {
|
||||||
|
fields.components = components.map((name: string) => ({ name }))
|
||||||
|
}
|
||||||
|
|
||||||
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
||||||
fields.duedate = duedate
|
fields.duedate = duedate
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
fixVersions !== undefined &&
|
||||||
|
fixVersions !== null &&
|
||||||
|
Array.isArray(fixVersions) &&
|
||||||
|
fixVersions.length > 0
|
||||||
|
) {
|
||||||
|
fields.fixVersions = fixVersions.map((name: string) => ({ name }))
|
||||||
|
}
|
||||||
|
|
||||||
if (reporter !== undefined && reporter !== null && reporter !== '') {
|
if (reporter !== undefined && reporter !== null && reporter !== '') {
|
||||||
fields.reporter = {
|
fields.reporter = {
|
||||||
id: reporter,
|
accountId: reporter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -220,8 +239,10 @@ export async function POST(request: NextRequest) {
|
|||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
ts: new Date().toISOString(),
|
ts: new Date().toISOString(),
|
||||||
|
id: responseData.id || '',
|
||||||
issueKey: issueKey,
|
issueKey: issueKey,
|
||||||
summary: responseData.fields?.summary || 'Issue created',
|
self: responseData.self || '',
|
||||||
|
summary: responseData.fields?.summary || summary || 'Issue created',
|
||||||
success: true,
|
success: true,
|
||||||
url: `https://${domain}/browse/${issueKey}`,
|
url: `https://${domain}/browse/${issueKey}`,
|
||||||
...(assigneeId && { assigneeId }),
|
...(assigneeId && { assigneeId }),
|
||||||
|
|||||||
@@ -165,8 +165,26 @@ export async function POST(request: NextRequest) {
|
|||||||
issueIdOrKey,
|
issueIdOrKey,
|
||||||
approvalId,
|
approvalId,
|
||||||
decision,
|
decision,
|
||||||
success: true,
|
id: data.id ?? null,
|
||||||
|
name: data.name ?? null,
|
||||||
|
finalDecision: data.finalDecision ?? null,
|
||||||
|
canAnswerApproval: data.canAnswerApproval ?? null,
|
||||||
|
approvers: (data.approvers ?? []).map((a: Record<string, unknown>) => {
|
||||||
|
const approver = a.approver as Record<string, unknown> | undefined
|
||||||
|
return {
|
||||||
|
approver: {
|
||||||
|
accountId: approver?.accountId ?? null,
|
||||||
|
displayName: approver?.displayName ?? null,
|
||||||
|
emailAddress: approver?.emailAddress ?? null,
|
||||||
|
active: approver?.active ?? null,
|
||||||
|
},
|
||||||
|
approverDecision: a.approverDecision ?? null,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
createdDate: data.createdDate ?? null,
|
||||||
|
completedDate: data.completedDate ?? null,
|
||||||
approval: data,
|
approval: data,
|
||||||
|
success: true,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -95,6 +95,14 @@ export async function POST(request: NextRequest) {
|
|||||||
commentId: data.id,
|
commentId: data.id,
|
||||||
body: data.body,
|
body: data.body,
|
||||||
isPublic: data.public,
|
isPublic: data.public,
|
||||||
|
author: data.author
|
||||||
|
? {
|
||||||
|
accountId: data.author.accountId ?? null,
|
||||||
|
displayName: data.author.displayName ?? null,
|
||||||
|
emailAddress: data.author.emailAddress ?? null,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
createdDate: data.created ?? null,
|
||||||
success: true,
|
success: true,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ export async function POST(request: NextRequest) {
|
|||||||
issueIdOrKey,
|
issueIdOrKey,
|
||||||
isPublic,
|
isPublic,
|
||||||
internal,
|
internal,
|
||||||
|
expand,
|
||||||
start,
|
start,
|
||||||
limit,
|
limit,
|
||||||
} = body
|
} = body
|
||||||
@@ -57,8 +58,9 @@ export async function POST(request: NextRequest) {
|
|||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const params = new URLSearchParams()
|
const params = new URLSearchParams()
|
||||||
if (isPublic) params.append('public', isPublic)
|
if (isPublic !== undefined) params.append('public', String(isPublic))
|
||||||
if (internal) params.append('internal', internal)
|
if (internal !== undefined) params.append('internal', String(internal))
|
||||||
|
if (expand) params.append('expand', expand)
|
||||||
if (start) params.append('start', start)
|
if (start) params.append('start', start)
|
||||||
if (limit) params.append('limit', limit)
|
if (limit) params.append('limit', limit)
|
||||||
|
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ export async function POST(request: NextRequest) {
|
|||||||
query,
|
query,
|
||||||
start,
|
start,
|
||||||
limit,
|
limit,
|
||||||
|
accountIds,
|
||||||
emails,
|
emails,
|
||||||
} = body
|
} = body
|
||||||
|
|
||||||
@@ -56,24 +57,27 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const parsedEmails = emails
|
const rawIds = accountIds || emails
|
||||||
? typeof emails === 'string'
|
const parsedAccountIds = rawIds
|
||||||
? emails
|
? typeof rawIds === 'string'
|
||||||
|
? rawIds
|
||||||
.split(',')
|
.split(',')
|
||||||
.map((email: string) => email.trim())
|
.map((id: string) => id.trim())
|
||||||
.filter((email: string) => email)
|
.filter((id: string) => id)
|
||||||
: emails
|
: Array.isArray(rawIds)
|
||||||
|
? rawIds
|
||||||
|
: []
|
||||||
: []
|
: []
|
||||||
|
|
||||||
const isAddOperation = parsedEmails.length > 0
|
const isAddOperation = parsedAccountIds.length > 0
|
||||||
|
|
||||||
if (isAddOperation) {
|
if (isAddOperation) {
|
||||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/customer`
|
const url = `${baseUrl}/servicedesk/${serviceDeskId}/customer`
|
||||||
|
|
||||||
logger.info('Adding customers to:', url, { emails: parsedEmails })
|
logger.info('Adding customers to:', url, { accountIds: parsedAccountIds })
|
||||||
|
|
||||||
const requestBody: Record<string, unknown> = {
|
const requestBody: Record<string, unknown> = {
|
||||||
usernames: parsedEmails,
|
accountIds: parsedAccountIds,
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
|
|||||||
@@ -31,6 +31,9 @@ export async function POST(request: NextRequest) {
|
|||||||
description,
|
description,
|
||||||
raiseOnBehalfOf,
|
raiseOnBehalfOf,
|
||||||
requestFieldValues,
|
requestFieldValues,
|
||||||
|
requestParticipants,
|
||||||
|
channel,
|
||||||
|
expand,
|
||||||
} = body
|
} = body
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
@@ -80,6 +83,19 @@ export async function POST(request: NextRequest) {
|
|||||||
if (raiseOnBehalfOf) {
|
if (raiseOnBehalfOf) {
|
||||||
requestBody.raiseOnBehalfOf = raiseOnBehalfOf
|
requestBody.raiseOnBehalfOf = raiseOnBehalfOf
|
||||||
}
|
}
|
||||||
|
if (requestParticipants) {
|
||||||
|
requestBody.requestParticipants = Array.isArray(requestParticipants)
|
||||||
|
? requestParticipants
|
||||||
|
: typeof requestParticipants === 'string'
|
||||||
|
? requestParticipants
|
||||||
|
.split(',')
|
||||||
|
.map((id: string) => id.trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
: []
|
||||||
|
}
|
||||||
|
if (channel) {
|
||||||
|
requestBody.channel = channel
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -111,6 +127,21 @@ export async function POST(request: NextRequest) {
|
|||||||
issueKey: data.issueKey,
|
issueKey: data.issueKey,
|
||||||
requestTypeId: data.requestTypeId,
|
requestTypeId: data.requestTypeId,
|
||||||
serviceDeskId: data.serviceDeskId,
|
serviceDeskId: data.serviceDeskId,
|
||||||
|
createdDate: data.createdDate ?? null,
|
||||||
|
currentStatus: data.currentStatus
|
||||||
|
? {
|
||||||
|
status: data.currentStatus.status ?? null,
|
||||||
|
statusCategory: data.currentStatus.statusCategory ?? null,
|
||||||
|
statusDate: data.currentStatus.statusDate ?? null,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
reporter: data.reporter
|
||||||
|
? {
|
||||||
|
accountId: data.reporter.accountId ?? null,
|
||||||
|
displayName: data.reporter.displayName ?? null,
|
||||||
|
emailAddress: data.reporter.emailAddress ?? null,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
success: true,
|
success: true,
|
||||||
url: `https://${domain}/browse/${data.issueKey}`,
|
url: `https://${domain}/browse/${data.issueKey}`,
|
||||||
},
|
},
|
||||||
@@ -126,7 +157,10 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}`
|
const params = new URLSearchParams()
|
||||||
|
if (expand) params.append('expand', expand)
|
||||||
|
|
||||||
|
const url = `${baseUrl}/request/${issueIdOrKey}${params.toString() ? `?${params.toString()}` : ''}`
|
||||||
|
|
||||||
logger.info('Fetching request from:', url)
|
logger.info('Fetching request from:', url)
|
||||||
|
|
||||||
@@ -155,6 +189,32 @@ export async function POST(request: NextRequest) {
|
|||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
ts: new Date().toISOString(),
|
ts: new Date().toISOString(),
|
||||||
|
issueId: data.issueId ?? null,
|
||||||
|
issueKey: data.issueKey ?? null,
|
||||||
|
requestTypeId: data.requestTypeId ?? null,
|
||||||
|
serviceDeskId: data.serviceDeskId ?? null,
|
||||||
|
createdDate: data.createdDate ?? null,
|
||||||
|
currentStatus: data.currentStatus
|
||||||
|
? {
|
||||||
|
status: data.currentStatus.status ?? null,
|
||||||
|
statusCategory: data.currentStatus.statusCategory ?? null,
|
||||||
|
statusDate: data.currentStatus.statusDate ?? null,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
reporter: data.reporter
|
||||||
|
? {
|
||||||
|
accountId: data.reporter.accountId ?? null,
|
||||||
|
displayName: data.reporter.displayName ?? null,
|
||||||
|
emailAddress: data.reporter.emailAddress ?? null,
|
||||||
|
active: data.reporter.active ?? true,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
requestFieldValues: (data.requestFieldValues ?? []).map((fv: Record<string, unknown>) => ({
|
||||||
|
fieldId: fv.fieldId ?? null,
|
||||||
|
label: fv.label ?? null,
|
||||||
|
value: fv.value ?? null,
|
||||||
|
})),
|
||||||
|
url: `https://${domain}/browse/${data.issueKey}`,
|
||||||
request: data,
|
request: data,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
validateAlphanumericId,
|
||||||
|
validateEnum,
|
||||||
|
validateJiraCloudId,
|
||||||
|
} from '@/lib/core/security/input-validation'
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -23,7 +27,9 @@ export async function POST(request: NextRequest) {
|
|||||||
serviceDeskId,
|
serviceDeskId,
|
||||||
requestOwnership,
|
requestOwnership,
|
||||||
requestStatus,
|
requestStatus,
|
||||||
|
requestTypeId,
|
||||||
searchTerm,
|
searchTerm,
|
||||||
|
expand,
|
||||||
start,
|
start,
|
||||||
limit,
|
limit,
|
||||||
} = body
|
} = body
|
||||||
@@ -52,17 +58,45 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const VALID_REQUEST_OWNERSHIP = [
|
||||||
|
'OWNED_REQUESTS',
|
||||||
|
'PARTICIPATED_REQUESTS',
|
||||||
|
'APPROVER',
|
||||||
|
'ALL_REQUESTS',
|
||||||
|
] as const
|
||||||
|
const VALID_REQUEST_STATUS = ['OPEN_REQUESTS', 'CLOSED_REQUESTS', 'ALL_REQUESTS'] as const
|
||||||
|
|
||||||
|
if (requestOwnership) {
|
||||||
|
const ownershipValidation = validateEnum(
|
||||||
|
requestOwnership,
|
||||||
|
VALID_REQUEST_OWNERSHIP,
|
||||||
|
'requestOwnership'
|
||||||
|
)
|
||||||
|
if (!ownershipValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: ownershipValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requestStatus) {
|
||||||
|
const statusValidation = validateEnum(requestStatus, VALID_REQUEST_STATUS, 'requestStatus')
|
||||||
|
if (!statusValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: statusValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const params = new URLSearchParams()
|
const params = new URLSearchParams()
|
||||||
if (serviceDeskId) params.append('serviceDeskId', serviceDeskId)
|
if (serviceDeskId) params.append('serviceDeskId', serviceDeskId)
|
||||||
if (requestOwnership && requestOwnership !== 'ALL_REQUESTS') {
|
if (requestOwnership) {
|
||||||
params.append('requestOwnership', requestOwnership)
|
params.append('requestOwnership', requestOwnership)
|
||||||
}
|
}
|
||||||
if (requestStatus && requestStatus !== 'ALL') {
|
if (requestStatus) {
|
||||||
params.append('requestStatus', requestStatus)
|
params.append('requestStatus', requestStatus)
|
||||||
}
|
}
|
||||||
|
if (requestTypeId) params.append('requestTypeId', requestTypeId)
|
||||||
if (searchTerm) params.append('searchTerm', searchTerm)
|
if (searchTerm) params.append('searchTerm', searchTerm)
|
||||||
|
if (expand) params.append('expand', expand)
|
||||||
if (start) params.append('start', start)
|
if (start) params.append('start', start)
|
||||||
if (limit) params.append('limit', limit)
|
if (limit) params.append('limit', limit)
|
||||||
|
|
||||||
|
|||||||
119
apps/sim/app/api/tools/jsm/requesttypefields/route.ts
Normal file
119
apps/sim/app/api/tools/jsm/requesttypefields/route.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||||
|
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('JsmRequestTypeFieldsAPI')
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const auth = await checkInternalAuth(request)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const { domain, accessToken, cloudId: cloudIdParam, serviceDeskId, requestTypeId } = body
|
||||||
|
|
||||||
|
if (!domain) {
|
||||||
|
logger.error('Missing domain in request')
|
||||||
|
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!accessToken) {
|
||||||
|
logger.error('Missing access token in request')
|
||||||
|
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!serviceDeskId) {
|
||||||
|
logger.error('Missing serviceDeskId in request')
|
||||||
|
return NextResponse.json({ error: 'Service Desk ID is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requestTypeId) {
|
||||||
|
logger.error('Missing requestTypeId in request')
|
||||||
|
return NextResponse.json({ error: 'Request Type ID is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||||
|
|
||||||
|
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||||
|
if (!cloudIdValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
||||||
|
if (!serviceDeskIdValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: serviceDeskIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestTypeIdValidation = validateAlphanumericId(requestTypeId, 'requestTypeId')
|
||||||
|
if (!requestTypeIdValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: requestTypeIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
const url = `${baseUrl}/servicedesk/${serviceDeskId}/requesttype/${requestTypeId}/field`
|
||||||
|
|
||||||
|
logger.info('Fetching request type fields from:', url)
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: getJsmHeaders(accessToken),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text()
|
||||||
|
logger.error('JSM API error:', {
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
ts: new Date().toISOString(),
|
||||||
|
serviceDeskId,
|
||||||
|
requestTypeId,
|
||||||
|
canAddRequestParticipants: data.canAddRequestParticipants ?? false,
|
||||||
|
canRaiseOnBehalfOf: data.canRaiseOnBehalfOf ?? false,
|
||||||
|
requestTypeFields: (data.requestTypeFields ?? []).map((field: Record<string, unknown>) => ({
|
||||||
|
fieldId: field.fieldId ?? null,
|
||||||
|
name: field.name ?? null,
|
||||||
|
description: field.description ?? null,
|
||||||
|
required: field.required ?? false,
|
||||||
|
visible: field.visible ?? true,
|
||||||
|
validValues: field.validValues ?? [],
|
||||||
|
presetValues: field.presetValues ?? [],
|
||||||
|
defaultValues: field.defaultValues ?? [],
|
||||||
|
jiraSchema: field.jiraSchema ?? null,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching request type fields:', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: error instanceof Error ? error.message : 'Internal server error',
|
||||||
|
success: false,
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,7 +16,17 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { domain, accessToken, cloudId: cloudIdParam, serviceDeskId, start, limit } = body
|
const {
|
||||||
|
domain,
|
||||||
|
accessToken,
|
||||||
|
cloudId: cloudIdParam,
|
||||||
|
serviceDeskId,
|
||||||
|
searchQuery,
|
||||||
|
groupId,
|
||||||
|
expand,
|
||||||
|
start,
|
||||||
|
limit,
|
||||||
|
} = body
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
logger.error('Missing domain in request')
|
logger.error('Missing domain in request')
|
||||||
@@ -48,6 +58,9 @@ export async function POST(request: NextRequest) {
|
|||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const params = new URLSearchParams()
|
const params = new URLSearchParams()
|
||||||
|
if (searchQuery) params.append('searchQuery', searchQuery)
|
||||||
|
if (groupId) params.append('groupId', groupId)
|
||||||
|
if (expand) params.append('expand', expand)
|
||||||
if (start) params.append('start', start)
|
if (start) params.append('start', start)
|
||||||
if (limit) params.append('limit', limit)
|
if (limit) params.append('limit', limit)
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { domain, accessToken, cloudId: cloudIdParam, start, limit } = body
|
const { domain, accessToken, cloudId: cloudIdParam, expand, start, limit } = body
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
logger.error('Missing domain in request')
|
logger.error('Missing domain in request')
|
||||||
@@ -38,6 +38,7 @@ export async function POST(request: NextRequest) {
|
|||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const params = new URLSearchParams()
|
const params = new URLSearchParams()
|
||||||
|
if (expand) params.append('expand', expand)
|
||||||
if (start) params.append('start', start)
|
if (start) params.append('start', start)
|
||||||
if (limit) params.append('limit', limit)
|
if (limit) params.append('limit', limit)
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey } = body
|
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey, start, limit } = body
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
logger.error('Missing domain in request')
|
logger.error('Missing domain in request')
|
||||||
@@ -47,7 +47,11 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}/transition`
|
const params = new URLSearchParams()
|
||||||
|
if (start) params.append('start', start)
|
||||||
|
if (limit) params.append('limit', limit)
|
||||||
|
|
||||||
|
const url = `${baseUrl}/request/${issueIdOrKey}/transition${params.toString() ? `?${params.toString()}` : ''}`
|
||||||
|
|
||||||
logger.info('Fetching transitions from:', url)
|
logger.info('Fetching transitions from:', url)
|
||||||
|
|
||||||
@@ -78,6 +82,8 @@ export async function POST(request: NextRequest) {
|
|||||||
ts: new Date().toISOString(),
|
ts: new Date().toISOString(),
|
||||||
issueIdOrKey,
|
issueIdOrKey,
|
||||||
transitions: data.values || [],
|
transitions: data.values || [],
|
||||||
|
total: data.size || 0,
|
||||||
|
isLastPage: data.isLastPage ?? true,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
113
apps/sim/app/api/tools/onepassword/create-item/route.ts
Normal file
113
apps/sim/app/api/tools/onepassword/create-item/route.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import { randomUUID } from 'crypto'
|
||||||
|
import type { ItemCreateParams } from '@1password/sdk'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
connectRequest,
|
||||||
|
createOnePasswordClient,
|
||||||
|
normalizeSdkItem,
|
||||||
|
resolveCredentials,
|
||||||
|
toSdkCategory,
|
||||||
|
toSdkFieldType,
|
||||||
|
} from '../utils'
|
||||||
|
|
||||||
|
const logger = createLogger('OnePasswordCreateItemAPI')
|
||||||
|
|
||||||
|
const CreateItemSchema = z.object({
|
||||||
|
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||||
|
serviceAccountToken: z.string().nullish(),
|
||||||
|
serverUrl: z.string().nullish(),
|
||||||
|
apiKey: z.string().nullish(),
|
||||||
|
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||||
|
category: z.string().min(1, 'Category is required'),
|
||||||
|
title: z.string().nullish(),
|
||||||
|
tags: z.string().nullish(),
|
||||||
|
fields: z.string().nullish(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = randomUUID().slice(0, 8)
|
||||||
|
|
||||||
|
const auth = await checkInternalAuth(request)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized 1Password create-item attempt`)
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const params = CreateItemSchema.parse(body)
|
||||||
|
const creds = resolveCredentials(params)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Creating item in vault ${params.vaultId} (${creds.mode} mode)`)
|
||||||
|
|
||||||
|
if (creds.mode === 'service_account') {
|
||||||
|
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||||
|
|
||||||
|
const parsedTags = params.tags
|
||||||
|
? params.tags
|
||||||
|
.split(',')
|
||||||
|
.map((t) => t.trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
: undefined
|
||||||
|
|
||||||
|
const parsedFields = params.fields
|
||||||
|
? (JSON.parse(params.fields) as Array<Record<string, any>>).map((f) => ({
|
||||||
|
id: f.id || randomUUID().slice(0, 8),
|
||||||
|
title: f.label || f.title || '',
|
||||||
|
fieldType: toSdkFieldType(f.type || 'STRING'),
|
||||||
|
value: f.value || '',
|
||||||
|
sectionId: f.section?.id ?? f.sectionId,
|
||||||
|
}))
|
||||||
|
: undefined
|
||||||
|
|
||||||
|
const item = await client.items.create({
|
||||||
|
vaultId: params.vaultId,
|
||||||
|
category: toSdkCategory(params.category),
|
||||||
|
title: params.title || '',
|
||||||
|
tags: parsedTags,
|
||||||
|
fields: parsedFields,
|
||||||
|
} as ItemCreateParams)
|
||||||
|
|
||||||
|
return NextResponse.json(normalizeSdkItem(item))
|
||||||
|
}
|
||||||
|
|
||||||
|
const connectBody: Record<string, unknown> = {
|
||||||
|
vault: { id: params.vaultId },
|
||||||
|
category: params.category,
|
||||||
|
}
|
||||||
|
if (params.title) connectBody.title = params.title
|
||||||
|
if (params.tags) connectBody.tags = params.tags.split(',').map((t) => t.trim())
|
||||||
|
if (params.fields) connectBody.fields = JSON.parse(params.fields)
|
||||||
|
|
||||||
|
const response = await connectRequest({
|
||||||
|
serverUrl: creds.serverUrl!,
|
||||||
|
apiKey: creds.apiKey!,
|
||||||
|
path: `/v1/vaults/${params.vaultId}/items`,
|
||||||
|
method: 'POST',
|
||||||
|
body: connectBody,
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: data.message || 'Failed to create item' },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
logger.error(`[${requestId}] Create item failed:`, error)
|
||||||
|
return NextResponse.json({ error: `Failed to create item: ${message}` }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
70
apps/sim/app/api/tools/onepassword/delete-item/route.ts
Normal file
70
apps/sim/app/api/tools/onepassword/delete-item/route.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import { randomUUID } from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { connectRequest, createOnePasswordClient, resolveCredentials } from '../utils'
|
||||||
|
|
||||||
|
const logger = createLogger('OnePasswordDeleteItemAPI')
|
||||||
|
|
||||||
|
const DeleteItemSchema = z.object({
|
||||||
|
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||||
|
serviceAccountToken: z.string().nullish(),
|
||||||
|
serverUrl: z.string().nullish(),
|
||||||
|
apiKey: z.string().nullish(),
|
||||||
|
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||||
|
itemId: z.string().min(1, 'Item ID is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = randomUUID().slice(0, 8)
|
||||||
|
|
||||||
|
const auth = await checkInternalAuth(request)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized 1Password delete-item attempt`)
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const params = DeleteItemSchema.parse(body)
|
||||||
|
const creds = resolveCredentials(params)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Deleting item ${params.itemId} from vault ${params.vaultId} (${creds.mode} mode)`
|
||||||
|
)
|
||||||
|
|
||||||
|
if (creds.mode === 'service_account') {
|
||||||
|
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||||
|
await client.items.delete(params.vaultId, params.itemId)
|
||||||
|
return NextResponse.json({ success: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await connectRequest({
|
||||||
|
serverUrl: creds.serverUrl!,
|
||||||
|
apiKey: creds.apiKey!,
|
||||||
|
path: `/v1/vaults/${params.vaultId}/items/${params.itemId}`,
|
||||||
|
method: 'DELETE',
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const data = await response.json().catch(() => ({}))
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: (data as Record<string, string>).message || 'Failed to delete item' },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true })
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
logger.error(`[${requestId}] Delete item failed:`, error)
|
||||||
|
return NextResponse.json({ error: `Failed to delete item: ${message}` }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
75
apps/sim/app/api/tools/onepassword/get-item/route.ts
Normal file
75
apps/sim/app/api/tools/onepassword/get-item/route.ts
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import { randomUUID } from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
connectRequest,
|
||||||
|
createOnePasswordClient,
|
||||||
|
normalizeSdkItem,
|
||||||
|
resolveCredentials,
|
||||||
|
} from '../utils'
|
||||||
|
|
||||||
|
const logger = createLogger('OnePasswordGetItemAPI')
|
||||||
|
|
||||||
|
const GetItemSchema = z.object({
|
||||||
|
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||||
|
serviceAccountToken: z.string().nullish(),
|
||||||
|
serverUrl: z.string().nullish(),
|
||||||
|
apiKey: z.string().nullish(),
|
||||||
|
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||||
|
itemId: z.string().min(1, 'Item ID is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = randomUUID().slice(0, 8)
|
||||||
|
|
||||||
|
const auth = await checkInternalAuth(request)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized 1Password get-item attempt`)
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const params = GetItemSchema.parse(body)
|
||||||
|
const creds = resolveCredentials(params)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Getting item ${params.itemId} from vault ${params.vaultId} (${creds.mode} mode)`
|
||||||
|
)
|
||||||
|
|
||||||
|
if (creds.mode === 'service_account') {
|
||||||
|
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||||
|
const item = await client.items.get(params.vaultId, params.itemId)
|
||||||
|
return NextResponse.json(normalizeSdkItem(item))
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await connectRequest({
|
||||||
|
serverUrl: creds.serverUrl!,
|
||||||
|
apiKey: creds.apiKey!,
|
||||||
|
path: `/v1/vaults/${params.vaultId}/items/${params.itemId}`,
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: data.message || 'Failed to get item' },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
logger.error(`[${requestId}] Get item failed:`, error)
|
||||||
|
return NextResponse.json({ error: `Failed to get item: ${message}` }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
78
apps/sim/app/api/tools/onepassword/get-vault/route.ts
Normal file
78
apps/sim/app/api/tools/onepassword/get-vault/route.ts
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import { randomUUID } from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
connectRequest,
|
||||||
|
createOnePasswordClient,
|
||||||
|
normalizeSdkVault,
|
||||||
|
resolveCredentials,
|
||||||
|
} from '../utils'
|
||||||
|
|
||||||
|
const logger = createLogger('OnePasswordGetVaultAPI')
|
||||||
|
|
||||||
|
const GetVaultSchema = z.object({
|
||||||
|
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||||
|
serviceAccountToken: z.string().nullish(),
|
||||||
|
serverUrl: z.string().nullish(),
|
||||||
|
apiKey: z.string().nullish(),
|
||||||
|
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = randomUUID().slice(0, 8)
|
||||||
|
|
||||||
|
const auth = await checkInternalAuth(request)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized 1Password get-vault attempt`)
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const params = GetVaultSchema.parse(body)
|
||||||
|
const creds = resolveCredentials(params)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting 1Password vault ${params.vaultId} (${creds.mode} mode)`)
|
||||||
|
|
||||||
|
if (creds.mode === 'service_account') {
|
||||||
|
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||||
|
const vaults = await client.vaults.list()
|
||||||
|
const vault = vaults.find((v) => v.id === params.vaultId)
|
||||||
|
|
||||||
|
if (!vault) {
|
||||||
|
return NextResponse.json({ error: 'Vault not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(normalizeSdkVault(vault))
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await connectRequest({
|
||||||
|
serverUrl: creds.serverUrl!,
|
||||||
|
apiKey: creds.apiKey!,
|
||||||
|
path: `/v1/vaults/${params.vaultId}`,
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: data.message || 'Failed to get vault' },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
logger.error(`[${requestId}] Get vault failed:`, error)
|
||||||
|
return NextResponse.json({ error: `Failed to get vault: ${message}` }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
87
apps/sim/app/api/tools/onepassword/list-items/route.ts
Normal file
87
apps/sim/app/api/tools/onepassword/list-items/route.ts
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import { randomUUID } from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
connectRequest,
|
||||||
|
createOnePasswordClient,
|
||||||
|
normalizeSdkItemOverview,
|
||||||
|
resolveCredentials,
|
||||||
|
} from '../utils'
|
||||||
|
|
||||||
|
const logger = createLogger('OnePasswordListItemsAPI')
|
||||||
|
|
||||||
|
const ListItemsSchema = z.object({
|
||||||
|
connectionMode: z.enum(['service_account', 'connect']).nullish(),
|
||||||
|
serviceAccountToken: z.string().nullish(),
|
||||||
|
serverUrl: z.string().nullish(),
|
||||||
|
apiKey: z.string().nullish(),
|
||||||
|
vaultId: z.string().min(1, 'Vault ID is required'),
|
||||||
|
filter: z.string().nullish(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = randomUUID().slice(0, 8)
|
||||||
|
|
||||||
|
const auth = await checkInternalAuth(request)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized 1Password list-items attempt`)
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const params = ListItemsSchema.parse(body)
|
||||||
|
const creds = resolveCredentials(params)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Listing items in vault ${params.vaultId} (${creds.mode} mode)`)
|
||||||
|
|
||||||
|
if (creds.mode === 'service_account') {
|
||||||
|
const client = await createOnePasswordClient(creds.serviceAccountToken!)
|
||||||
|
const items = await client.items.list(params.vaultId)
|
||||||
|
const normalized = items.map(normalizeSdkItemOverview)
|
||||||
|
|
||||||
|
if (params.filter) {
|
||||||
|
const filterLower = params.filter.toLowerCase()
|
||||||
|
const filtered = normalized.filter(
|
||||||
|
(item) =>
|
||||||
|
item.title?.toLowerCase().includes(filterLower) ||
|
||||||
|
item.id?.toLowerCase().includes(filterLower)
|
||||||
|
)
|
||||||
|
return NextResponse.json(filtered)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(normalized)
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = params.filter ? `filter=${encodeURIComponent(params.filter)}` : undefined
|
||||||
|
const response = await connectRequest({
|
||||||
|
serverUrl: creds.serverUrl!,
|
||||||
|
apiKey: creds.apiKey!,
|
||||||
|
path: `/v1/vaults/${params.vaultId}/items`,
|
||||||
|
method: 'GET',
|
||||||
|
query,
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: data.message || 'Failed to list items' },
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(data)
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
logger.error(`[${requestId}] List items failed:`, error)
|
||||||
|
return NextResponse.json({ error: `Failed to list items: ${message}` }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user