mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
v0.6.33: polling improvements, jsm forms tools, credentials reactquery invalidation, HITL docs
This commit is contained in:
@@ -126,26 +126,38 @@ Access resume data in downstream blocks using `<blockId.fieldName>`.
|
||||
|
||||
- **Stream mode** (`stream: true` on the original execute call) — The resume response streams SSE events with `selectedOutputs` chunks, just like the initial execution.
|
||||
|
||||
- **Async mode** (`X-Execution-Mode: async` on the original execute call) — The resume dispatches execution to a background worker and returns immediately with `202`:
|
||||
- **Async mode** (`X-Execution-Mode: async` on the original execute call) — The resume dispatches execution to a background worker and returns immediately with `202`, including a `jobId` and `statusUrl` for polling:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "started",
|
||||
"success": true,
|
||||
"async": true,
|
||||
"jobId": "<jobId>",
|
||||
"executionId": "<resumeExecutionId>",
|
||||
"message": "Resume execution started asynchronously."
|
||||
"message": "Resume execution queued",
|
||||
"statusUrl": "/api/jobs/<jobId>"
|
||||
}
|
||||
```
|
||||
|
||||
#### Polling execution status
|
||||
|
||||
To check on a paused execution or poll for completion after an async resume:
|
||||
Poll the `statusUrl` from the async response to check when the resume completes:
|
||||
|
||||
```bash
|
||||
GET /api/jobs/{jobId}
|
||||
X-API-Key: your-api-key
|
||||
```
|
||||
|
||||
Returns job status and, when completed, the full workflow output.
|
||||
|
||||
To check on a paused execution's pause points and resume links:
|
||||
|
||||
```bash
|
||||
GET /api/resume/{workflowId}/{executionId}
|
||||
X-API-Key: your-api-key
|
||||
```
|
||||
|
||||
Returns the full paused execution detail with all pause points, their statuses, and resume links. Returns `404` when the execution has completed and is no longer paused.
|
||||
Returns the paused execution detail with all pause points, their statuses, and resume links. Returns `404` when the execution has completed and is no longer paused.
|
||||
</Tab>
|
||||
<Tab>
|
||||
### Webhook
|
||||
|
||||
@@ -678,4 +678,84 @@ Get the fields required to create a request of a specific type in Jira Service M
|
||||
| ↳ `defaultValues` | json | Default values for the field |
|
||||
| ↳ `jiraSchema` | json | Jira field schema with type, system, custom, customId |
|
||||
|
||||
### `jsm_get_form_templates`
|
||||
|
||||
List forms (ProForma/JSM Forms) in a Jira project to discover form IDs for request types
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `projectIdOrKey` | string | Yes | Jira project ID or key \(e.g., "10001" or "SD"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `projectIdOrKey` | string | Project ID or key |
|
||||
| `templates` | array | List of forms in the project |
|
||||
| ↳ `id` | string | Form template ID \(UUID\) |
|
||||
| ↳ `name` | string | Form template name |
|
||||
| ↳ `updated` | string | Last updated timestamp \(ISO 8601\) |
|
||||
| ↳ `issueCreateIssueTypeIds` | json | Issue type IDs that auto-attach this form on issue create |
|
||||
| ↳ `issueCreateRequestTypeIds` | json | Request type IDs that auto-attach this form on issue create |
|
||||
| ↳ `portalRequestTypeIds` | json | Request type IDs that show this form on the customer portal |
|
||||
| ↳ `recommendedIssueRequestTypeIds` | json | Request type IDs that recommend this form |
|
||||
| `total` | number | Total number of forms |
|
||||
|
||||
### `jsm_get_form_structure`
|
||||
|
||||
Get the full structure of a ProForma/JSM form including all questions, field types, choices, layout, and conditions
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `projectIdOrKey` | string | Yes | Jira project ID or key \(e.g., "10001" or "SD"\) |
|
||||
| `formId` | string | Yes | Form ID \(UUID from Get Form Templates\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `projectIdOrKey` | string | Project ID or key |
|
||||
| `formId` | string | Form ID |
|
||||
| `design` | json | Full form design with questions \(field types, labels, choices, validation\), layout \(field ordering\), and conditions |
|
||||
| `updated` | string | Last updated timestamp |
|
||||
| `publish` | json | Publishing and request type configuration |
|
||||
|
||||
### `jsm_get_issue_forms`
|
||||
|
||||
List forms (ProForma/JSM Forms) attached to a Jira issue with metadata (name, submitted status, lock)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., "SD-123", "10001"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `forms` | array | List of forms attached to the issue |
|
||||
| ↳ `id` | string | Form instance ID \(UUID\) |
|
||||
| ↳ `name` | string | Form name |
|
||||
| ↳ `updated` | string | Last updated timestamp \(ISO 8601\) |
|
||||
| ↳ `submitted` | boolean | Whether the form has been submitted |
|
||||
| ↳ `lock` | boolean | Whether the form is locked |
|
||||
| ↳ `internal` | boolean | Whether the form is internal-only |
|
||||
| ↳ `formTemplateId` | string | Source form template ID \(UUID\) |
|
||||
| `total` | number | Total number of forms |
|
||||
|
||||
|
||||
|
||||
@@ -25,6 +25,10 @@
|
||||
"name": "Workflows",
|
||||
"description": "Execute workflows and manage workflow resources"
|
||||
},
|
||||
{
|
||||
"name": "Human in the Loop",
|
||||
"description": "Manage paused workflow executions and resume them with input"
|
||||
},
|
||||
{
|
||||
"name": "Logs",
|
||||
"description": "Query execution logs and retrieve details"
|
||||
@@ -235,6 +239,544 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/workflows/{id}/paused": {
|
||||
"get": {
|
||||
"operationId": "listPausedExecutions",
|
||||
"summary": "List Paused Executions",
|
||||
"description": "List all paused executions for a workflow. Workflows pause at Human in the Loop blocks and wait for input before continuing. Use this endpoint to discover which executions need attention.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/workflows/{id}/paused?status=paused\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"description": "Filter paused executions by status.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "paused"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of paused executions.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pausedExecutions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PausedExecutionSummary"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"example": {
|
||||
"pausedExecutions": [
|
||||
{
|
||||
"id": "pe_abc123",
|
||||
"workflowId": "wf_1a2b3c4d5e",
|
||||
"executionId": "exec_9f8e7d6c5b",
|
||||
"status": "paused",
|
||||
"totalPauseCount": 1,
|
||||
"resumedCount": 0,
|
||||
"pausedAt": "2026-01-15T10:30:00Z",
|
||||
"updatedAt": "2026-01-15T10:30:00Z",
|
||||
"expiresAt": null,
|
||||
"metadata": null,
|
||||
"triggerIds": [],
|
||||
"pausePoints": [
|
||||
{
|
||||
"contextId": "ctx_xyz789",
|
||||
"blockId": "block_hitl_1",
|
||||
"registeredAt": "2026-01-15T10:30:00Z",
|
||||
"resumeStatus": "paused",
|
||||
"snapshotReady": true,
|
||||
"resumeLinks": {
|
||||
"apiUrl": "https://www.sim.ai/api/resume/wf_1a2b3c4d5e/exec_9f8e7d6c5b/ctx_xyz789",
|
||||
"uiUrl": "https://www.sim.ai/resume/wf_1a2b3c4d5e/exec_9f8e7d6c5b",
|
||||
"contextId": "ctx_xyz789",
|
||||
"executionId": "exec_9f8e7d6c5b",
|
||||
"workflowId": "wf_1a2b3c4d5e"
|
||||
},
|
||||
"response": {
|
||||
"displayData": {
|
||||
"title": "Approval Required",
|
||||
"message": "Please review this request"
|
||||
},
|
||||
"formFields": []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest"
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/workflows/{id}/paused/{executionId}": {
|
||||
"get": {
|
||||
"operationId": "getPausedExecution",
|
||||
"summary": "Get Paused Execution",
|
||||
"description": "Get detailed information about a specific paused execution, including its pause points, execution snapshot, and resume queue. Use this to inspect the state before resuming.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/workflows/{id}/paused/{executionId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Paused execution details.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/PausedExecutionDetail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/resume/{workflowId}/{executionId}": {
|
||||
"get": {
|
||||
"operationId": "getPausedExecutionByResumePath",
|
||||
"summary": "Get Paused Execution (Resume Path)",
|
||||
"description": "Get detailed information about a specific paused execution using the resume URL path. Returns the same data as the workflow paused execution detail endpoint.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/resume/{workflowId}/{executionId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflowId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Paused execution details.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/PausedExecutionDetail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Human-readable error message."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/resume/{workflowId}/{executionId}/{contextId}": {
|
||||
"get": {
|
||||
"operationId": "getPauseContext",
|
||||
"summary": "Get Pause Context",
|
||||
"description": "Get detailed information about a specific pause context within a paused execution. Returns the pause point details, resume queue state, and any active resume entry.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/resume/{workflowId}/{executionId}/{contextId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflowId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "contextId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The pause context ID to retrieve details for.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "ctx_xyz789"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Pause context details.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/PauseContextDetail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"operationId": "resumeExecution",
|
||||
"summary": "Resume Execution",
|
||||
"description": "Resume a paused workflow execution by providing input for a specific pause context. The execution continues from where it paused, using the provided input. Supports synchronous, asynchronous, and streaming modes (determined by the original execution's configuration).",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X POST \\\n \"https://www.sim.ai/api/resume/{workflowId}/{executionId}/{contextId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\" \\\n -H \"Content-Type: application/json\" \\\n -d '{\n \"input\": {\n \"approved\": true,\n \"comment\": \"Looks good to me\"\n }\n }'"
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflowId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "contextId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The pause context ID to resume. Found in the pause point's contextId field or resumeLinks.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "ctx_xyz789"
|
||||
}
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"description": "Input data for the resumed execution. The structure depends on the workflow's Human in the Loop block configuration.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "object",
|
||||
"description": "Key-value pairs to pass as input to the resumed execution. If omitted, the entire request body is used as input.",
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"example": {
|
||||
"input": {
|
||||
"approved": true,
|
||||
"comment": "Looks good to me"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Resume execution completed synchronously, or resume was queued behind another in-progress resume.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/ResumeResult"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "Resume has been queued behind another in-progress resume.",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["queued"],
|
||||
"description": "Indicates the resume is queued."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution ID assigned to this resume."
|
||||
},
|
||||
"queuePosition": {
|
||||
"type": "integer",
|
||||
"description": "Position in the resume queue."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "Human-readable status message."
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "Resume execution started (non-API-key callers). The execution runs asynchronously.",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["started"],
|
||||
"description": "Indicates the resume execution has started."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution ID for the resumed workflow."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "Human-readable status message."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"sync": {
|
||||
"summary": "Synchronous completion",
|
||||
"value": {
|
||||
"success": true,
|
||||
"status": "completed",
|
||||
"executionId": "exec_new123",
|
||||
"output": {
|
||||
"result": "Approved and processed"
|
||||
},
|
||||
"error": null,
|
||||
"metadata": {
|
||||
"duration": 850,
|
||||
"startTime": "2026-01-15T10:35:00Z",
|
||||
"endTime": "2026-01-15T10:35:01Z"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queued": {
|
||||
"summary": "Queued behind another resume",
|
||||
"value": {
|
||||
"status": "queued",
|
||||
"executionId": "exec_new123",
|
||||
"queuePosition": 2,
|
||||
"message": "Resume queued. It will run after current resumes finish."
|
||||
}
|
||||
},
|
||||
"started": {
|
||||
"summary": "Execution started (fire and forget)",
|
||||
"value": {
|
||||
"status": "started",
|
||||
"executionId": "exec_new123",
|
||||
"message": "Resume execution started."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"202": {
|
||||
"description": "Resume execution has been queued for asynchronous processing. Poll the statusUrl for results.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/AsyncExecutionResult"
|
||||
},
|
||||
"example": {
|
||||
"success": true,
|
||||
"async": true,
|
||||
"jobId": "job_4a3b2c1d0e",
|
||||
"executionId": "exec_new123",
|
||||
"message": "Resume execution queued",
|
||||
"statusUrl": "https://www.sim.ai/api/jobs/job_4a3b2c1d0e"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest"
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
},
|
||||
"503": {
|
||||
"description": "Failed to queue the resume execution. Retry the request.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Error message."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Human-readable error message."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/v1/workflows": {
|
||||
"get": {
|
||||
"operationId": "listWorkflows",
|
||||
@@ -5788,6 +6330,346 @@
|
||||
"description": "Upper bound value for 'between' operator."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PausedExecutionSummary": {
|
||||
"type": "object",
|
||||
"description": "Summary of a paused workflow execution.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the paused execution record."
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string",
|
||||
"description": "The workflow this execution belongs to."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution that was paused."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the paused execution.",
|
||||
"example": "paused"
|
||||
},
|
||||
"totalPauseCount": {
|
||||
"type": "integer",
|
||||
"description": "Total number of pause points in this execution."
|
||||
},
|
||||
"resumedCount": {
|
||||
"type": "integer",
|
||||
"description": "Number of pause points that have been resumed."
|
||||
},
|
||||
"pausedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the execution was paused."
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the paused execution record was last updated."
|
||||
},
|
||||
"expiresAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the paused execution will expire and be cleaned up."
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"nullable": true,
|
||||
"description": "Additional metadata associated with the paused execution.",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"triggerIds": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "IDs of triggers that initiated the original execution."
|
||||
},
|
||||
"pausePoints": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PausePoint"
|
||||
},
|
||||
"description": "List of pause points in the execution."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PausePoint": {
|
||||
"type": "object",
|
||||
"description": "A point in the workflow where execution has been paused awaiting human input.",
|
||||
"properties": {
|
||||
"contextId": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for this pause context. Used when resuming execution."
|
||||
},
|
||||
"blockId": {
|
||||
"type": "string",
|
||||
"description": "The block ID where execution paused."
|
||||
},
|
||||
"response": {
|
||||
"description": "Data returned by the block before pausing, including display data and form fields."
|
||||
},
|
||||
"registeredAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "When this pause point was registered."
|
||||
},
|
||||
"resumeStatus": {
|
||||
"type": "string",
|
||||
"enum": ["paused", "resumed", "failed", "queued", "resuming"],
|
||||
"description": "Current status of this pause point."
|
||||
},
|
||||
"snapshotReady": {
|
||||
"type": "boolean",
|
||||
"description": "Whether the execution snapshot is ready for resumption."
|
||||
},
|
||||
"resumeLinks": {
|
||||
"type": "object",
|
||||
"description": "Links for resuming this pause point.",
|
||||
"properties": {
|
||||
"apiUrl": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"description": "API endpoint URL to POST resume input to."
|
||||
},
|
||||
"uiUrl": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"description": "UI URL for a human to review and approve."
|
||||
},
|
||||
"contextId": {
|
||||
"type": "string",
|
||||
"description": "The context ID for this pause point."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution ID."
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string",
|
||||
"description": "The workflow ID."
|
||||
}
|
||||
}
|
||||
},
|
||||
"queuePosition": {
|
||||
"type": "integer",
|
||||
"nullable": true,
|
||||
"description": "Position in the resume queue, if queued."
|
||||
},
|
||||
"latestResumeEntry": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry",
|
||||
"nullable": true,
|
||||
"description": "The most recent resume queue entry for this pause point."
|
||||
},
|
||||
"parallelScope": {
|
||||
"type": "object",
|
||||
"description": "Scope information when the pause occurs inside a parallel branch.",
|
||||
"properties": {
|
||||
"parallelId": {
|
||||
"type": "string",
|
||||
"description": "Identifier of the parallel execution group."
|
||||
},
|
||||
"branchIndex": {
|
||||
"type": "integer",
|
||||
"description": "Index of the branch within the parallel group."
|
||||
},
|
||||
"branchTotal": {
|
||||
"type": "integer",
|
||||
"description": "Total number of branches in the parallel group."
|
||||
}
|
||||
}
|
||||
},
|
||||
"loopScope": {
|
||||
"type": "object",
|
||||
"description": "Scope information when the pause occurs inside a loop.",
|
||||
"properties": {
|
||||
"loopId": {
|
||||
"type": "string",
|
||||
"description": "Identifier of the loop."
|
||||
},
|
||||
"iteration": {
|
||||
"type": "integer",
|
||||
"description": "Current loop iteration number."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResumeQueueEntry": {
|
||||
"type": "object",
|
||||
"description": "An entry in the resume execution queue.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for this queue entry."
|
||||
},
|
||||
"pausedExecutionId": {
|
||||
"type": "string",
|
||||
"description": "The paused execution this entry belongs to."
|
||||
},
|
||||
"parentExecutionId": {
|
||||
"type": "string",
|
||||
"description": "The original execution that was paused."
|
||||
},
|
||||
"newExecutionId": {
|
||||
"type": "string",
|
||||
"description": "The new execution ID created for the resume."
|
||||
},
|
||||
"contextId": {
|
||||
"type": "string",
|
||||
"description": "The pause context ID being resumed."
|
||||
},
|
||||
"resumeInput": {
|
||||
"description": "The input provided when resuming."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Status of this queue entry (e.g., pending, claimed, completed, failed)."
|
||||
},
|
||||
"queuedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the entry was added to the queue."
|
||||
},
|
||||
"claimedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When execution started processing this entry."
|
||||
},
|
||||
"completedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When execution completed."
|
||||
},
|
||||
"failureReason": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Reason for failure, if the resume failed."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PausedExecutionDetail": {
|
||||
"type": "object",
|
||||
"description": "Detailed information about a paused execution, including the execution snapshot and resume queue.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PausedExecutionSummary"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"executionSnapshot": {
|
||||
"type": "object",
|
||||
"description": "Serialized execution state for resumption.",
|
||||
"properties": {
|
||||
"snapshot": {
|
||||
"type": "string",
|
||||
"description": "Serialized execution snapshot data."
|
||||
},
|
||||
"triggerIds": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Trigger IDs from the snapshot."
|
||||
}
|
||||
}
|
||||
},
|
||||
"queue": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry"
|
||||
},
|
||||
"description": "Resume queue entries for this execution."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"PauseContextDetail": {
|
||||
"type": "object",
|
||||
"description": "Detailed information about a specific pause context within a paused execution.",
|
||||
"properties": {
|
||||
"execution": {
|
||||
"$ref": "#/components/schemas/PausedExecutionSummary",
|
||||
"description": "Summary of the parent paused execution."
|
||||
},
|
||||
"pausePoint": {
|
||||
"$ref": "#/components/schemas/PausePoint",
|
||||
"description": "The specific pause point for this context."
|
||||
},
|
||||
"queue": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry"
|
||||
},
|
||||
"description": "Resume queue entries for this context."
|
||||
},
|
||||
"activeResumeEntry": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry",
|
||||
"nullable": true,
|
||||
"description": "The currently active resume entry, if any."
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResumeResult": {
|
||||
"type": "object",
|
||||
"description": "Result of a synchronous resume execution.",
|
||||
"properties": {
|
||||
"success": {
|
||||
"type": "boolean",
|
||||
"description": "Whether the resume execution completed successfully."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Execution status.",
|
||||
"enum": ["completed", "failed", "paused", "cancelled"],
|
||||
"example": "completed"
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The new execution ID for the resumed workflow."
|
||||
},
|
||||
"output": {
|
||||
"type": "object",
|
||||
"description": "Workflow output from the resumed execution.",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"error": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Error message if the execution failed."
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"description": "Execution timing metadata.",
|
||||
"properties": {
|
||||
"duration": {
|
||||
"type": "integer",
|
||||
"description": "Total execution duration in milliseconds."
|
||||
},
|
||||
"startTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "When the resume execution started."
|
||||
},
|
||||
"endTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "When the resume execution completed."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
|
||||
@@ -6614,9 +6614,21 @@
|
||||
{
|
||||
"name": "Get Request Type Fields",
|
||||
"description": "Get the fields required to create a request of a specific type in Jira Service Management"
|
||||
},
|
||||
{
|
||||
"name": "Get Form Templates",
|
||||
"description": "List forms (ProForma/JSM Forms) in a Jira project to discover form IDs for request types"
|
||||
},
|
||||
{
|
||||
"name": "Get Form Structure",
|
||||
"description": "Get the full structure of a ProForma/JSM form including all questions, field types, choices, layout, and conditions"
|
||||
},
|
||||
{
|
||||
"name": "Get Issue Forms",
|
||||
"description": "List forms (ProForma/JSM Forms) attached to a Jira issue with metadata (name, submitted status, lock)"
|
||||
}
|
||||
],
|
||||
"operationCount": 21,
|
||||
"operationCount": 24,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"authType": "oauth",
|
||||
@@ -10784,8 +10796,34 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 4,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "servicenow_incident_created",
|
||||
"name": "ServiceNow Incident Created",
|
||||
"description": "Trigger workflow when a new incident is created in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_incident_updated",
|
||||
"name": "ServiceNow Incident Updated",
|
||||
"description": "Trigger workflow when an incident is updated in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_change_request_created",
|
||||
"name": "ServiceNow Change Request Created",
|
||||
"description": "Trigger workflow when a new change request is created in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_change_request_updated",
|
||||
"name": "ServiceNow Change Request Updated",
|
||||
"description": "Trigger workflow when a change request is updated in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_webhook",
|
||||
"name": "ServiceNow Webhook (All Events)",
|
||||
"description": "Trigger workflow on any ServiceNow webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 5,
|
||||
"authType": "none",
|
||||
"category": "tools",
|
||||
"integrationType": "customer-support",
|
||||
|
||||
@@ -246,31 +246,10 @@ export async function POST(
|
||||
),
|
||||
})
|
||||
|
||||
// For forms, we don't stream back - we wait for completion and return success
|
||||
// Consume the stream to wait for completion
|
||||
const reader = stream.getReader()
|
||||
let lastOutput: any = null
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
// Parse SSE data if present
|
||||
const text = new TextDecoder().decode(value)
|
||||
const lines = text.split('\n')
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
const data = JSON.parse(line.slice(6))
|
||||
if (data.type === 'complete' || data.output) {
|
||||
lastOutput = data.output || data
|
||||
}
|
||||
} catch {
|
||||
// Ignore parse errors
|
||||
}
|
||||
}
|
||||
}
|
||||
while (!(await reader.read()).done) {
|
||||
/* drain to let the workflow run to completion */
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
|
||||
@@ -194,7 +194,7 @@ export async function POST(
|
||||
})
|
||||
}
|
||||
|
||||
if (isApiCaller && executionMode !== 'async') {
|
||||
if (isApiCaller && executionMode === 'sync') {
|
||||
const result = await PauseResumeManager.startResumeExecution(resumeArgs)
|
||||
|
||||
return NextResponse.json({
|
||||
|
||||
@@ -8,6 +8,7 @@ import { createBullMQJobData, isBullMQEnabled } from '@/lib/core/bullmq'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
|
||||
import { getWorkflowById } from '@/lib/workflows/utils'
|
||||
import {
|
||||
executeJobInline,
|
||||
executeScheduleJob,
|
||||
@@ -115,7 +116,6 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
|
||||
try {
|
||||
const { getWorkflowById } = await import('@/lib/workflows/utils')
|
||||
const resolvedWorkflow = schedule.workflowId
|
||||
? await getWorkflowById(schedule.workflowId)
|
||||
: null
|
||||
|
||||
115
apps/sim/app/api/tools/jsm/forms/issue/route.ts
Normal file
115
apps/sim/app/api/tools/jsm/forms/issue/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
getJiraCloudId,
|
||||
getJsmFormsApiBaseUrl,
|
||||
getJsmHeaders,
|
||||
parseJsmErrorMessage,
|
||||
} from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmIssueFormsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!issueIdOrKey) {
|
||||
logger.error('Missing issueIdOrKey in request')
|
||||
return NextResponse.json({ error: 'Issue ID or key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const issueIdOrKeyValidation = validateJiraIssueKey(issueIdOrKey, 'issueIdOrKey')
|
||||
if (!issueIdOrKeyValidation.isValid) {
|
||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmFormsApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/issue/${encodeURIComponent(issueIdOrKey)}/form`
|
||||
|
||||
logger.info('Fetching issue forms from:', { url, issueIdOrKey })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM Forms API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const forms = Array.isArray(data) ? data : (data.values ?? data.forms ?? [])
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueIdOrKey,
|
||||
forms: forms.map((form: Record<string, unknown>) => ({
|
||||
id: form.id ?? null,
|
||||
name: form.name ?? null,
|
||||
updated: form.updated ?? null,
|
||||
submitted: form.submitted ?? false,
|
||||
lock: form.lock ?? false,
|
||||
internal: form.internal ?? null,
|
||||
formTemplateId: (form.formTemplate as Record<string, unknown>)?.id ?? null,
|
||||
})),
|
||||
total: forms.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching issue forms:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
117
apps/sim/app/api/tools/jsm/forms/structure/route.ts
Normal file
117
apps/sim/app/api/tools/jsm/forms/structure/route.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
getJiraCloudId,
|
||||
getJsmFormsApiBaseUrl,
|
||||
getJsmHeaders,
|
||||
parseJsmErrorMessage,
|
||||
} from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmFormStructureAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, projectIdOrKey, formId } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!projectIdOrKey) {
|
||||
logger.error('Missing projectIdOrKey in request')
|
||||
return NextResponse.json({ error: 'Project ID or key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!formId) {
|
||||
logger.error('Missing formId in request')
|
||||
return NextResponse.json({ error: 'Form ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const projectIdOrKeyValidation = validateJiraIssueKey(projectIdOrKey, 'projectIdOrKey')
|
||||
if (!projectIdOrKeyValidation.isValid) {
|
||||
return NextResponse.json({ error: projectIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const formIdValidation = validateJiraCloudId(formId, 'formId')
|
||||
if (!formIdValidation.isValid) {
|
||||
return NextResponse.json({ error: formIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmFormsApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/project/${encodeURIComponent(projectIdOrKey)}/form/${encodeURIComponent(formId)}`
|
||||
|
||||
logger.info('Fetching form template from:', { url, projectIdOrKey, formId })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM Forms API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey,
|
||||
formId,
|
||||
design: data.design ?? null,
|
||||
updated: data.updated ?? null,
|
||||
publish: data.publish ?? null,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching form structure:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
115
apps/sim/app/api/tools/jsm/forms/templates/route.ts
Normal file
115
apps/sim/app/api/tools/jsm/forms/templates/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
getJiraCloudId,
|
||||
getJsmFormsApiBaseUrl,
|
||||
getJsmHeaders,
|
||||
parseJsmErrorMessage,
|
||||
} from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmFormTemplatesAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, projectIdOrKey } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!projectIdOrKey) {
|
||||
logger.error('Missing projectIdOrKey in request')
|
||||
return NextResponse.json({ error: 'Project ID or key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const projectIdOrKeyValidation = validateJiraIssueKey(projectIdOrKey, 'projectIdOrKey')
|
||||
if (!projectIdOrKeyValidation.isValid) {
|
||||
return NextResponse.json({ error: projectIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmFormsApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/project/${encodeURIComponent(projectIdOrKey)}/form`
|
||||
|
||||
logger.info('Fetching project form templates from:', { url, projectIdOrKey })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM Forms API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const templates = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey,
|
||||
templates: templates.map((template: Record<string, unknown>) => ({
|
||||
id: template.id ?? null,
|
||||
name: template.name ?? null,
|
||||
updated: template.updated ?? null,
|
||||
issueCreateIssueTypeIds: template.issueCreateIssueTypeIds ?? [],
|
||||
issueCreateRequestTypeIds: template.issueCreateRequestTypeIds ?? [],
|
||||
portalRequestTypeIds: template.portalRequestTypeIds ?? [],
|
||||
recommendedIssueRequestTypeIds: template.recommendedIssueRequestTypeIds ?? [],
|
||||
})),
|
||||
total: templates.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching form templates:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -20,9 +20,6 @@ export async function GET(
|
||||
const { provider } = await params
|
||||
const requestId = generateShortId()
|
||||
|
||||
const LOCK_KEY = `${provider}-polling-lock`
|
||||
let lockValue: string | undefined
|
||||
|
||||
try {
|
||||
const authError = verifyCronAuth(request, `${provider} webhook polling`)
|
||||
if (authError) return authError
|
||||
@@ -31,29 +28,38 @@ export async function GET(
|
||||
return NextResponse.json({ error: `Unknown polling provider: ${provider}` }, { status: 404 })
|
||||
}
|
||||
|
||||
lockValue = requestId
|
||||
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
|
||||
if (!locked) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
message: 'Polling already in progress – skipped',
|
||||
requestId,
|
||||
status: 'skip',
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
const LOCK_KEY = `${provider}-polling-lock`
|
||||
let lockValue: string | undefined
|
||||
|
||||
try {
|
||||
lockValue = requestId
|
||||
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
|
||||
if (!locked) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
message: 'Polling already in progress – skipped',
|
||||
requestId,
|
||||
status: 'skip',
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
}
|
||||
|
||||
const results = await pollProvider(provider)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${provider} polling completed`,
|
||||
requestId,
|
||||
status: 'completed',
|
||||
...results,
|
||||
})
|
||||
} finally {
|
||||
if (lockValue) {
|
||||
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
const results = await pollProvider(provider)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${provider} polling completed`,
|
||||
requestId,
|
||||
status: 'completed',
|
||||
...results,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Error during ${provider} polling (${requestId}):`, error)
|
||||
return NextResponse.json(
|
||||
@@ -65,9 +71,5 @@ export async function GET(
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (lockValue) {
|
||||
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,7 +89,7 @@ export function VersionDescriptionModal({
|
||||
return (
|
||||
<>
|
||||
<Modal open={open} onOpenChange={(openState) => !openState && handleCloseAttempt()}>
|
||||
<ModalContent size='md'>
|
||||
<ModalContent size='lg'>
|
||||
<ModalHeader>
|
||||
<span>Version Description</span>
|
||||
</ModalHeader>
|
||||
|
||||
@@ -44,6 +44,9 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
{ label: 'Get Approvals', id: 'get_approvals' },
|
||||
{ label: 'Answer Approval', id: 'answer_approval' },
|
||||
{ label: 'Get Request Type Fields', id: 'get_request_type_fields' },
|
||||
{ label: 'Get Form Templates', id: 'get_form_templates' },
|
||||
{ label: 'Get Form Structure', id: 'get_form_structure' },
|
||||
{ label: 'Get Issue Forms', id: 'get_issue_forms' },
|
||||
],
|
||||
value: () => 'get_service_desks',
|
||||
},
|
||||
@@ -191,9 +194,26 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
'add_participants',
|
||||
'get_approvals',
|
||||
'answer_approval',
|
||||
'get_issue_forms',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'projectIdOrKey',
|
||||
title: 'Project ID or Key',
|
||||
type: 'short-input',
|
||||
required: { field: 'operation', value: ['get_form_templates', 'get_form_structure'] },
|
||||
placeholder: 'Enter Jira project ID or key (e.g., 10001 or SD)',
|
||||
condition: { field: 'operation', value: ['get_form_templates', 'get_form_structure'] },
|
||||
},
|
||||
{
|
||||
id: 'formId',
|
||||
title: 'Form ID',
|
||||
type: 'short-input',
|
||||
required: true,
|
||||
placeholder: 'Enter form ID (UUID from Get Form Templates)',
|
||||
condition: { field: 'operation', value: 'get_form_structure' },
|
||||
},
|
||||
{
|
||||
id: 'summary',
|
||||
title: 'Summary',
|
||||
@@ -503,6 +523,9 @@ Return ONLY the comment text - no explanations.`,
|
||||
'jsm_get_approvals',
|
||||
'jsm_answer_approval',
|
||||
'jsm_get_request_type_fields',
|
||||
'jsm_get_form_templates',
|
||||
'jsm_get_form_structure',
|
||||
'jsm_get_issue_forms',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -549,6 +572,12 @@ Return ONLY the comment text - no explanations.`,
|
||||
return 'jsm_answer_approval'
|
||||
case 'get_request_type_fields':
|
||||
return 'jsm_get_request_type_fields'
|
||||
case 'get_form_templates':
|
||||
return 'jsm_get_form_templates'
|
||||
case 'get_form_structure':
|
||||
return 'jsm_get_form_structure'
|
||||
case 'get_issue_forms':
|
||||
return 'jsm_get_issue_forms'
|
||||
default:
|
||||
return 'jsm_get_service_desks'
|
||||
}
|
||||
@@ -808,6 +837,34 @@ Return ONLY the comment text - no explanations.`,
|
||||
serviceDeskId: params.serviceDeskId,
|
||||
requestTypeId: params.requestTypeId,
|
||||
}
|
||||
case 'get_form_templates':
|
||||
if (!params.projectIdOrKey) {
|
||||
throw new Error('Project ID or key is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
projectIdOrKey: params.projectIdOrKey,
|
||||
}
|
||||
case 'get_form_structure':
|
||||
if (!params.projectIdOrKey) {
|
||||
throw new Error('Project ID or key is required')
|
||||
}
|
||||
if (!params.formId) {
|
||||
throw new Error('Form ID is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
projectIdOrKey: params.projectIdOrKey,
|
||||
formId: params.formId,
|
||||
}
|
||||
case 'get_issue_forms':
|
||||
if (!params.issueIdOrKey) {
|
||||
throw new Error('Issue ID or key is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
issueIdOrKey: params.issueIdOrKey,
|
||||
}
|
||||
default:
|
||||
return baseParams
|
||||
}
|
||||
@@ -857,6 +914,8 @@ Return ONLY the comment text - no explanations.`,
|
||||
type: 'string',
|
||||
description: 'JSON object of form answers for form-based request types',
|
||||
},
|
||||
projectIdOrKey: { type: 'string', description: 'Jira project ID or key' },
|
||||
formId: { type: 'string', description: 'Form ID (UUID)' },
|
||||
searchQuery: { type: 'string', description: 'Filter request types by name' },
|
||||
groupId: { type: 'string', description: 'Filter by request type group ID' },
|
||||
expand: { type: 'string', description: 'Comma-separated fields to expand' },
|
||||
@@ -899,5 +958,25 @@ Return ONLY the comment text - no explanations.`,
|
||||
type: 'boolean',
|
||||
description: 'Whether requests can be raised on behalf of another user',
|
||||
},
|
||||
templates: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of form templates (id, name, updated, portalRequestTypeIds, issueCreateIssueTypeIds)',
|
||||
},
|
||||
design: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Full form design with questions (labels, types, choices, validation), layout, conditions, sections, settings',
|
||||
},
|
||||
publish: {
|
||||
type: 'json',
|
||||
description: 'Form publishing and request type configuration',
|
||||
},
|
||||
updated: { type: 'string', description: 'Last updated timestamp' },
|
||||
forms: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of forms attached to an issue (id, name, updated, submitted, lock, internal, formTemplateId)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ServiceNowIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { IntegrationType } from '@/blocks/types'
|
||||
import type { ServiceNowResponse } from '@/tools/servicenow/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const ServiceNowBlock: BlockConfig<ServiceNowResponse> = {
|
||||
type: 'servicenow',
|
||||
@@ -215,6 +216,11 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
condition: { field: 'operation', value: 'servicenow_delete_record' },
|
||||
required: true,
|
||||
},
|
||||
...getTrigger('servicenow_incident_created').subBlocks,
|
||||
...getTrigger('servicenow_incident_updated').subBlocks,
|
||||
...getTrigger('servicenow_change_request_created').subBlocks,
|
||||
...getTrigger('servicenow_change_request_updated').subBlocks,
|
||||
...getTrigger('servicenow_webhook').subBlocks,
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -262,4 +268,14 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
success: { type: 'boolean', description: 'Operation success status' },
|
||||
metadata: { type: 'json', description: 'Operation metadata' },
|
||||
},
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'servicenow_incident_created',
|
||||
'servicenow_incident_updated',
|
||||
'servicenow_change_request_created',
|
||||
'servicenow_change_request_updated',
|
||||
'servicenow_webhook',
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -157,7 +157,7 @@ export function useAcceptCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: () => {
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.memberships() })
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.invitations() })
|
||||
},
|
||||
@@ -187,7 +187,7 @@ export function useCreateCredentialSet() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.list(variables.organizationId) })
|
||||
},
|
||||
})
|
||||
@@ -209,7 +209,7 @@ export function useCreateCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailInvitations(variables.credentialSetId),
|
||||
})
|
||||
@@ -264,7 +264,7 @@ export function useRemoveCredentialSetMember() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailMembers(variables.credentialSetId),
|
||||
})
|
||||
@@ -288,7 +288,7 @@ export function useLeaveCredentialSet() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: () => {
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.memberships() })
|
||||
},
|
||||
})
|
||||
@@ -313,7 +313,7 @@ export function useDeleteCredentialSet() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.list(variables.organizationId),
|
||||
})
|
||||
@@ -370,7 +370,7 @@ export function useCancelCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailInvitations(variables.credentialSetId),
|
||||
})
|
||||
@@ -393,7 +393,7 @@ export function useResendCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailInvitations(variables.credentialSetId),
|
||||
})
|
||||
|
||||
@@ -5,6 +5,12 @@ import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { environmentKeys } from '@/hooks/queries/environment'
|
||||
import { fetchJson } from '@/hooks/selectors/helpers'
|
||||
|
||||
/**
|
||||
* Key prefix for OAuth credential queries.
|
||||
* Duplicated here to avoid circular imports with oauth-credentials.ts.
|
||||
*/
|
||||
const OAUTH_CREDENTIALS_KEY = ['oauthCredentials'] as const
|
||||
|
||||
export type WorkspaceCredentialType = 'oauth' | 'env_workspace' | 'env_personal' | 'service_account'
|
||||
export type WorkspaceCredentialRole = 'admin' | 'member'
|
||||
export type WorkspaceCredentialMemberStatus = 'active' | 'pending' | 'revoked'
|
||||
@@ -192,6 +198,9 @@ export function useCreateWorkspaceCredential() {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.lists(),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: OAUTH_CREDENTIALS_KEY,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -269,6 +278,9 @@ export function useUpdateWorkspaceCredential() {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.lists(),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: OAUTH_CREDENTIALS_KEY,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -290,6 +302,7 @@ export function useDeleteWorkspaceCredential() {
|
||||
onSettled: (_data, _error, credentialId) => {
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.detail(credentialId) })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.lists() })
|
||||
queryClient.invalidateQueries({ queryKey: OAUTH_CREDENTIALS_KEY })
|
||||
queryClient.invalidateQueries({ queryKey: environmentKeys.all })
|
||||
},
|
||||
})
|
||||
|
||||
@@ -151,44 +151,68 @@ async function fetchNewEmails(
|
||||
let latestHistoryId = config.historyId
|
||||
|
||||
if (useHistoryApi) {
|
||||
const historyUrl = `https://gmail.googleapis.com/gmail/v1/users/me/history?startHistoryId=${config.historyId}`
|
||||
const messageIds = new Set<string>()
|
||||
let pageToken: string | undefined
|
||||
|
||||
const historyResponse = await fetch(historyUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
do {
|
||||
let historyUrl = `https://gmail.googleapis.com/gmail/v1/users/me/history?startHistoryId=${config.historyId}&historyTypes=messageAdded`
|
||||
if (pageToken) {
|
||||
historyUrl += `&pageToken=${pageToken}`
|
||||
}
|
||||
|
||||
if (!historyResponse.ok) {
|
||||
const errorData = await historyResponse.json()
|
||||
logger.error(`[${requestId}] Gmail history API error:`, {
|
||||
status: historyResponse.status,
|
||||
statusText: historyResponse.statusText,
|
||||
error: errorData,
|
||||
const historyResponse = await fetch(historyUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Falling back to search API after history API failure`)
|
||||
return searchEmails(accessToken, config, requestId, logger)
|
||||
}
|
||||
if (!historyResponse.ok) {
|
||||
const status = historyResponse.status
|
||||
const errorData = await historyResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Gmail history API error:`, {
|
||||
status,
|
||||
statusText: historyResponse.statusText,
|
||||
error: errorData,
|
||||
})
|
||||
|
||||
const historyData = await historyResponse.json()
|
||||
if (status === 403 || status === 429) {
|
||||
throw new Error(
|
||||
`Gmail API error ${status} — skipping to retry next poll cycle: ${JSON.stringify(errorData)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (!historyData.history || !historyData.history.length) {
|
||||
return { emails: [], latestHistoryId }
|
||||
}
|
||||
logger.info(`[${requestId}] Falling back to search API after history API error ${status}`)
|
||||
const searchResult = await searchEmails(accessToken, config, requestId, logger)
|
||||
if (searchResult.emails.length === 0) {
|
||||
const freshHistoryId = await getGmailProfileHistoryId(accessToken, requestId, logger)
|
||||
if (freshHistoryId) {
|
||||
logger.info(
|
||||
`[${requestId}] Fetched fresh historyId ${freshHistoryId} after invalid historyId (was: ${config.historyId})`
|
||||
)
|
||||
return { emails: [], latestHistoryId: freshHistoryId }
|
||||
}
|
||||
}
|
||||
return searchResult
|
||||
}
|
||||
|
||||
if (historyData.historyId) {
|
||||
latestHistoryId = historyData.historyId
|
||||
}
|
||||
const historyData = await historyResponse.json()
|
||||
|
||||
const messageIds = new Set<string>()
|
||||
for (const history of historyData.history) {
|
||||
if (history.messagesAdded) {
|
||||
for (const messageAdded of history.messagesAdded) {
|
||||
messageIds.add(messageAdded.message.id)
|
||||
if (historyData.historyId) {
|
||||
latestHistoryId = historyData.historyId
|
||||
}
|
||||
|
||||
if (historyData.history) {
|
||||
for (const history of historyData.history) {
|
||||
if (history.messagesAdded) {
|
||||
for (const messageAdded of history.messagesAdded) {
|
||||
messageIds.add(messageAdded.message.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (messageIds.size === 0) {
|
||||
pageToken = historyData.nextPageToken
|
||||
} while (pageToken)
|
||||
|
||||
if (!messageIds.size) {
|
||||
return { emails: [], latestHistoryId }
|
||||
}
|
||||
|
||||
@@ -352,6 +376,29 @@ async function searchEmails(
|
||||
}
|
||||
}
|
||||
|
||||
async function getGmailProfileHistoryId(
|
||||
accessToken: string,
|
||||
requestId: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch('https://gmail.googleapis.com/gmail/v1/users/me/profile', {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
if (!response.ok) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to fetch Gmail profile for fresh historyId: ${response.status}`
|
||||
)
|
||||
return null
|
||||
}
|
||||
const profile = await response.json()
|
||||
return (profile.historyId as string | undefined) ?? null
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Error fetching Gmail profile:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function getEmailDetails(accessToken: string, messageId: string): Promise<GmailEmail> {
|
||||
const messageUrl = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}?format=full`
|
||||
|
||||
@@ -442,9 +489,7 @@ async function processEmails(
|
||||
if (headers.date) {
|
||||
try {
|
||||
date = new Date(headers.date).toISOString()
|
||||
} catch (_e) {
|
||||
// Keep date as null if parsing fails
|
||||
}
|
||||
} catch (_e) {}
|
||||
} else if (email.internalDate) {
|
||||
date = new Date(Number.parseInt(email.internalDate)).toISOString()
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ interface ImapWebhookConfig {
|
||||
includeAttachments: boolean
|
||||
lastProcessedUid?: number
|
||||
lastProcessedUidByMailbox?: Record<string, number>
|
||||
uidValidityByMailbox?: Record<string, string>
|
||||
lastCheckedTimestamp?: string
|
||||
maxEmailsPerPoll?: number
|
||||
}
|
||||
@@ -90,48 +91,90 @@ export const imapPollingHandler: PollingProviderHandler = {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
const { emails, latestUidByMailbox } = await fetchNewEmails(
|
||||
config,
|
||||
requestId,
|
||||
hostValidation.resolvedIP!,
|
||||
logger
|
||||
)
|
||||
const pollTimestamp = new Date().toISOString()
|
||||
const client = new ImapFlow({
|
||||
host: hostValidation.resolvedIP!,
|
||||
servername: config.host,
|
||||
port: config.port || 993,
|
||||
secure: config.secure ?? true,
|
||||
auth: {
|
||||
user: config.username,
|
||||
pass: config.password,
|
||||
},
|
||||
tls: { rejectUnauthorized: true },
|
||||
logger: false,
|
||||
})
|
||||
|
||||
if (!emails || !emails.length) {
|
||||
await updateImapState(webhookId, latestUidByMailbox, pollTimestamp, config, logger)
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(`[${requestId}] No new emails found for webhook ${webhookId}`)
|
||||
return 'success'
|
||||
}
|
||||
let emails: Awaited<ReturnType<typeof fetchNewEmails>>['emails'] = []
|
||||
let latestUidByMailbox: Record<string, number> = {}
|
||||
let uidValidityByMailbox: Record<string, string> = {}
|
||||
|
||||
logger.info(`[${requestId}] Found ${emails.length} new emails for webhook ${webhookId}`)
|
||||
try {
|
||||
await client.connect()
|
||||
|
||||
const { processedCount, failedCount } = await processEmails(
|
||||
emails,
|
||||
webhookData,
|
||||
workflowData,
|
||||
config,
|
||||
requestId,
|
||||
hostValidation.resolvedIP!,
|
||||
logger
|
||||
)
|
||||
const result = await fetchNewEmails(client, config, requestId, logger)
|
||||
emails = result.emails
|
||||
latestUidByMailbox = result.latestUidByMailbox
|
||||
uidValidityByMailbox = result.uidValidityByMailbox
|
||||
|
||||
await updateImapState(webhookId, latestUidByMailbox, pollTimestamp, config, logger)
|
||||
const pollTimestamp = new Date().toISOString()
|
||||
|
||||
if (failedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
logger.warn(
|
||||
`[${requestId}] All ${failedCount} emails failed to process for webhook ${webhookId}`
|
||||
if (!emails.length) {
|
||||
await updateImapState(
|
||||
webhookId,
|
||||
latestUidByMailbox,
|
||||
pollTimestamp,
|
||||
config,
|
||||
logger,
|
||||
uidValidityByMailbox
|
||||
)
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(`[${requestId}] No new emails found for webhook ${webhookId}`)
|
||||
await client.logout()
|
||||
return 'success'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Found ${emails.length} new emails for webhook ${webhookId}`)
|
||||
|
||||
const { processedCount, failedCount } = await processEmails(
|
||||
emails,
|
||||
webhookData,
|
||||
workflowData,
|
||||
config,
|
||||
client,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed ${processedCount} emails for webhook ${webhookId}${failedCount > 0 ? ` (${failedCount} failed)` : ''}`
|
||||
)
|
||||
return 'success'
|
||||
await updateImapState(
|
||||
webhookId,
|
||||
latestUidByMailbox,
|
||||
pollTimestamp,
|
||||
config,
|
||||
logger,
|
||||
uidValidityByMailbox
|
||||
)
|
||||
|
||||
await client.logout()
|
||||
|
||||
if (failedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
logger.warn(
|
||||
`[${requestId}] All ${failedCount} emails failed to process for webhook ${webhookId}`
|
||||
)
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed ${processedCount} emails for webhook ${webhookId}${failedCount > 0 ? ` (${failedCount} failed)` : ''}`
|
||||
)
|
||||
return 'success'
|
||||
} catch (innerError) {
|
||||
try {
|
||||
await client.logout()
|
||||
} catch {}
|
||||
throw innerError
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error processing IMAP webhook ${webhookId}:`, error)
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
@@ -145,13 +188,35 @@ async function updateImapState(
|
||||
uidByMailbox: Record<string, number>,
|
||||
timestamp: string,
|
||||
config: ImapWebhookConfig,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>,
|
||||
uidValidityByMailbox: Record<string, string>
|
||||
) {
|
||||
const existingUidByMailbox = config.lastProcessedUidByMailbox || {}
|
||||
const mergedUidByMailbox = { ...existingUidByMailbox }
|
||||
const prevUidValidity = config.uidValidityByMailbox || {}
|
||||
|
||||
const resetMailboxes = new Set(
|
||||
Object.entries(uidValidityByMailbox)
|
||||
.filter(
|
||||
([mailbox, validity]) =>
|
||||
prevUidValidity[mailbox] !== undefined && prevUidValidity[mailbox] !== validity
|
||||
)
|
||||
.map(([mailbox]) => mailbox)
|
||||
)
|
||||
|
||||
const mergedUidByMailbox: Record<string, number> = {}
|
||||
|
||||
for (const [mailbox, uid] of Object.entries(existingUidByMailbox)) {
|
||||
if (!resetMailboxes.has(mailbox)) {
|
||||
mergedUidByMailbox[mailbox] = uid
|
||||
}
|
||||
}
|
||||
|
||||
for (const [mailbox, uid] of Object.entries(uidByMailbox)) {
|
||||
mergedUidByMailbox[mailbox] = Math.max(uid, mergedUidByMailbox[mailbox] || 0)
|
||||
if (resetMailboxes.has(mailbox)) {
|
||||
mergedUidByMailbox[mailbox] = uid
|
||||
} else {
|
||||
mergedUidByMailbox[mailbox] = Math.max(uid, mergedUidByMailbox[mailbox] || 0)
|
||||
}
|
||||
}
|
||||
|
||||
await updateWebhookProviderConfig(
|
||||
@@ -159,30 +224,18 @@ async function updateImapState(
|
||||
{
|
||||
lastProcessedUidByMailbox: mergedUidByMailbox,
|
||||
lastCheckedTimestamp: timestamp,
|
||||
uidValidityByMailbox,
|
||||
},
|
||||
logger
|
||||
)
|
||||
}
|
||||
|
||||
async function fetchNewEmails(
|
||||
client: ImapFlow,
|
||||
config: ImapWebhookConfig,
|
||||
requestId: string,
|
||||
resolvedIP: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
) {
|
||||
const client = new ImapFlow({
|
||||
host: resolvedIP,
|
||||
servername: config.host,
|
||||
port: config.port || 993,
|
||||
secure: config.secure ?? true,
|
||||
auth: {
|
||||
user: config.username,
|
||||
pass: config.password,
|
||||
},
|
||||
tls: { rejectUnauthorized: true },
|
||||
logger: false,
|
||||
})
|
||||
|
||||
const emails: Array<{
|
||||
uid: number
|
||||
mailboxPath: string
|
||||
@@ -193,97 +246,93 @@ async function fetchNewEmails(
|
||||
|
||||
const mailboxes = getMailboxesToCheck(config)
|
||||
const latestUidByMailbox: Record<string, number> = { ...(config.lastProcessedUidByMailbox || {}) }
|
||||
const uidValidityByMailbox: Record<string, string> = { ...(config.uidValidityByMailbox || {}) }
|
||||
|
||||
try {
|
||||
await client.connect()
|
||||
const maxEmails = config.maxEmailsPerPoll || 25
|
||||
let totalEmailsCollected = 0
|
||||
|
||||
const maxEmails = config.maxEmailsPerPoll || 25
|
||||
let totalEmailsCollected = 0
|
||||
for (const mailboxPath of mailboxes) {
|
||||
if (totalEmailsCollected >= maxEmails) break
|
||||
|
||||
for (const mailboxPath of mailboxes) {
|
||||
if (totalEmailsCollected >= maxEmails) break
|
||||
try {
|
||||
const mailbox = await client.mailboxOpen(mailboxPath)
|
||||
|
||||
try {
|
||||
await client.mailboxOpen(mailboxPath)
|
||||
const currentUidValidity = mailbox.uidValidity.toString()
|
||||
const storedUidValidity = uidValidityByMailbox[mailboxPath]
|
||||
|
||||
let searchCriteria: Record<string, unknown> = { unseen: true }
|
||||
if (config.searchCriteria) {
|
||||
if (typeof config.searchCriteria === 'object') {
|
||||
searchCriteria = config.searchCriteria as unknown as Record<string, unknown>
|
||||
} else if (typeof config.searchCriteria === 'string') {
|
||||
try {
|
||||
searchCriteria = JSON.parse(config.searchCriteria)
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid search criteria JSON, using default`)
|
||||
}
|
||||
if (storedUidValidity && storedUidValidity !== currentUidValidity) {
|
||||
logger.warn(
|
||||
`[${requestId}] UIDVALIDITY changed for ${mailboxPath} (${storedUidValidity} -> ${currentUidValidity}), discarding stored UID`
|
||||
)
|
||||
delete latestUidByMailbox[mailboxPath]
|
||||
}
|
||||
uidValidityByMailbox[mailboxPath] = currentUidValidity
|
||||
|
||||
let searchCriteria: Record<string, unknown> = { unseen: true }
|
||||
if (config.searchCriteria) {
|
||||
if (typeof config.searchCriteria === 'object') {
|
||||
searchCriteria = config.searchCriteria as unknown as Record<string, unknown>
|
||||
} else if (typeof config.searchCriteria === 'string') {
|
||||
try {
|
||||
searchCriteria = JSON.parse(config.searchCriteria)
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid search criteria JSON, using default`)
|
||||
}
|
||||
}
|
||||
|
||||
const lastUidForMailbox = latestUidByMailbox[mailboxPath] || config.lastProcessedUid
|
||||
|
||||
if (lastUidForMailbox) {
|
||||
searchCriteria = { ...searchCriteria, uid: `${lastUidForMailbox + 1}:*` }
|
||||
}
|
||||
|
||||
if (config.lastCheckedTimestamp) {
|
||||
const lastChecked = new Date(config.lastCheckedTimestamp)
|
||||
const bufferTime = new Date(lastChecked.getTime() - 60000)
|
||||
searchCriteria = { ...searchCriteria, since: bufferTime }
|
||||
} else {
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000)
|
||||
searchCriteria = { ...searchCriteria, since: oneDayAgo }
|
||||
}
|
||||
|
||||
let messageUids: number[] = []
|
||||
try {
|
||||
const searchResult = await client.search(searchCriteria, { uid: true })
|
||||
messageUids = searchResult === false ? [] : searchResult
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
|
||||
if (messageUids.length === 0) continue
|
||||
|
||||
messageUids.sort((a, b) => a - b)
|
||||
const remainingSlots = maxEmails - totalEmailsCollected
|
||||
const uidsToProcess = messageUids.slice(0, remainingSlots)
|
||||
|
||||
if (uidsToProcess.length > 0) {
|
||||
latestUidByMailbox[mailboxPath] = Math.max(
|
||||
...uidsToProcess,
|
||||
latestUidByMailbox[mailboxPath] || 0
|
||||
)
|
||||
}
|
||||
|
||||
for await (const msg of client.fetch(
|
||||
uidsToProcess,
|
||||
{ uid: true, envelope: true, bodyStructure: true, source: true },
|
||||
{ uid: true }
|
||||
)) {
|
||||
emails.push({
|
||||
uid: msg.uid,
|
||||
mailboxPath,
|
||||
envelope: msg.envelope,
|
||||
bodyStructure: msg.bodyStructure,
|
||||
source: msg.source,
|
||||
})
|
||||
totalEmailsCollected++
|
||||
}
|
||||
} catch (mailboxError) {
|
||||
logger.warn(`[${requestId}] Error processing mailbox ${mailboxPath}:`, mailboxError)
|
||||
}
|
||||
}
|
||||
|
||||
await client.logout()
|
||||
return { emails, latestUidByMailbox }
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.logout()
|
||||
} catch {
|
||||
// Ignore logout errors
|
||||
const lastUidForMailbox = latestUidByMailbox[mailboxPath]
|
||||
|
||||
if (lastUidForMailbox) {
|
||||
searchCriteria = { ...searchCriteria, uid: `${lastUidForMailbox + 1}:*` }
|
||||
}
|
||||
|
||||
if (config.lastCheckedTimestamp) {
|
||||
const lastChecked = new Date(config.lastCheckedTimestamp)
|
||||
const bufferTime = new Date(lastChecked.getTime() - 60000)
|
||||
searchCriteria = { ...searchCriteria, since: bufferTime }
|
||||
} else {
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000)
|
||||
searchCriteria = { ...searchCriteria, since: oneDayAgo }
|
||||
}
|
||||
|
||||
let messageUids: number[] = []
|
||||
try {
|
||||
const searchResult = await client.search(searchCriteria, { uid: true })
|
||||
messageUids = searchResult === false ? [] : searchResult
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
|
||||
if (messageUids.length === 0) continue
|
||||
|
||||
messageUids.sort((a, b) => a - b)
|
||||
const remainingSlots = maxEmails - totalEmailsCollected
|
||||
const uidsToProcess = messageUids.slice(0, remainingSlots)
|
||||
|
||||
for await (const msg of client.fetch(
|
||||
uidsToProcess,
|
||||
{ uid: true, envelope: true, bodyStructure: true, source: true },
|
||||
{ uid: true }
|
||||
)) {
|
||||
emails.push({
|
||||
uid: msg.uid,
|
||||
mailboxPath,
|
||||
envelope: msg.envelope,
|
||||
bodyStructure: msg.bodyStructure,
|
||||
source: msg.source,
|
||||
})
|
||||
if (msg.uid > (latestUidByMailbox[mailboxPath] || 0)) {
|
||||
latestUidByMailbox[mailboxPath] = msg.uid
|
||||
}
|
||||
totalEmailsCollected++
|
||||
}
|
||||
} catch (mailboxError) {
|
||||
logger.warn(`[${requestId}] Error processing mailbox ${mailboxPath}:`, mailboxError)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
return { emails, latestUidByMailbox, uidValidityByMailbox }
|
||||
}
|
||||
|
||||
function getMailboxesToCheck(config: ImapWebhookConfig): string[] {
|
||||
@@ -331,9 +380,7 @@ function extractTextFromSource(source: Buffer): { text: string; html: string } {
|
||||
if (lowerPart.includes('base64')) {
|
||||
try {
|
||||
text = Buffer.from(text.replace(/\s/g, ''), 'base64').toString('utf-8')
|
||||
} catch {
|
||||
// Keep as-is if base64 decode fails
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
} else if (lowerPart.includes('content-type: text/html')) {
|
||||
@@ -348,9 +395,7 @@ function extractTextFromSource(source: Buffer): { text: string; html: string } {
|
||||
if (lowerPart.includes('base64')) {
|
||||
try {
|
||||
html = Buffer.from(html.replace(/\s/g, ''), 'base64').toString('utf-8')
|
||||
} catch {
|
||||
// Keep as-is if base64 decode fails
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -405,9 +450,7 @@ function extractAttachmentsFromSource(
|
||||
mimeType,
|
||||
size: buffer.length,
|
||||
})
|
||||
} catch {
|
||||
// Skip if decode fails
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -437,34 +480,17 @@ async function processEmails(
|
||||
webhookData: PollWebhookContext['webhookData'],
|
||||
workflowData: PollWebhookContext['workflowData'],
|
||||
config: ImapWebhookConfig,
|
||||
client: ImapFlow,
|
||||
requestId: string,
|
||||
resolvedIP: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
) {
|
||||
let processedCount = 0
|
||||
let failedCount = 0
|
||||
|
||||
const client = new ImapFlow({
|
||||
host: resolvedIP,
|
||||
servername: config.host,
|
||||
port: config.port || 993,
|
||||
secure: config.secure ?? true,
|
||||
auth: {
|
||||
user: config.username,
|
||||
pass: config.password,
|
||||
},
|
||||
tls: { rejectUnauthorized: true },
|
||||
logger: false,
|
||||
})
|
||||
|
||||
let currentOpenMailbox: string | null = null
|
||||
const lockState: { lock: MailboxLockObject | null } = { lock: null }
|
||||
|
||||
try {
|
||||
if (config.markAsRead) {
|
||||
await client.connect()
|
||||
}
|
||||
|
||||
for (const email of emails) {
|
||||
try {
|
||||
await pollingIdempotency.executeWithIdempotency(
|
||||
@@ -541,7 +567,7 @@ async function processEmails(
|
||||
lockState.lock = await client.getMailboxLock(email.mailboxPath)
|
||||
currentOpenMailbox = email.mailboxPath
|
||||
}
|
||||
await client.messageFlagsAdd({ uid: email.uid }, ['\\Seen'], { uid: true })
|
||||
await client.messageFlagsAdd(email.uid, ['\\Seen'], { uid: true })
|
||||
} catch (flagError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to mark message ${email.uid} as read:`,
|
||||
@@ -565,15 +591,10 @@ async function processEmails(
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (config.markAsRead) {
|
||||
if (lockState.lock) {
|
||||
try {
|
||||
if (lockState.lock) {
|
||||
lockState.lock.release()
|
||||
}
|
||||
await client.logout()
|
||||
} catch {
|
||||
// Ignore logout errors
|
||||
}
|
||||
lockState.lock.release()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { htmlToText } from 'html-to-text'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
import type { PollingProviderHandler, PollWebhookContext } from '@/lib/webhooks/polling/types'
|
||||
import {
|
||||
markWebhookFailed,
|
||||
@@ -166,6 +167,12 @@ export const outlookPollingHandler: PollingProviderHandler = {
|
||||
},
|
||||
}
|
||||
|
||||
/** Hard cap on total emails fetched per poll to prevent unbounded pagination loops. */
|
||||
const OUTLOOK_HARD_MAX_EMAILS = 200
|
||||
|
||||
/** Number of items to request per Graph API page. Decoupled from the total cap so pagination actually runs. */
|
||||
const OUTLOOK_PAGE_SIZE = 50
|
||||
|
||||
async function fetchNewOutlookEmails(
|
||||
accessToken: string,
|
||||
config: OutlookWebhookConfig,
|
||||
@@ -181,53 +188,77 @@ async function fetchNewOutlookEmails(
|
||||
'id,conversationId,subject,bodyPreview,body,from,toRecipients,ccRecipients,receivedDateTime,sentDateTime,hasAttachments,isRead,parentFolderId'
|
||||
)
|
||||
params.append('$orderby', 'receivedDateTime desc')
|
||||
params.append('$top', (config.maxEmailsPerPoll || 25).toString())
|
||||
const maxEmails = Math.min(config.maxEmailsPerPoll || 25, OUTLOOK_HARD_MAX_EMAILS)
|
||||
params.append('$top', OUTLOOK_PAGE_SIZE.toString())
|
||||
|
||||
if (config.lastCheckedTimestamp) {
|
||||
const lastChecked = new Date(config.lastCheckedTimestamp)
|
||||
const bufferTime = new Date(lastChecked.getTime() - 60000)
|
||||
params.append('$filter', `receivedDateTime gt ${bufferTime.toISOString()}`)
|
||||
}
|
||||
const allEmails: OutlookEmail[] = []
|
||||
let nextUrl: string | undefined = `${apiUrl}?${params.toString()}`
|
||||
logger.info(`[${requestId}] Fetching emails from: ${nextUrl}`)
|
||||
|
||||
const fullUrl = `${apiUrl}?${params.toString()}`
|
||||
logger.info(`[${requestId}] Fetching emails from: ${fullUrl}`)
|
||||
|
||||
const response = await fetch(fullUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: { message: 'Unknown error' } }))
|
||||
logger.error(`[${requestId}] Microsoft Graph API error:`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorData,
|
||||
while (nextUrl && allEmails.length < maxEmails) {
|
||||
const response = await fetchWithRetry(nextUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
throw new Error(
|
||||
`Microsoft Graph API error: ${response.status} ${response.statusText} - ${JSON.stringify(errorData)}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response
|
||||
.json()
|
||||
.catch(() => ({ error: { message: 'Unknown error' } }))
|
||||
logger.error(`[${requestId}] Microsoft Graph API error:`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorData,
|
||||
})
|
||||
throw new Error(
|
||||
`Microsoft Graph API error: ${response.status} ${response.statusText} - ${JSON.stringify(errorData)}`
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const pageEmails: OutlookEmail[] = data.value || []
|
||||
const remaining = maxEmails - allEmails.length
|
||||
allEmails.push(...pageEmails.slice(0, remaining))
|
||||
|
||||
nextUrl =
|
||||
allEmails.length < maxEmails ? (data['@odata.nextLink'] as string | undefined) : undefined
|
||||
|
||||
if (pageEmails.length === 0) break
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const emails = data.value || []
|
||||
logger.info(`[${requestId}] Fetched ${allEmails.length} emails total`)
|
||||
|
||||
const emails = allEmails
|
||||
|
||||
let resolvedFolderIds: Map<string, string> | undefined
|
||||
let skipFolderFilter = false
|
||||
if (config.folderIds && config.folderIds.length > 0) {
|
||||
const hasWellKnownFolders = config.folderIds.some(isWellKnownFolderName)
|
||||
if (hasWellKnownFolders) {
|
||||
const wellKnownFolders = config.folderIds.filter(isWellKnownFolderName)
|
||||
if (wellKnownFolders.length > 0) {
|
||||
resolvedFolderIds = await resolveWellKnownFolderIds(
|
||||
accessToken,
|
||||
config.folderIds,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
if (resolvedFolderIds.size < wellKnownFolders.length) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not resolve all well-known folders (${resolvedFolderIds.size}/${wellKnownFolders.length}) — skipping folder filter to avoid incorrect results`
|
||||
)
|
||||
skipFolderFilter = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const filteredEmails = filterEmailsByFolder(emails, config, resolvedFolderIds)
|
||||
const filteredEmails = skipFolderFilter
|
||||
? emails
|
||||
: filterEmailsByFolder(emails, config, resolvedFolderIds)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Fetched ${emails.length} emails, ${filteredEmails.length} after filtering`
|
||||
@@ -262,12 +293,14 @@ async function resolveWellKnownFolderId(
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch(`https://graph.microsoft.com/v1.0/me/mailFolders/${folderName}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
const response = await fetchWithRetry(
|
||||
`https://graph.microsoft.com/v1.0/me/mailFolders/${folderName}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(
|
||||
@@ -455,12 +488,11 @@ async function downloadOutlookAttachments(
|
||||
const attachments: OutlookAttachment[] = []
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
const response = await fetchWithRetry(
|
||||
`https://graph.microsoft.com/v1.0/me/messages/${messageId}/attachments`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
)
|
||||
@@ -511,14 +543,17 @@ async function markOutlookEmailAsRead(
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
) {
|
||||
try {
|
||||
const response = await fetch(`https://graph.microsoft.com/v1.0/me/messages/${messageId}`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ isRead: true }),
|
||||
})
|
||||
const response = await fetchWithRetry(
|
||||
`https://graph.microsoft.com/v1.0/me/messages/${messageId}`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ isRead: true }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from '@/lib/webhooks/polling/utils'
|
||||
import { processPolledWebhookEvent } from '@/lib/webhooks/processor'
|
||||
|
||||
const MAX_GUIDS_TO_TRACK = 100
|
||||
const MAX_GUIDS_TO_TRACK = 500
|
||||
|
||||
interface RssWebhookConfig {
|
||||
feedUrl: string
|
||||
@@ -87,10 +87,15 @@ export const rssPollingHandler: PollingProviderHandler = {
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const { feed, items: newItems } = await fetchNewRssItems(config, requestId, logger)
|
||||
const {
|
||||
feed,
|
||||
items: newItems,
|
||||
etag,
|
||||
lastModified,
|
||||
} = await fetchNewRssItems(config, requestId, logger)
|
||||
|
||||
if (!newItems.length) {
|
||||
await updateRssState(webhookId, now.toISOString(), [], config, logger)
|
||||
await updateRssState(webhookId, now.toISOString(), [], config, logger, etag, lastModified)
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
|
||||
return 'success'
|
||||
@@ -108,10 +113,23 @@ export const rssPollingHandler: PollingProviderHandler = {
|
||||
)
|
||||
|
||||
const newGuids = newItems
|
||||
.map((item) => item.guid || item.link || '')
|
||||
.map(
|
||||
(item) =>
|
||||
item.guid ||
|
||||
item.link ||
|
||||
(item.title && item.pubDate ? `${item.title}-${item.pubDate}` : '')
|
||||
)
|
||||
.filter((guid) => guid.length > 0)
|
||||
|
||||
await updateRssState(webhookId, now.toISOString(), newGuids, config, logger)
|
||||
await updateRssState(
|
||||
webhookId,
|
||||
now.toISOString(),
|
||||
newGuids,
|
||||
config,
|
||||
logger,
|
||||
etag,
|
||||
lastModified
|
||||
)
|
||||
|
||||
if (failedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
@@ -139,7 +157,9 @@ async function updateRssState(
|
||||
timestamp: string,
|
||||
newGuids: string[],
|
||||
config: RssWebhookConfig,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>,
|
||||
etag?: string,
|
||||
lastModified?: string
|
||||
) {
|
||||
const existingGuids = config.lastSeenGuids || []
|
||||
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)
|
||||
@@ -149,6 +169,8 @@ async function updateRssState(
|
||||
{
|
||||
lastCheckedTimestamp: timestamp,
|
||||
lastSeenGuids: allGuids,
|
||||
...(etag !== undefined ? { etag } : {}),
|
||||
...(lastModified !== undefined ? { lastModified } : {}),
|
||||
},
|
||||
logger
|
||||
)
|
||||
@@ -158,7 +180,7 @@ async function fetchNewRssItems(
|
||||
config: RssWebhookConfig,
|
||||
requestId: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
): Promise<{ feed: RssFeed; items: RssItem[] }> {
|
||||
): Promise<{ feed: RssFeed; items: RssItem[]; etag?: string; lastModified?: string }> {
|
||||
try {
|
||||
const urlValidation = await validateUrlWithDNS(config.feedUrl, 'feedUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -166,24 +188,45 @@ async function fetchNewRssItems(
|
||||
throw new Error(`Invalid RSS feed URL: ${urlValidation.error}`)
|
||||
}
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'Sim/1.0 RSS Poller',
|
||||
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||
}
|
||||
if (config.etag) {
|
||||
headers['If-None-Match'] = config.etag
|
||||
}
|
||||
if (config.lastModified) {
|
||||
headers['If-Modified-Since'] = config.lastModified
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(config.feedUrl, urlValidation.resolvedIP!, {
|
||||
headers: {
|
||||
'User-Agent': 'Sim/1.0 RSS Poller',
|
||||
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||
},
|
||||
headers,
|
||||
timeout: 30000,
|
||||
})
|
||||
|
||||
if (response.status === 304) {
|
||||
logger.info(`[${requestId}] RSS feed not modified (304) for ${config.feedUrl}`)
|
||||
return {
|
||||
feed: { items: [] } as RssFeed,
|
||||
items: [],
|
||||
etag: response.headers.get('etag') ?? config.etag,
|
||||
lastModified: response.headers.get('last-modified') ?? config.lastModified,
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
await response.text().catch(() => {})
|
||||
throw new Error(`Failed to fetch RSS feed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const newEtag = response.headers.get('etag') ?? undefined
|
||||
const newLastModified = response.headers.get('last-modified') ?? undefined
|
||||
|
||||
const xmlContent = await response.text()
|
||||
const feed = await parser.parseString(xmlContent)
|
||||
|
||||
if (!feed.items || !feed.items.length) {
|
||||
return { feed: feed as RssFeed, items: [] }
|
||||
return { feed: feed as RssFeed, items: [], etag: newEtag, lastModified: newLastModified }
|
||||
}
|
||||
|
||||
const lastCheckedTime = config.lastCheckedTimestamp
|
||||
@@ -192,7 +235,10 @@ async function fetchNewRssItems(
|
||||
const lastSeenGuids = new Set(config.lastSeenGuids || [])
|
||||
|
||||
const newItems = feed.items.filter((item) => {
|
||||
const itemGuid = item.guid || item.link || ''
|
||||
const itemGuid =
|
||||
item.guid ||
|
||||
item.link ||
|
||||
(item.title && item.pubDate ? `${item.title}-${item.pubDate}` : '')
|
||||
|
||||
if (itemGuid && lastSeenGuids.has(itemGuid)) {
|
||||
return false
|
||||
@@ -220,7 +266,12 @@ async function fetchNewRssItems(
|
||||
`[${requestId}] Found ${newItems.length} new items (processing ${limitedItems.length})`
|
||||
)
|
||||
|
||||
return { feed: feed as RssFeed, items: limitedItems as RssItem[] }
|
||||
return {
|
||||
feed: feed as RssFeed,
|
||||
items: limitedItems as RssItem[],
|
||||
etag: newEtag,
|
||||
lastModified: newLastModified,
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Error fetching RSS feed:`, errorMessage)
|
||||
@@ -241,7 +292,17 @@ async function processRssItems(
|
||||
|
||||
for (const item of items) {
|
||||
try {
|
||||
const itemGuid = item.guid || item.link || `${item.title}-${item.pubDate}`
|
||||
const itemGuid =
|
||||
item.guid ||
|
||||
item.link ||
|
||||
(item.title && item.pubDate ? `${item.title}-${item.pubDate}` : '')
|
||||
|
||||
if (!itemGuid) {
|
||||
logger.warn(
|
||||
`[${requestId}] Skipping RSS item with no identifiable GUID for webhook ${webhookData.id}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
await pollingIdempotency.executeWithIdempotency(
|
||||
'rss',
|
||||
|
||||
@@ -28,6 +28,7 @@ import { outlookHandler } from '@/lib/webhooks/providers/outlook'
|
||||
import { resendHandler } from '@/lib/webhooks/providers/resend'
|
||||
import { rssHandler } from '@/lib/webhooks/providers/rss'
|
||||
import { salesforceHandler } from '@/lib/webhooks/providers/salesforce'
|
||||
import { servicenowHandler } from '@/lib/webhooks/providers/servicenow'
|
||||
import { slackHandler } from '@/lib/webhooks/providers/slack'
|
||||
import { stripeHandler } from '@/lib/webhooks/providers/stripe'
|
||||
import { telegramHandler } from '@/lib/webhooks/providers/telegram'
|
||||
@@ -72,6 +73,7 @@ const PROVIDER_HANDLERS: Record<string, WebhookProviderHandler> = {
|
||||
outlook: outlookHandler,
|
||||
rss: rssHandler,
|
||||
salesforce: salesforceHandler,
|
||||
servicenow: servicenowHandler,
|
||||
slack: slackHandler,
|
||||
stripe: stripeHandler,
|
||||
telegram: telegramHandler,
|
||||
|
||||
57
apps/sim/lib/webhooks/providers/servicenow.ts
Normal file
57
apps/sim/lib/webhooks/providers/servicenow.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NextResponse } from 'next/server'
|
||||
import type {
|
||||
AuthContext,
|
||||
EventMatchContext,
|
||||
WebhookProviderHandler,
|
||||
} from '@/lib/webhooks/providers/types'
|
||||
import { verifyTokenAuth } from '@/lib/webhooks/providers/utils'
|
||||
|
||||
const logger = createLogger('WebhookProvider:ServiceNow')
|
||||
|
||||
function asRecord(body: unknown): Record<string, unknown> {
|
||||
return body && typeof body === 'object' && !Array.isArray(body)
|
||||
? (body as Record<string, unknown>)
|
||||
: {}
|
||||
}
|
||||
|
||||
export const servicenowHandler: WebhookProviderHandler = {
|
||||
verifyAuth({ request, requestId, providerConfig }: AuthContext): NextResponse | null {
|
||||
const secret = providerConfig.webhookSecret as string | undefined
|
||||
if (!secret?.trim()) {
|
||||
logger.warn(`[${requestId}] ServiceNow webhook missing webhookSecret — rejecting`)
|
||||
return new NextResponse('Unauthorized - Webhook secret not configured', { status: 401 })
|
||||
}
|
||||
|
||||
if (
|
||||
!verifyTokenAuth(request, secret.trim(), 'x-sim-webhook-secret') &&
|
||||
!verifyTokenAuth(request, secret.trim())
|
||||
) {
|
||||
logger.warn(`[${requestId}] ServiceNow webhook secret verification failed`)
|
||||
return new NextResponse('Unauthorized - Invalid webhook secret', { status: 401 })
|
||||
}
|
||||
|
||||
return null
|
||||
},
|
||||
|
||||
async matchEvent({ webhook, workflow, body, requestId, providerConfig }: EventMatchContext) {
|
||||
const triggerId = providerConfig.triggerId as string | undefined
|
||||
if (!triggerId) {
|
||||
return true
|
||||
}
|
||||
|
||||
const { isServiceNowEventMatch } = await import('@/triggers/servicenow/utils')
|
||||
const configuredTableName = providerConfig.tableName as string | undefined
|
||||
const obj = asRecord(body)
|
||||
|
||||
if (!isServiceNowEventMatch(triggerId, obj, configuredTableName)) {
|
||||
logger.debug(
|
||||
`[${requestId}] ServiceNow event mismatch for trigger ${triggerId}. Skipping execution.`,
|
||||
{ webhookId: webhook.id, workflowId: workflow.id, triggerId }
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
}
|
||||
@@ -14,7 +14,10 @@ import {
|
||||
normalizeVariables,
|
||||
sanitizeVariable,
|
||||
} from './normalize'
|
||||
import { formatValueForDisplay, resolveValueForDisplay } from './resolve-values'
|
||||
import { formatValueForDisplay, resolveFieldLabel, resolveValueForDisplay } from './resolve-values'
|
||||
|
||||
const MAX_CHANGES_PER_BLOCK = 6
|
||||
const MAX_EDGE_DETAILS = 3
|
||||
|
||||
const logger = createLogger('WorkflowComparison')
|
||||
|
||||
@@ -45,10 +48,22 @@ export interface WorkflowDiffSummary {
|
||||
addedBlocks: Array<{ id: string; type: string; name?: string }>
|
||||
removedBlocks: Array<{ id: string; type: string; name?: string }>
|
||||
modifiedBlocks: Array<{ id: string; type: string; name?: string; changes: FieldChange[] }>
|
||||
edgeChanges: { added: number; removed: number }
|
||||
edgeChanges: {
|
||||
added: number
|
||||
removed: number
|
||||
addedDetails: Array<{ sourceName: string; targetName: string }>
|
||||
removedDetails: Array<{ sourceName: string; targetName: string }>
|
||||
}
|
||||
loopChanges: { added: number; removed: number; modified: number }
|
||||
parallelChanges: { added: number; removed: number; modified: number }
|
||||
variableChanges: { added: number; removed: number; modified: number }
|
||||
variableChanges: {
|
||||
added: number
|
||||
removed: number
|
||||
modified: number
|
||||
addedNames: string[]
|
||||
removedNames: string[]
|
||||
modifiedNames: string[]
|
||||
}
|
||||
hasChanges: boolean
|
||||
}
|
||||
|
||||
@@ -63,10 +78,17 @@ export function generateWorkflowDiffSummary(
|
||||
addedBlocks: [],
|
||||
removedBlocks: [],
|
||||
modifiedBlocks: [],
|
||||
edgeChanges: { added: 0, removed: 0 },
|
||||
edgeChanges: { added: 0, removed: 0, addedDetails: [], removedDetails: [] },
|
||||
loopChanges: { added: 0, removed: 0, modified: 0 },
|
||||
parallelChanges: { added: 0, removed: 0, modified: 0 },
|
||||
variableChanges: { added: 0, removed: 0, modified: 0 },
|
||||
variableChanges: {
|
||||
added: 0,
|
||||
removed: 0,
|
||||
modified: 0,
|
||||
addedNames: [],
|
||||
removedNames: [],
|
||||
modifiedNames: [],
|
||||
},
|
||||
hasChanges: false,
|
||||
}
|
||||
|
||||
@@ -79,10 +101,28 @@ export function generateWorkflowDiffSummary(
|
||||
name: block.name,
|
||||
})
|
||||
}
|
||||
result.edgeChanges.added = (currentState.edges || []).length
|
||||
|
||||
const edges = currentState.edges || []
|
||||
result.edgeChanges.added = edges.length
|
||||
for (const edge of edges) {
|
||||
const sourceBlock = currentBlocks[edge.source]
|
||||
const targetBlock = currentBlocks[edge.target]
|
||||
result.edgeChanges.addedDetails.push({
|
||||
sourceName: sourceBlock?.name || sourceBlock?.type || edge.source,
|
||||
targetName: targetBlock?.name || targetBlock?.type || edge.target,
|
||||
})
|
||||
}
|
||||
|
||||
result.loopChanges.added = Object.keys(currentState.loops || {}).length
|
||||
result.parallelChanges.added = Object.keys(currentState.parallels || {}).length
|
||||
result.variableChanges.added = Object.keys(currentState.variables || {}).length
|
||||
|
||||
const variables = currentState.variables || {}
|
||||
const varEntries = Object.entries(variables)
|
||||
result.variableChanges.added = varEntries.length
|
||||
for (const [id, variable] of varEntries) {
|
||||
result.variableChanges.addedNames.push((variable as { name?: string }).name || id)
|
||||
}
|
||||
|
||||
result.hasChanges = true
|
||||
return result
|
||||
}
|
||||
@@ -121,7 +161,6 @@ export function generateWorkflowDiffSummary(
|
||||
const previousBlock = previousBlocks[id]
|
||||
const changes: FieldChange[] = []
|
||||
|
||||
// Use shared helpers for block field extraction (single source of truth)
|
||||
const {
|
||||
blockRest: currentRest,
|
||||
normalizedData: currentDataRest,
|
||||
@@ -156,8 +195,6 @@ export function generateWorkflowDiffSummary(
|
||||
newValue: currentBlock.enabled,
|
||||
})
|
||||
}
|
||||
// Check other block properties (boolean fields)
|
||||
// Use !! to normalize: null/undefined/false are all equivalent (falsy)
|
||||
const blockFields = ['horizontalHandles', 'advancedMode', 'triggerMode', 'locked'] as const
|
||||
for (const field of blockFields) {
|
||||
if (!!currentBlock[field] !== !!previousBlock[field]) {
|
||||
@@ -169,15 +206,27 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
if (normalizedStringify(currentDataRest) !== normalizedStringify(previousDataRest)) {
|
||||
changes.push({ field: 'data', oldValue: previousDataRest, newValue: currentDataRest })
|
||||
const allDataKeys = new Set([
|
||||
...Object.keys(currentDataRest),
|
||||
...Object.keys(previousDataRest),
|
||||
])
|
||||
for (const key of allDataKeys) {
|
||||
if (
|
||||
normalizedStringify(currentDataRest[key]) !== normalizedStringify(previousDataRest[key])
|
||||
) {
|
||||
changes.push({
|
||||
field: `data.${key}`,
|
||||
oldValue: previousDataRest[key] ?? null,
|
||||
newValue: currentDataRest[key] ?? null,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize trigger config values for both states before comparison
|
||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
||||
|
||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||
const allSubBlockIds = filterSubBlockIds([
|
||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
||||
])
|
||||
@@ -195,11 +244,9 @@ export function generateWorkflowDiffSummary(
|
||||
continue
|
||||
}
|
||||
|
||||
// Use shared helper for subBlock value normalization (single source of truth)
|
||||
const currentValue = normalizeSubBlockValue(subId, currentSub.value)
|
||||
const previousValue = normalizeSubBlockValue(subId, previousSub.value)
|
||||
|
||||
// For string values, compare directly to catch even small text changes
|
||||
if (typeof currentValue === 'string' && typeof previousValue === 'string') {
|
||||
if (currentValue !== previousValue) {
|
||||
changes.push({ field: subId, oldValue: previousSub.value, newValue: currentSub.value })
|
||||
@@ -212,7 +259,6 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
|
||||
// Use shared helper for subBlock REST extraction (single source of truth)
|
||||
const currentSubRest = extractSubBlockRest(currentSub)
|
||||
const previousSubRest = extractSubBlockRest(previousSub)
|
||||
|
||||
@@ -240,11 +286,30 @@ export function generateWorkflowDiffSummary(
|
||||
const currentEdgeSet = new Set(currentEdges.map(normalizedStringify))
|
||||
const previousEdgeSet = new Set(previousEdges.map(normalizedStringify))
|
||||
|
||||
for (const edge of currentEdgeSet) {
|
||||
if (!previousEdgeSet.has(edge)) result.edgeChanges.added++
|
||||
const resolveBlockName = (blockId: string): string => {
|
||||
const block = currentBlocks[blockId] || previousBlocks[blockId]
|
||||
return block?.name || block?.type || blockId
|
||||
}
|
||||
for (const edge of previousEdgeSet) {
|
||||
if (!currentEdgeSet.has(edge)) result.edgeChanges.removed++
|
||||
|
||||
for (const edgeStr of currentEdgeSet) {
|
||||
if (!previousEdgeSet.has(edgeStr)) {
|
||||
result.edgeChanges.added++
|
||||
const edge = JSON.parse(edgeStr) as { source: string; target: string }
|
||||
result.edgeChanges.addedDetails.push({
|
||||
sourceName: resolveBlockName(edge.source),
|
||||
targetName: resolveBlockName(edge.target),
|
||||
})
|
||||
}
|
||||
}
|
||||
for (const edgeStr of previousEdgeSet) {
|
||||
if (!currentEdgeSet.has(edgeStr)) {
|
||||
result.edgeChanges.removed++
|
||||
const edge = JSON.parse(edgeStr) as { source: string; target: string }
|
||||
result.edgeChanges.removedDetails.push({
|
||||
sourceName: resolveBlockName(edge.source),
|
||||
targetName: resolveBlockName(edge.target),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const currentLoops = currentState.loops || {}
|
||||
@@ -296,8 +361,18 @@ export function generateWorkflowDiffSummary(
|
||||
const currentVarIds = Object.keys(currentVars)
|
||||
const previousVarIds = Object.keys(previousVars)
|
||||
|
||||
result.variableChanges.added = currentVarIds.filter((id) => !previousVarIds.includes(id)).length
|
||||
result.variableChanges.removed = previousVarIds.filter((id) => !currentVarIds.includes(id)).length
|
||||
for (const id of currentVarIds) {
|
||||
if (!previousVarIds.includes(id)) {
|
||||
result.variableChanges.added++
|
||||
result.variableChanges.addedNames.push(currentVars[id].name || id)
|
||||
}
|
||||
}
|
||||
for (const id of previousVarIds) {
|
||||
if (!currentVarIds.includes(id)) {
|
||||
result.variableChanges.removed++
|
||||
result.variableChanges.removedNames.push(previousVars[id].name || id)
|
||||
}
|
||||
}
|
||||
|
||||
for (const id of currentVarIds) {
|
||||
if (!previousVarIds.includes(id)) continue
|
||||
@@ -305,6 +380,7 @@ export function generateWorkflowDiffSummary(
|
||||
const previousVar = normalizeValue(sanitizeVariable(previousVars[id]))
|
||||
if (normalizedStringify(currentVar) !== normalizedStringify(previousVar)) {
|
||||
result.variableChanges.modified++
|
||||
result.variableChanges.modifiedNames.push(currentVars[id].name || id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -349,56 +425,24 @@ export function formatDiffSummaryForDescription(summary: WorkflowDiffSummary): s
|
||||
|
||||
for (const block of summary.modifiedBlocks) {
|
||||
const name = block.name || block.type
|
||||
for (const change of block.changes.slice(0, 3)) {
|
||||
const meaningfulChanges = block.changes.filter((c) => !c.field.endsWith('.properties'))
|
||||
for (const change of meaningfulChanges.slice(0, MAX_CHANGES_PER_BLOCK)) {
|
||||
const fieldLabel = resolveFieldLabel(block.type, change.field)
|
||||
const oldStr = formatValueForDisplay(change.oldValue)
|
||||
const newStr = formatValueForDisplay(change.newValue)
|
||||
changes.push(`Modified ${name}: ${change.field} changed from "${oldStr}" to "${newStr}"`)
|
||||
changes.push(`Modified ${name}: ${fieldLabel} changed from "${oldStr}" to "${newStr}"`)
|
||||
}
|
||||
if (block.changes.length > 3) {
|
||||
changes.push(` ...and ${block.changes.length - 3} more changes in ${name}`)
|
||||
if (meaningfulChanges.length > MAX_CHANGES_PER_BLOCK) {
|
||||
changes.push(
|
||||
` ...and ${meaningfulChanges.length - MAX_CHANGES_PER_BLOCK} more changes in ${name}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (summary.edgeChanges.added > 0) {
|
||||
changes.push(`Added ${summary.edgeChanges.added} connection(s)`)
|
||||
}
|
||||
if (summary.edgeChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.edgeChanges.removed} connection(s)`)
|
||||
}
|
||||
|
||||
if (summary.loopChanges.added > 0) {
|
||||
changes.push(`Added ${summary.loopChanges.added} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.loopChanges.removed} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.loopChanges.modified} loop(s)`)
|
||||
}
|
||||
|
||||
if (summary.parallelChanges.added > 0) {
|
||||
changes.push(`Added ${summary.parallelChanges.added} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.parallelChanges.removed} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.parallelChanges.modified} parallel group(s)`)
|
||||
}
|
||||
|
||||
const varChanges: string[] = []
|
||||
if (summary.variableChanges.added > 0) {
|
||||
varChanges.push(`${summary.variableChanges.added} added`)
|
||||
}
|
||||
if (summary.variableChanges.removed > 0) {
|
||||
varChanges.push(`${summary.variableChanges.removed} removed`)
|
||||
}
|
||||
if (summary.variableChanges.modified > 0) {
|
||||
varChanges.push(`${summary.variableChanges.modified} modified`)
|
||||
}
|
||||
if (varChanges.length > 0) {
|
||||
changes.push(`Variables: ${varChanges.join(', ')}`)
|
||||
}
|
||||
formatEdgeChanges(summary, changes)
|
||||
formatCountChanges(summary.loopChanges, 'loop', changes)
|
||||
formatCountChanges(summary.parallelChanges, 'parallel group', changes)
|
||||
formatVariableChanges(summary, changes)
|
||||
|
||||
return changes.join('\n')
|
||||
}
|
||||
@@ -437,8 +481,9 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
const modifiedBlockPromises = summary.modifiedBlocks.map(async (block) => {
|
||||
const name = block.name || block.type
|
||||
const blockChanges: string[] = []
|
||||
const meaningfulChanges = block.changes.filter((c) => !c.field.endsWith('.properties'))
|
||||
|
||||
const changesToProcess = block.changes.slice(0, 3)
|
||||
const changesToProcess = meaningfulChanges.slice(0, MAX_CHANGES_PER_BLOCK)
|
||||
const resolvedChanges = await Promise.all(
|
||||
changesToProcess.map(async (change) => {
|
||||
const context = {
|
||||
@@ -455,7 +500,7 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
])
|
||||
|
||||
return {
|
||||
field: change.field,
|
||||
field: resolveFieldLabel(block.type, change.field),
|
||||
oldLabel: oldResolved.displayLabel,
|
||||
newLabel: newResolved.displayLabel,
|
||||
}
|
||||
@@ -468,8 +513,10 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
)
|
||||
}
|
||||
|
||||
if (block.changes.length > 3) {
|
||||
blockChanges.push(` ...and ${block.changes.length - 3} more changes in ${name}`)
|
||||
if (meaningfulChanges.length > MAX_CHANGES_PER_BLOCK) {
|
||||
blockChanges.push(
|
||||
` ...and ${meaningfulChanges.length - MAX_CHANGES_PER_BLOCK} more changes in ${name}`
|
||||
)
|
||||
}
|
||||
|
||||
return blockChanges
|
||||
@@ -480,46 +527,10 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
changes.push(...blockChanges)
|
||||
}
|
||||
|
||||
if (summary.edgeChanges.added > 0) {
|
||||
changes.push(`Added ${summary.edgeChanges.added} connection(s)`)
|
||||
}
|
||||
if (summary.edgeChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.edgeChanges.removed} connection(s)`)
|
||||
}
|
||||
|
||||
if (summary.loopChanges.added > 0) {
|
||||
changes.push(`Added ${summary.loopChanges.added} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.loopChanges.removed} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.loopChanges.modified} loop(s)`)
|
||||
}
|
||||
|
||||
if (summary.parallelChanges.added > 0) {
|
||||
changes.push(`Added ${summary.parallelChanges.added} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.parallelChanges.removed} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.parallelChanges.modified} parallel group(s)`)
|
||||
}
|
||||
|
||||
const varChanges: string[] = []
|
||||
if (summary.variableChanges.added > 0) {
|
||||
varChanges.push(`${summary.variableChanges.added} added`)
|
||||
}
|
||||
if (summary.variableChanges.removed > 0) {
|
||||
varChanges.push(`${summary.variableChanges.removed} removed`)
|
||||
}
|
||||
if (summary.variableChanges.modified > 0) {
|
||||
varChanges.push(`${summary.variableChanges.modified} modified`)
|
||||
}
|
||||
if (varChanges.length > 0) {
|
||||
changes.push(`Variables: ${varChanges.join(', ')}`)
|
||||
}
|
||||
formatEdgeChanges(summary, changes)
|
||||
formatCountChanges(summary.loopChanges, 'loop', changes)
|
||||
formatCountChanges(summary.parallelChanges, 'parallel group', changes)
|
||||
formatVariableChanges(summary, changes)
|
||||
|
||||
logger.info('Generated async diff description', {
|
||||
workflowId,
|
||||
@@ -529,3 +540,82 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
|
||||
return changes.join('\n')
|
||||
}
|
||||
|
||||
function formatEdgeDetailList(
|
||||
edges: Array<{ sourceName: string; targetName: string }>,
|
||||
total: number,
|
||||
verb: string,
|
||||
changes: string[]
|
||||
): void {
|
||||
if (edges.length === 0) {
|
||||
changes.push(`${verb} ${total} connection(s)`)
|
||||
return
|
||||
}
|
||||
for (const edge of edges.slice(0, MAX_EDGE_DETAILS)) {
|
||||
changes.push(`${verb} connection: ${edge.sourceName} -> ${edge.targetName}`)
|
||||
}
|
||||
if (total > MAX_EDGE_DETAILS) {
|
||||
changes.push(` ...and ${total - MAX_EDGE_DETAILS} more ${verb.toLowerCase()} connection(s)`)
|
||||
}
|
||||
}
|
||||
|
||||
function formatEdgeChanges(summary: WorkflowDiffSummary, changes: string[]): void {
|
||||
if (summary.edgeChanges.added > 0) {
|
||||
formatEdgeDetailList(
|
||||
summary.edgeChanges.addedDetails ?? [],
|
||||
summary.edgeChanges.added,
|
||||
'Added',
|
||||
changes
|
||||
)
|
||||
}
|
||||
if (summary.edgeChanges.removed > 0) {
|
||||
formatEdgeDetailList(
|
||||
summary.edgeChanges.removedDetails ?? [],
|
||||
summary.edgeChanges.removed,
|
||||
'Removed',
|
||||
changes
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function formatCountChanges(
|
||||
counts: { added: number; removed: number; modified: number },
|
||||
label: string,
|
||||
changes: string[]
|
||||
): void {
|
||||
if (counts.added > 0) changes.push(`Added ${counts.added} ${label}(s)`)
|
||||
if (counts.removed > 0) changes.push(`Removed ${counts.removed} ${label}(s)`)
|
||||
if (counts.modified > 0) changes.push(`Modified ${counts.modified} ${label}(s)`)
|
||||
}
|
||||
|
||||
function formatVariableChanges(summary: WorkflowDiffSummary, changes: string[]): void {
|
||||
const categories = [
|
||||
{
|
||||
count: summary.variableChanges.added,
|
||||
names: summary.variableChanges.addedNames ?? [],
|
||||
verb: 'added',
|
||||
},
|
||||
{
|
||||
count: summary.variableChanges.removed,
|
||||
names: summary.variableChanges.removedNames ?? [],
|
||||
verb: 'removed',
|
||||
},
|
||||
{
|
||||
count: summary.variableChanges.modified,
|
||||
names: summary.variableChanges.modifiedNames ?? [],
|
||||
verb: 'modified',
|
||||
},
|
||||
] as const
|
||||
|
||||
const varParts: string[] = []
|
||||
for (const { count, names, verb } of categories) {
|
||||
if (count > 0) {
|
||||
varParts.push(
|
||||
names.length > 0 ? `${verb} ${names.map((n) => `"${n}"`).join(', ')}` : `${count} ${verb}`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (varParts.length > 0) {
|
||||
changes.push(`Variables: ${varParts.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
864
apps/sim/lib/workflows/comparison/format-description.test.ts
Normal file
864
apps/sim/lib/workflows/comparison/format-description.test.ts
Normal file
@@ -0,0 +1,864 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const { mockGetBlock } = vi.hoisted(() => ({
|
||||
mockGetBlock: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/subblocks/visibility', () => ({
|
||||
isNonEmptyValue: (v: unknown) => v !== null && v !== undefined && v !== '',
|
||||
}))
|
||||
|
||||
vi.mock('@/triggers/constants', () => ({
|
||||
SYSTEM_SUBBLOCK_IDS: [],
|
||||
TRIGGER_RUNTIME_SUBBLOCK_IDS: [],
|
||||
}))
|
||||
|
||||
vi.mock('@/blocks/types', () => ({
|
||||
SELECTOR_TYPES_HYDRATION_REQUIRED: [],
|
||||
}))
|
||||
|
||||
vi.mock('@/executor/constants', () => ({
|
||||
CREDENTIAL_SET: { PREFIX: 'cred_set_' },
|
||||
isUuid: (v: string) => /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(v),
|
||||
}))
|
||||
|
||||
vi.mock('@/blocks/registry', () => ({
|
||||
getBlock: mockGetBlock,
|
||||
getAllBlocks: () => ({}),
|
||||
getAllBlockTypes: () => [],
|
||||
registry: {},
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/subblocks/context', () => ({
|
||||
buildSelectorContextFromBlock: vi.fn(() => ({})),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/queries/credential-sets', () => ({
|
||||
fetchCredentialSetById: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/queries/oauth/oauth-credentials', () => ({
|
||||
fetchOAuthCredentialDetail: vi.fn(() => []),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/selectors/registry', () => ({
|
||||
getSelectorDefinition: vi.fn(() => ({ fetchList: vi.fn(() => []) })),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/selectors/resolution', () => ({
|
||||
resolveSelectorForSubBlock: vi.fn(),
|
||||
}))
|
||||
|
||||
import { WorkflowBuilder } from '@sim/testing'
|
||||
import type { WorkflowDiffSummary } from '@/lib/workflows/comparison/compare'
|
||||
import {
|
||||
formatDiffSummaryForDescription,
|
||||
formatDiffSummaryForDescriptionAsync,
|
||||
generateWorkflowDiffSummary,
|
||||
} from '@/lib/workflows/comparison/compare'
|
||||
import { formatValueForDisplay, resolveFieldLabel } from '@/lib/workflows/comparison/resolve-values'
|
||||
|
||||
function emptyDiffSummary(overrides: Partial<WorkflowDiffSummary> = {}): WorkflowDiffSummary {
|
||||
return {
|
||||
addedBlocks: [],
|
||||
removedBlocks: [],
|
||||
modifiedBlocks: [],
|
||||
edgeChanges: { added: 0, removed: 0, addedDetails: [], removedDetails: [] },
|
||||
loopChanges: { added: 0, removed: 0, modified: 0 },
|
||||
parallelChanges: { added: 0, removed: 0, modified: 0 },
|
||||
variableChanges: {
|
||||
added: 0,
|
||||
removed: 0,
|
||||
modified: 0,
|
||||
addedNames: [],
|
||||
removedNames: [],
|
||||
modifiedNames: [],
|
||||
},
|
||||
hasChanges: false,
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('resolveFieldLabel', () => {
|
||||
it('resolves subBlock id to its title', () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
],
|
||||
})
|
||||
expect(resolveFieldLabel('agent', 'systemPrompt')).toBe('System Prompt')
|
||||
expect(resolveFieldLabel('agent', 'model')).toBe('Model')
|
||||
})
|
||||
|
||||
it('falls back to raw id when block not found', () => {
|
||||
mockGetBlock.mockReturnValue(null)
|
||||
expect(resolveFieldLabel('unknown_type', 'someField')).toBe('someField')
|
||||
})
|
||||
|
||||
it('falls back to raw id when subBlock not found', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [{ id: 'other', title: 'Other' }] })
|
||||
expect(resolveFieldLabel('agent', 'missingField')).toBe('missingField')
|
||||
})
|
||||
|
||||
it('converts data.* fields to Title Case', () => {
|
||||
expect(resolveFieldLabel('agent', 'data.loopType')).toBe('Loop Type')
|
||||
expect(resolveFieldLabel('agent', 'data.canonicalModes')).toBe('Canonical Modes')
|
||||
expect(resolveFieldLabel('agent', 'data.isStarter')).toBe('Is Starter')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatValueForDisplay', () => {
|
||||
it('handles null/undefined', () => {
|
||||
expect(formatValueForDisplay(null)).toBe('(none)')
|
||||
expect(formatValueForDisplay(undefined)).toBe('(none)')
|
||||
})
|
||||
|
||||
it('handles booleans', () => {
|
||||
expect(formatValueForDisplay(true)).toBe('enabled')
|
||||
expect(formatValueForDisplay(false)).toBe('disabled')
|
||||
})
|
||||
|
||||
it('truncates long strings', () => {
|
||||
const longStr = 'a'.repeat(60)
|
||||
expect(formatValueForDisplay(longStr)).toBe(`${'a'.repeat(50)}...`)
|
||||
})
|
||||
|
||||
it('handles empty string', () => {
|
||||
expect(formatValueForDisplay('')).toBe('(empty)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatDiffSummaryForDescription', () => {
|
||||
it('returns no-changes message for empty diff', () => {
|
||||
const result = formatDiffSummaryForDescription(emptyDiffSummary())
|
||||
expect(result).toBe('No structural changes detected (configuration may have changed)')
|
||||
})
|
||||
|
||||
it('uses human-readable field labels for modified blocks', () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
],
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'agent',
|
||||
name: 'My Agent',
|
||||
changes: [
|
||||
{ field: 'systemPrompt', oldValue: 'You are helpful', newValue: 'You are an expert' },
|
||||
{ field: 'model', oldValue: 'gpt-4o', newValue: 'claude-sonnet-4-5' },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain(
|
||||
'Modified My Agent: System Prompt changed from "You are helpful" to "You are an expert"'
|
||||
)
|
||||
expect(result).toContain(
|
||||
'Modified My Agent: Model changed from "gpt-4o" to "claude-sonnet-4-5"'
|
||||
)
|
||||
expect(result).not.toContain('systemPrompt')
|
||||
expect(result).not.toContain('model changed')
|
||||
})
|
||||
|
||||
it('filters out .properties changes', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [
|
||||
{ field: 'systemPrompt', oldValue: 'old', newValue: 'new' },
|
||||
{
|
||||
field: 'systemPrompt.properties',
|
||||
oldValue: { some: 'meta' },
|
||||
newValue: { some: 'other' },
|
||||
},
|
||||
{ field: 'model.properties', oldValue: {}, newValue: { x: 1 } },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain('systemPrompt changed')
|
||||
expect(result).not.toContain('.properties')
|
||||
expect(result).not.toContain('model.properties')
|
||||
})
|
||||
|
||||
it('respects MAX_CHANGES_PER_BLOCK limit of 6', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const changes = Array.from({ length: 8 }, (_, i) => ({
|
||||
field: `field${i}`,
|
||||
oldValue: `old${i}`,
|
||||
newValue: `new${i}`,
|
||||
}))
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [{ id: 'b1', type: 'agent', name: 'Agent', changes }],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
const lines = result.split('\n')
|
||||
const modifiedLines = lines.filter((l) => l.startsWith('Modified'))
|
||||
expect(modifiedLines).toHaveLength(6)
|
||||
expect(result).toContain('...and 2 more changes in Agent')
|
||||
})
|
||||
|
||||
it('shows edge changes with block names', () => {
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
edgeChanges: {
|
||||
added: 2,
|
||||
removed: 1,
|
||||
addedDetails: [
|
||||
{ sourceName: 'My Agent', targetName: 'Slack' },
|
||||
{ sourceName: 'Router', targetName: 'Gmail' },
|
||||
],
|
||||
removedDetails: [{ sourceName: 'Function', targetName: 'Webhook' }],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain('Added connection: My Agent -> Slack')
|
||||
expect(result).toContain('Added connection: Router -> Gmail')
|
||||
expect(result).toContain('Removed connection: Function -> Webhook')
|
||||
})
|
||||
|
||||
it('truncates edge details beyond MAX_EDGE_DETAILS', () => {
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
edgeChanges: {
|
||||
added: 5,
|
||||
removed: 0,
|
||||
addedDetails: [
|
||||
{ sourceName: 'A', targetName: 'B' },
|
||||
{ sourceName: 'C', targetName: 'D' },
|
||||
{ sourceName: 'E', targetName: 'F' },
|
||||
{ sourceName: 'G', targetName: 'H' },
|
||||
{ sourceName: 'I', targetName: 'J' },
|
||||
],
|
||||
removedDetails: [],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
const connectionLines = result.split('\n').filter((l) => l.startsWith('Added connection'))
|
||||
expect(connectionLines).toHaveLength(3)
|
||||
expect(result).toContain('...and 2 more added connection(s)')
|
||||
})
|
||||
|
||||
it('shows variable changes with names', () => {
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
variableChanges: {
|
||||
added: 2,
|
||||
removed: 1,
|
||||
modified: 1,
|
||||
addedNames: ['counter', 'apiKey'],
|
||||
removedNames: ['oldVar'],
|
||||
modifiedNames: ['threshold'],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain(
|
||||
'Variables: added "counter", "apiKey", removed "oldVar", modified "threshold"'
|
||||
)
|
||||
})
|
||||
|
||||
it('handles data.* fields with Title Case labels', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [
|
||||
{ field: 'data.loopType', oldValue: 'for', newValue: 'forEach' },
|
||||
{ field: 'data.isStarter', oldValue: true, newValue: false },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain('Modified Agent: Loop Type changed from "for" to "forEach"')
|
||||
expect(result).toContain('Modified Agent: Is Starter changed from "enabled" to "disabled"')
|
||||
})
|
||||
|
||||
it('formats a realistic multi-block workflow change', () => {
|
||||
mockGetBlock.mockImplementation((type: string) => {
|
||||
if (type === 'agent') {
|
||||
return {
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
{ id: 'temperature', title: 'Temperature' },
|
||||
],
|
||||
}
|
||||
}
|
||||
if (type === 'slack') {
|
||||
return {
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ id: 'slack_send_message', label: 'Send Message' },
|
||||
{ id: 'slack_list_channels', label: 'List Channels' },
|
||||
],
|
||||
},
|
||||
{ id: 'channel', title: 'Channel' },
|
||||
{ id: 'credential', title: 'Slack Account' },
|
||||
],
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
addedBlocks: [{ id: 'b3', type: 'gmail', name: 'Gmail Notifications' }],
|
||||
removedBlocks: [{ id: 'b4', type: 'function', name: 'Legacy Transform' }],
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'AI Assistant',
|
||||
changes: [
|
||||
{ field: 'model', oldValue: 'gpt-4o', newValue: 'claude-sonnet-4-5' },
|
||||
{ field: 'temperature', oldValue: '0.7', newValue: '0.3' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'b2',
|
||||
type: 'slack',
|
||||
name: 'Slack Alert',
|
||||
changes: [{ field: 'channel', oldValue: '#general', newValue: '#alerts' }],
|
||||
},
|
||||
],
|
||||
edgeChanges: {
|
||||
added: 1,
|
||||
removed: 0,
|
||||
addedDetails: [{ sourceName: 'AI Assistant', targetName: 'Gmail Notifications' }],
|
||||
removedDetails: [],
|
||||
},
|
||||
variableChanges: {
|
||||
added: 1,
|
||||
removed: 0,
|
||||
modified: 0,
|
||||
addedNames: ['errorCount'],
|
||||
removedNames: [],
|
||||
modifiedNames: [],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Gmail Notifications (gmail)')
|
||||
expect(result).toContain('Removed block: Legacy Transform (function)')
|
||||
expect(result).toContain(
|
||||
'Modified AI Assistant: Model changed from "gpt-4o" to "claude-sonnet-4-5"'
|
||||
)
|
||||
expect(result).toContain('Modified AI Assistant: Temperature changed from "0.7" to "0.3"')
|
||||
expect(result).toContain('Modified Slack Alert: Channel changed from "#general" to "#alerts"')
|
||||
expect(result).toContain('Added connection: AI Assistant -> Gmail Notifications')
|
||||
expect(result).toContain('Variables: added "errorCount"')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatDiffSummaryForDescriptionAsync', () => {
|
||||
it('resolves dropdown values to labels', async () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ id: 'calendly_get_current_user', label: 'Get Current User' },
|
||||
{ id: 'calendly_list_event_types', label: 'List Event Types' },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'calendly',
|
||||
name: 'Calendly',
|
||||
changes: [
|
||||
{
|
||||
field: 'operation',
|
||||
oldValue: 'calendly_get_current_user',
|
||||
newValue: 'calendly_list_event_types',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const mockState = { blocks: {} } as any
|
||||
const result = await formatDiffSummaryForDescriptionAsync(summary, mockState, 'wf-1')
|
||||
expect(result).toContain(
|
||||
'Modified Calendly: Operation changed from "Get Current User" to "List Event Types"'
|
||||
)
|
||||
expect(result).not.toContain('calendly_get_current_user')
|
||||
})
|
||||
|
||||
it('uses field titles in async path', async () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [{ id: 'systemPrompt', title: 'System Prompt' }],
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [{ field: 'systemPrompt', oldValue: 'Be helpful', newValue: 'Be concise' }],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const mockState = { blocks: {} } as any
|
||||
const result = await formatDiffSummaryForDescriptionAsync(summary, mockState, 'wf-1')
|
||||
expect(result).toContain('System Prompt')
|
||||
expect(result).not.toContain('systemPrompt')
|
||||
})
|
||||
|
||||
it('filters .properties changes in async path', async () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [
|
||||
{ field: 'prompt', oldValue: 'old', newValue: 'new' },
|
||||
{ field: 'prompt.properties', oldValue: {}, newValue: { x: 1 } },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const mockState = { blocks: {} } as any
|
||||
const result = await formatDiffSummaryForDescriptionAsync(summary, mockState, 'wf-1')
|
||||
expect(result).not.toContain('.properties')
|
||||
})
|
||||
})
|
||||
|
||||
describe('end-to-end: generateWorkflowDiffSummary + formatDiffSummaryForDescription', () => {
|
||||
beforeEach(() => {
|
||||
mockGetBlock.mockReturnValue(null)
|
||||
})
|
||||
|
||||
it('detects added and removed blocks between two workflow versions', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Summarizer')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Summarizer')
|
||||
.addFunction('func-1', undefined, 'Formatter')
|
||||
.connect('start', 'agent-1')
|
||||
.connect('agent-1', 'func-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Formatter (function)')
|
||||
expect(result).toContain('Added connection: Summarizer -> Formatter')
|
||||
expect(result).not.toContain('Removed')
|
||||
})
|
||||
|
||||
it('detects block removal and edge removal', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Classifier')
|
||||
.addFunction('func-1', undefined, 'Logger')
|
||||
.connect('start', 'agent-1')
|
||||
.connect('agent-1', 'func-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Classifier')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Removed block: Logger (function)')
|
||||
expect(result).toContain('Removed connection: Classifier -> Logger')
|
||||
expect(result).not.toContain('Added block')
|
||||
})
|
||||
|
||||
it('detects subBlock value changes on modified blocks', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Writer')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
previous.blocks['agent-1'].subBlocks = {
|
||||
systemPrompt: { id: 'systemPrompt', value: 'You are a helpful assistant' },
|
||||
model: { id: 'model', value: 'gpt-4o' },
|
||||
}
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Writer')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
current.blocks['agent-1'].subBlocks = {
|
||||
systemPrompt: { id: 'systemPrompt', value: 'You are a concise writer' },
|
||||
model: { id: 'model', value: 'claude-sonnet-4-5' },
|
||||
}
|
||||
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
],
|
||||
})
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain(
|
||||
'Modified Writer: System Prompt changed from "You are a helpful assistant" to "You are a concise writer"'
|
||||
)
|
||||
expect(result).toContain('Modified Writer: Model changed from "gpt-4o" to "claude-sonnet-4-5"')
|
||||
})
|
||||
|
||||
it('detects loop addition with correct count', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Process')
|
||||
.connect('start', 'func-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Process')
|
||||
.addLoop('loop-1', undefined, { iterations: 5, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'func-1')
|
||||
.connect('func-1', 'loop-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Loop (loop)')
|
||||
expect(result).toContain('Added block: loop-body (function)')
|
||||
expect(result).toContain('Added 1 loop(s)')
|
||||
expect(result).toContain('Added connection: Process -> Loop')
|
||||
})
|
||||
|
||||
it('detects loop removal', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'agent')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Direct Agent')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Removed block: Loop (loop)')
|
||||
expect(result).toContain('Removed 1 loop(s)')
|
||||
expect(result).toContain('Added block: Direct Agent (agent)')
|
||||
})
|
||||
|
||||
it('detects loop modification when iterations change', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 10, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Modified 1 loop(s)')
|
||||
})
|
||||
|
||||
it('detects parallel addition', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Sequencer')
|
||||
.connect('start', 'func-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 3, parallelType: 'count' })
|
||||
.addParallelChild('par-1', 'par-task-1', 'agent')
|
||||
.addParallelChild('par-1', 'par-task-2', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Parallel (parallel)')
|
||||
expect(result).toContain('Added 1 parallel group(s)')
|
||||
expect(result).toContain('Removed block: Sequencer (function)')
|
||||
})
|
||||
|
||||
it('detects parallel removal', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 2 })
|
||||
.addParallelChild('par-1', 'par-task', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Simple Step')
|
||||
.connect('start', 'func-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Removed block: Parallel (parallel)')
|
||||
expect(result).toContain('Removed 1 parallel group(s)')
|
||||
expect(result).toContain('Added block: Simple Step (function)')
|
||||
})
|
||||
|
||||
it('detects parallel modification when count changes', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 2, parallelType: 'count' })
|
||||
.addParallelChild('par-1', 'par-task', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 5, parallelType: 'count' })
|
||||
.addParallelChild('par-1', 'par-task', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Modified 1 parallel group(s)')
|
||||
})
|
||||
|
||||
it('detects variable additions and removals with names', () => {
|
||||
const previous = new WorkflowBuilder().addStarter('start').build()
|
||||
previous.variables = {
|
||||
v1: { id: 'v1', name: 'retryCount', type: 'number', value: 3 },
|
||||
v2: { id: 'v2', name: 'apiEndpoint', type: 'string', value: 'https://api.example.com' },
|
||||
}
|
||||
|
||||
const current = new WorkflowBuilder().addStarter('start').build()
|
||||
current.variables = {
|
||||
v1: { id: 'v1', name: 'retryCount', type: 'number', value: 5 },
|
||||
v3: { id: 'v3', name: 'timeout', type: 'number', value: 30 },
|
||||
}
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Variables:')
|
||||
expect(result).toContain('added "timeout"')
|
||||
expect(result).toContain('removed "apiEndpoint"')
|
||||
expect(result).toContain('modified "retryCount"')
|
||||
})
|
||||
|
||||
it('produces no-change message for identical workflows', () => {
|
||||
const workflow = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Agent')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(workflow, workflow)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toBe('No structural changes detected (configuration may have changed)')
|
||||
})
|
||||
|
||||
it('handles complex scenario: loop replaced with parallel + new connections + variables', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 5 })
|
||||
.addLoopChild('loop-1', 'loop-task', 'agent')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.connect('start', 'loop-1')
|
||||
.connect('loop-1', 'sink')
|
||||
.build()
|
||||
previous.variables = {
|
||||
v1: { id: 'v1', name: 'batchSize', type: 'number', value: 10 },
|
||||
}
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 3 })
|
||||
.addParallelChild('par-1', 'par-task', 'agent')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.addAgent('agg', undefined, 'Aggregator')
|
||||
.connect('start', 'par-1')
|
||||
.connect('par-1', 'agg')
|
||||
.connect('agg', 'sink')
|
||||
.build()
|
||||
current.variables = {
|
||||
v1: { id: 'v1', name: 'batchSize', type: 'number', value: 25 },
|
||||
v2: { id: 'v2', name: 'concurrency', type: 'number', value: 3 },
|
||||
}
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Parallel (parallel)')
|
||||
expect(result).toContain('Added block: Aggregator (agent)')
|
||||
expect(result).toContain('Removed block: Loop (loop)')
|
||||
expect(result).toContain('Added 1 parallel group(s)')
|
||||
expect(result).toContain('Removed 1 loop(s)')
|
||||
expect(result).toContain('added "concurrency"')
|
||||
expect(result).toContain('modified "batchSize"')
|
||||
|
||||
const lines = result.split('\n')
|
||||
expect(lines.length).toBeGreaterThanOrEqual(7)
|
||||
})
|
||||
|
||||
it('detects edge rewiring without block changes', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('a', undefined, 'Agent A')
|
||||
.addAgent('b', undefined, 'Agent B')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.connect('start', 'a')
|
||||
.connect('a', 'sink')
|
||||
.connect('start', 'b')
|
||||
.connect('b', 'sink')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('a', undefined, 'Agent A')
|
||||
.addAgent('b', undefined, 'Agent B')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.connect('start', 'a')
|
||||
.connect('a', 'b')
|
||||
.connect('b', 'sink')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(summary.addedBlocks).toHaveLength(0)
|
||||
expect(summary.removedBlocks).toHaveLength(0)
|
||||
expect(result).toContain('Added connection: Agent A -> Agent B')
|
||||
expect(result).toContain('Removed connection:')
|
||||
expect(result).not.toContain('Added block')
|
||||
expect(result).not.toContain('Removed block')
|
||||
})
|
||||
|
||||
it('detects data field changes with human-readable labels', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addBlock('custom-1', 'function', undefined, 'Processor')
|
||||
.connect('start', 'custom-1')
|
||||
.build()
|
||||
previous.blocks['custom-1'].data = { isStarter: true, retryPolicy: 'linear' }
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addBlock('custom-1', 'function', undefined, 'Processor')
|
||||
.connect('start', 'custom-1')
|
||||
.build()
|
||||
current.blocks['custom-1'].data = { isStarter: false, retryPolicy: 'exponential' }
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Is Starter')
|
||||
expect(result).toContain('Retry Policy')
|
||||
expect(result).toContain('enabled')
|
||||
expect(result).toContain('disabled')
|
||||
expect(result).toContain('linear')
|
||||
expect(result).toContain('exponential')
|
||||
})
|
||||
|
||||
it('detects loop type change via loop config modification', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'forEach' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Modified 1 loop(s)')
|
||||
})
|
||||
})
|
||||
@@ -9,6 +9,7 @@ import { getSelectorDefinition } from '@/hooks/selectors/registry'
|
||||
import { resolveSelectorForSubBlock } from '@/hooks/selectors/resolution'
|
||||
import type { SelectorContext, SelectorKey } from '@/hooks/selectors/types'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { formatParameterLabel } from '@/tools/params'
|
||||
|
||||
const logger = createLogger('ResolveValues')
|
||||
|
||||
@@ -126,6 +127,33 @@ function extractMcpToolName(toolId: string): string {
|
||||
return withoutPrefix
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a subBlock field ID to its human-readable title.
|
||||
* Falls back to the raw ID if the block or subBlock is not found.
|
||||
*/
|
||||
export function resolveFieldLabel(blockType: string, subBlockId: string): string {
|
||||
if (subBlockId.startsWith('data.')) {
|
||||
return formatParameterLabel(subBlockId.slice(5))
|
||||
}
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (!blockConfig) return subBlockId
|
||||
const subBlockConfig = blockConfig.subBlocks.find((sb) => sb.id === subBlockId)
|
||||
return subBlockConfig?.title ?? subBlockId
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a dropdown option ID to its human-readable label.
|
||||
* Returns null if the subBlock is not a dropdown or the value is not found.
|
||||
*/
|
||||
function resolveDropdownLabel(subBlockConfig: SubBlockConfig, value: string): string | null {
|
||||
if (subBlockConfig.type !== 'dropdown') return null
|
||||
if (!subBlockConfig.options) return null
|
||||
const options =
|
||||
typeof subBlockConfig.options === 'function' ? subBlockConfig.options() : subBlockConfig.options
|
||||
const match = options.find((opt) => opt.id === value)
|
||||
return match?.label ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a value for display in diff descriptions.
|
||||
*/
|
||||
@@ -138,7 +166,10 @@ export function formatValueForDisplay(value: unknown): string {
|
||||
if (typeof value === 'boolean') return value ? 'enabled' : 'disabled'
|
||||
if (typeof value === 'number') return String(value)
|
||||
if (Array.isArray(value)) return `[${value.length} items]`
|
||||
if (typeof value === 'object') return `${JSON.stringify(value).slice(0, 50)}...`
|
||||
if (typeof value === 'object') {
|
||||
const json = JSON.stringify(value)
|
||||
return json.length > 50 ? `${json.slice(0, 50)}...` : json
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
@@ -165,7 +196,6 @@ export async function resolveValueForDisplay(
|
||||
value: unknown,
|
||||
context: ResolutionContext
|
||||
): Promise<ResolvedValue> {
|
||||
// Non-string or empty values can't be resolved
|
||||
if (typeof value !== 'string' || !value) {
|
||||
return {
|
||||
original: value,
|
||||
@@ -190,9 +220,8 @@ export async function resolveValueForDisplay(
|
||||
)
|
||||
: { workflowId: context.workflowId, workspaceId: context.workspaceId }
|
||||
|
||||
// Credential fields (oauth-input or credential subBlockId)
|
||||
const isCredentialField =
|
||||
subBlockConfig?.type === 'oauth-input' || context.subBlockId === 'credential'
|
||||
subBlockConfig.type === 'oauth-input' || context.subBlockId === 'credential'
|
||||
|
||||
if (isCredentialField && (value.startsWith(CREDENTIAL_SET.PREFIX) || isUuid(value))) {
|
||||
const label = await resolveCredential(value, context.workflowId)
|
||||
@@ -202,8 +231,7 @@ export async function resolveValueForDisplay(
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// Workflow selector
|
||||
if (subBlockConfig?.type === 'workflow-selector' && isUuid(value)) {
|
||||
if (subBlockConfig.type === 'workflow-selector' && isUuid(value)) {
|
||||
const label = await resolveWorkflow(value, selectorCtx.workspaceId)
|
||||
if (label) {
|
||||
return { original: value, displayLabel: label, resolved: true }
|
||||
@@ -211,15 +239,27 @@ export async function resolveValueForDisplay(
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// MCP tool selector
|
||||
if (subBlockConfig?.type === 'mcp-tool-selector') {
|
||||
if (subBlockConfig.type === 'mcp-tool-selector') {
|
||||
const toolName = extractMcpToolName(value)
|
||||
return { original: value, displayLabel: toolName, resolved: true }
|
||||
}
|
||||
|
||||
// Selector types that require hydration (file-selector, sheet-selector, etc.)
|
||||
// These support external service IDs like Google Drive file IDs
|
||||
if (subBlockConfig && SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlockConfig.type)) {
|
||||
if (subBlockConfig.type === 'dropdown') {
|
||||
try {
|
||||
const label = resolveDropdownLabel(subBlockConfig, value)
|
||||
if (label) {
|
||||
return { original: value, displayLabel: label, resolved: true }
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve dropdown label', {
|
||||
value,
|
||||
subBlockId: context.subBlockId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlockConfig.type)) {
|
||||
const resolution = resolveSelectorForSubBlock(subBlockConfig, selectorCtx)
|
||||
|
||||
if (resolution?.key) {
|
||||
@@ -228,22 +268,17 @@ export async function resolveValueForDisplay(
|
||||
return { original: value, displayLabel: label, resolved: true }
|
||||
}
|
||||
}
|
||||
// If resolution failed for a hydration-required type, use semantic fallback
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// For fields without specific subBlock types, use pattern matching
|
||||
// UUID fallback
|
||||
if (isUuid(value)) {
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// Slack-style IDs (channels: C..., users: U.../W...) get semantic fallback
|
||||
if (/^C[A-Z0-9]{8,}$/.test(value) || /^[UW][A-Z0-9]{8,}$/.test(value)) {
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// Credential set prefix without credential field type
|
||||
if (value.startsWith(CREDENTIAL_SET.PREFIX)) {
|
||||
const label = await resolveCredential(value, context.workflowId)
|
||||
if (label) {
|
||||
|
||||
121
apps/sim/tools/jsm/get_form_structure.ts
Normal file
121
apps/sim/tools/jsm/get_form_structure.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import type { JsmGetFormStructureParams, JsmGetFormStructureResponse } from '@/tools/jsm/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const jsmGetFormStructureTool: ToolConfig<
|
||||
JsmGetFormStructureParams,
|
||||
JsmGetFormStructureResponse
|
||||
> = {
|
||||
id: 'jsm_get_form_structure',
|
||||
name: 'JSM Get Form Structure',
|
||||
description:
|
||||
'Get the full structure of a ProForma/JSM form including all questions, field types, choices, layout, and conditions',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'jira',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Jira Service Management',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Your Jira domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Jira Cloud ID for the instance',
|
||||
},
|
||||
projectIdOrKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Jira project ID or key (e.g., "10001" or "SD")',
|
||||
},
|
||||
formId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Form ID (UUID from Get Form Templates)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/jsm/forms/structure',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
cloudId: params.cloudId,
|
||||
projectIdOrKey: params.projectIdOrKey,
|
||||
formId: params.formId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const responseText = await response.text()
|
||||
|
||||
if (!responseText) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey: '',
|
||||
formId: '',
|
||||
design: null,
|
||||
updated: null,
|
||||
publish: null,
|
||||
},
|
||||
error: 'Empty response from API',
|
||||
}
|
||||
}
|
||||
|
||||
const data = JSON.parse(responseText)
|
||||
|
||||
if (data.success && data.output) {
|
||||
return data
|
||||
}
|
||||
|
||||
return {
|
||||
success: data.success || false,
|
||||
output: data.output || {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey: '',
|
||||
formId: '',
|
||||
design: null,
|
||||
updated: null,
|
||||
publish: null,
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||
projectIdOrKey: { type: 'string', description: 'Project ID or key' },
|
||||
formId: { type: 'string', description: 'Form ID' },
|
||||
design: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Full form design with questions (field types, labels, choices, validation), layout (field ordering), and conditions',
|
||||
},
|
||||
updated: { type: 'string', description: 'Last updated timestamp', optional: true },
|
||||
publish: {
|
||||
type: 'json',
|
||||
description: 'Publishing and request type configuration',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
108
apps/sim/tools/jsm/get_form_templates.ts
Normal file
108
apps/sim/tools/jsm/get_form_templates.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { JsmGetFormTemplatesParams, JsmGetFormTemplatesResponse } from '@/tools/jsm/types'
|
||||
import { FORM_TEMPLATE_PROPERTIES } from '@/tools/jsm/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const jsmGetFormTemplatesTool: ToolConfig<
|
||||
JsmGetFormTemplatesParams,
|
||||
JsmGetFormTemplatesResponse
|
||||
> = {
|
||||
id: 'jsm_get_form_templates',
|
||||
name: 'JSM Get Form Templates',
|
||||
description:
|
||||
'List forms (ProForma/JSM Forms) in a Jira project to discover form IDs for request types',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'jira',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Jira Service Management',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Your Jira domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Jira Cloud ID for the instance',
|
||||
},
|
||||
projectIdOrKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Jira project ID or key (e.g., "10001" or "SD")',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/jsm/forms/templates',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
cloudId: params.cloudId,
|
||||
projectIdOrKey: params.projectIdOrKey,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const responseText = await response.text()
|
||||
|
||||
if (!responseText) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey: '',
|
||||
templates: [],
|
||||
total: 0,
|
||||
},
|
||||
error: 'Empty response from API',
|
||||
}
|
||||
}
|
||||
|
||||
const data = JSON.parse(responseText)
|
||||
|
||||
if (data.success && data.output) {
|
||||
return data
|
||||
}
|
||||
|
||||
return {
|
||||
success: data.success || false,
|
||||
output: data.output || {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey: '',
|
||||
templates: [],
|
||||
total: 0,
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||
projectIdOrKey: { type: 'string', description: 'Project ID or key' },
|
||||
templates: {
|
||||
type: 'array',
|
||||
description: 'List of forms in the project',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: FORM_TEMPLATE_PROPERTIES,
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of forms' },
|
||||
},
|
||||
}
|
||||
105
apps/sim/tools/jsm/get_issue_forms.ts
Normal file
105
apps/sim/tools/jsm/get_issue_forms.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { JsmGetIssueFormsParams, JsmGetIssueFormsResponse } from '@/tools/jsm/types'
|
||||
import { ISSUE_FORM_PROPERTIES } from '@/tools/jsm/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const jsmGetIssueFormsTool: ToolConfig<JsmGetIssueFormsParams, JsmGetIssueFormsResponse> = {
|
||||
id: 'jsm_get_issue_forms',
|
||||
name: 'JSM Get Issue Forms',
|
||||
description:
|
||||
'List forms (ProForma/JSM Forms) attached to a Jira issue with metadata (name, submitted status, lock)',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'jira',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Jira Service Management',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Your Jira domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Jira Cloud ID for the instance',
|
||||
},
|
||||
issueIdOrKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Issue ID or key (e.g., "SD-123", "10001")',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/jsm/forms/issue',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
cloudId: params.cloudId,
|
||||
issueIdOrKey: params.issueIdOrKey,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const responseText = await response.text()
|
||||
|
||||
if (!responseText) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueIdOrKey: '',
|
||||
forms: [],
|
||||
total: 0,
|
||||
},
|
||||
error: 'Empty response from API',
|
||||
}
|
||||
}
|
||||
|
||||
const data = JSON.parse(responseText)
|
||||
|
||||
if (data.success && data.output) {
|
||||
return data
|
||||
}
|
||||
|
||||
return {
|
||||
success: data.success || false,
|
||||
output: data.output || {
|
||||
ts: new Date().toISOString(),
|
||||
issueIdOrKey: '',
|
||||
forms: [],
|
||||
total: 0,
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||
issueIdOrKey: { type: 'string', description: 'Issue ID or key' },
|
||||
forms: {
|
||||
type: 'array',
|
||||
description: 'List of forms attached to the issue',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: ISSUE_FORM_PROPERTIES,
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of forms' },
|
||||
},
|
||||
}
|
||||
@@ -8,6 +8,9 @@ import { jsmCreateRequestTool } from '@/tools/jsm/create_request'
|
||||
import { jsmGetApprovalsTool } from '@/tools/jsm/get_approvals'
|
||||
import { jsmGetCommentsTool } from '@/tools/jsm/get_comments'
|
||||
import { jsmGetCustomersTool } from '@/tools/jsm/get_customers'
|
||||
import { jsmGetFormStructureTool } from '@/tools/jsm/get_form_structure'
|
||||
import { jsmGetFormTemplatesTool } from '@/tools/jsm/get_form_templates'
|
||||
import { jsmGetIssueFormsTool } from '@/tools/jsm/get_issue_forms'
|
||||
import { jsmGetOrganizationsTool } from '@/tools/jsm/get_organizations'
|
||||
import { jsmGetParticipantsTool } from '@/tools/jsm/get_participants'
|
||||
import { jsmGetQueuesTool } from '@/tools/jsm/get_queues'
|
||||
@@ -31,6 +34,9 @@ export {
|
||||
jsmGetApprovalsTool,
|
||||
jsmGetCommentsTool,
|
||||
jsmGetCustomersTool,
|
||||
jsmGetFormStructureTool,
|
||||
jsmGetFormTemplatesTool,
|
||||
jsmGetIssueFormsTool,
|
||||
jsmGetOrganizationsTool,
|
||||
jsmGetParticipantsTool,
|
||||
jsmGetQueuesTool,
|
||||
|
||||
@@ -222,6 +222,44 @@ export const REQUEST_TYPE_FIELD_PROPERTIES = {
|
||||
},
|
||||
} as const
|
||||
|
||||
/** Output properties for a FormTemplateIndexEntry (list endpoint) per OpenAPI spec */
|
||||
export const FORM_TEMPLATE_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Form template ID (UUID)' },
|
||||
name: { type: 'string', description: 'Form template name' },
|
||||
updated: { type: 'string', description: 'Last updated timestamp (ISO 8601)' },
|
||||
issueCreateIssueTypeIds: {
|
||||
type: 'json',
|
||||
description: 'Issue type IDs that auto-attach this form on issue create',
|
||||
},
|
||||
issueCreateRequestTypeIds: {
|
||||
type: 'json',
|
||||
description: 'Request type IDs that auto-attach this form on issue create',
|
||||
},
|
||||
portalRequestTypeIds: {
|
||||
type: 'json',
|
||||
description: 'Request type IDs that show this form on the customer portal',
|
||||
},
|
||||
recommendedIssueRequestTypeIds: {
|
||||
type: 'json',
|
||||
description: 'Request type IDs that recommend this form',
|
||||
},
|
||||
} as const
|
||||
|
||||
/** Output properties for a FormIndexEntry (issue forms list endpoint) per OpenAPI spec */
|
||||
export const ISSUE_FORM_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Form instance ID (UUID)' },
|
||||
name: { type: 'string', description: 'Form name' },
|
||||
updated: { type: 'string', description: 'Last updated timestamp (ISO 8601)' },
|
||||
submitted: { type: 'boolean', description: 'Whether the form has been submitted' },
|
||||
lock: { type: 'boolean', description: 'Whether the form is locked' },
|
||||
internal: { type: 'boolean', description: 'Whether the form is internal-only', optional: true },
|
||||
formTemplateId: {
|
||||
type: 'string',
|
||||
description: 'Source form template ID (UUID)',
|
||||
optional: true,
|
||||
},
|
||||
} as const
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Data model interfaces
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -778,6 +816,89 @@ export interface JsmGetRequestTypeFieldsResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface JsmGetFormTemplatesParams extends JsmBaseParams {
|
||||
projectIdOrKey: string
|
||||
}
|
||||
|
||||
export interface JsmGetFormStructureParams extends JsmBaseParams {
|
||||
projectIdOrKey: string
|
||||
formId: string
|
||||
}
|
||||
|
||||
export interface JsmGetIssueFormsParams extends JsmBaseParams {
|
||||
issueIdOrKey: string
|
||||
}
|
||||
|
||||
/** FormQuestion per OpenAPI spec */
|
||||
export interface JsmFormQuestion {
|
||||
label: string
|
||||
type: string
|
||||
validation: { rq?: boolean; [key: string]: unknown }
|
||||
choices?: Array<{ id: string; label: string; other?: boolean }>
|
||||
dcId?: string
|
||||
defaultAnswer?: Record<string, unknown>
|
||||
description?: string
|
||||
jiraField?: string
|
||||
questionKey?: string
|
||||
}
|
||||
|
||||
/** FormTemplateIndexEntry per OpenAPI spec */
|
||||
export interface JsmFormTemplate {
|
||||
id: string
|
||||
name: string
|
||||
updated: string
|
||||
issueCreateIssueTypeIds: number[]
|
||||
issueCreateRequestTypeIds: number[]
|
||||
portalRequestTypeIds: number[]
|
||||
recommendedIssueRequestTypeIds: number[]
|
||||
}
|
||||
|
||||
/** FormIndexEntry (issue form) per OpenAPI spec */
|
||||
export interface JsmIssueForm {
|
||||
id: string
|
||||
name: string
|
||||
updated: string
|
||||
submitted: boolean
|
||||
lock: boolean
|
||||
internal?: boolean
|
||||
formTemplateId?: string
|
||||
}
|
||||
|
||||
export interface JsmGetFormTemplatesResponse extends ToolResponse {
|
||||
output: {
|
||||
ts: string
|
||||
projectIdOrKey: string
|
||||
templates: JsmFormTemplate[]
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface JsmGetFormStructureResponse extends ToolResponse {
|
||||
output: {
|
||||
ts: string
|
||||
projectIdOrKey: string
|
||||
formId: string
|
||||
design: {
|
||||
questions: Record<string, JsmFormQuestion>
|
||||
layout: unknown[]
|
||||
conditions: Record<string, unknown>
|
||||
sections: Record<string, unknown>
|
||||
settings: { name: string; submit: { lock: boolean; pdf: boolean }; language?: string }
|
||||
} | null
|
||||
updated: string | null
|
||||
publish: Record<string, unknown> | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface JsmGetIssueFormsResponse extends ToolResponse {
|
||||
output: {
|
||||
ts: string
|
||||
issueIdOrKey: string
|
||||
forms: JsmIssueForm[]
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Union type for all JSM responses
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -805,3 +926,6 @@ export type JsmResponse =
|
||||
| JsmGetApprovalsResponse
|
||||
| JsmAnswerApprovalResponse
|
||||
| JsmGetRequestTypeFieldsResponse
|
||||
| JsmGetFormTemplatesResponse
|
||||
| JsmGetFormStructureResponse
|
||||
| JsmGetIssueFormsResponse
|
||||
|
||||
@@ -13,6 +13,15 @@ export function getJsmApiBaseUrl(cloudId: string): string {
|
||||
return `https://api.atlassian.com/ex/jira/${cloudId}/rest/servicedeskapi`
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the base URL for JSM Forms (ProForma) API
|
||||
* @param cloudId - The Jira Cloud ID
|
||||
* @returns The base URL for the JSM Forms API
|
||||
*/
|
||||
export function getJsmFormsApiBaseUrl(cloudId: string): string {
|
||||
return `https://api.atlassian.com/jira/forms/cloud/${cloudId}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Build common headers for JSM API requests
|
||||
* @param accessToken - The OAuth access token
|
||||
@@ -26,3 +35,28 @@ export function getJsmHeaders(accessToken: string): Record<string, string> {
|
||||
'X-ExperimentalApi': 'opt-in',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse error messages from JSM/Forms API responses
|
||||
* @param status - HTTP status code
|
||||
* @param statusText - HTTP status text
|
||||
* @param errorText - Raw error response body
|
||||
* @returns Formatted error message string
|
||||
*/
|
||||
export function parseJsmErrorMessage(
|
||||
status: number,
|
||||
statusText: string,
|
||||
errorText: string
|
||||
): string {
|
||||
try {
|
||||
const errorData = JSON.parse(errorText)
|
||||
if (errorData.errorMessage) {
|
||||
return `JSM Forms API error: ${errorData.errorMessage}`
|
||||
}
|
||||
} catch {
|
||||
if (errorText) {
|
||||
return `JSM Forms API error: ${errorText}`
|
||||
}
|
||||
}
|
||||
return `JSM Forms API error: ${status} ${statusText}`
|
||||
}
|
||||
|
||||
@@ -1292,6 +1292,9 @@ import {
|
||||
jsmGetApprovalsTool,
|
||||
jsmGetCommentsTool,
|
||||
jsmGetCustomersTool,
|
||||
jsmGetFormStructureTool,
|
||||
jsmGetFormTemplatesTool,
|
||||
jsmGetIssueFormsTool,
|
||||
jsmGetOrganizationsTool,
|
||||
jsmGetParticipantsTool,
|
||||
jsmGetQueuesTool,
|
||||
@@ -3093,6 +3096,9 @@ export const tools: Record<string, ToolConfig> = {
|
||||
jsm_add_participants: jsmAddParticipantsTool,
|
||||
jsm_get_approvals: jsmGetApprovalsTool,
|
||||
jsm_answer_approval: jsmAnswerApprovalTool,
|
||||
jsm_get_form_templates: jsmGetFormTemplatesTool,
|
||||
jsm_get_form_structure: jsmGetFormStructureTool,
|
||||
jsm_get_issue_forms: jsmGetIssueFormsTool,
|
||||
kalshi_get_markets: kalshiGetMarketsTool,
|
||||
kalshi_get_markets_v2: kalshiGetMarketsV2Tool,
|
||||
kalshi_get_market: kalshiGetMarketTool,
|
||||
|
||||
@@ -27,6 +27,7 @@ export default defineConfig({
|
||||
'isolated-vm',
|
||||
'pptxgenjs',
|
||||
'react-dom',
|
||||
'@react-email/components',
|
||||
'@react-email/render',
|
||||
],
|
||||
}),
|
||||
|
||||
@@ -235,6 +235,13 @@ import {
|
||||
salesforceRecordUpdatedTrigger,
|
||||
salesforceWebhookTrigger,
|
||||
} from '@/triggers/salesforce'
|
||||
import {
|
||||
servicenowChangeRequestCreatedTrigger,
|
||||
servicenowChangeRequestUpdatedTrigger,
|
||||
servicenowIncidentCreatedTrigger,
|
||||
servicenowIncidentUpdatedTrigger,
|
||||
servicenowWebhookTrigger,
|
||||
} from '@/triggers/servicenow'
|
||||
import { slackWebhookTrigger } from '@/triggers/slack'
|
||||
import { stripeWebhookTrigger } from '@/triggers/stripe'
|
||||
import { telegramWebhookTrigger } from '@/triggers/telegram'
|
||||
@@ -437,6 +444,11 @@ export const TRIGGER_REGISTRY: TriggerRegistry = {
|
||||
salesforce_opportunity_stage_changed: salesforceOpportunityStageChangedTrigger,
|
||||
salesforce_case_status_changed: salesforceCaseStatusChangedTrigger,
|
||||
salesforce_webhook: salesforceWebhookTrigger,
|
||||
servicenow_incident_created: servicenowIncidentCreatedTrigger,
|
||||
servicenow_incident_updated: servicenowIncidentUpdatedTrigger,
|
||||
servicenow_change_request_created: servicenowChangeRequestCreatedTrigger,
|
||||
servicenow_change_request_updated: servicenowChangeRequestUpdatedTrigger,
|
||||
servicenow_webhook: servicenowWebhookTrigger,
|
||||
stripe_webhook: stripeWebhookTrigger,
|
||||
telegram_webhook: telegramWebhookTrigger,
|
||||
typeform_webhook: typeformWebhookTrigger,
|
||||
|
||||
37
apps/sim/triggers/servicenow/change_request_created.ts
Normal file
37
apps/sim/triggers/servicenow/change_request_created.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { ServiceNowIcon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
buildChangeRequestOutputs,
|
||||
buildServiceNowExtraFields,
|
||||
servicenowSetupInstructions,
|
||||
servicenowTriggerOptions,
|
||||
} from '@/triggers/servicenow/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* ServiceNow Change Request Created Trigger
|
||||
*/
|
||||
export const servicenowChangeRequestCreatedTrigger: TriggerConfig = {
|
||||
id: 'servicenow_change_request_created',
|
||||
name: 'ServiceNow Change Request Created',
|
||||
provider: 'servicenow',
|
||||
description: 'Trigger workflow when a new change request is created in ServiceNow',
|
||||
version: '1.0.0',
|
||||
icon: ServiceNowIcon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: 'servicenow_change_request_created',
|
||||
triggerOptions: servicenowTriggerOptions,
|
||||
setupInstructions: servicenowSetupInstructions('Insert (record creation)'),
|
||||
extraFields: buildServiceNowExtraFields('servicenow_change_request_created'),
|
||||
}),
|
||||
|
||||
outputs: buildChangeRequestOutputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
}
|
||||
37
apps/sim/triggers/servicenow/change_request_updated.ts
Normal file
37
apps/sim/triggers/servicenow/change_request_updated.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { ServiceNowIcon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
buildChangeRequestOutputs,
|
||||
buildServiceNowExtraFields,
|
||||
servicenowSetupInstructions,
|
||||
servicenowTriggerOptions,
|
||||
} from '@/triggers/servicenow/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* ServiceNow Change Request Updated Trigger
|
||||
*/
|
||||
export const servicenowChangeRequestUpdatedTrigger: TriggerConfig = {
|
||||
id: 'servicenow_change_request_updated',
|
||||
name: 'ServiceNow Change Request Updated',
|
||||
provider: 'servicenow',
|
||||
description: 'Trigger workflow when a change request is updated in ServiceNow',
|
||||
version: '1.0.0',
|
||||
icon: ServiceNowIcon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: 'servicenow_change_request_updated',
|
||||
triggerOptions: servicenowTriggerOptions,
|
||||
setupInstructions: servicenowSetupInstructions('Update (record modification)'),
|
||||
extraFields: buildServiceNowExtraFields('servicenow_change_request_updated'),
|
||||
}),
|
||||
|
||||
outputs: buildChangeRequestOutputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
}
|
||||
40
apps/sim/triggers/servicenow/incident_created.ts
Normal file
40
apps/sim/triggers/servicenow/incident_created.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { ServiceNowIcon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
buildIncidentOutputs,
|
||||
buildServiceNowExtraFields,
|
||||
servicenowSetupInstructions,
|
||||
servicenowTriggerOptions,
|
||||
} from '@/triggers/servicenow/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* ServiceNow Incident Created Trigger
|
||||
*
|
||||
* Primary trigger — includes the dropdown for selecting trigger type.
|
||||
*/
|
||||
export const servicenowIncidentCreatedTrigger: TriggerConfig = {
|
||||
id: 'servicenow_incident_created',
|
||||
name: 'ServiceNow Incident Created',
|
||||
provider: 'servicenow',
|
||||
description: 'Trigger workflow when a new incident is created in ServiceNow',
|
||||
version: '1.0.0',
|
||||
icon: ServiceNowIcon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: 'servicenow_incident_created',
|
||||
triggerOptions: servicenowTriggerOptions,
|
||||
includeDropdown: true,
|
||||
setupInstructions: servicenowSetupInstructions('Insert (record creation)'),
|
||||
extraFields: buildServiceNowExtraFields('servicenow_incident_created'),
|
||||
}),
|
||||
|
||||
outputs: buildIncidentOutputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
}
|
||||
37
apps/sim/triggers/servicenow/incident_updated.ts
Normal file
37
apps/sim/triggers/servicenow/incident_updated.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { ServiceNowIcon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
buildIncidentOutputs,
|
||||
buildServiceNowExtraFields,
|
||||
servicenowSetupInstructions,
|
||||
servicenowTriggerOptions,
|
||||
} from '@/triggers/servicenow/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* ServiceNow Incident Updated Trigger
|
||||
*/
|
||||
export const servicenowIncidentUpdatedTrigger: TriggerConfig = {
|
||||
id: 'servicenow_incident_updated',
|
||||
name: 'ServiceNow Incident Updated',
|
||||
provider: 'servicenow',
|
||||
description: 'Trigger workflow when an incident is updated in ServiceNow',
|
||||
version: '1.0.0',
|
||||
icon: ServiceNowIcon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: 'servicenow_incident_updated',
|
||||
triggerOptions: servicenowTriggerOptions,
|
||||
setupInstructions: servicenowSetupInstructions('Update (record modification)'),
|
||||
extraFields: buildServiceNowExtraFields('servicenow_incident_updated'),
|
||||
}),
|
||||
|
||||
outputs: buildIncidentOutputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
}
|
||||
5
apps/sim/triggers/servicenow/index.ts
Normal file
5
apps/sim/triggers/servicenow/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export { servicenowChangeRequestCreatedTrigger } from './change_request_created'
|
||||
export { servicenowChangeRequestUpdatedTrigger } from './change_request_updated'
|
||||
export { servicenowIncidentCreatedTrigger } from './incident_created'
|
||||
export { servicenowIncidentUpdatedTrigger } from './incident_updated'
|
||||
export { servicenowWebhookTrigger } from './webhook'
|
||||
280
apps/sim/triggers/servicenow/utils.ts
Normal file
280
apps/sim/triggers/servicenow/utils.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { TriggerOutput } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* Shared trigger dropdown options for all ServiceNow triggers
|
||||
*/
|
||||
export const servicenowTriggerOptions = [
|
||||
{ label: 'Incident Created', id: 'servicenow_incident_created' },
|
||||
{ label: 'Incident Updated', id: 'servicenow_incident_updated' },
|
||||
{ label: 'Change Request Created', id: 'servicenow_change_request_created' },
|
||||
{ label: 'Change Request Updated', id: 'servicenow_change_request_updated' },
|
||||
{ label: 'Generic Webhook (All Events)', id: 'servicenow_webhook' },
|
||||
]
|
||||
|
||||
/**
|
||||
* Generates setup instructions for ServiceNow webhooks.
|
||||
* ServiceNow uses Business Rules with RESTMessageV2 for outbound webhooks.
|
||||
*/
|
||||
export function servicenowSetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'<strong>Note:</strong> You need admin or developer permissions in your ServiceNow instance to create Business Rules.',
|
||||
'Navigate to <strong>System Definition > Business Rules</strong> and create a new Business Rule.',
|
||||
`Set the table (e.g., <strong>incident</strong>, <strong>change_request</strong>), set <strong>When</strong> to <strong>after</strong>, and check <strong>${eventType}</strong>.`,
|
||||
'Check the <strong>Advanced</strong> checkbox to enable the script editor.',
|
||||
'Copy the <strong>Webhook URL</strong> above and generate a <strong>Webhook Secret</strong> (any strong random string). Paste the secret in the <strong>Webhook Secret</strong> field here.',
|
||||
`In the script, use <strong>RESTMessageV2</strong> to POST the record data as JSON to the <strong>Webhook URL</strong> above. Include the secret as <code>Authorization: Bearer <your secret></code> or <code>X-Sim-Webhook-Secret: <your secret></code>. Example:<br/><code style="font-size: 0.85em; display: block; margin-top: 4px; white-space: pre-wrap;">var r = new sn_ws.RESTMessageV2();\nr.setEndpoint("<webhook_url>");\nr.setHttpMethod("POST");\nr.setRequestHeader("Content-Type", "application/json");\nr.setRequestHeader("Authorization", "Bearer <your_webhook_secret>");\nr.setRequestBody(JSON.stringify({\n sysId: current.sys_id.toString(),\n number: current.number.toString(),\n shortDescription: current.short_description.toString(),\n state: current.state.toString(),\n priority: current.priority.toString()\n}));\nr.execute();</code>`,
|
||||
'Activate the Business Rule and click "Save" above to activate your trigger.',
|
||||
]
|
||||
|
||||
return instructions
|
||||
.map(
|
||||
(instruction, index) =>
|
||||
`<div class="mb-3">${index === 0 ? instruction : `<strong>${index}.</strong> ${instruction}`}</div>`
|
||||
)
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Webhook secret field for ServiceNow triggers
|
||||
*/
|
||||
function servicenowWebhookSecretField(triggerId: string): SubBlockConfig {
|
||||
return {
|
||||
id: 'webhookSecret',
|
||||
title: 'Webhook Secret',
|
||||
type: 'short-input',
|
||||
placeholder: 'Generate a secret and paste it here',
|
||||
description:
|
||||
'Required. Use the same value in your ServiceNow Business Rule as Bearer token or X-Sim-Webhook-Secret.',
|
||||
password: true,
|
||||
required: true,
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extra fields for ServiceNow triggers (webhook secret + optional table filter)
|
||||
*/
|
||||
export function buildServiceNowExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
return [
|
||||
servicenowWebhookSecretField(triggerId),
|
||||
{
|
||||
id: 'tableName',
|
||||
title: 'Table Name (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., incident, change_request',
|
||||
description: 'Optionally filter to a specific ServiceNow table',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Common record fields shared across ServiceNow trigger outputs
|
||||
*/
|
||||
function buildRecordOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
sysId: { type: 'string', description: 'Unique system ID of the record' },
|
||||
number: { type: 'string', description: 'Record number (e.g., INC0010001, CHG0010001)' },
|
||||
tableName: { type: 'string', description: 'ServiceNow table name' },
|
||||
shortDescription: { type: 'string', description: 'Short description of the record' },
|
||||
description: { type: 'string', description: 'Full description of the record' },
|
||||
state: { type: 'string', description: 'Current state of the record' },
|
||||
priority: {
|
||||
type: 'string',
|
||||
description: 'Priority level (1=Critical, 2=High, 3=Moderate, 4=Low, 5=Planning)',
|
||||
},
|
||||
assignedTo: { type: 'string', description: 'User assigned to this record' },
|
||||
assignmentGroup: { type: 'string', description: 'Group assigned to this record' },
|
||||
createdBy: { type: 'string', description: 'User who created the record' },
|
||||
createdOn: { type: 'string', description: 'When the record was created (ISO 8601)' },
|
||||
updatedBy: { type: 'string', description: 'User who last updated the record' },
|
||||
updatedOn: { type: 'string', description: 'When the record was last updated (ISO 8601)' },
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Outputs for incident triggers
|
||||
*/
|
||||
export function buildIncidentOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...buildRecordOutputs(),
|
||||
urgency: { type: 'string', description: 'Urgency level (1=High, 2=Medium, 3=Low)' },
|
||||
impact: { type: 'string', description: 'Impact level (1=High, 2=Medium, 3=Low)' },
|
||||
category: { type: 'string', description: 'Incident category' },
|
||||
subcategory: { type: 'string', description: 'Incident subcategory' },
|
||||
caller: { type: 'string', description: 'Caller/requester of the incident' },
|
||||
resolvedBy: { type: 'string', description: 'User who resolved the incident' },
|
||||
resolvedAt: { type: 'string', description: 'When the incident was resolved' },
|
||||
closeNotes: { type: 'string', description: 'Notes added when the incident was closed' },
|
||||
record: { type: 'json', description: 'Full incident record data' },
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Outputs for change request triggers
|
||||
*/
|
||||
export function buildChangeRequestOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...buildRecordOutputs(),
|
||||
type: { type: 'string', description: 'Change type (Normal, Standard, Emergency)' },
|
||||
risk: { type: 'string', description: 'Risk level of the change' },
|
||||
impact: { type: 'string', description: 'Impact level of the change' },
|
||||
approval: { type: 'string', description: 'Approval status' },
|
||||
startDate: { type: 'string', description: 'Planned start date' },
|
||||
endDate: { type: 'string', description: 'Planned end date' },
|
||||
category: { type: 'string', description: 'Change category' },
|
||||
record: { type: 'json', description: 'Full change request record data' },
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeToken(s: string): string {
|
||||
return s
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/[\s-]+/g, '_')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the table name from a ServiceNow webhook payload.
|
||||
* Business Rule scripts can send tableName in multiple formats.
|
||||
*/
|
||||
function extractTableName(body: Record<string, unknown>): string | undefined {
|
||||
const candidates = [body.tableName, body.table_name, body.table, body.sys_class_name]
|
||||
for (const c of candidates) {
|
||||
if (typeof c === 'string' && c.trim()) {
|
||||
return c.trim()
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the event type from a ServiceNow webhook payload.
|
||||
*/
|
||||
function extractEventType(body: Record<string, unknown>): string | undefined {
|
||||
const candidates = [body.eventType, body.event_type, body.action, body.operation]
|
||||
for (const c of candidates) {
|
||||
if (typeof c === 'string' && c.trim()) {
|
||||
return c.trim()
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const INCIDENT_CREATED = new Set([
|
||||
'incident_created',
|
||||
'insert',
|
||||
'created',
|
||||
'create',
|
||||
'after_insert',
|
||||
'afterinsert',
|
||||
])
|
||||
|
||||
const INCIDENT_UPDATED = new Set([
|
||||
'incident_updated',
|
||||
'update',
|
||||
'updated',
|
||||
'after_update',
|
||||
'afterupdate',
|
||||
])
|
||||
|
||||
const CHANGE_REQUEST_CREATED = new Set([
|
||||
'change_request_created',
|
||||
'insert',
|
||||
'created',
|
||||
'create',
|
||||
'after_insert',
|
||||
'afterinsert',
|
||||
])
|
||||
|
||||
const CHANGE_REQUEST_UPDATED = new Set([
|
||||
'change_request_updated',
|
||||
'update',
|
||||
'updated',
|
||||
'after_update',
|
||||
'afterupdate',
|
||||
])
|
||||
|
||||
/**
|
||||
* Checks whether a ServiceNow webhook payload matches the configured trigger.
|
||||
* Used by the ServiceNow provider handler to filter events at runtime.
|
||||
*/
|
||||
export function isServiceNowEventMatch(
|
||||
triggerId: string,
|
||||
body: Record<string, unknown>,
|
||||
configuredTableName?: string
|
||||
): boolean {
|
||||
const payloadTable = extractTableName(body)
|
||||
const eventType = extractEventType(body)
|
||||
|
||||
if (triggerId === 'servicenow_webhook') {
|
||||
if (!configuredTableName?.trim()) {
|
||||
return true
|
||||
}
|
||||
if (!payloadTable) {
|
||||
return true
|
||||
}
|
||||
return normalizeToken(payloadTable) === normalizeToken(configuredTableName)
|
||||
}
|
||||
|
||||
if (triggerId === 'servicenow_incident_created' || triggerId === 'servicenow_incident_updated') {
|
||||
if (configuredTableName?.trim()) {
|
||||
if (payloadTable && normalizeToken(payloadTable) !== normalizeToken(configuredTableName)) {
|
||||
return false
|
||||
}
|
||||
} else if (payloadTable && normalizeToken(payloadTable) !== 'incident') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!eventType) {
|
||||
return true
|
||||
}
|
||||
|
||||
const normalized = normalizeToken(eventType)
|
||||
return triggerId === 'servicenow_incident_created'
|
||||
? INCIDENT_CREATED.has(normalized)
|
||||
: INCIDENT_UPDATED.has(normalized)
|
||||
}
|
||||
|
||||
if (
|
||||
triggerId === 'servicenow_change_request_created' ||
|
||||
triggerId === 'servicenow_change_request_updated'
|
||||
) {
|
||||
if (configuredTableName?.trim()) {
|
||||
if (payloadTable && normalizeToken(payloadTable) !== normalizeToken(configuredTableName)) {
|
||||
return false
|
||||
}
|
||||
} else if (payloadTable && normalizeToken(payloadTable) !== 'change_request') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!eventType) {
|
||||
return true
|
||||
}
|
||||
|
||||
const normalized = normalizeToken(eventType)
|
||||
return triggerId === 'servicenow_change_request_created'
|
||||
? CHANGE_REQUEST_CREATED.has(normalized)
|
||||
: CHANGE_REQUEST_UPDATED.has(normalized)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Outputs for the generic webhook trigger (all events)
|
||||
*/
|
||||
export function buildServiceNowWebhookOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...buildRecordOutputs(),
|
||||
eventType: {
|
||||
type: 'string',
|
||||
description: 'The type of event that triggered this workflow (e.g., insert, update, delete)',
|
||||
},
|
||||
category: { type: 'string', description: 'Record category' },
|
||||
record: { type: 'json', description: 'Full record data from the webhook payload' },
|
||||
}
|
||||
}
|
||||
38
apps/sim/triggers/servicenow/webhook.ts
Normal file
38
apps/sim/triggers/servicenow/webhook.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { ServiceNowIcon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
buildServiceNowExtraFields,
|
||||
buildServiceNowWebhookOutputs,
|
||||
servicenowSetupInstructions,
|
||||
servicenowTriggerOptions,
|
||||
} from '@/triggers/servicenow/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* Generic ServiceNow Webhook Trigger
|
||||
* Captures all ServiceNow webhook events
|
||||
*/
|
||||
export const servicenowWebhookTrigger: TriggerConfig = {
|
||||
id: 'servicenow_webhook',
|
||||
name: 'ServiceNow Webhook (All Events)',
|
||||
provider: 'servicenow',
|
||||
description: 'Trigger workflow on any ServiceNow webhook event',
|
||||
version: '1.0.0',
|
||||
icon: ServiceNowIcon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: 'servicenow_webhook',
|
||||
triggerOptions: servicenowTriggerOptions,
|
||||
setupInstructions: servicenowSetupInstructions('Insert, Update, or Delete'),
|
||||
extraFields: buildServiceNowExtraFields('servicenow_webhook'),
|
||||
}),
|
||||
|
||||
outputs: buildServiceNowWebhookOutputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
}
|
||||
Reference in New Issue
Block a user