mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
Merge branch 'staging' into dev
This commit is contained in:
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
@@ -126,7 +126,7 @@ jobs:
|
||||
ecr_repo_secret: ECR_REALTIME
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
@@ -200,7 +200,7 @@ jobs:
|
||||
# Build ARM64 images for GHCR (main branch only, runs in parallel)
|
||||
build-ghcr-arm64:
|
||||
name: Build ARM64 (GHCR Only)
|
||||
needs: [test-build, detect-version]
|
||||
needs: [detect-version]
|
||||
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
permissions:
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
@@ -322,10 +322,10 @@ jobs:
|
||||
outputs:
|
||||
docs_changed: ${{ steps.filter.outputs.docs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 2 # Need at least 2 commits to detect changes
|
||||
- uses: dorny/paths-filter@v3
|
||||
- uses: dorny/paths-filter@v4
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
@@ -352,7 +352,7 @@ jobs:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/docs-embeddings.yml
vendored
2
.github/workflows/docs-embeddings.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
4
.github/workflows/i18n.yml
vendored
4
.github/workflows/i18n.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: staging
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: staging
|
||||
|
||||
|
||||
4
.github/workflows/images.yml
vendored
4
.github/workflows/images.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
|
||||
2
.github/workflows/migrations.yml
vendored
2
.github/workflows/migrations.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
2
.github/workflows/publish-cli.yml
vendored
2
.github/workflows/publish-cli.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
2
.github/workflows/publish-python-sdk.yml
vendored
2
.github/workflows/publish-python-sdk.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
2
.github/workflows/publish-ts-sdk.yml
vendored
2
.github/workflows/publish-ts-sdk.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
2
.github/workflows/test-build.yml
vendored
2
.github/workflows/test-build.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
@@ -78,7 +78,7 @@ Defines the fields approvers fill in when responding. This data becomes availabl
|
||||
}
|
||||
```
|
||||
|
||||
Access resume data in downstream blocks using `<blockId.resumeInput.fieldName>`.
|
||||
Access resume data in downstream blocks using `<blockId.fieldName>`.
|
||||
|
||||
## Approval Methods
|
||||
|
||||
@@ -93,11 +93,12 @@ Access resume data in downstream blocks using `<blockId.resumeInput.fieldName>`.
|
||||
<Tab>
|
||||
### REST API
|
||||
|
||||
Programmatically resume workflows using the resume endpoint. The `contextId` is available from the block's `resumeEndpoint` output or from the paused execution detail.
|
||||
Programmatically resume workflows using the resume endpoint. The `contextId` is available from the block's `resumeEndpoint` output or from the `_resume` object in the paused execution response.
|
||||
|
||||
```bash
|
||||
POST /api/resume/{workflowId}/{executionId}/{contextId}
|
||||
Content-Type: application/json
|
||||
X-API-Key: your-api-key
|
||||
|
||||
{
|
||||
"input": {
|
||||
@@ -107,23 +108,56 @@ Access resume data in downstream blocks using `<blockId.resumeInput.fieldName>`.
|
||||
}
|
||||
```
|
||||
|
||||
The response includes a new `executionId` for the resumed execution:
|
||||
The resume endpoint automatically respects the execution mode used in the original execute call:
|
||||
|
||||
- **Sync mode** (default) — The response waits for the remaining workflow to complete and returns the full result:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "started",
|
||||
"success": true,
|
||||
"status": "completed",
|
||||
"executionId": "<resumeExecutionId>",
|
||||
"message": "Resume execution started."
|
||||
"output": { ... },
|
||||
"metadata": { "duration": 1234, "startTime": "...", "endTime": "..." }
|
||||
}
|
||||
```
|
||||
|
||||
To poll execution progress after resuming, connect to the SSE stream:
|
||||
If the resumed workflow hits another HITL block, the response returns `"status": "paused"` with new `_resume` URLs in the output.
|
||||
|
||||
```bash
|
||||
GET /api/workflows/{workflowId}/executions/{resumeExecutionId}/stream
|
||||
- **Stream mode** (`stream: true` on the original execute call) — The resume response streams SSE events with `selectedOutputs` chunks, just like the initial execution.
|
||||
|
||||
- **Async mode** (`X-Execution-Mode: async` on the original execute call) — The resume dispatches execution to a background worker and returns immediately with `202`, including a `jobId` and `statusUrl` for polling:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"async": true,
|
||||
"jobId": "<jobId>",
|
||||
"executionId": "<resumeExecutionId>",
|
||||
"message": "Resume execution queued",
|
||||
"statusUrl": "/api/jobs/<jobId>"
|
||||
}
|
||||
```
|
||||
|
||||
Build custom approval UIs or integrate with existing systems.
|
||||
#### Polling execution status
|
||||
|
||||
Poll the `statusUrl` from the async response to check when the resume completes:
|
||||
|
||||
```bash
|
||||
GET /api/jobs/{jobId}
|
||||
X-API-Key: your-api-key
|
||||
```
|
||||
|
||||
Returns job status and, when completed, the full workflow output.
|
||||
|
||||
To check on a paused execution's pause points and resume links:
|
||||
|
||||
```bash
|
||||
GET /api/resume/{workflowId}/{executionId}
|
||||
X-API-Key: your-api-key
|
||||
```
|
||||
|
||||
Returns the paused execution detail with all pause points, their statuses, and resume links. Returns `404` when the execution has completed and is no longer paused.
|
||||
</Tab>
|
||||
<Tab>
|
||||
### Webhook
|
||||
@@ -132,6 +166,53 @@ Access resume data in downstream blocks using `<blockId.resumeInput.fieldName>`.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## API Execute Behavior
|
||||
|
||||
When triggering a workflow via the execute API (`POST /api/workflows/{id}/execute`), HITL blocks cause the execution to pause and return the `_resume` data in the response:
|
||||
|
||||
<Tabs items={['Sync (JSON)', 'Stream (SSE)', 'Async']}>
|
||||
<Tab>
|
||||
The response includes the full pause data with resume URLs:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"executionId": "<executionId>",
|
||||
"output": {
|
||||
"data": {
|
||||
"operation": "human",
|
||||
"_resume": {
|
||||
"apiUrl": "/api/resume/{workflowId}/{executionId}/{contextId}",
|
||||
"uiUrl": "/resume/{workflowId}/{executionId}",
|
||||
"contextId": "<contextId>",
|
||||
"executionId": "<executionId>",
|
||||
"workflowId": "<workflowId>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
</Tab>
|
||||
<Tab>
|
||||
Blocks before the HITL stream their `selectedOutputs` normally. When execution pauses, the final SSE event includes `status: "paused"` and the `_resume` data:
|
||||
|
||||
```
|
||||
data: {"blockId":"agent1","chunk":"streamed content..."}
|
||||
data: {"event":"final","data":{"success":true,"output":{...,"_resume":{...}},"status":"paused"}}
|
||||
data: "[DONE]"
|
||||
```
|
||||
|
||||
On resume, blocks after the HITL stream their `selectedOutputs` the same way.
|
||||
|
||||
<Callout type="info">
|
||||
HITL blocks are automatically excluded from the `selectedOutputs` dropdown since their data is always included in the pause response.
|
||||
</Callout>
|
||||
</Tab>
|
||||
<Tab>
|
||||
Returns `202` immediately. Use the polling endpoint to check when the execution pauses.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
**Content Approval** - Review AI-generated content before publishing
|
||||
@@ -161,9 +242,9 @@ Agent (Generate) → Human in the Loop (QA) → Gmail (Send)
|
||||
**`response`** - Display data shown to the approver (json)
|
||||
**`submission`** - Form submission data from the approver (json)
|
||||
**`submittedAt`** - ISO timestamp when the workflow was resumed
|
||||
**`resumeInput.*`** - All fields defined in Resume Form become available after the workflow resumes
|
||||
**`<fieldName>`** - All fields defined in Resume Form become available at the top level after the workflow resumes
|
||||
|
||||
Access using `<blockId.resumeInput.fieldName>`.
|
||||
Access using `<blockId.fieldName>`.
|
||||
|
||||
## Example
|
||||
|
||||
@@ -187,7 +268,7 @@ Access using `<blockId.resumeInput.fieldName>`.
|
||||
**Downstream Usage:**
|
||||
```javascript
|
||||
// Condition block
|
||||
<approval1.resumeInput.approved> === true
|
||||
<approval1.approved> === true
|
||||
```
|
||||
The example below shows an approval portal as seen by an approver after the workflow is paused. Approvers can review the data and provide inputs as a part of the workflow resumption. The approval portal can be accessed directly via the unique URL, `<blockId.url>`.
|
||||
|
||||
@@ -204,7 +285,7 @@ The example below shows an approval portal as seen by an approver after the work
|
||||
<FAQ items={[
|
||||
{ question: "How long does the workflow stay paused?", answer: "The workflow pauses indefinitely until a human provides input through the approval portal, the REST API, or a webhook. There is no automatic timeout — it will wait until someone responds." },
|
||||
{ question: "What notification channels can I use to alert approvers?", answer: "You can configure notifications through Slack, Gmail, Microsoft Teams, SMS (via Twilio), or custom webhooks. Include the approval URL in your notification message so approvers can access the portal directly." },
|
||||
{ question: "How do I access the approver's input in downstream blocks?", answer: "Use the syntax <blockId.resumeInput.fieldName> to reference specific fields from the resume form. For example, if your block ID is 'approval1' and the form has an 'approved' field, use <approval1.resumeInput.approved>." },
|
||||
{ question: "How do I access the approver's input in downstream blocks?", answer: "Use the syntax <blockId.fieldName> to reference specific fields from the resume form. For example, if your block name is 'approval1' and the form has an 'approved' field, use <approval1.approved>." },
|
||||
{ question: "Can I chain multiple Human in the Loop blocks for multi-stage approvals?", answer: "Yes. You can place multiple Human in the Loop blocks in sequence to create multi-stage approval workflows. Each block pauses independently and can have its own notification configuration and resume form fields." },
|
||||
{ question: "Can I resume the workflow programmatically without the portal?", answer: "Yes. Each block exposes a resume API endpoint that you can call with a POST request containing the form data as JSON. This lets you build custom approval UIs or integrate with existing systems like Jira or ServiceNow." },
|
||||
{ question: "What outputs are available after the workflow resumes?", answer: "The block outputs include the approval portal URL, the resume API endpoint URL, the display data shown to the approver, the form submission data, the raw resume input, and an ISO timestamp of when the workflow was resumed." },
|
||||
|
||||
@@ -113,7 +113,7 @@ Retrieve the results of a completed Athena query execution
|
||||
| `awsAccessKeyId` | string | Yes | AWS access key ID |
|
||||
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `queryExecutionId` | string | Yes | Query execution ID to get results for |
|
||||
| `maxResults` | number | No | Maximum number of rows to return \(1-1000\) |
|
||||
| `maxResults` | number | No | Maximum number of rows to return \(1-999\) |
|
||||
| `nextToken` | string | No | Pagination token from a previous request |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -10,6 +10,24 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="linear-gradient(45deg, #B0084D 0%, #FF4F8B 100%)"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[AWS CloudWatch](https://aws.amazon.com/cloudwatch/) is a monitoring and observability service that provides data and actionable insights for AWS resources, applications, and services. CloudWatch collects monitoring and operational data in the form of logs, metrics, and events, giving you a unified view of your AWS environment.
|
||||
|
||||
With the CloudWatch integration, you can:
|
||||
|
||||
- **Query Logs (Insights)**: Run CloudWatch Log Insights queries against one or more log groups to analyze log data with a powerful query language
|
||||
- **Describe Log Groups**: List available CloudWatch log groups in your account, optionally filtered by name prefix
|
||||
- **Get Log Events**: Retrieve log events from a specific log stream within a log group
|
||||
- **Describe Log Streams**: List log streams within a log group, ordered by last event time or filtered by name prefix
|
||||
- **List Metrics**: Browse available CloudWatch metrics, optionally filtered by namespace, metric name, or recent activity
|
||||
- **Get Metric Statistics**: Retrieve statistical data for a metric over a specified time range with configurable granularity
|
||||
- **Publish Metric**: Publish custom metric data points to CloudWatch for your own application monitoring
|
||||
- **Describe Alarms**: List and filter CloudWatch alarms by name prefix, state, or alarm type
|
||||
|
||||
In Sim, the CloudWatch integration enables your agents to monitor AWS infrastructure, analyze application logs, track custom metrics, and respond to alarm states as part of automated DevOps and SRE workflows. This is especially powerful when combined with other AWS integrations like CloudFormation and SNS for end-to-end infrastructure management.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate AWS CloudWatch into workflows. Run Log Insights queries, list log groups, retrieve log events, list and get metrics, and monitor alarms. Requires AWS access key and secret access key.
|
||||
@@ -155,6 +173,34 @@ Get statistics for a CloudWatch metric over a time range
|
||||
| `label` | string | Metric label |
|
||||
| `datapoints` | array | Datapoints with timestamp and statistics values |
|
||||
|
||||
### `cloudwatch_put_metric_data`
|
||||
|
||||
Publish a custom metric data point to CloudWatch
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `awsAccessKeyId` | string | Yes | AWS access key ID |
|
||||
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `namespace` | string | Yes | Metric namespace \(e.g., Custom/MyApp\) |
|
||||
| `metricName` | string | Yes | Name of the metric |
|
||||
| `value` | number | Yes | Metric value to publish |
|
||||
| `unit` | string | No | Unit of the metric \(e.g., Count, Seconds, Bytes\) |
|
||||
| `dimensions` | string | No | JSON string of dimension name/value pairs |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the metric was published successfully |
|
||||
| `namespace` | string | Metric namespace |
|
||||
| `metricName` | string | Metric name |
|
||||
| `value` | number | Published metric value |
|
||||
| `unit` | string | Metric unit |
|
||||
| `timestamp` | string | Timestamp when the metric was published |
|
||||
|
||||
### `cloudwatch_describe_alarms`
|
||||
|
||||
List and filter CloudWatch alarms
|
||||
|
||||
@@ -113,10 +113,11 @@ Create a new service request in Jira Service Management
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||
| `requestTypeId` | string | Yes | Request Type ID \(e.g., "10", "15"\) |
|
||||
| `summary` | string | Yes | Summary/title for the service request |
|
||||
| `summary` | string | No | Summary/title for the service request \(required unless using Form Answers\) |
|
||||
| `description` | string | No | Description for the service request |
|
||||
| `raiseOnBehalfOf` | string | No | Account ID of customer to raise request on behalf of |
|
||||
| `requestFieldValues` | json | No | Request field values as key-value pairs \(overrides summary/description if provided\) |
|
||||
| `formAnswers` | json | No | Form answers for form-based request types \(e.g., \{"summary": \{"text": "Title"\}, "customfield_10010": \{"choices": \["10320"\]\}\}\) |
|
||||
| `requestParticipants` | string | No | Comma-separated account IDs to add as request participants |
|
||||
| `channel` | string | No | Channel the request originates from \(e.g., portal, email\) |
|
||||
|
||||
@@ -677,4 +678,84 @@ Get the fields required to create a request of a specific type in Jira Service M
|
||||
| ↳ `defaultValues` | json | Default values for the field |
|
||||
| ↳ `jiraSchema` | json | Jira field schema with type, system, custom, customId |
|
||||
|
||||
### `jsm_get_form_templates`
|
||||
|
||||
List forms (ProForma/JSM Forms) in a Jira project to discover form IDs for request types
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `projectIdOrKey` | string | Yes | Jira project ID or key \(e.g., "10001" or "SD"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `projectIdOrKey` | string | Project ID or key |
|
||||
| `templates` | array | List of forms in the project |
|
||||
| ↳ `id` | string | Form template ID \(UUID\) |
|
||||
| ↳ `name` | string | Form template name |
|
||||
| ↳ `updated` | string | Last updated timestamp \(ISO 8601\) |
|
||||
| ↳ `issueCreateIssueTypeIds` | json | Issue type IDs that auto-attach this form on issue create |
|
||||
| ↳ `issueCreateRequestTypeIds` | json | Request type IDs that auto-attach this form on issue create |
|
||||
| ↳ `portalRequestTypeIds` | json | Request type IDs that show this form on the customer portal |
|
||||
| ↳ `recommendedIssueRequestTypeIds` | json | Request type IDs that recommend this form |
|
||||
| `total` | number | Total number of forms |
|
||||
|
||||
### `jsm_get_form_structure`
|
||||
|
||||
Get the full structure of a ProForma/JSM form including all questions, field types, choices, layout, and conditions
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `projectIdOrKey` | string | Yes | Jira project ID or key \(e.g., "10001" or "SD"\) |
|
||||
| `formId` | string | Yes | Form ID \(UUID from Get Form Templates\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `projectIdOrKey` | string | Project ID or key |
|
||||
| `formId` | string | Form ID |
|
||||
| `design` | json | Full form design with questions \(field types, labels, choices, validation\), layout \(field ordering\), and conditions |
|
||||
| `updated` | string | Last updated timestamp |
|
||||
| `publish` | json | Publishing and request type configuration |
|
||||
|
||||
### `jsm_get_issue_forms`
|
||||
|
||||
List forms (ProForma/JSM Forms) attached to a Jira issue with metadata (name, submitted status, lock)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., "SD-123", "10001"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `issueIdOrKey` | string | Issue ID or key |
|
||||
| `forms` | array | List of forms attached to the issue |
|
||||
| ↳ `id` | string | Form instance ID \(UUID\) |
|
||||
| ↳ `name` | string | Form name |
|
||||
| ↳ `updated` | string | Last updated timestamp \(ISO 8601\) |
|
||||
| ↳ `submitted` | boolean | Whether the form has been submitted |
|
||||
| ↳ `lock` | boolean | Whether the form is locked |
|
||||
| ↳ `internal` | boolean | Whether the form is internal-only |
|
||||
| ↳ `formTemplateId` | string | Source form template ID \(UUID\) |
|
||||
| `total` | number | Total number of forms |
|
||||
|
||||
|
||||
|
||||
@@ -25,6 +25,10 @@
|
||||
"name": "Workflows",
|
||||
"description": "Execute workflows and manage workflow resources"
|
||||
},
|
||||
{
|
||||
"name": "Human in the Loop",
|
||||
"description": "Manage paused workflow executions and resume them with input"
|
||||
},
|
||||
{
|
||||
"name": "Logs",
|
||||
"description": "Query execution logs and retrieve details"
|
||||
@@ -235,6 +239,544 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/workflows/{id}/paused": {
|
||||
"get": {
|
||||
"operationId": "listPausedExecutions",
|
||||
"summary": "List Paused Executions",
|
||||
"description": "List all paused executions for a workflow. Workflows pause at Human in the Loop blocks and wait for input before continuing. Use this endpoint to discover which executions need attention.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/workflows/{id}/paused?status=paused\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"description": "Filter paused executions by status.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "paused"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of paused executions.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pausedExecutions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PausedExecutionSummary"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"example": {
|
||||
"pausedExecutions": [
|
||||
{
|
||||
"id": "pe_abc123",
|
||||
"workflowId": "wf_1a2b3c4d5e",
|
||||
"executionId": "exec_9f8e7d6c5b",
|
||||
"status": "paused",
|
||||
"totalPauseCount": 1,
|
||||
"resumedCount": 0,
|
||||
"pausedAt": "2026-01-15T10:30:00Z",
|
||||
"updatedAt": "2026-01-15T10:30:00Z",
|
||||
"expiresAt": null,
|
||||
"metadata": null,
|
||||
"triggerIds": [],
|
||||
"pausePoints": [
|
||||
{
|
||||
"contextId": "ctx_xyz789",
|
||||
"blockId": "block_hitl_1",
|
||||
"registeredAt": "2026-01-15T10:30:00Z",
|
||||
"resumeStatus": "paused",
|
||||
"snapshotReady": true,
|
||||
"resumeLinks": {
|
||||
"apiUrl": "https://www.sim.ai/api/resume/wf_1a2b3c4d5e/exec_9f8e7d6c5b/ctx_xyz789",
|
||||
"uiUrl": "https://www.sim.ai/resume/wf_1a2b3c4d5e/exec_9f8e7d6c5b",
|
||||
"contextId": "ctx_xyz789",
|
||||
"executionId": "exec_9f8e7d6c5b",
|
||||
"workflowId": "wf_1a2b3c4d5e"
|
||||
},
|
||||
"response": {
|
||||
"displayData": {
|
||||
"title": "Approval Required",
|
||||
"message": "Please review this request"
|
||||
},
|
||||
"formFields": []
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest"
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/workflows/{id}/paused/{executionId}": {
|
||||
"get": {
|
||||
"operationId": "getPausedExecution",
|
||||
"summary": "Get Paused Execution",
|
||||
"description": "Get detailed information about a specific paused execution, including its pause points, execution snapshot, and resume queue. Use this to inspect the state before resuming.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/workflows/{id}/paused/{executionId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Paused execution details.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/PausedExecutionDetail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/resume/{workflowId}/{executionId}": {
|
||||
"get": {
|
||||
"operationId": "getPausedExecutionByResumePath",
|
||||
"summary": "Get Paused Execution (Resume Path)",
|
||||
"description": "Get detailed information about a specific paused execution using the resume URL path. Returns the same data as the workflow paused execution detail endpoint.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/resume/{workflowId}/{executionId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflowId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Paused execution details.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/PausedExecutionDetail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Human-readable error message."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/resume/{workflowId}/{executionId}/{contextId}": {
|
||||
"get": {
|
||||
"operationId": "getPauseContext",
|
||||
"summary": "Get Pause Context",
|
||||
"description": "Get detailed information about a specific pause context within a paused execution. Returns the pause point details, resume queue state, and any active resume entry.",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X GET \\\n \"https://www.sim.ai/api/resume/{workflowId}/{executionId}/{contextId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\""
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflowId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "contextId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The pause context ID to retrieve details for.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "ctx_xyz789"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Pause context details.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/PauseContextDetail"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"operationId": "resumeExecution",
|
||||
"summary": "Resume Execution",
|
||||
"description": "Resume a paused workflow execution by providing input for a specific pause context. The execution continues from where it paused, using the provided input. Supports synchronous, asynchronous, and streaming modes (determined by the original execution's configuration).",
|
||||
"tags": ["Human in the Loop"],
|
||||
"x-codeSamples": [
|
||||
{
|
||||
"id": "curl",
|
||||
"label": "cURL",
|
||||
"lang": "bash",
|
||||
"source": "curl -X POST \\\n \"https://www.sim.ai/api/resume/{workflowId}/{executionId}/{contextId}\" \\\n -H \"X-API-Key: YOUR_API_KEY\" \\\n -H \"Content-Type: application/json\" \\\n -d '{\n \"input\": {\n \"approved\": true,\n \"comment\": \"Looks good to me\"\n }\n }'"
|
||||
}
|
||||
],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflowId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The unique identifier of the workflow.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "wf_1a2b3c4d5e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "executionId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The execution ID of the paused execution.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "exec_9f8e7d6c5b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "contextId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"description": "The pause context ID to resume. Found in the pause point's contextId field or resumeLinks.",
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"example": "ctx_xyz789"
|
||||
}
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"description": "Input data for the resumed execution. The structure depends on the workflow's Human in the Loop block configuration.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "object",
|
||||
"description": "Key-value pairs to pass as input to the resumed execution. If omitted, the entire request body is used as input.",
|
||||
"additionalProperties": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"example": {
|
||||
"input": {
|
||||
"approved": true,
|
||||
"comment": "Looks good to me"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Resume execution completed synchronously, or resume was queued behind another in-progress resume.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/ResumeResult"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "Resume has been queued behind another in-progress resume.",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["queued"],
|
||||
"description": "Indicates the resume is queued."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution ID assigned to this resume."
|
||||
},
|
||||
"queuePosition": {
|
||||
"type": "integer",
|
||||
"description": "Position in the resume queue."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "Human-readable status message."
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"description": "Resume execution started (non-API-key callers). The execution runs asynchronously.",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["started"],
|
||||
"description": "Indicates the resume execution has started."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution ID for the resumed workflow."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "Human-readable status message."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"examples": {
|
||||
"sync": {
|
||||
"summary": "Synchronous completion",
|
||||
"value": {
|
||||
"success": true,
|
||||
"status": "completed",
|
||||
"executionId": "exec_new123",
|
||||
"output": {
|
||||
"result": "Approved and processed"
|
||||
},
|
||||
"error": null,
|
||||
"metadata": {
|
||||
"duration": 850,
|
||||
"startTime": "2026-01-15T10:35:00Z",
|
||||
"endTime": "2026-01-15T10:35:01Z"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queued": {
|
||||
"summary": "Queued behind another resume",
|
||||
"value": {
|
||||
"status": "queued",
|
||||
"executionId": "exec_new123",
|
||||
"queuePosition": 2,
|
||||
"message": "Resume queued. It will run after current resumes finish."
|
||||
}
|
||||
},
|
||||
"started": {
|
||||
"summary": "Execution started (fire and forget)",
|
||||
"value": {
|
||||
"status": "started",
|
||||
"executionId": "exec_new123",
|
||||
"message": "Resume execution started."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"202": {
|
||||
"description": "Resume execution has been queued for asynchronous processing. Poll the statusUrl for results.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/AsyncExecutionResult"
|
||||
},
|
||||
"example": {
|
||||
"success": true,
|
||||
"async": true,
|
||||
"jobId": "job_4a3b2c1d0e",
|
||||
"executionId": "exec_new123",
|
||||
"message": "Resume execution queued",
|
||||
"statusUrl": "https://www.sim.ai/api/jobs/job_4a3b2c1d0e"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"$ref": "#/components/responses/BadRequest"
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/Unauthorized"
|
||||
},
|
||||
"403": {
|
||||
"$ref": "#/components/responses/Forbidden"
|
||||
},
|
||||
"404": {
|
||||
"$ref": "#/components/responses/NotFound"
|
||||
},
|
||||
"503": {
|
||||
"description": "Failed to queue the resume execution. Retry the request.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Error message."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Human-readable error message."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/v1/workflows": {
|
||||
"get": {
|
||||
"operationId": "listWorkflows",
|
||||
@@ -5788,6 +6330,346 @@
|
||||
"description": "Upper bound value for 'between' operator."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PausedExecutionSummary": {
|
||||
"type": "object",
|
||||
"description": "Summary of a paused workflow execution.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the paused execution record."
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string",
|
||||
"description": "The workflow this execution belongs to."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution that was paused."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Current status of the paused execution.",
|
||||
"example": "paused"
|
||||
},
|
||||
"totalPauseCount": {
|
||||
"type": "integer",
|
||||
"description": "Total number of pause points in this execution."
|
||||
},
|
||||
"resumedCount": {
|
||||
"type": "integer",
|
||||
"description": "Number of pause points that have been resumed."
|
||||
},
|
||||
"pausedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the execution was paused."
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the paused execution record was last updated."
|
||||
},
|
||||
"expiresAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the paused execution will expire and be cleaned up."
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"nullable": true,
|
||||
"description": "Additional metadata associated with the paused execution.",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"triggerIds": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "IDs of triggers that initiated the original execution."
|
||||
},
|
||||
"pausePoints": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/PausePoint"
|
||||
},
|
||||
"description": "List of pause points in the execution."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PausePoint": {
|
||||
"type": "object",
|
||||
"description": "A point in the workflow where execution has been paused awaiting human input.",
|
||||
"properties": {
|
||||
"contextId": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for this pause context. Used when resuming execution."
|
||||
},
|
||||
"blockId": {
|
||||
"type": "string",
|
||||
"description": "The block ID where execution paused."
|
||||
},
|
||||
"response": {
|
||||
"description": "Data returned by the block before pausing, including display data and form fields."
|
||||
},
|
||||
"registeredAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "When this pause point was registered."
|
||||
},
|
||||
"resumeStatus": {
|
||||
"type": "string",
|
||||
"enum": ["paused", "resumed", "failed", "queued", "resuming"],
|
||||
"description": "Current status of this pause point."
|
||||
},
|
||||
"snapshotReady": {
|
||||
"type": "boolean",
|
||||
"description": "Whether the execution snapshot is ready for resumption."
|
||||
},
|
||||
"resumeLinks": {
|
||||
"type": "object",
|
||||
"description": "Links for resuming this pause point.",
|
||||
"properties": {
|
||||
"apiUrl": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"description": "API endpoint URL to POST resume input to."
|
||||
},
|
||||
"uiUrl": {
|
||||
"type": "string",
|
||||
"format": "uri",
|
||||
"description": "UI URL for a human to review and approve."
|
||||
},
|
||||
"contextId": {
|
||||
"type": "string",
|
||||
"description": "The context ID for this pause point."
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The execution ID."
|
||||
},
|
||||
"workflowId": {
|
||||
"type": "string",
|
||||
"description": "The workflow ID."
|
||||
}
|
||||
}
|
||||
},
|
||||
"queuePosition": {
|
||||
"type": "integer",
|
||||
"nullable": true,
|
||||
"description": "Position in the resume queue, if queued."
|
||||
},
|
||||
"latestResumeEntry": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry",
|
||||
"nullable": true,
|
||||
"description": "The most recent resume queue entry for this pause point."
|
||||
},
|
||||
"parallelScope": {
|
||||
"type": "object",
|
||||
"description": "Scope information when the pause occurs inside a parallel branch.",
|
||||
"properties": {
|
||||
"parallelId": {
|
||||
"type": "string",
|
||||
"description": "Identifier of the parallel execution group."
|
||||
},
|
||||
"branchIndex": {
|
||||
"type": "integer",
|
||||
"description": "Index of the branch within the parallel group."
|
||||
},
|
||||
"branchTotal": {
|
||||
"type": "integer",
|
||||
"description": "Total number of branches in the parallel group."
|
||||
}
|
||||
}
|
||||
},
|
||||
"loopScope": {
|
||||
"type": "object",
|
||||
"description": "Scope information when the pause occurs inside a loop.",
|
||||
"properties": {
|
||||
"loopId": {
|
||||
"type": "string",
|
||||
"description": "Identifier of the loop."
|
||||
},
|
||||
"iteration": {
|
||||
"type": "integer",
|
||||
"description": "Current loop iteration number."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResumeQueueEntry": {
|
||||
"type": "object",
|
||||
"description": "An entry in the resume execution queue.",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for this queue entry."
|
||||
},
|
||||
"pausedExecutionId": {
|
||||
"type": "string",
|
||||
"description": "The paused execution this entry belongs to."
|
||||
},
|
||||
"parentExecutionId": {
|
||||
"type": "string",
|
||||
"description": "The original execution that was paused."
|
||||
},
|
||||
"newExecutionId": {
|
||||
"type": "string",
|
||||
"description": "The new execution ID created for the resume."
|
||||
},
|
||||
"contextId": {
|
||||
"type": "string",
|
||||
"description": "The pause context ID being resumed."
|
||||
},
|
||||
"resumeInput": {
|
||||
"description": "The input provided when resuming."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Status of this queue entry (e.g., pending, claimed, completed, failed)."
|
||||
},
|
||||
"queuedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When the entry was added to the queue."
|
||||
},
|
||||
"claimedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When execution started processing this entry."
|
||||
},
|
||||
"completedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"nullable": true,
|
||||
"description": "When execution completed."
|
||||
},
|
||||
"failureReason": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Reason for failure, if the resume failed."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PausedExecutionDetail": {
|
||||
"type": "object",
|
||||
"description": "Detailed information about a paused execution, including the execution snapshot and resume queue.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/PausedExecutionSummary"
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"executionSnapshot": {
|
||||
"type": "object",
|
||||
"description": "Serialized execution state for resumption.",
|
||||
"properties": {
|
||||
"snapshot": {
|
||||
"type": "string",
|
||||
"description": "Serialized execution snapshot data."
|
||||
},
|
||||
"triggerIds": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Trigger IDs from the snapshot."
|
||||
}
|
||||
}
|
||||
},
|
||||
"queue": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry"
|
||||
},
|
||||
"description": "Resume queue entries for this execution."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"PauseContextDetail": {
|
||||
"type": "object",
|
||||
"description": "Detailed information about a specific pause context within a paused execution.",
|
||||
"properties": {
|
||||
"execution": {
|
||||
"$ref": "#/components/schemas/PausedExecutionSummary",
|
||||
"description": "Summary of the parent paused execution."
|
||||
},
|
||||
"pausePoint": {
|
||||
"$ref": "#/components/schemas/PausePoint",
|
||||
"description": "The specific pause point for this context."
|
||||
},
|
||||
"queue": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry"
|
||||
},
|
||||
"description": "Resume queue entries for this context."
|
||||
},
|
||||
"activeResumeEntry": {
|
||||
"$ref": "#/components/schemas/ResumeQueueEntry",
|
||||
"nullable": true,
|
||||
"description": "The currently active resume entry, if any."
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResumeResult": {
|
||||
"type": "object",
|
||||
"description": "Result of a synchronous resume execution.",
|
||||
"properties": {
|
||||
"success": {
|
||||
"type": "boolean",
|
||||
"description": "Whether the resume execution completed successfully."
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Execution status.",
|
||||
"enum": ["completed", "failed", "paused", "cancelled"],
|
||||
"example": "completed"
|
||||
},
|
||||
"executionId": {
|
||||
"type": "string",
|
||||
"description": "The new execution ID for the resumed workflow."
|
||||
},
|
||||
"output": {
|
||||
"type": "object",
|
||||
"description": "Workflow output from the resumed execution.",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"error": {
|
||||
"type": "string",
|
||||
"nullable": true,
|
||||
"description": "Error message if the execution failed."
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"description": "Execution timing metadata.",
|
||||
"properties": {
|
||||
"duration": {
|
||||
"type": "integer",
|
||||
"description": "Total execution duration in milliseconds."
|
||||
},
|
||||
"startTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "When the resume execution started."
|
||||
},
|
||||
"endTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "When the resume execution completed."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
|
||||
@@ -2044,12 +2044,16 @@
|
||||
"name": "Get Metric Statistics",
|
||||
"description": "Get statistics for a CloudWatch metric over a time range"
|
||||
},
|
||||
{
|
||||
"name": "Publish Metric",
|
||||
"description": "Publish a custom metric data point to CloudWatch"
|
||||
},
|
||||
{
|
||||
"name": "Describe Alarms",
|
||||
"description": "List and filter CloudWatch alarms"
|
||||
}
|
||||
],
|
||||
"operationCount": 7,
|
||||
"operationCount": 8,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"authType": "none",
|
||||
@@ -6610,9 +6614,21 @@
|
||||
{
|
||||
"name": "Get Request Type Fields",
|
||||
"description": "Get the fields required to create a request of a specific type in Jira Service Management"
|
||||
},
|
||||
{
|
||||
"name": "Get Form Templates",
|
||||
"description": "List forms (ProForma/JSM Forms) in a Jira project to discover form IDs for request types"
|
||||
},
|
||||
{
|
||||
"name": "Get Form Structure",
|
||||
"description": "Get the full structure of a ProForma/JSM form including all questions, field types, choices, layout, and conditions"
|
||||
},
|
||||
{
|
||||
"name": "Get Issue Forms",
|
||||
"description": "List forms (ProForma/JSM Forms) attached to a Jira issue with metadata (name, submitted status, lock)"
|
||||
}
|
||||
],
|
||||
"operationCount": 21,
|
||||
"operationCount": 24,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"authType": "oauth",
|
||||
@@ -10780,8 +10796,34 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 4,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "servicenow_incident_created",
|
||||
"name": "ServiceNow Incident Created",
|
||||
"description": "Trigger workflow when a new incident is created in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_incident_updated",
|
||||
"name": "ServiceNow Incident Updated",
|
||||
"description": "Trigger workflow when an incident is updated in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_change_request_created",
|
||||
"name": "ServiceNow Change Request Created",
|
||||
"description": "Trigger workflow when a new change request is created in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_change_request_updated",
|
||||
"name": "ServiceNow Change Request Updated",
|
||||
"description": "Trigger workflow when a change request is updated in ServiceNow"
|
||||
},
|
||||
{
|
||||
"id": "servicenow_webhook",
|
||||
"name": "ServiceNow Webhook (All Events)",
|
||||
"description": "Trigger workflow on any ServiceNow webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 5,
|
||||
"authType": "none",
|
||||
"category": "tools",
|
||||
"integrationType": "customer-support",
|
||||
|
||||
@@ -220,6 +220,7 @@ html[data-sidebar-collapsed] .sidebar-container .sidebar-collapse-btn {
|
||||
/* Brand & state */
|
||||
--brand-secondary: #33b4ff;
|
||||
--brand-accent: #33c482;
|
||||
--brand-accent-hover: #2dac72;
|
||||
--selection: #1a5cf6;
|
||||
--warning: #ea580c;
|
||||
|
||||
@@ -375,6 +376,7 @@ html[data-sidebar-collapsed] .sidebar-container .sidebar-collapse-btn {
|
||||
/* Brand & state */
|
||||
--brand-secondary: #33b4ff;
|
||||
--brand-accent: #33c482;
|
||||
--brand-accent-hover: #2dac72;
|
||||
--selection: #4b83f7;
|
||||
--warning: #ff6600;
|
||||
|
||||
|
||||
@@ -23,6 +23,18 @@ export async function POST() {
|
||||
|
||||
return NextResponse.json({ token: response.token })
|
||||
} catch (error) {
|
||||
// better-auth's sessionMiddleware throws APIError("UNAUTHORIZED") with no message
|
||||
// when the session is missing/expired — surface this as a 401, not a 500.
|
||||
if (
|
||||
error instanceof Error &&
|
||||
('statusCode' in error || 'status' in error) &&
|
||||
((error as Record<string, unknown>).statusCode === 401 ||
|
||||
(error as Record<string, unknown>).status === 'UNAUTHORIZED')
|
||||
) {
|
||||
logger.warn('Socket token request with invalid/expired session')
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
logger.error('Failed to generate socket token', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
|
||||
@@ -140,6 +140,10 @@ vi.mock('@/lib/workflows/streaming/streaming', () => ({
|
||||
createStreamingResponse: vi.fn().mockImplementation(async () => createMockStream()),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/executor/execute-workflow', () => ({
|
||||
executeWorkflow: vi.fn().mockResolvedValue({ success: true, output: {} }),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/sse', () => ({
|
||||
SSE_HEADERS: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
@@ -410,14 +414,7 @@ describe('Chat Identifier API Route', () => {
|
||||
|
||||
expect(createStreamingResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
workflow: expect.objectContaining({
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
}),
|
||||
input: expect.objectContaining({
|
||||
input: 'Hello world',
|
||||
conversationId: 'conv-123',
|
||||
}),
|
||||
executeFn: expect.any(Function),
|
||||
streamConfig: expect.objectContaining({
|
||||
isSecureMode: true,
|
||||
workflowTriggerType: 'chat',
|
||||
@@ -494,9 +491,9 @@ describe('Chat Identifier API Route', () => {
|
||||
|
||||
expect(createStreamingResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
input: expect.objectContaining({
|
||||
input: 'Hello world',
|
||||
conversationId: 'test-conversation-123',
|
||||
executeFn: expect.any(Function),
|
||||
streamConfig: expect.objectContaining({
|
||||
workflowTriggerType: 'chat',
|
||||
}),
|
||||
})
|
||||
)
|
||||
@@ -510,9 +507,7 @@ describe('Chat Identifier API Route', () => {
|
||||
|
||||
expect(createStreamingResponse).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
input: expect.objectContaining({
|
||||
input: 'Hello world',
|
||||
}),
|
||||
executeFn: expect.any(Function),
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
@@ -199,6 +199,7 @@ export async function POST(
|
||||
}
|
||||
|
||||
const { createStreamingResponse } = await import('@/lib/workflows/streaming/streaming')
|
||||
const { executeWorkflow } = await import('@/lib/workflows/executor/execute-workflow')
|
||||
const { SSE_HEADERS } = await import('@/lib/core/utils/sse')
|
||||
|
||||
const workflowInput: any = { input, conversationId }
|
||||
@@ -252,15 +253,31 @@ export async function POST(
|
||||
|
||||
const stream = await createStreamingResponse({
|
||||
requestId,
|
||||
workflow: workflowForExecution,
|
||||
input: workflowInput,
|
||||
executingUserId: workspaceOwnerId,
|
||||
streamConfig: {
|
||||
selectedOutputs,
|
||||
isSecureMode: true,
|
||||
workflowTriggerType: 'chat',
|
||||
},
|
||||
executionId,
|
||||
executeFn: async ({ onStream, onBlockComplete, abortSignal }) =>
|
||||
executeWorkflow(
|
||||
workflowForExecution,
|
||||
requestId,
|
||||
workflowInput,
|
||||
workspaceOwnerId,
|
||||
{
|
||||
enabled: true,
|
||||
selectedOutputs,
|
||||
isSecureMode: true,
|
||||
workflowTriggerType: 'chat',
|
||||
onStream,
|
||||
onBlockComplete,
|
||||
skipLoggingComplete: true,
|
||||
abortSignal,
|
||||
executionMode: 'stream',
|
||||
},
|
||||
executionId
|
||||
),
|
||||
})
|
||||
|
||||
const streamResponse = new NextResponse(stream, {
|
||||
|
||||
@@ -9,6 +9,7 @@ import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
@@ -216,45 +217,39 @@ export async function POST(
|
||||
...formData, // Spread form fields at top level for convenience
|
||||
}
|
||||
|
||||
// Execute workflow using streaming (for consistency with chat)
|
||||
const stream = await createStreamingResponse({
|
||||
requestId,
|
||||
workflow: workflowForExecution,
|
||||
input: workflowInput,
|
||||
executingUserId: workspaceOwnerId,
|
||||
streamConfig: {
|
||||
selectedOutputs: [],
|
||||
isSecureMode: true,
|
||||
workflowTriggerType: 'api', // Use 'api' type since form is similar
|
||||
workflowTriggerType: 'api',
|
||||
},
|
||||
executionId,
|
||||
executeFn: async ({ onStream, onBlockComplete, abortSignal }) =>
|
||||
executeWorkflow(
|
||||
workflowForExecution,
|
||||
requestId,
|
||||
workflowInput,
|
||||
workspaceOwnerId,
|
||||
{
|
||||
enabled: true,
|
||||
selectedOutputs: [],
|
||||
isSecureMode: true,
|
||||
workflowTriggerType: 'api',
|
||||
onStream,
|
||||
onBlockComplete,
|
||||
skipLoggingComplete: true,
|
||||
abortSignal,
|
||||
executionMode: 'sync',
|
||||
},
|
||||
executionId
|
||||
),
|
||||
})
|
||||
|
||||
// For forms, we don't stream back - we wait for completion and return success
|
||||
// Consume the stream to wait for completion
|
||||
const reader = stream.getReader()
|
||||
let lastOutput: any = null
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
// Parse SSE data if present
|
||||
const text = new TextDecoder().decode(value)
|
||||
const lines = text.split('\n')
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
const data = JSON.parse(line.slice(6))
|
||||
if (data.type === 'complete' || data.output) {
|
||||
lastOutput = data.output || data
|
||||
}
|
||||
} catch {
|
||||
// Ignore parse errors
|
||||
}
|
||||
}
|
||||
}
|
||||
while (!(await reader.read()).done) {
|
||||
/* drain to let the workflow run to completion */
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { subscription, user, workflowExecutionLogs, workspace } from '@sim/db/schema'
|
||||
import { subscription, workflowExecutionLogs, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull, lt } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
@@ -26,38 +26,19 @@ export async function GET(request: NextRequest) {
|
||||
const retentionDate = new Date()
|
||||
retentionDate.setDate(retentionDate.getDate() - Number(env.FREE_PLAN_LOG_RETENTION_DAYS || '7'))
|
||||
|
||||
const freeUsers = await db
|
||||
.select({ userId: user.id })
|
||||
.from(user)
|
||||
const freeWorkspacesSubquery = db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
subscription,
|
||||
and(
|
||||
eq(user.id, subscription.referenceId),
|
||||
eq(subscription.referenceId, workspace.billedAccountUserId),
|
||||
inArray(subscription.status, ENTITLED_SUBSCRIPTION_STATUSES),
|
||||
sqlIsPaid(subscription.plan)
|
||||
)
|
||||
)
|
||||
.where(isNull(subscription.id))
|
||||
|
||||
if (freeUsers.length === 0) {
|
||||
logger.info('No free users found for log cleanup')
|
||||
return NextResponse.json({ message: 'No free users found for cleanup' })
|
||||
}
|
||||
|
||||
const freeUserIds = freeUsers.map((u) => u.userId)
|
||||
|
||||
const workspacesQuery = await db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(inArray(workspace.billedAccountUserId, freeUserIds))
|
||||
|
||||
if (workspacesQuery.length === 0) {
|
||||
logger.info('No workspaces found for free users')
|
||||
return NextResponse.json({ message: 'No workspaces found for cleanup' })
|
||||
}
|
||||
|
||||
const workspaceIds = workspacesQuery.map((w) => w.id)
|
||||
|
||||
const results = {
|
||||
enhancedLogs: {
|
||||
total: 0,
|
||||
@@ -83,7 +64,7 @@ export async function GET(request: NextRequest) {
|
||||
let batchesProcessed = 0
|
||||
let hasMoreLogs = true
|
||||
|
||||
logger.info(`Starting enhanced logs cleanup for ${workspaceIds.length} workspaces`)
|
||||
logger.info('Starting enhanced logs cleanup for free-plan workspaces')
|
||||
|
||||
while (hasMoreLogs && batchesProcessed < MAX_BATCHES) {
|
||||
const oldEnhancedLogs = await db
|
||||
@@ -105,8 +86,8 @@ export async function GET(request: NextRequest) {
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowExecutionLogs.workspaceId, workspaceIds),
|
||||
lt(workflowExecutionLogs.createdAt, retentionDate)
|
||||
inArray(workflowExecutionLogs.workspaceId, freeWorkspacesSubquery),
|
||||
lt(workflowExecutionLogs.startedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
|
||||
213
apps/sim/app/api/organizations/[id]/whitelabel/route.ts
Normal file
213
apps/sim/app/api/organizations/[id]/whitelabel/route.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isOrganizationOnEnterprisePlan } from '@/lib/billing/core/subscription'
|
||||
import { HEX_COLOR_REGEX } from '@/lib/branding'
|
||||
import type { OrganizationWhitelabelSettings } from '@/lib/branding/types'
|
||||
|
||||
const logger = createLogger('WhitelabelAPI')
|
||||
|
||||
const updateWhitelabelSchema = z.object({
|
||||
brandName: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(64, 'Brand name must be 64 characters or fewer')
|
||||
.nullable()
|
||||
.optional(),
|
||||
logoUrl: z.string().min(1).nullable().optional(),
|
||||
wordmarkUrl: z.string().min(1).nullable().optional(),
|
||||
primaryColor: z
|
||||
.string()
|
||||
.regex(HEX_COLOR_REGEX, 'Primary color must be a valid hex color (e.g. #701ffc)')
|
||||
.nullable()
|
||||
.optional(),
|
||||
primaryHoverColor: z
|
||||
.string()
|
||||
.regex(HEX_COLOR_REGEX, 'Primary hover color must be a valid hex color')
|
||||
.nullable()
|
||||
.optional(),
|
||||
accentColor: z
|
||||
.string()
|
||||
.regex(HEX_COLOR_REGEX, 'Accent color must be a valid hex color')
|
||||
.nullable()
|
||||
.optional(),
|
||||
accentHoverColor: z
|
||||
.string()
|
||||
.regex(HEX_COLOR_REGEX, 'Accent hover color must be a valid hex color')
|
||||
.nullable()
|
||||
.optional(),
|
||||
supportEmail: z
|
||||
.string()
|
||||
.email('Support email must be a valid email address')
|
||||
.nullable()
|
||||
.optional(),
|
||||
documentationUrl: z.string().url('Documentation URL must be a valid URL').nullable().optional(),
|
||||
termsUrl: z.string().url('Terms URL must be a valid URL').nullable().optional(),
|
||||
privacyUrl: z.string().url('Privacy URL must be a valid URL').nullable().optional(),
|
||||
hidePoweredBySim: z.boolean().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/whitelabel
|
||||
* Returns the organization's whitelabel settings.
|
||||
* Accessible by any member of the organization.
|
||||
*/
|
||||
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
|
||||
const [memberEntry] = await db
|
||||
.select({ id: member.id })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (!memberEntry) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const [org] = await db
|
||||
.select({ whitelabelSettings: organization.whitelabelSettings })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (!org) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: (org.whitelabelSettings ?? {}) as OrganizationWhitelabelSettings,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get whitelabel settings', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/organizations/[id]/whitelabel
|
||||
* Updates the organization's whitelabel settings.
|
||||
* Requires enterprise plan and owner/admin role.
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = updateWhitelabelSchema.safeParse(body)
|
||||
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: parsed.error.errors[0]?.message ?? 'Invalid request body' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [memberEntry] = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (!memberEntry) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (memberEntry.role !== 'owner' && memberEntry.role !== 'admin') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Only organization owners and admins can update whitelabel settings' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const hasEnterprisePlan = await isOrganizationOnEnterprisePlan(organizationId)
|
||||
|
||||
if (!hasEnterprisePlan) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Whitelabeling is available on Enterprise plans only' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const [currentOrg] = await db
|
||||
.select({ name: organization.name, whitelabelSettings: organization.whitelabelSettings })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (!currentOrg) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const current: OrganizationWhitelabelSettings = currentOrg.whitelabelSettings ?? {}
|
||||
const incoming = parsed.data
|
||||
|
||||
const merged: OrganizationWhitelabelSettings = { ...current }
|
||||
|
||||
for (const key of Object.keys(incoming) as Array<keyof typeof incoming>) {
|
||||
const value = incoming[key]
|
||||
if (value === null) {
|
||||
delete merged[key as keyof OrganizationWhitelabelSettings]
|
||||
} else if (value !== undefined) {
|
||||
;(merged as Record<string, unknown>)[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(organization)
|
||||
.set({ whitelabelSettings: merged, updatedAt: new Date() })
|
||||
.where(eq(organization.id, organizationId))
|
||||
.returning({ whitelabelSettings: organization.whitelabelSettings })
|
||||
|
||||
if (!updated) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORGANIZATION_UPDATED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
resourceName: currentOrg.name,
|
||||
description: 'Updated organization whitelabel settings',
|
||||
metadata: { changes: Object.keys(incoming) },
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: (updated.whitelabelSettings ?? {}) as OrganizationWhitelabelSettings,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to update whitelabel settings', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,44 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuthType } from '@/lib/auth/hybrid'
|
||||
import { getJobQueue, shouldUseBullMQ } from '@/lib/core/async-jobs'
|
||||
import { createBullMQJobData } from '@/lib/core/bullmq'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
|
||||
import { setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
||||
import type { ResumeExecutionPayload } from '@/background/resume-execution'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { SerializedSnapshot } from '@/executor/types'
|
||||
|
||||
const logger = createLogger('WorkflowResumeAPI')
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
function getStoredSnapshotConfig(pausedExecution: { executionSnapshot: unknown }): {
|
||||
executionMode?: 'sync' | 'stream' | 'async'
|
||||
selectedOutputs?: string[]
|
||||
} {
|
||||
try {
|
||||
const serialized = pausedExecution.executionSnapshot as SerializedSnapshot
|
||||
const snapshot = ExecutionSnapshot.fromJSON(serialized.snapshot)
|
||||
return {
|
||||
executionMode: snapshot.metadata.executionMode,
|
||||
selectedOutputs: snapshot.selectedOutputs,
|
||||
}
|
||||
} catch {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{
|
||||
@@ -24,7 +49,6 @@ export async function POST(
|
||||
) {
|
||||
const { workflowId, executionId, contextId } = await params
|
||||
|
||||
// Allow resume from dashboard without requiring deployment
|
||||
const access = await validateWorkflowAccess(request, workflowId, false)
|
||||
if (access.error) {
|
||||
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
|
||||
@@ -74,12 +98,12 @@ export async function POST(
|
||||
const preprocessResult = await preprocessExecution({
|
||||
workflowId,
|
||||
userId,
|
||||
triggerType: 'manual', // Resume is a manual trigger
|
||||
triggerType: 'manual',
|
||||
executionId: resumeExecutionId,
|
||||
requestId,
|
||||
checkRateLimit: false, // Manual triggers bypass rate limits
|
||||
checkDeployment: false, // Resuming existing execution, deployment already checked
|
||||
skipUsageLimits: true, // Resume is continuation of authorized execution - don't recheck limits
|
||||
checkRateLimit: false,
|
||||
checkDeployment: false,
|
||||
skipUsageLimits: true,
|
||||
useAuthenticatedUserAsActor: isPersonalApiKeyCaller,
|
||||
workspaceId: workflow.workspaceId || undefined,
|
||||
})
|
||||
@@ -142,8 +166,35 @@ export async function POST(
|
||||
}
|
||||
|
||||
const isApiCaller = access.auth?.authType === AuthType.API_KEY
|
||||
const snapshotConfig = isApiCaller ? getStoredSnapshotConfig(enqueueResult.pausedExecution) : {}
|
||||
const executionMode = isApiCaller ? (snapshotConfig.executionMode ?? 'sync') : undefined
|
||||
|
||||
if (isApiCaller) {
|
||||
if (isApiCaller && executionMode === 'stream') {
|
||||
const stream = await createStreamingResponse({
|
||||
requestId,
|
||||
streamConfig: {
|
||||
selectedOutputs: snapshotConfig.selectedOutputs,
|
||||
timeoutMs: preprocessResult.executionTimeout?.sync,
|
||||
},
|
||||
executionId: enqueueResult.resumeExecutionId,
|
||||
executeFn: async ({ onStream, onBlockComplete, abortSignal }) =>
|
||||
PauseResumeManager.startResumeExecution({
|
||||
...resumeArgs,
|
||||
onStream,
|
||||
onBlockComplete,
|
||||
abortSignal,
|
||||
}),
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
'X-Execution-Id': enqueueResult.resumeExecutionId,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (isApiCaller && executionMode === 'sync') {
|
||||
const result = await PauseResumeManager.startResumeExecution(resumeArgs)
|
||||
|
||||
return NextResponse.json({
|
||||
@@ -162,6 +213,68 @@ export async function POST(
|
||||
})
|
||||
}
|
||||
|
||||
if (isApiCaller && executionMode === 'async') {
|
||||
const resumePayload: ResumeExecutionPayload = {
|
||||
resumeEntryId: enqueueResult.resumeEntryId,
|
||||
resumeExecutionId: enqueueResult.resumeExecutionId,
|
||||
pausedExecutionId: enqueueResult.pausedExecution.id,
|
||||
contextId: enqueueResult.contextId,
|
||||
resumeInput: enqueueResult.resumeInput,
|
||||
userId: enqueueResult.userId,
|
||||
workflowId,
|
||||
parentExecutionId: executionId,
|
||||
}
|
||||
|
||||
let jobId: string
|
||||
try {
|
||||
const useBullMQ = shouldUseBullMQ()
|
||||
if (useBullMQ) {
|
||||
jobId = await enqueueWorkspaceDispatch({
|
||||
id: enqueueResult.resumeExecutionId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
lane: 'runtime',
|
||||
queueName: 'resume-execution',
|
||||
bullmqJobName: 'resume-execution',
|
||||
bullmqPayload: createBullMQJobData(resumePayload, {
|
||||
workflowId,
|
||||
userId,
|
||||
}),
|
||||
metadata: { workflowId, userId },
|
||||
})
|
||||
} else {
|
||||
const jobQueue = await getJobQueue()
|
||||
jobId = await jobQueue.enqueue('resume-execution', resumePayload, {
|
||||
metadata: { workflowId, workspaceId: workflow.workspaceId, userId },
|
||||
})
|
||||
}
|
||||
logger.info('Enqueued async resume execution', {
|
||||
jobId,
|
||||
resumeExecutionId: enqueueResult.resumeExecutionId,
|
||||
})
|
||||
} catch (dispatchError) {
|
||||
logger.error('Failed to dispatch async resume execution', {
|
||||
error: dispatchError instanceof Error ? dispatchError.message : String(dispatchError),
|
||||
resumeExecutionId: enqueueResult.resumeExecutionId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to queue resume execution. Please try again.' },
|
||||
{ status: 503 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
async: true,
|
||||
jobId,
|
||||
executionId: enqueueResult.resumeExecutionId,
|
||||
message: 'Resume execution queued',
|
||||
statusUrl: `${getBaseUrl()}/api/jobs/${jobId}`,
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
}
|
||||
|
||||
PauseResumeManager.startResumeExecution(resumeArgs).catch((error) => {
|
||||
logger.error('Failed to start resume execution', {
|
||||
workflowId,
|
||||
@@ -200,7 +313,6 @@ export async function GET(
|
||||
) {
|
||||
const { workflowId, executionId, contextId } = await params
|
||||
|
||||
// Allow access without API key for browser-based UI (same as parent execution endpoint)
|
||||
const access = await validateWorkflowAccess(request, workflowId, false)
|
||||
if (access.error) {
|
||||
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
|
||||
|
||||
@@ -4,7 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasExceededCostLimit } from '@/lib/billing/core/subscription'
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { recordUsage } from '@/lib/billing/core/usage-log'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
|
||||
@@ -110,11 +110,14 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
if (billingUserId && isBillingEnabled) {
|
||||
const exceeded = await hasExceededCostLimit(billingUserId)
|
||||
if (exceeded) {
|
||||
if (billingUserId) {
|
||||
const usageCheck = await checkServerSideUsageLimits(billingUserId)
|
||||
if (usageCheck.isExceeded) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Usage limit exceeded. Please upgrade your plan to continue.' },
|
||||
{
|
||||
error:
|
||||
usageCheck.message || 'Usage limit exceeded. Please upgrade your plan to continue.',
|
||||
},
|
||||
{ status: 402 }
|
||||
)
|
||||
}
|
||||
|
||||
@@ -51,7 +51,9 @@ export async function POST(request: NextRequest) {
|
||||
const command = new DescribeAlarmsCommand({
|
||||
...(validatedData.alarmNamePrefix && { AlarmNamePrefix: validatedData.alarmNamePrefix }),
|
||||
...(validatedData.stateValue && { StateValue: validatedData.stateValue as StateValue }),
|
||||
...(validatedData.alarmType && { AlarmTypes: [validatedData.alarmType as AlarmType] }),
|
||||
AlarmTypes: validatedData.alarmType
|
||||
? [validatedData.alarmType as AlarmType]
|
||||
: (['MetricAlarm', 'CompositeAlarm'] as AlarmType[]),
|
||||
...(validatedData.limit !== undefined && { MaxRecords: validatedData.limit }),
|
||||
})
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ export async function POST(request: NextRequest) {
|
||||
}))
|
||||
}
|
||||
} catch {
|
||||
throw new Error('Invalid dimensions JSON')
|
||||
return NextResponse.json({ error: 'Invalid dimensions JSON format' }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
136
apps/sim/app/api/tools/cloudwatch/put-metric-data/route.ts
Normal file
136
apps/sim/app/api/tools/cloudwatch/put-metric-data/route.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import {
|
||||
CloudWatchClient,
|
||||
PutMetricDataCommand,
|
||||
type StandardUnit,
|
||||
} from '@aws-sdk/client-cloudwatch'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
|
||||
const logger = createLogger('CloudWatchPutMetricData')
|
||||
|
||||
const VALID_UNITS = [
|
||||
'Seconds',
|
||||
'Microseconds',
|
||||
'Milliseconds',
|
||||
'Bytes',
|
||||
'Kilobytes',
|
||||
'Megabytes',
|
||||
'Gigabytes',
|
||||
'Terabytes',
|
||||
'Bits',
|
||||
'Kilobits',
|
||||
'Megabits',
|
||||
'Gigabits',
|
||||
'Terabits',
|
||||
'Percent',
|
||||
'Count',
|
||||
'Bytes/Second',
|
||||
'Kilobytes/Second',
|
||||
'Megabytes/Second',
|
||||
'Gigabytes/Second',
|
||||
'Terabytes/Second',
|
||||
'Bits/Second',
|
||||
'Kilobits/Second',
|
||||
'Megabits/Second',
|
||||
'Gigabits/Second',
|
||||
'Terabits/Second',
|
||||
'Count/Second',
|
||||
'None',
|
||||
] as const
|
||||
|
||||
const PutMetricDataSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
namespace: z.string().min(1, 'Namespace is required'),
|
||||
metricName: z.string().min(1, 'Metric name is required'),
|
||||
value: z.number({ coerce: true }).refine((v) => Number.isFinite(v), {
|
||||
message: 'Metric value must be a finite number',
|
||||
}),
|
||||
unit: z.enum(VALID_UNITS).optional(),
|
||||
dimensions: z
|
||||
.string()
|
||||
.optional()
|
||||
.refine(
|
||||
(val) => {
|
||||
if (!val) return true
|
||||
try {
|
||||
const parsed = JSON.parse(val)
|
||||
return typeof parsed === 'object' && parsed !== null && !Array.isArray(parsed)
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
},
|
||||
{ message: 'dimensions must be a valid JSON object string' }
|
||||
),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = PutMetricDataSchema.parse(body)
|
||||
|
||||
const client = new CloudWatchClient({
|
||||
region: validatedData.region,
|
||||
credentials: {
|
||||
accessKeyId: validatedData.accessKeyId,
|
||||
secretAccessKey: validatedData.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
const timestamp = new Date()
|
||||
|
||||
const dimensions: { Name: string; Value: string }[] = []
|
||||
if (validatedData.dimensions) {
|
||||
const parsed = JSON.parse(validatedData.dimensions)
|
||||
for (const [name, value] of Object.entries(parsed)) {
|
||||
dimensions.push({ Name: name, Value: String(value) })
|
||||
}
|
||||
}
|
||||
|
||||
const command = new PutMetricDataCommand({
|
||||
Namespace: validatedData.namespace,
|
||||
MetricData: [
|
||||
{
|
||||
MetricName: validatedData.metricName,
|
||||
Value: validatedData.value,
|
||||
Timestamp: timestamp,
|
||||
...(validatedData.unit && { Unit: validatedData.unit as StandardUnit }),
|
||||
...(dimensions.length > 0 && { Dimensions: dimensions }),
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
await client.send(command)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
namespace: validatedData.namespace,
|
||||
metricName: validatedData.metricName,
|
||||
value: validatedData.value,
|
||||
unit: validatedData.unit ?? 'None',
|
||||
timestamp: timestamp.toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: error.errors[0]?.message ?? 'Invalid request' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Failed to publish CloudWatch metric'
|
||||
logger.error('PutMetricData failed', { error: errorMessage })
|
||||
return NextResponse.json({ error: errorMessage }, { status: 500 })
|
||||
}
|
||||
}
|
||||
115
apps/sim/app/api/tools/jsm/forms/issue/route.ts
Normal file
115
apps/sim/app/api/tools/jsm/forms/issue/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
getJiraCloudId,
|
||||
getJsmFormsApiBaseUrl,
|
||||
getJsmHeaders,
|
||||
parseJsmErrorMessage,
|
||||
} from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmIssueFormsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!issueIdOrKey) {
|
||||
logger.error('Missing issueIdOrKey in request')
|
||||
return NextResponse.json({ error: 'Issue ID or key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const issueIdOrKeyValidation = validateJiraIssueKey(issueIdOrKey, 'issueIdOrKey')
|
||||
if (!issueIdOrKeyValidation.isValid) {
|
||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmFormsApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/issue/${encodeURIComponent(issueIdOrKey)}/form`
|
||||
|
||||
logger.info('Fetching issue forms from:', { url, issueIdOrKey })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM Forms API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const forms = Array.isArray(data) ? data : (data.values ?? data.forms ?? [])
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueIdOrKey,
|
||||
forms: forms.map((form: Record<string, unknown>) => ({
|
||||
id: form.id ?? null,
|
||||
name: form.name ?? null,
|
||||
updated: form.updated ?? null,
|
||||
submitted: form.submitted ?? false,
|
||||
lock: form.lock ?? false,
|
||||
internal: form.internal ?? null,
|
||||
formTemplateId: (form.formTemplate as Record<string, unknown>)?.id ?? null,
|
||||
})),
|
||||
total: forms.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching issue forms:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
117
apps/sim/app/api/tools/jsm/forms/structure/route.ts
Normal file
117
apps/sim/app/api/tools/jsm/forms/structure/route.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
getJiraCloudId,
|
||||
getJsmFormsApiBaseUrl,
|
||||
getJsmHeaders,
|
||||
parseJsmErrorMessage,
|
||||
} from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmFormStructureAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, projectIdOrKey, formId } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!projectIdOrKey) {
|
||||
logger.error('Missing projectIdOrKey in request')
|
||||
return NextResponse.json({ error: 'Project ID or key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!formId) {
|
||||
logger.error('Missing formId in request')
|
||||
return NextResponse.json({ error: 'Form ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const projectIdOrKeyValidation = validateJiraIssueKey(projectIdOrKey, 'projectIdOrKey')
|
||||
if (!projectIdOrKeyValidation.isValid) {
|
||||
return NextResponse.json({ error: projectIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const formIdValidation = validateJiraCloudId(formId, 'formId')
|
||||
if (!formIdValidation.isValid) {
|
||||
return NextResponse.json({ error: formIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmFormsApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/project/${encodeURIComponent(projectIdOrKey)}/form/${encodeURIComponent(formId)}`
|
||||
|
||||
logger.info('Fetching form template from:', { url, projectIdOrKey, formId })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM Forms API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey,
|
||||
formId,
|
||||
design: data.design ?? null,
|
||||
updated: data.updated ?? null,
|
||||
publish: data.publish ?? null,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching form structure:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
115
apps/sim/app/api/tools/jsm/forms/templates/route.ts
Normal file
115
apps/sim/app/api/tools/jsm/forms/templates/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
getJiraCloudId,
|
||||
getJsmFormsApiBaseUrl,
|
||||
getJsmHeaders,
|
||||
parseJsmErrorMessage,
|
||||
} from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmFormTemplatesAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, projectIdOrKey } = body
|
||||
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!projectIdOrKey) {
|
||||
logger.error('Missing projectIdOrKey in request')
|
||||
return NextResponse.json({ error: 'Project ID or key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const projectIdOrKeyValidation = validateJiraIssueKey(projectIdOrKey, 'projectIdOrKey')
|
||||
if (!projectIdOrKeyValidation.isValid) {
|
||||
return NextResponse.json({ error: projectIdOrKeyValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getJsmFormsApiBaseUrl(cloudId)
|
||||
const url = `${baseUrl}/project/${encodeURIComponent(projectIdOrKey)}/form`
|
||||
|
||||
logger.info('Fetching project form templates from:', { url, projectIdOrKey })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: getJsmHeaders(accessToken),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('JSM Forms API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const templates = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
projectIdOrKey,
|
||||
templates: templates.map((template: Record<string, unknown>) => ({
|
||||
id: template.id ?? null,
|
||||
name: template.name ?? null,
|
||||
updated: template.updated ?? null,
|
||||
issueCreateIssueTypeIds: template.issueCreateIssueTypeIds ?? [],
|
||||
issueCreateRequestTypeIds: template.issueCreateRequestTypeIds ?? [],
|
||||
portalRequestTypeIds: template.portalRequestTypeIds ?? [],
|
||||
recommendedIssueRequestTypeIds: template.recommendedIssueRequestTypeIds ?? [],
|
||||
})),
|
||||
total: templates.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching form templates:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,20 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmRequestAPI')
|
||||
|
||||
function parseJsmErrorMessage(status: number, statusText: string, errorText: string): string {
|
||||
try {
|
||||
const errorData = JSON.parse(errorText)
|
||||
if (errorData.errorMessage) {
|
||||
return `JSM API error: ${errorData.errorMessage}`
|
||||
}
|
||||
} catch {
|
||||
if (errorText) {
|
||||
return `JSM API error: ${errorText}`
|
||||
}
|
||||
}
|
||||
return `JSM API error: ${status} ${statusText}`
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
@@ -31,6 +45,7 @@ export async function POST(request: NextRequest) {
|
||||
description,
|
||||
raiseOnBehalfOf,
|
||||
requestFieldValues,
|
||||
formAnswers,
|
||||
requestParticipants,
|
||||
channel,
|
||||
expand,
|
||||
@@ -55,7 +70,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||
|
||||
const isCreateOperation = serviceDeskId && requestTypeId && summary
|
||||
const isCreateOperation = serviceDeskId && requestTypeId && (summary || formAnswers)
|
||||
|
||||
if (isCreateOperation) {
|
||||
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
||||
@@ -69,15 +84,30 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
const url = `${baseUrl}/request`
|
||||
|
||||
logger.info('Creating request at:', url)
|
||||
logger.info('Creating request at:', { url, serviceDeskId, requestTypeId })
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
serviceDeskId,
|
||||
requestTypeId,
|
||||
requestFieldValues: requestFieldValues || {
|
||||
summary,
|
||||
...(description && { description }),
|
||||
},
|
||||
}
|
||||
|
||||
if (summary || description || requestFieldValues) {
|
||||
const fieldValues =
|
||||
requestFieldValues && typeof requestFieldValues === 'object'
|
||||
? {
|
||||
...(!requestFieldValues.summary && summary ? { summary } : {}),
|
||||
...(!requestFieldValues.description && description ? { description } : {}),
|
||||
...requestFieldValues,
|
||||
}
|
||||
: {
|
||||
...(summary && { summary }),
|
||||
...(description && { description }),
|
||||
}
|
||||
requestBody.requestFieldValues = fieldValues
|
||||
}
|
||||
|
||||
if (formAnswers && typeof formAnswers === 'object') {
|
||||
requestBody.form = { answers: formAnswers }
|
||||
}
|
||||
|
||||
if (raiseOnBehalfOf) {
|
||||
@@ -112,7 +142,10 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
@@ -178,7 +211,10 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
||||
{
|
||||
error: parseJsmErrorMessage(response.status, response.statusText, errorText),
|
||||
details: errorText,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
@@ -20,9 +20,6 @@ export async function GET(
|
||||
const { provider } = await params
|
||||
const requestId = generateShortId()
|
||||
|
||||
const LOCK_KEY = `${provider}-polling-lock`
|
||||
let lockValue: string | undefined
|
||||
|
||||
try {
|
||||
const authError = verifyCronAuth(request, `${provider} webhook polling`)
|
||||
if (authError) return authError
|
||||
@@ -31,29 +28,38 @@ export async function GET(
|
||||
return NextResponse.json({ error: `Unknown polling provider: ${provider}` }, { status: 404 })
|
||||
}
|
||||
|
||||
lockValue = requestId
|
||||
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
|
||||
if (!locked) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
message: 'Polling already in progress – skipped',
|
||||
requestId,
|
||||
status: 'skip',
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
const LOCK_KEY = `${provider}-polling-lock`
|
||||
let lockValue: string | undefined
|
||||
|
||||
try {
|
||||
lockValue = requestId
|
||||
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
|
||||
if (!locked) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
message: 'Polling already in progress – skipped',
|
||||
requestId,
|
||||
status: 'skip',
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
}
|
||||
|
||||
const results = await pollProvider(provider)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${provider} polling completed`,
|
||||
requestId,
|
||||
status: 'completed',
|
||||
...results,
|
||||
})
|
||||
} finally {
|
||||
if (lockValue) {
|
||||
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
const results = await pollProvider(provider)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${provider} polling completed`,
|
||||
requestId,
|
||||
status: 'completed',
|
||||
...results,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Error during ${provider} polling (${requestId}):`, error)
|
||||
return NextResponse.json(
|
||||
@@ -65,9 +71,5 @@ export async function GET(
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (lockValue) {
|
||||
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence'
|
||||
@@ -213,6 +214,7 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
|
||||
requestId,
|
||||
correlation,
|
||||
callChain,
|
||||
executionMode: 'async',
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -789,6 +791,7 @@ async function handleExecutePost(
|
||||
enforceCredentialAccess: useAuthenticatedUserAsActor,
|
||||
workflowStateOverride: effectiveWorkflowStateOverride,
|
||||
callChain,
|
||||
executionMode: 'sync',
|
||||
}
|
||||
|
||||
const executionVariables = cachedWorkflowData?.variables ?? workflow.variables ?? {}
|
||||
@@ -1012,6 +1015,7 @@ async function handleExecutePost(
|
||||
enforceCredentialAccess: useAuthenticatedUserAsActor,
|
||||
workflowStateOverride: effectiveWorkflowStateOverride,
|
||||
callChain,
|
||||
executionMode: 'sync',
|
||||
}
|
||||
|
||||
const executionVariables = cachedWorkflowData?.variables ?? workflow.variables ?? {}
|
||||
@@ -1051,17 +1055,15 @@ async function handleExecutePost(
|
||||
cachedWorkflowData?.blocks || {}
|
||||
)
|
||||
const streamVariables = cachedWorkflowData?.variables ?? (workflow as any).variables
|
||||
const streamWorkflow = {
|
||||
id: workflow.id,
|
||||
userId: actorUserId,
|
||||
workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
variables: streamVariables,
|
||||
}
|
||||
const stream = await createStreamingResponse({
|
||||
requestId,
|
||||
workflow: {
|
||||
id: workflow.id,
|
||||
userId: actorUserId,
|
||||
workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
variables: streamVariables,
|
||||
},
|
||||
input: processedInput,
|
||||
executingUserId: actorUserId,
|
||||
streamConfig: {
|
||||
selectedOutputs: resolvedSelectedOutputs,
|
||||
isSecureMode: false,
|
||||
@@ -1071,6 +1073,27 @@ async function handleExecutePost(
|
||||
timeoutMs: preprocessResult.executionTimeout?.sync,
|
||||
},
|
||||
executionId,
|
||||
executeFn: async ({ onStream, onBlockComplete, abortSignal }) =>
|
||||
executeWorkflow(
|
||||
streamWorkflow,
|
||||
requestId,
|
||||
processedInput,
|
||||
actorUserId,
|
||||
{
|
||||
enabled: true,
|
||||
selectedOutputs: resolvedSelectedOutputs,
|
||||
isSecureMode: false,
|
||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||
onStream,
|
||||
onBlockComplete,
|
||||
skipLoggingComplete: true,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
abortSignal,
|
||||
executionMode: 'stream',
|
||||
},
|
||||
executionId
|
||||
),
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
@@ -1310,6 +1333,7 @@ async function handleExecutePost(
|
||||
enforceCredentialAccess: useAuthenticatedUserAsActor,
|
||||
workflowStateOverride: effectiveWorkflowStateOverride,
|
||||
callChain,
|
||||
executionMode: 'sync',
|
||||
}
|
||||
|
||||
const sseExecutionVariables = cachedWorkflowData?.variables ?? workflow.variables ?? {}
|
||||
|
||||
@@ -39,6 +39,7 @@ import {
|
||||
extractContextTokens,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/utils'
|
||||
import { useWorkflowMap } from '@/hooks/queries/workflows'
|
||||
import { useSettingsNavigation } from '@/hooks/use-settings-navigation'
|
||||
import { useSpeechToText } from '@/hooks/use-speech-to-text'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
|
||||
@@ -120,6 +121,7 @@ export function UserInput({
|
||||
onEnterWhileEmpty,
|
||||
}: UserInputProps) {
|
||||
const { workspaceId } = useParams<{ workspaceId: string }>()
|
||||
const { navigateToSettings } = useSettingsNavigation()
|
||||
const { data: workflowsById = {} } = useWorkflowMap(workspaceId)
|
||||
const { data: session } = useSession()
|
||||
const [value, setValue] = useState(defaultValue)
|
||||
@@ -239,12 +241,19 @@ export function UserInput({
|
||||
valueRef.current = newVal
|
||||
}, [])
|
||||
|
||||
const handleUsageLimitExceeded = useCallback(() => {
|
||||
navigateToSettings({ section: 'subscription' })
|
||||
}, [navigateToSettings])
|
||||
|
||||
const {
|
||||
isListening,
|
||||
isSupported: isSttSupported,
|
||||
toggleListening: rawToggle,
|
||||
resetTranscript,
|
||||
} = useSpeechToText({ onTranscript: handleTranscript })
|
||||
} = useSpeechToText({
|
||||
onTranscript: handleTranscript,
|
||||
onUsageLimitExceeded: handleUsageLimitExceeded,
|
||||
})
|
||||
|
||||
const toggleListening = useCallback(() => {
|
||||
if (!isListening) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { ToastProvider } from '@/components/emcn'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { NavTour } from '@/app/workspace/[workspaceId]/components/product-tour'
|
||||
import { ImpersonationBanner } from '@/app/workspace/[workspaceId]/impersonation-banner'
|
||||
import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
@@ -7,31 +8,40 @@ import { SettingsLoader } from '@/app/workspace/[workspaceId]/providers/settings
|
||||
import { WorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { WorkspaceScopeSync } from '@/app/workspace/[workspaceId]/providers/workspace-scope-sync'
|
||||
import { Sidebar } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
|
||||
import { BrandingProvider } from '@/ee/whitelabeling/components/branding-provider'
|
||||
import { getOrgWhitelabelSettings } from '@/ee/whitelabeling/org-branding'
|
||||
|
||||
export default async function WorkspaceLayout({ children }: { children: React.ReactNode }) {
|
||||
const session = await getSession()
|
||||
// The organization plugin is conditionally spread so TS can't infer activeOrganizationId on the base session type.
|
||||
const orgId = (session?.session as { activeOrganizationId?: string } | null)?.activeOrganizationId
|
||||
const initialOrgSettings = orgId ? await getOrgWhitelabelSettings(orgId) : null
|
||||
|
||||
export default function WorkspaceLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<ToastProvider>
|
||||
<SettingsLoader />
|
||||
<ProviderModelsLoader />
|
||||
<GlobalCommandsProvider>
|
||||
<div className='flex h-screen w-full flex-col overflow-hidden bg-[var(--surface-1)]'>
|
||||
<ImpersonationBanner />
|
||||
<WorkspacePermissionsProvider>
|
||||
<WorkspaceScopeSync />
|
||||
<div className='flex min-h-0 flex-1'>
|
||||
<div className='shrink-0' suppressHydrationWarning>
|
||||
<Sidebar />
|
||||
</div>
|
||||
<div className='flex min-w-0 flex-1 flex-col p-[8px] pl-0'>
|
||||
<div className='flex-1 overflow-hidden rounded-[8px] border border-[var(--border)] bg-[var(--bg)]'>
|
||||
{children}
|
||||
<BrandingProvider initialOrgSettings={initialOrgSettings}>
|
||||
<ToastProvider>
|
||||
<SettingsLoader />
|
||||
<ProviderModelsLoader />
|
||||
<GlobalCommandsProvider>
|
||||
<div className='flex h-screen w-full flex-col overflow-hidden bg-[var(--surface-1)]'>
|
||||
<ImpersonationBanner />
|
||||
<WorkspacePermissionsProvider>
|
||||
<WorkspaceScopeSync />
|
||||
<div className='flex min-h-0 flex-1'>
|
||||
<div className='shrink-0' suppressHydrationWarning>
|
||||
<Sidebar />
|
||||
</div>
|
||||
<div className='flex min-w-0 flex-1 flex-col p-[8px] pl-0'>
|
||||
<div className='flex-1 overflow-hidden rounded-[8px] border border-[var(--border)] bg-[var(--bg)]'>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<NavTour />
|
||||
</WorkspacePermissionsProvider>
|
||||
</div>
|
||||
</GlobalCommandsProvider>
|
||||
</ToastProvider>
|
||||
<NavTour />
|
||||
</WorkspacePermissionsProvider>
|
||||
</div>
|
||||
</GlobalCommandsProvider>
|
||||
</ToastProvider>
|
||||
</BrandingProvider>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -598,6 +598,24 @@ export const LogDetails = memo(function LogDetails({
|
||||
{formatCost(log.cost?.output || 0)}
|
||||
</span>
|
||||
</div>
|
||||
{(() => {
|
||||
const models = (log.cost as Record<string, unknown>)?.models as
|
||||
| Record<string, { toolCost?: number }>
|
||||
| undefined
|
||||
const totalToolCost = models
|
||||
? Object.values(models).reduce((sum, m) => sum + (m?.toolCost || 0), 0)
|
||||
: 0
|
||||
return totalToolCost > 0 ? (
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-medium text-[var(--text-tertiary)] text-caption'>
|
||||
Tool Usage:
|
||||
</span>
|
||||
<span className='font-medium text-[var(--text-secondary)] text-caption'>
|
||||
{formatCost(totalToolCost)}
|
||||
</span>
|
||||
</div>
|
||||
) : null
|
||||
})()}
|
||||
</div>
|
||||
|
||||
<div className='border-[var(--border)] border-t' />
|
||||
@@ -626,7 +644,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
<div className='flex items-center justify-center rounded-md bg-[var(--surface-2)] p-2 text-center'>
|
||||
<p className='font-medium text-[var(--text-subtle)] text-xs'>
|
||||
Total cost includes a base execution charge of{' '}
|
||||
{formatCost(BASE_EXECUTION_CHARGE)} plus any model usage costs.
|
||||
{formatCost(BASE_EXECUTION_CHARGE)} plus any model and tool usage costs.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -16,6 +16,7 @@ const SECTION_TITLES: Record<string, string> = {
|
||||
subscription: 'Subscription',
|
||||
team: 'Team',
|
||||
sso: 'Single Sign-On',
|
||||
whitelabeling: 'Whitelabeling',
|
||||
copilot: 'Copilot Keys',
|
||||
mcp: 'MCP Tools',
|
||||
'custom-tools': 'Custom Tools',
|
||||
|
||||
@@ -163,6 +163,13 @@ const AccessControl = dynamic(
|
||||
const SSO = dynamic(() => import('@/ee/sso/components/sso-settings').then((m) => m.SSO), {
|
||||
loading: () => <SettingsSectionSkeleton />,
|
||||
})
|
||||
const WhitelabelingSettings = dynamic(
|
||||
() =>
|
||||
import('@/ee/whitelabeling/components/whitelabeling-settings').then(
|
||||
(m) => m.WhitelabelingSettings
|
||||
),
|
||||
{ loading: () => <SettingsSectionSkeleton />, ssr: false }
|
||||
)
|
||||
|
||||
interface SettingsPageProps {
|
||||
section: SettingsSection
|
||||
@@ -207,6 +214,7 @@ export function SettingsPage({ section }: SettingsPageProps) {
|
||||
{isBillingEnabled && effectiveSection === 'subscription' && <Subscription />}
|
||||
{isBillingEnabled && effectiveSection === 'team' && <TeamManagement />}
|
||||
{effectiveSection === 'sso' && <SSO />}
|
||||
{effectiveSection === 'whitelabeling' && <WhitelabelingSettings />}
|
||||
{effectiveSection === 'byok' && <BYOK />}
|
||||
{effectiveSection === 'copilot' && <Copilot />}
|
||||
{effectiveSection === 'mcp' && <MCP initialServerId={mcpServerId} />}
|
||||
|
||||
@@ -387,7 +387,7 @@ export function General() {
|
||||
<Tooltip.Preview
|
||||
src='/tooltips/auto-connect-on-drop.mp4'
|
||||
alt='Auto-connect on drop example'
|
||||
loop={false}
|
||||
loop={true}
|
||||
/>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
@@ -3,7 +3,7 @@ import { createLogger } from '@sim/logger'
|
||||
|
||||
const logger = createLogger('ProfilePictureUpload')
|
||||
const MAX_FILE_SIZE = 5 * 1024 * 1024 // 5MB
|
||||
const ACCEPTED_IMAGE_TYPES = ['image/png', 'image/jpeg', 'image/jpg']
|
||||
const ACCEPTED_IMAGE_TYPES = ['image/png', 'image/jpeg', 'image/jpg', 'image/svg+xml']
|
||||
|
||||
interface UseProfilePictureUploadProps {
|
||||
onUpload?: (url: string | null) => void
|
||||
@@ -27,21 +27,19 @@ export function useProfilePictureUpload({
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (currentImage !== previewUrl) {
|
||||
if (previewRef.current && previewRef.current !== currentImage) {
|
||||
URL.revokeObjectURL(previewRef.current)
|
||||
previewRef.current = null
|
||||
}
|
||||
setPreviewUrl(currentImage || null)
|
||||
if (previewRef.current && previewRef.current !== currentImage) {
|
||||
URL.revokeObjectURL(previewRef.current)
|
||||
previewRef.current = null
|
||||
}
|
||||
}, [currentImage, previewUrl])
|
||||
setPreviewUrl(currentImage || null)
|
||||
}, [currentImage])
|
||||
|
||||
const validateFile = useCallback((file: File): string | null => {
|
||||
if (file.size > MAX_FILE_SIZE) {
|
||||
return `File "${file.name}" is too large. Maximum size is 5MB.`
|
||||
}
|
||||
if (!ACCEPTED_IMAGE_TYPES.includes(file.type)) {
|
||||
return `File "${file.name}" is not a supported image format. Please use PNG or JPEG.`
|
||||
return `File "${file.name}" is not a supported image format. Please use PNG, JPEG, or SVG.`
|
||||
}
|
||||
return null
|
||||
}, [])
|
||||
@@ -75,52 +73,59 @@ export function useProfilePictureUpload({
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleFileChange = useCallback(
|
||||
async (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = event.target.files?.[0]
|
||||
if (file) {
|
||||
const validationError = validateFile(file)
|
||||
if (validationError) {
|
||||
onError?.(validationError)
|
||||
return
|
||||
}
|
||||
const processFile = useCallback(
|
||||
async (file: File) => {
|
||||
const validationError = validateFile(file)
|
||||
if (validationError) {
|
||||
onError?.(validationError)
|
||||
return
|
||||
}
|
||||
|
||||
setFileName(file.name)
|
||||
setFileName(file.name)
|
||||
|
||||
const newPreviewUrl = URL.createObjectURL(file)
|
||||
const newPreviewUrl = URL.createObjectURL(file)
|
||||
if (previewRef.current) URL.revokeObjectURL(previewRef.current)
|
||||
setPreviewUrl(newPreviewUrl)
|
||||
previewRef.current = newPreviewUrl
|
||||
|
||||
if (previewRef.current) {
|
||||
URL.revokeObjectURL(previewRef.current)
|
||||
}
|
||||
|
||||
setPreviewUrl(newPreviewUrl)
|
||||
previewRef.current = newPreviewUrl
|
||||
|
||||
setIsUploading(true)
|
||||
try {
|
||||
const serverUrl = await uploadFileToServer(file)
|
||||
|
||||
URL.revokeObjectURL(newPreviewUrl)
|
||||
previewRef.current = null
|
||||
setPreviewUrl(serverUrl)
|
||||
|
||||
onUpload?.(serverUrl)
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Failed to upload profile picture'
|
||||
onError?.(errorMessage)
|
||||
|
||||
URL.revokeObjectURL(newPreviewUrl)
|
||||
previewRef.current = null
|
||||
setPreviewUrl(currentImage || null)
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
}
|
||||
setIsUploading(true)
|
||||
try {
|
||||
const serverUrl = await uploadFileToServer(file)
|
||||
URL.revokeObjectURL(newPreviewUrl)
|
||||
previewRef.current = null
|
||||
setPreviewUrl(serverUrl)
|
||||
onUpload?.(serverUrl)
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Failed to upload profile picture'
|
||||
onError?.(errorMessage)
|
||||
URL.revokeObjectURL(newPreviewUrl)
|
||||
previewRef.current = null
|
||||
setPreviewUrl(currentImage || null)
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
}
|
||||
},
|
||||
[onUpload, onError, uploadFileToServer, validateFile, currentImage]
|
||||
)
|
||||
|
||||
const handleFileChange = useCallback(
|
||||
(event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = event.target.files?.[0]
|
||||
if (file) processFile(file)
|
||||
},
|
||||
[processFile]
|
||||
)
|
||||
|
||||
const handleFileDrop = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
e.preventDefault()
|
||||
const file = e.dataTransfer.files[0]
|
||||
if (file) processFile(file)
|
||||
},
|
||||
[processFile]
|
||||
)
|
||||
|
||||
const handleRemove = useCallback(() => {
|
||||
if (previewRef.current) {
|
||||
URL.revokeObjectURL(previewRef.current)
|
||||
@@ -148,6 +153,7 @@ export function useProfilePictureUpload({
|
||||
fileInputRef,
|
||||
handleThumbnailClick,
|
||||
handleFileChange,
|
||||
handleFileDrop,
|
||||
handleRemove,
|
||||
isUploading,
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
Lock,
|
||||
LogIn,
|
||||
Mail,
|
||||
Palette,
|
||||
Send,
|
||||
Server,
|
||||
Settings,
|
||||
@@ -31,6 +32,7 @@ export type SettingsSection =
|
||||
| 'subscription'
|
||||
| 'team'
|
||||
| 'sso'
|
||||
| 'whitelabeling'
|
||||
| 'copilot'
|
||||
| 'mcp'
|
||||
| 'custom-tools'
|
||||
@@ -163,6 +165,15 @@ export const allNavigationItems: NavigationItem[] = [
|
||||
requiresEnterprise: true,
|
||||
selfHostedOverride: isSSOEnabled,
|
||||
},
|
||||
{
|
||||
id: 'whitelabeling',
|
||||
label: 'Whitelabeling',
|
||||
icon: Palette,
|
||||
section: 'enterprise',
|
||||
requiresHosted: true,
|
||||
requiresEnterprise: true,
|
||||
selfHostedOverride: isBillingEnabled,
|
||||
},
|
||||
{
|
||||
id: 'admin',
|
||||
label: 'Admin',
|
||||
|
||||
@@ -38,6 +38,8 @@ const TagIcon: React.FC<{
|
||||
</div>
|
||||
)
|
||||
|
||||
const EXCLUDED_OUTPUT_TYPES = new Set(['starter', 'start_trigger', 'human_in_the_loop'] as const)
|
||||
|
||||
/**
|
||||
* Props for the OutputSelect component
|
||||
*/
|
||||
@@ -121,7 +123,7 @@ export function OutputSelect({
|
||||
if (blockArray.length === 0) return outputs
|
||||
|
||||
blockArray.forEach((block: any) => {
|
||||
if (block.type === 'starter' || !block?.id || !block?.type) return
|
||||
if (EXCLUDED_OUTPUT_TYPES.has(block.type) || !block?.id || !block?.type) return
|
||||
|
||||
const blockName =
|
||||
block.name && typeof block.name === 'string'
|
||||
|
||||
@@ -89,7 +89,7 @@ export function VersionDescriptionModal({
|
||||
return (
|
||||
<>
|
||||
<Modal open={open} onOpenChange={(openState) => !openState && handleCloseAttempt()}>
|
||||
<ModalContent size='md'>
|
||||
<ModalContent size='lg'>
|
||||
<ModalHeader>
|
||||
<span>Version Description</span>
|
||||
</ModalHeader>
|
||||
|
||||
@@ -9,7 +9,7 @@ export interface HighlightContext {
|
||||
highlightAll?: boolean
|
||||
}
|
||||
|
||||
const SYSTEM_PREFIXES = new Set(['start', 'loop', 'parallel', 'variable'])
|
||||
const SYSTEM_PREFIXES = new Set(['loop', 'parallel', 'variable'])
|
||||
|
||||
/**
|
||||
* Formats text by highlighting block references (<...>) and environment variables ({{...}})
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
DEFAULT_LAYOUT_PADDING,
|
||||
DEFAULT_VERTICAL_SPACING,
|
||||
} from '@/lib/workflows/autolayout/constants'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('AutoLayoutUtils')
|
||||
@@ -109,10 +110,12 @@ export async function applyAutoLayoutAndUpdateStore(
|
||||
return { success: false, error: errorMessage }
|
||||
}
|
||||
|
||||
// Update workflow store immediately with new positions
|
||||
const layoutedBlocks = result.data?.layoutedBlocks || blocks
|
||||
const mergedBlocks = mergeSubblockState(layoutedBlocks, workflowId)
|
||||
|
||||
const newWorkflowState = {
|
||||
...workflowStore.getWorkflowState(),
|
||||
blocks: result.data?.layoutedBlocks || blocks,
|
||||
blocks: mergedBlocks,
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
@@ -167,9 +170,10 @@ export async function applyAutoLayoutAndUpdateStore(
|
||||
})
|
||||
|
||||
// Revert the store changes since database save failed
|
||||
const revertBlocks = mergeSubblockState(blocks, workflowId)
|
||||
useWorkflowStore.getState().replaceWorkflowState({
|
||||
...workflowStore.getWorkflowState(),
|
||||
blocks,
|
||||
blocks: revertBlocks,
|
||||
lastSaved: workflowStore.lastSaved,
|
||||
})
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Plus,
|
||||
Skeleton,
|
||||
UserPlus,
|
||||
} from '@/components/emcn'
|
||||
import { getDisplayPlanName, isFree } from '@/lib/billing/plan-helpers'
|
||||
@@ -356,14 +357,16 @@ export function WorkspaceHeader({
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-sm font-medium text-caption text-white leading-none'
|
||||
style={{
|
||||
backgroundColor: activeWorkspaceFull?.color || 'var(--brand-accent)',
|
||||
}}
|
||||
>
|
||||
{workspaceInitial}
|
||||
</div>
|
||||
{activeWorkspaceFull ? (
|
||||
<div
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-sm font-medium text-caption text-white leading-none'
|
||||
style={{ backgroundColor: activeWorkspaceFull.color ?? 'var(--brand-accent)' }}
|
||||
>
|
||||
{workspaceInitial}
|
||||
</div>
|
||||
) : (
|
||||
<Skeleton className='h-[20px] w-[20px] flex-shrink-0 rounded-sm' />
|
||||
)}
|
||||
{!isCollapsed && (
|
||||
<>
|
||||
<span className='min-w-0 flex-1 truncate text-left font-base text-[var(--text-primary)] text-sm'>
|
||||
@@ -400,14 +403,18 @@ export function WorkspaceHeader({
|
||||
) : (
|
||||
<>
|
||||
<div className='flex items-center gap-2 px-0.5 py-0.5'>
|
||||
<div
|
||||
className='flex h-[32px] w-[32px] flex-shrink-0 items-center justify-center rounded-md font-medium text-caption text-white'
|
||||
style={{
|
||||
backgroundColor: activeWorkspaceFull?.color || 'var(--brand-accent)',
|
||||
}}
|
||||
>
|
||||
{workspaceInitial}
|
||||
</div>
|
||||
{activeWorkspaceFull ? (
|
||||
<div
|
||||
className='flex h-[32px] w-[32px] flex-shrink-0 items-center justify-center rounded-md font-medium text-caption text-white'
|
||||
style={{
|
||||
backgroundColor: activeWorkspaceFull.color ?? 'var(--brand-accent)',
|
||||
}}
|
||||
>
|
||||
{workspaceInitial}
|
||||
</div>
|
||||
) : (
|
||||
<Skeleton className='h-[32px] w-[32px] flex-shrink-0 rounded-md' />
|
||||
)}
|
||||
<div className='flex min-w-0 flex-1 flex-col'>
|
||||
<span className='truncate font-medium text-[var(--text-primary)] text-small'>
|
||||
{activeWorkspace?.name || 'Loading...'}
|
||||
@@ -580,12 +587,16 @@ export function WorkspaceHeader({
|
||||
title={activeWorkspace?.name || 'Loading...'}
|
||||
disabled
|
||||
>
|
||||
<div
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-sm font-medium text-caption text-white leading-none'
|
||||
style={{ backgroundColor: activeWorkspaceFull?.color || 'var(--brand-accent)' }}
|
||||
>
|
||||
{workspaceInitial}
|
||||
</div>
|
||||
{activeWorkspaceFull ? (
|
||||
<div
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-sm font-medium text-caption text-white leading-none'
|
||||
style={{ backgroundColor: activeWorkspaceFull.color ?? 'var(--brand-accent)' }}
|
||||
>
|
||||
{workspaceInitial}
|
||||
</div>
|
||||
) : (
|
||||
<Skeleton className='h-[20px] w-[20px] flex-shrink-0 rounded-sm' />
|
||||
)}
|
||||
{!isCollapsed && (
|
||||
<>
|
||||
<span className='min-w-0 flex-1 truncate text-left font-base text-[var(--text-primary)] text-sm'>
|
||||
|
||||
@@ -83,7 +83,7 @@ import {
|
||||
useImportWorkflow,
|
||||
useImportWorkspace,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { getBrandConfig } from '@/ee/whitelabeling'
|
||||
import { useOrgBrandConfig } from '@/ee/whitelabeling/components/branding-provider'
|
||||
import { useFolderMap, useFolders } from '@/hooks/queries/folders'
|
||||
import { useKnowledgeBasesQuery } from '@/hooks/queries/kb/knowledge'
|
||||
import { useTablesList } from '@/hooks/queries/tables'
|
||||
@@ -337,7 +337,7 @@ export const SIDEBAR_SCROLL_EVENT = 'sidebar-scroll-to-item'
|
||||
* @returns Sidebar with workflows panel
|
||||
*/
|
||||
export const Sidebar = memo(function Sidebar() {
|
||||
const brand = getBrandConfig()
|
||||
const brand = useOrgBrandConfig()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const workflowId = params.workflowId as string | undefined
|
||||
@@ -1251,7 +1251,16 @@ export const Sidebar = memo(function Sidebar() {
|
||||
tabIndex={isCollapsed ? -1 : undefined}
|
||||
aria-label={brand.name}
|
||||
>
|
||||
{brand.logoUrl ? (
|
||||
{brand.wordmarkUrl ? (
|
||||
<Image
|
||||
src={brand.wordmarkUrl}
|
||||
alt={brand.name}
|
||||
height={16}
|
||||
width={80}
|
||||
className='h-[16px] w-auto flex-shrink-0 object-contain object-left'
|
||||
unoptimized
|
||||
/>
|
||||
) : brand.logoUrl ? (
|
||||
<Image
|
||||
src={brand.logoUrl}
|
||||
alt={brand.name}
|
||||
|
||||
77
apps/sim/background/resume-execution.ts
Normal file
77
apps/sim/background/resume-execution.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
|
||||
const logger = createLogger('TriggerResumeExecution')
|
||||
|
||||
export type ResumeExecutionPayload = {
|
||||
resumeEntryId: string
|
||||
resumeExecutionId: string
|
||||
pausedExecutionId: string
|
||||
contextId: string
|
||||
resumeInput: unknown
|
||||
userId: string
|
||||
workflowId: string
|
||||
parentExecutionId: string
|
||||
}
|
||||
|
||||
export async function executeResumeJob(payload: ResumeExecutionPayload) {
|
||||
const { resumeExecutionId, pausedExecutionId, contextId, workflowId, parentExecutionId } = payload
|
||||
|
||||
logger.info('Starting background resume execution', {
|
||||
resumeExecutionId,
|
||||
pausedExecutionId,
|
||||
contextId,
|
||||
workflowId,
|
||||
parentExecutionId,
|
||||
})
|
||||
|
||||
try {
|
||||
const pausedExecution = await PauseResumeManager.getPausedExecutionById(pausedExecutionId)
|
||||
if (!pausedExecution) {
|
||||
throw new Error(`Paused execution not found: ${pausedExecutionId}`)
|
||||
}
|
||||
|
||||
const result = await PauseResumeManager.startResumeExecution({
|
||||
resumeEntryId: payload.resumeEntryId,
|
||||
resumeExecutionId: payload.resumeExecutionId,
|
||||
pausedExecution,
|
||||
contextId: payload.contextId,
|
||||
resumeInput: payload.resumeInput,
|
||||
userId: payload.userId,
|
||||
})
|
||||
|
||||
logger.info('Background resume execution completed', {
|
||||
resumeExecutionId,
|
||||
workflowId,
|
||||
success: result.success,
|
||||
status: result.status,
|
||||
})
|
||||
|
||||
return {
|
||||
success: result.success,
|
||||
workflowId,
|
||||
executionId: resumeExecutionId,
|
||||
parentExecutionId,
|
||||
status: result.status,
|
||||
output: result.output,
|
||||
executedAt: new Date().toISOString(),
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Background resume execution failed', {
|
||||
resumeExecutionId,
|
||||
workflowId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export const resumeExecutionTask = task({
|
||||
id: 'resume-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
run: executeResumeJob,
|
||||
})
|
||||
@@ -44,6 +44,7 @@ export type WorkflowExecutionPayload = {
|
||||
correlation?: AsyncExecutionCorrelation
|
||||
metadata?: Record<string, any>
|
||||
callChain?: string[]
|
||||
executionMode?: 'sync' | 'stream' | 'async'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,6 +113,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
isClientSession: false,
|
||||
callChain: payload.callChain,
|
||||
correlation,
|
||||
executionMode: payload.executionMode ?? 'async',
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot(
|
||||
|
||||
@@ -117,6 +117,7 @@ export const CloudFormationBlock: BlockConfig<
|
||||
type: 'short-input',
|
||||
placeholder: '50',
|
||||
condition: { field: 'operation', value: 'describe_stack_events' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
|
||||
@@ -8,6 +8,7 @@ import type {
|
||||
CloudWatchGetLogEventsResponse,
|
||||
CloudWatchGetMetricStatisticsResponse,
|
||||
CloudWatchListMetricsResponse,
|
||||
CloudWatchPutMetricDataResponse,
|
||||
CloudWatchQueryLogsResponse,
|
||||
} from '@/tools/cloudwatch/types'
|
||||
|
||||
@@ -19,6 +20,7 @@ export const CloudWatchBlock: BlockConfig<
|
||||
| CloudWatchDescribeAlarmsResponse
|
||||
| CloudWatchListMetricsResponse
|
||||
| CloudWatchGetMetricStatisticsResponse
|
||||
| CloudWatchPutMetricDataResponse
|
||||
> = {
|
||||
type: 'cloudwatch',
|
||||
name: 'CloudWatch',
|
||||
@@ -27,6 +29,7 @@ export const CloudWatchBlock: BlockConfig<
|
||||
'Integrate AWS CloudWatch into workflows. Run Log Insights queries, list log groups, retrieve log events, list and get metrics, and monitor alarms. Requires AWS access key and secret access key.',
|
||||
category: 'tools',
|
||||
integrationType: IntegrationType.Analytics,
|
||||
docsLink: 'https://docs.sim.ai/tools/cloudwatch',
|
||||
tags: ['cloud', 'monitoring'],
|
||||
bgColor: 'linear-gradient(45deg, #B0084D 0%, #FF4F8B 100%)',
|
||||
icon: CloudWatchIcon,
|
||||
@@ -42,6 +45,7 @@ export const CloudWatchBlock: BlockConfig<
|
||||
{ label: 'Describe Log Streams', id: 'describe_log_streams' },
|
||||
{ label: 'List Metrics', id: 'list_metrics' },
|
||||
{ label: 'Get Metric Statistics', id: 'get_metric_statistics' },
|
||||
{ label: 'Publish Metric', id: 'put_metric_data' },
|
||||
{ label: 'Describe Alarms', id: 'describe_alarms' },
|
||||
],
|
||||
value: () => 'query_logs',
|
||||
@@ -69,7 +73,6 @@ export const CloudWatchBlock: BlockConfig<
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
// Query Logs fields
|
||||
{
|
||||
id: 'logGroupSelector',
|
||||
title: 'Log Group',
|
||||
@@ -124,6 +127,14 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
value: ['query_logs', 'get_log_events', 'get_metric_statistics'],
|
||||
},
|
||||
required: { field: 'operation', value: ['query_logs', 'get_metric_statistics'] },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Unix epoch timestamp (in seconds) based on the user's description of a point in time.
|
||||
|
||||
Return ONLY the numeric timestamp - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the start time (e.g., "1 hour ago", "beginning of today")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'endTime',
|
||||
@@ -135,8 +146,15 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
value: ['query_logs', 'get_log_events', 'get_metric_statistics'],
|
||||
},
|
||||
required: { field: 'operation', value: ['query_logs', 'get_metric_statistics'] },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Unix epoch timestamp (in seconds) based on the user's description of a point in time.
|
||||
|
||||
Return ONLY the numeric timestamp - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the end time (e.g., "now", "end of yesterday")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
// Describe Log Groups fields
|
||||
{
|
||||
id: 'prefix',
|
||||
title: 'Log Group Name Prefix',
|
||||
@@ -144,7 +162,6 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
placeholder: '/aws/lambda/',
|
||||
condition: { field: 'operation', value: 'describe_log_groups' },
|
||||
},
|
||||
// Get Log Events / Describe Log Streams — shared log group selector
|
||||
{
|
||||
id: 'logGroupNameSelector',
|
||||
title: 'Log Group',
|
||||
@@ -167,7 +184,6 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
required: { field: 'operation', value: ['get_log_events', 'describe_log_streams'] },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Describe Log Streams — stream prefix filter
|
||||
{
|
||||
id: 'streamPrefix',
|
||||
title: 'Stream Name Prefix',
|
||||
@@ -175,7 +191,6 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
placeholder: '2024/03/31/',
|
||||
condition: { field: 'operation', value: 'describe_log_streams' },
|
||||
},
|
||||
// Get Log Events — log stream selector (cascading: depends on log group)
|
||||
{
|
||||
id: 'logStreamNameSelector',
|
||||
title: 'Log Stream',
|
||||
@@ -198,30 +213,92 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
required: { field: 'operation', value: 'get_log_events' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// List Metrics fields
|
||||
{
|
||||
id: 'metricNamespace',
|
||||
title: 'Namespace',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., AWS/EC2, AWS/Lambda, AWS/RDS',
|
||||
condition: { field: 'operation', value: ['list_metrics', 'get_metric_statistics'] },
|
||||
required: { field: 'operation', value: 'get_metric_statistics' },
|
||||
placeholder: 'e.g., AWS/EC2, AWS/Lambda, Custom/MyApp',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_metrics', 'get_metric_statistics', 'put_metric_data'],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['get_metric_statistics', 'put_metric_data'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'metricName',
|
||||
title: 'Metric Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., CPUUtilization, Invocations',
|
||||
condition: { field: 'operation', value: ['list_metrics', 'get_metric_statistics'] },
|
||||
required: { field: 'operation', value: 'get_metric_statistics' },
|
||||
placeholder: 'e.g., CPUUtilization, Invocations, ErrorCount',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_metrics', 'get_metric_statistics', 'put_metric_data'],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['get_metric_statistics', 'put_metric_data'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'recentlyActive',
|
||||
title: 'Recently Active Only',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'list_metrics' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'metricValue',
|
||||
title: 'Value',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., 1, 42.5',
|
||||
condition: { field: 'operation', value: 'put_metric_data' },
|
||||
required: { field: 'operation', value: 'put_metric_data' },
|
||||
},
|
||||
{
|
||||
id: 'metricUnit',
|
||||
title: 'Unit',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'None', id: 'None' },
|
||||
{ label: 'Count', id: 'Count' },
|
||||
{ label: 'Percent', id: 'Percent' },
|
||||
{ label: 'Seconds', id: 'Seconds' },
|
||||
{ label: 'Milliseconds', id: 'Milliseconds' },
|
||||
{ label: 'Microseconds', id: 'Microseconds' },
|
||||
{ label: 'Bytes', id: 'Bytes' },
|
||||
{ label: 'Kilobytes', id: 'Kilobytes' },
|
||||
{ label: 'Megabytes', id: 'Megabytes' },
|
||||
{ label: 'Gigabytes', id: 'Gigabytes' },
|
||||
{ label: 'Terabytes', id: 'Terabytes' },
|
||||
{ label: 'Bits', id: 'Bits' },
|
||||
{ label: 'Kilobits', id: 'Kilobits' },
|
||||
{ label: 'Megabits', id: 'Megabits' },
|
||||
{ label: 'Gigabits', id: 'Gigabits' },
|
||||
{ label: 'Terabits', id: 'Terabits' },
|
||||
{ label: 'Bytes/Second', id: 'Bytes/Second' },
|
||||
{ label: 'Kilobytes/Second', id: 'Kilobytes/Second' },
|
||||
{ label: 'Megabytes/Second', id: 'Megabytes/Second' },
|
||||
{ label: 'Gigabytes/Second', id: 'Gigabytes/Second' },
|
||||
{ label: 'Terabytes/Second', id: 'Terabytes/Second' },
|
||||
{ label: 'Bits/Second', id: 'Bits/Second' },
|
||||
{ label: 'Kilobits/Second', id: 'Kilobits/Second' },
|
||||
{ label: 'Megabits/Second', id: 'Megabits/Second' },
|
||||
{ label: 'Gigabits/Second', id: 'Gigabits/Second' },
|
||||
{ label: 'Terabits/Second', id: 'Terabits/Second' },
|
||||
{ label: 'Count/Second', id: 'Count/Second' },
|
||||
],
|
||||
value: () => 'None',
|
||||
condition: { field: 'operation', value: 'put_metric_data' },
|
||||
},
|
||||
{
|
||||
id: 'publishDimensions',
|
||||
title: 'Dimensions',
|
||||
type: 'table',
|
||||
columns: ['name', 'value'],
|
||||
condition: { field: 'operation', value: 'put_metric_data' },
|
||||
},
|
||||
// Get Metric Statistics fields
|
||||
{
|
||||
id: 'metricPeriod',
|
||||
title: 'Period (seconds)',
|
||||
@@ -251,7 +328,6 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
columns: ['name', 'value'],
|
||||
condition: { field: 'operation', value: 'get_metric_statistics' },
|
||||
},
|
||||
// Describe Alarms fields
|
||||
{
|
||||
id: 'alarmNamePrefix',
|
||||
title: 'Alarm Name Prefix',
|
||||
@@ -269,6 +345,7 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
{ label: 'ALARM', id: 'ALARM' },
|
||||
{ label: 'INSUFFICIENT_DATA', id: 'INSUFFICIENT_DATA' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'describe_alarms' },
|
||||
},
|
||||
{
|
||||
@@ -280,9 +357,9 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
{ label: 'Metric Alarm', id: 'MetricAlarm' },
|
||||
{ label: 'Composite Alarm', id: 'CompositeAlarm' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'describe_alarms' },
|
||||
},
|
||||
// Shared limit field
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
@@ -299,6 +376,7 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
'describe_alarms',
|
||||
],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
@@ -309,6 +387,7 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
'cloudwatch_describe_log_streams',
|
||||
'cloudwatch_list_metrics',
|
||||
'cloudwatch_get_metric_statistics',
|
||||
'cloudwatch_put_metric_data',
|
||||
'cloudwatch_describe_alarms',
|
||||
],
|
||||
config: {
|
||||
@@ -326,6 +405,8 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
return 'cloudwatch_list_metrics'
|
||||
case 'get_metric_statistics':
|
||||
return 'cloudwatch_get_metric_statistics'
|
||||
case 'put_metric_data':
|
||||
return 'cloudwatch_put_metric_data'
|
||||
case 'describe_alarms':
|
||||
return 'cloudwatch_describe_alarms'
|
||||
default:
|
||||
@@ -479,6 +560,48 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
}
|
||||
}
|
||||
|
||||
case 'put_metric_data': {
|
||||
if (!rest.metricNamespace) {
|
||||
throw new Error('Namespace is required')
|
||||
}
|
||||
if (!rest.metricName) {
|
||||
throw new Error('Metric name is required')
|
||||
}
|
||||
if (rest.metricValue === undefined || rest.metricValue === '') {
|
||||
throw new Error('Metric value is required')
|
||||
}
|
||||
const numericValue = Number(rest.metricValue)
|
||||
if (!Number.isFinite(numericValue)) {
|
||||
throw new Error('Metric value must be a finite number')
|
||||
}
|
||||
|
||||
return {
|
||||
awsRegion,
|
||||
awsAccessKeyId,
|
||||
awsSecretAccessKey,
|
||||
namespace: rest.metricNamespace,
|
||||
metricName: rest.metricName,
|
||||
value: numericValue,
|
||||
...(rest.metricUnit && rest.metricUnit !== 'None' && { unit: rest.metricUnit }),
|
||||
...(rest.publishDimensions && {
|
||||
dimensions: (() => {
|
||||
const dims = rest.publishDimensions
|
||||
if (typeof dims === 'string') return dims
|
||||
if (Array.isArray(dims)) {
|
||||
const obj: Record<string, string> = {}
|
||||
for (const row of dims) {
|
||||
const name = row.cells?.name
|
||||
const value = row.cells?.value
|
||||
if (name && value !== undefined) obj[name] = String(value)
|
||||
}
|
||||
return JSON.stringify(obj)
|
||||
}
|
||||
return JSON.stringify(dims)
|
||||
})(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
case 'describe_alarms':
|
||||
return {
|
||||
awsRegion,
|
||||
@@ -518,6 +641,12 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
metricPeriod: { type: 'number', description: 'Granularity in seconds' },
|
||||
metricStatistics: { type: 'string', description: 'Statistic type (Average, Sum, etc.)' },
|
||||
metricDimensions: { type: 'json', description: 'Metric dimensions (Name/Value pairs)' },
|
||||
metricValue: { type: 'number', description: 'Metric value to publish' },
|
||||
metricUnit: { type: 'string', description: 'Metric unit (Count, Seconds, Bytes, etc.)' },
|
||||
publishDimensions: {
|
||||
type: 'json',
|
||||
description: 'Dimensions for published metric (Name/Value pairs)',
|
||||
},
|
||||
alarmNamePrefix: { type: 'string', description: 'Alarm name prefix filter' },
|
||||
stateValue: {
|
||||
type: 'string',
|
||||
@@ -567,5 +696,29 @@ Return ONLY the query — no explanations, no markdown code blocks.`,
|
||||
type: 'array',
|
||||
description: 'CloudWatch alarms with state and configuration',
|
||||
},
|
||||
success: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the published metric was successful',
|
||||
},
|
||||
namespace: {
|
||||
type: 'string',
|
||||
description: 'Metric namespace',
|
||||
},
|
||||
metricName: {
|
||||
type: 'string',
|
||||
description: 'Metric name',
|
||||
},
|
||||
value: {
|
||||
type: 'number',
|
||||
description: 'Published metric value',
|
||||
},
|
||||
unit: {
|
||||
type: 'string',
|
||||
description: 'Metric unit',
|
||||
},
|
||||
timestamp: {
|
||||
type: 'string',
|
||||
description: 'Timestamp when metric was published',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -44,6 +44,9 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
{ label: 'Get Approvals', id: 'get_approvals' },
|
||||
{ label: 'Answer Approval', id: 'answer_approval' },
|
||||
{ label: 'Get Request Type Fields', id: 'get_request_type_fields' },
|
||||
{ label: 'Get Form Templates', id: 'get_form_templates' },
|
||||
{ label: 'Get Form Structure', id: 'get_form_structure' },
|
||||
{ label: 'Get Issue Forms', id: 'get_issue_forms' },
|
||||
],
|
||||
value: () => 'get_service_desks',
|
||||
},
|
||||
@@ -191,14 +194,30 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
'add_participants',
|
||||
'get_approvals',
|
||||
'answer_approval',
|
||||
'get_issue_forms',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'projectIdOrKey',
|
||||
title: 'Project ID or Key',
|
||||
type: 'short-input',
|
||||
required: { field: 'operation', value: ['get_form_templates', 'get_form_structure'] },
|
||||
placeholder: 'Enter Jira project ID or key (e.g., 10001 or SD)',
|
||||
condition: { field: 'operation', value: ['get_form_templates', 'get_form_structure'] },
|
||||
},
|
||||
{
|
||||
id: 'formId',
|
||||
title: 'Form ID',
|
||||
type: 'short-input',
|
||||
required: true,
|
||||
placeholder: 'Enter form ID (UUID from Get Form Templates)',
|
||||
condition: { field: 'operation', value: 'get_form_structure' },
|
||||
},
|
||||
{
|
||||
id: 'summary',
|
||||
title: 'Summary',
|
||||
type: 'short-input',
|
||||
required: true,
|
||||
placeholder: 'Enter request summary',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
wandConfig: {
|
||||
@@ -238,6 +257,7 @@ Return ONLY the description text - no explanations.`,
|
||||
title: 'Raise on Behalf Of',
|
||||
type: 'short-input',
|
||||
placeholder: 'Account ID to raise request on behalf of',
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
@@ -245,6 +265,7 @@ Return ONLY the description text - no explanations.`,
|
||||
title: 'Request Participants',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated account IDs to add as participants',
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
@@ -252,6 +273,7 @@ Return ONLY the description text - no explanations.`,
|
||||
title: 'Channel',
|
||||
type: 'short-input',
|
||||
placeholder: 'Channel (e.g., portal, email)',
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
@@ -260,6 +282,16 @@ Return ONLY the description text - no explanations.`,
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'JSON object of field values (e.g., {"summary": "Title", "customfield_10010": "value"})',
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
id: 'formAnswers',
|
||||
title: 'Form Answers',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'JSON object for form-based request types (e.g., {"summary": {"text": "Title"}, "customfield_10010": {"choices": ["10320"]}})',
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
@@ -491,6 +523,9 @@ Return ONLY the comment text - no explanations.`,
|
||||
'jsm_get_approvals',
|
||||
'jsm_answer_approval',
|
||||
'jsm_get_request_type_fields',
|
||||
'jsm_get_form_templates',
|
||||
'jsm_get_form_structure',
|
||||
'jsm_get_issue_forms',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -537,6 +572,12 @@ Return ONLY the comment text - no explanations.`,
|
||||
return 'jsm_answer_approval'
|
||||
case 'get_request_type_fields':
|
||||
return 'jsm_get_request_type_fields'
|
||||
case 'get_form_templates':
|
||||
return 'jsm_get_form_templates'
|
||||
case 'get_form_structure':
|
||||
return 'jsm_get_form_structure'
|
||||
case 'get_issue_forms':
|
||||
return 'jsm_get_issue_forms'
|
||||
default:
|
||||
return 'jsm_get_service_desks'
|
||||
}
|
||||
@@ -571,8 +612,8 @@ Return ONLY the comment text - no explanations.`,
|
||||
if (!params.requestTypeId) {
|
||||
throw new Error('Request Type ID is required')
|
||||
}
|
||||
if (!params.summary) {
|
||||
throw new Error('Summary is required')
|
||||
if (!params.summary && !params.formAnswers) {
|
||||
throw new Error('Summary is required (unless using Form Answers)')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
@@ -584,7 +625,22 @@ Return ONLY the comment text - no explanations.`,
|
||||
requestParticipants: params.requestParticipants,
|
||||
channel: params.channel,
|
||||
requestFieldValues: params.requestFieldValues
|
||||
? JSON.parse(params.requestFieldValues)
|
||||
? (() => {
|
||||
try {
|
||||
return JSON.parse(params.requestFieldValues)
|
||||
} catch {
|
||||
throw new Error('requestFieldValues must be valid JSON')
|
||||
}
|
||||
})()
|
||||
: undefined,
|
||||
formAnswers: params.formAnswers
|
||||
? (() => {
|
||||
try {
|
||||
return JSON.parse(params.formAnswers)
|
||||
} catch {
|
||||
throw new Error('formAnswers must be valid JSON')
|
||||
}
|
||||
})()
|
||||
: undefined,
|
||||
}
|
||||
case 'get_request':
|
||||
@@ -781,6 +837,34 @@ Return ONLY the comment text - no explanations.`,
|
||||
serviceDeskId: params.serviceDeskId,
|
||||
requestTypeId: params.requestTypeId,
|
||||
}
|
||||
case 'get_form_templates':
|
||||
if (!params.projectIdOrKey) {
|
||||
throw new Error('Project ID or key is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
projectIdOrKey: params.projectIdOrKey,
|
||||
}
|
||||
case 'get_form_structure':
|
||||
if (!params.projectIdOrKey) {
|
||||
throw new Error('Project ID or key is required')
|
||||
}
|
||||
if (!params.formId) {
|
||||
throw new Error('Form ID is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
projectIdOrKey: params.projectIdOrKey,
|
||||
formId: params.formId,
|
||||
}
|
||||
case 'get_issue_forms':
|
||||
if (!params.issueIdOrKey) {
|
||||
throw new Error('Issue ID or key is required')
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
issueIdOrKey: params.issueIdOrKey,
|
||||
}
|
||||
default:
|
||||
return baseParams
|
||||
}
|
||||
@@ -826,6 +910,12 @@ Return ONLY the comment text - no explanations.`,
|
||||
},
|
||||
channel: { type: 'string', description: 'Channel (e.g., portal, email)' },
|
||||
requestFieldValues: { type: 'string', description: 'JSON object of request field values' },
|
||||
formAnswers: {
|
||||
type: 'string',
|
||||
description: 'JSON object of form answers for form-based request types',
|
||||
},
|
||||
projectIdOrKey: { type: 'string', description: 'Jira project ID or key' },
|
||||
formId: { type: 'string', description: 'Form ID (UUID)' },
|
||||
searchQuery: { type: 'string', description: 'Filter request types by name' },
|
||||
groupId: { type: 'string', description: 'Filter by request type group ID' },
|
||||
expand: { type: 'string', description: 'Comma-separated fields to expand' },
|
||||
@@ -868,5 +958,25 @@ Return ONLY the comment text - no explanations.`,
|
||||
type: 'boolean',
|
||||
description: 'Whether requests can be raised on behalf of another user',
|
||||
},
|
||||
templates: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of form templates (id, name, updated, portalRequestTypeIds, issueCreateIssueTypeIds)',
|
||||
},
|
||||
design: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Full form design with questions (labels, types, choices, validation), layout, conditions, sections, settings',
|
||||
},
|
||||
publish: {
|
||||
type: 'json',
|
||||
description: 'Form publishing and request type configuration',
|
||||
},
|
||||
updated: { type: 'string', description: 'Last updated timestamp' },
|
||||
forms: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of forms attached to an issue (id, name, updated, submitted, lock, internal, formTemplateId)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ServiceNowIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { IntegrationType } from '@/blocks/types'
|
||||
import type { ServiceNowResponse } from '@/tools/servicenow/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const ServiceNowBlock: BlockConfig<ServiceNowResponse> = {
|
||||
type: 'servicenow',
|
||||
@@ -215,6 +216,11 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
condition: { field: 'operation', value: 'servicenow_delete_record' },
|
||||
required: true,
|
||||
},
|
||||
...getTrigger('servicenow_incident_created').subBlocks,
|
||||
...getTrigger('servicenow_incident_updated').subBlocks,
|
||||
...getTrigger('servicenow_change_request_created').subBlocks,
|
||||
...getTrigger('servicenow_change_request_updated').subBlocks,
|
||||
...getTrigger('servicenow_webhook').subBlocks,
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -262,4 +268,14 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
success: { type: 'boolean', description: 'Operation success status' },
|
||||
metadata: { type: 'json', description: 'Operation metadata' },
|
||||
},
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'servicenow_incident_created',
|
||||
'servicenow_incident_updated',
|
||||
'servicenow_change_request_created',
|
||||
'servicenow_change_request_updated',
|
||||
'servicenow_webhook',
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { render } from '@react-email/components'
|
||||
import { render } from '@react-email/render'
|
||||
import {
|
||||
OnboardingFollowupEmail,
|
||||
OTPVerificationEmail,
|
||||
|
||||
@@ -20,7 +20,7 @@ const buttonVariants = cva(
|
||||
'bg-[var(--text-error)] text-white hover-hover:text-white hover-hover:brightness-106',
|
||||
secondary: 'bg-[var(--brand-secondary)] text-[var(--text-primary)]',
|
||||
tertiary:
|
||||
'bg-[var(--brand-accent)] text-[var(--text-inverse)] hover-hover:text-[var(--text-inverse)] hover-hover:bg-[#2DAC72] dark:bg-[var(--brand-accent)] dark:hover-hover:bg-[#2DAC72] dark:text-[var(--text-inverse)] dark:hover-hover:text-[var(--text-inverse)]',
|
||||
'bg-[var(--brand-accent)] text-[var(--text-inverse)] hover-hover:text-[var(--text-inverse)] hover-hover:bg-[var(--brand-accent-hover)] dark:bg-[var(--brand-accent)] dark:hover-hover:bg-[var(--brand-accent-hover)] dark:text-[var(--text-inverse)] dark:hover-hover:text-[var(--text-inverse)]',
|
||||
ghost: 'text-[var(--text-secondary)] hover-hover:text-[var(--text-primary)]',
|
||||
subtle:
|
||||
'text-[var(--text-body)] hover-hover:text-[var(--text-body)] hover-hover:bg-[var(--surface-4)]',
|
||||
|
||||
@@ -42,20 +42,20 @@ const Trigger = TooltipPrimitive.Trigger
|
||||
const Content = React.forwardRef<
|
||||
React.ElementRef<typeof TooltipPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof TooltipPrimitive.Content>
|
||||
>(({ className, sideOffset = 6, ...props }, ref) => (
|
||||
>(({ className, sideOffset = 6, children, ...props }, ref) => (
|
||||
<TooltipPrimitive.Portal>
|
||||
<TooltipPrimitive.Content
|
||||
ref={ref}
|
||||
sideOffset={sideOffset}
|
||||
collisionPadding={8}
|
||||
avoidCollisions={true}
|
||||
avoidCollisions
|
||||
className={cn(
|
||||
'z-[var(--z-tooltip)] max-w-[260px] rounded-[4px] bg-[var(--tooltip-bg)] px-2 py-[3.5px] font-base text-white text-xs shadow-sm dark:text-black',
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{props.children}
|
||||
{children}
|
||||
<TooltipPrimitive.Arrow className='fill-[var(--tooltip-bg)]' />
|
||||
</TooltipPrimitive.Content>
|
||||
</TooltipPrimitive.Portal>
|
||||
@@ -120,22 +120,35 @@ const VIDEO_EXTENSIONS = ['.mp4', '.webm', '.ogg', '.mov'] as const
|
||||
const Preview = ({ src, alt = '', width = 240, height, loop = true, className }: PreviewProps) => {
|
||||
const pathname = src.toLowerCase().split('?')[0].split('#')[0]
|
||||
const isVideo = VIDEO_EXTENSIONS.some((ext) => pathname.endsWith(ext))
|
||||
const [isReady, setIsReady] = React.useState(!isVideo)
|
||||
|
||||
return (
|
||||
<div className={cn('-mx-2 -mb-[3.5px] mt-1 overflow-hidden rounded-b-[4px]', className)}>
|
||||
<div className={cn('-mx-[6px] -mb-[1.5px] mt-1.5 overflow-hidden rounded-[4px]', className)}>
|
||||
{isVideo ? (
|
||||
<video
|
||||
src={src}
|
||||
width={width}
|
||||
height={height}
|
||||
className='block w-full'
|
||||
autoPlay
|
||||
loop={loop}
|
||||
muted
|
||||
playsInline
|
||||
preload='none'
|
||||
aria-label={alt}
|
||||
/>
|
||||
<div className='relative'>
|
||||
{!isReady && (
|
||||
<div
|
||||
className='animate-pulse bg-white/5'
|
||||
style={{ aspectRatio: height ? `${width}/${height}` : '16/9' }}
|
||||
/>
|
||||
)}
|
||||
<video
|
||||
src={src}
|
||||
width={width}
|
||||
height={height}
|
||||
className={cn(
|
||||
'block w-full transition-opacity duration-200',
|
||||
isReady ? 'opacity-100' : 'absolute inset-0 opacity-0'
|
||||
)}
|
||||
autoPlay
|
||||
loop={loop}
|
||||
muted
|
||||
playsInline
|
||||
preload='auto'
|
||||
aria-label={alt}
|
||||
onCanPlay={() => setIsReady(true)}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<img
|
||||
src={src}
|
||||
|
||||
@@ -26,12 +26,14 @@ export const getBrandConfig = (): BrandConfig => {
|
||||
const hasCustomBrand = Boolean(
|
||||
getEnv('NEXT_PUBLIC_BRAND_NAME') ||
|
||||
getEnv('NEXT_PUBLIC_BRAND_LOGO_URL') ||
|
||||
getEnv('NEXT_PUBLIC_BRAND_WORDMARK_URL') ||
|
||||
getEnv('NEXT_PUBLIC_BRAND_PRIMARY_COLOR')
|
||||
)
|
||||
|
||||
return {
|
||||
name: getEnv('NEXT_PUBLIC_BRAND_NAME') || defaultBrandConfig.name,
|
||||
logoUrl: getEnv('NEXT_PUBLIC_BRAND_LOGO_URL') || defaultBrandConfig.logoUrl,
|
||||
wordmarkUrl: getEnv('NEXT_PUBLIC_BRAND_WORDMARK_URL') || defaultBrandConfig.wordmarkUrl,
|
||||
faviconUrl: getEnv('NEXT_PUBLIC_BRAND_FAVICON_URL') || defaultBrandConfig.faviconUrl,
|
||||
customCssUrl: getEnv('NEXT_PUBLIC_CUSTOM_CSS_URL') || defaultBrandConfig.customCssUrl,
|
||||
supportEmail: getEnv('NEXT_PUBLIC_SUPPORT_EMAIL') || defaultBrandConfig.supportEmail,
|
||||
|
||||
64
apps/sim/ee/whitelabeling/components/branding-provider.tsx
Normal file
64
apps/sim/ee/whitelabeling/components/branding-provider.tsx
Normal file
@@ -0,0 +1,64 @@
|
||||
'use client'
|
||||
|
||||
import { createContext, useContext, useMemo } from 'react'
|
||||
import type { BrandConfig, OrganizationWhitelabelSettings } from '@/lib/branding/types'
|
||||
import { getBrandConfig } from '@/ee/whitelabeling/branding'
|
||||
import { useWhitelabelSettings } from '@/ee/whitelabeling/hooks/whitelabel'
|
||||
import { generateOrgThemeCSS, mergeOrgBrandConfig } from '@/ee/whitelabeling/org-branding-utils'
|
||||
import { useOrganizations } from '@/hooks/queries/organization'
|
||||
|
||||
interface BrandingContextValue {
|
||||
config: BrandConfig
|
||||
}
|
||||
|
||||
const BrandingContext = createContext<BrandingContextValue>({
|
||||
config: getBrandConfig(),
|
||||
})
|
||||
|
||||
interface BrandingProviderProps {
|
||||
children: React.ReactNode
|
||||
/**
|
||||
* Org whitelabel settings fetched server-side from the DB by the workspace layout.
|
||||
* Used as the source of truth until the React Query result becomes available,
|
||||
* ensuring the correct org logo appears in the initial server HTML — no flash.
|
||||
*/
|
||||
initialOrgSettings?: OrganizationWhitelabelSettings | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides merged branding (instance env vars + org DB settings) to the workspace.
|
||||
* Injects a `<style>` tag with CSS variable overrides when org colors are configured.
|
||||
*/
|
||||
export function BrandingProvider({ children, initialOrgSettings }: BrandingProviderProps) {
|
||||
const { data: orgsData } = useOrganizations()
|
||||
const orgId = orgsData?.activeOrganization?.id
|
||||
const { data: orgSettings } = useWhitelabelSettings(orgId)
|
||||
|
||||
const effectiveOrgSettings =
|
||||
orgSettings !== undefined ? orgSettings : (initialOrgSettings ?? null)
|
||||
|
||||
const brandConfig = useMemo(
|
||||
() => mergeOrgBrandConfig(effectiveOrgSettings, getBrandConfig()),
|
||||
[effectiveOrgSettings]
|
||||
)
|
||||
|
||||
const themeCSS = useMemo(
|
||||
() => (effectiveOrgSettings ? generateOrgThemeCSS(effectiveOrgSettings) : ''),
|
||||
[effectiveOrgSettings]
|
||||
)
|
||||
|
||||
return (
|
||||
<BrandingContext.Provider value={{ config: brandConfig }}>
|
||||
{themeCSS && <style>{themeCSS}</style>}
|
||||
{children}
|
||||
</BrandingContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the merged brand config (org settings overlaid on instance defaults).
|
||||
* Use this inside the workspace instead of `getBrandConfig()`.
|
||||
*/
|
||||
export function useOrgBrandConfig(): BrandConfig {
|
||||
return useContext(BrandingContext).config
|
||||
}
|
||||
536
apps/sim/ee/whitelabeling/components/whitelabeling-settings.tsx
Normal file
536
apps/sim/ee/whitelabeling/components/whitelabeling-settings.tsx
Normal file
@@ -0,0 +1,536 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Loader2, X } from 'lucide-react'
|
||||
import Image from 'next/image'
|
||||
import { Button, Input, Label, Switch } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getSubscriptionAccessState } from '@/lib/billing/client/utils'
|
||||
import { HEX_COLOR_REGEX } from '@/lib/branding'
|
||||
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getUserRole } from '@/lib/workspaces/organization/utils'
|
||||
import { useProfilePictureUpload } from '@/app/workspace/[workspaceId]/settings/hooks/use-profile-picture-upload'
|
||||
import {
|
||||
useUpdateWhitelabelSettings,
|
||||
useWhitelabelSettings,
|
||||
type WhitelabelSettingsPayload,
|
||||
} from '@/ee/whitelabeling/hooks/whitelabel'
|
||||
import { useOrganizations } from '@/hooks/queries/organization'
|
||||
import { useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
|
||||
const logger = createLogger('WhitelabelingSettings')
|
||||
|
||||
interface DropZoneProps {
|
||||
onDrop: (e: React.DragEvent) => void
|
||||
children: React.ReactNode
|
||||
className?: string
|
||||
}
|
||||
|
||||
function DropZone({ onDrop, children, className }: DropZoneProps) {
|
||||
const [isDragging, setIsDragging] = useState(false)
|
||||
|
||||
const handleDragOver = useCallback((e: React.DragEvent) => {
|
||||
if (e.dataTransfer.types.includes('Files')) {
|
||||
e.preventDefault()
|
||||
setIsDragging(true)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleDragLeave = useCallback((e: React.DragEvent) => {
|
||||
if (!e.currentTarget.contains(e.relatedTarget as Node)) {
|
||||
setIsDragging(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleDrop = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
setIsDragging(false)
|
||||
onDrop(e)
|
||||
},
|
||||
[onDrop]
|
||||
)
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn('relative', className)}
|
||||
onDragOver={handleDragOver}
|
||||
onDragLeave={handleDragLeave}
|
||||
onDrop={handleDrop}
|
||||
>
|
||||
{children}
|
||||
{isDragging && (
|
||||
<div className='pointer-events-none absolute inset-0 z-10 flex items-center justify-center rounded-lg border-[1.5px] border-[var(--brand)] border-dashed bg-[color-mix(in_srgb,var(--brand)_8%,transparent)]'>
|
||||
<span className='font-medium text-[12px] text-[var(--brand)]'>Drop image</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface ColorInputProps {
|
||||
label: string
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
placeholder?: string
|
||||
}
|
||||
|
||||
function ColorInput({ label, value, onChange, placeholder = '#000000' }: ColorInputProps) {
|
||||
const isValidHex = !value || HEX_COLOR_REGEX.test(value)
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-1.5'>
|
||||
<Label className='text-[13px] text-[var(--text-primary)]'>{label}</Label>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='relative flex h-[36px] w-[36px] shrink-0 items-center justify-center overflow-hidden rounded-md border border-[var(--border)] bg-[var(--surface-2)]'>
|
||||
{value && isValidHex ? (
|
||||
<div className='h-full w-full rounded-md' style={{ backgroundColor: value }} />
|
||||
) : (
|
||||
<div className='h-full w-full rounded-md bg-[var(--surface-3)]' />
|
||||
)}
|
||||
</div>
|
||||
<Input
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
className={cn(
|
||||
'h-[36px] font-mono text-[13px]',
|
||||
!isValidHex && 'border-red-500 focus-visible:ring-red-500'
|
||||
)}
|
||||
maxLength={7}
|
||||
/>
|
||||
</div>
|
||||
{!isValidHex && (
|
||||
<p className='text-[12px] text-red-500'>Must be a valid hex color (e.g. #701ffc)</p>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface SettingRowProps {
|
||||
label: string
|
||||
description?: string
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
function SettingRow({ label, description, children }: SettingRowProps) {
|
||||
return (
|
||||
<div className='flex flex-col gap-1.5'>
|
||||
<Label className='text-[13px] text-[var(--text-primary)]'>{label}</Label>
|
||||
{description && <p className='text-[12px] text-[var(--text-muted)]'>{description}</p>}
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function SectionTitle({ children }: { children: React.ReactNode }) {
|
||||
return <h3 className='mb-4 font-medium text-[15px] text-[var(--text-primary)]'>{children}</h3>
|
||||
}
|
||||
|
||||
export function WhitelabelingSettings() {
|
||||
const { data: session } = useSession()
|
||||
const { data: orgsData } = useOrganizations()
|
||||
const { data: subscriptionData } = useSubscriptionData()
|
||||
|
||||
const activeOrganization = orgsData?.activeOrganization
|
||||
const orgId = activeOrganization?.id
|
||||
|
||||
const { data: savedSettings, isLoading } = useWhitelabelSettings(orgId)
|
||||
const updateSettings = useUpdateWhitelabelSettings()
|
||||
|
||||
const userEmail = session?.user?.email
|
||||
const userRole = getUserRole(activeOrganization, userEmail)
|
||||
const canManage = userRole === 'owner' || userRole === 'admin'
|
||||
const subscriptionAccess = getSubscriptionAccessState(subscriptionData?.data)
|
||||
const hasEnterprisePlan = subscriptionAccess.hasUsableEnterpriseAccess
|
||||
|
||||
const [brandName, setBrandName] = useState('')
|
||||
const [primaryColor, setPrimaryColor] = useState('')
|
||||
const [primaryHoverColor, setPrimaryHoverColor] = useState('')
|
||||
const [accentColor, setAccentColor] = useState('')
|
||||
const [accentHoverColor, setAccentHoverColor] = useState('')
|
||||
const [supportEmail, setSupportEmail] = useState('')
|
||||
const [documentationUrl, setDocumentationUrl] = useState('')
|
||||
const [termsUrl, setTermsUrl] = useState('')
|
||||
const [privacyUrl, setPrivacyUrl] = useState('')
|
||||
const [hidePoweredBySim, setHidePoweredBySim] = useState(false)
|
||||
const [logoUrl, setLogoUrl] = useState<string | null>(null)
|
||||
const [wordmarkUrl, setWordmarkUrl] = useState<string | null>(null)
|
||||
const [formInitialized, setFormInitialized] = useState(false)
|
||||
|
||||
const [saveError, setSaveError] = useState<string | null>(null)
|
||||
const [saveSuccess, setSaveSuccess] = useState(false)
|
||||
|
||||
if (savedSettings && !formInitialized) {
|
||||
setBrandName(savedSettings.brandName ?? '')
|
||||
setPrimaryColor(savedSettings.primaryColor ?? '')
|
||||
setPrimaryHoverColor(savedSettings.primaryHoverColor ?? '')
|
||||
setAccentColor(savedSettings.accentColor ?? '')
|
||||
setAccentHoverColor(savedSettings.accentHoverColor ?? '')
|
||||
setSupportEmail(savedSettings.supportEmail ?? '')
|
||||
setDocumentationUrl(savedSettings.documentationUrl ?? '')
|
||||
setTermsUrl(savedSettings.termsUrl ?? '')
|
||||
setPrivacyUrl(savedSettings.privacyUrl ?? '')
|
||||
setHidePoweredBySim(savedSettings.hidePoweredBySim ?? false)
|
||||
setLogoUrl(savedSettings.logoUrl ?? null)
|
||||
setWordmarkUrl(savedSettings.wordmarkUrl ?? null)
|
||||
setFormInitialized(true)
|
||||
}
|
||||
|
||||
const logoUpload = useProfilePictureUpload({
|
||||
currentImage: logoUrl,
|
||||
onUpload: (url) => setLogoUrl(url),
|
||||
onError: (error) => setSaveError(error),
|
||||
})
|
||||
|
||||
const wordmarkUpload = useProfilePictureUpload({
|
||||
currentImage: wordmarkUrl,
|
||||
onUpload: (url) => setWordmarkUrl(url),
|
||||
onError: (error) => setSaveError(error),
|
||||
})
|
||||
|
||||
const handleSave = useCallback(async () => {
|
||||
if (!orgId) return
|
||||
|
||||
setSaveError(null)
|
||||
setSaveSuccess(false)
|
||||
|
||||
const colorFields: Array<[string, string]> = [
|
||||
['Primary color', primaryColor],
|
||||
['Primary hover color', primaryHoverColor],
|
||||
['Accent color', accentColor],
|
||||
['Accent hover color', accentHoverColor],
|
||||
]
|
||||
|
||||
for (const [fieldName, value] of colorFields) {
|
||||
if (value && !HEX_COLOR_REGEX.test(value)) {
|
||||
setSaveError(`${fieldName} must be a valid hex color (e.g. #701ffc)`)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const settings: WhitelabelSettingsPayload = {
|
||||
brandName: brandName || null,
|
||||
logoUrl: logoUpload.previewUrl || null,
|
||||
wordmarkUrl: wordmarkUpload.previewUrl || null,
|
||||
primaryColor: primaryColor || null,
|
||||
primaryHoverColor: primaryHoverColor || null,
|
||||
accentColor: accentColor || null,
|
||||
accentHoverColor: accentHoverColor || null,
|
||||
supportEmail: supportEmail || null,
|
||||
documentationUrl: documentationUrl || null,
|
||||
termsUrl: termsUrl || null,
|
||||
privacyUrl: privacyUrl || null,
|
||||
hidePoweredBySim,
|
||||
}
|
||||
|
||||
try {
|
||||
await updateSettings.mutateAsync({ orgId, settings })
|
||||
setSaveSuccess(true)
|
||||
setTimeout(() => setSaveSuccess(false), 3000)
|
||||
} catch (error) {
|
||||
logger.error('Failed to save whitelabel settings', { error })
|
||||
setSaveError(error instanceof Error ? error.message : 'Failed to save settings')
|
||||
}
|
||||
}, [
|
||||
orgId,
|
||||
brandName,
|
||||
logoUpload.previewUrl,
|
||||
wordmarkUpload.previewUrl,
|
||||
primaryColor,
|
||||
primaryHoverColor,
|
||||
accentColor,
|
||||
accentHoverColor,
|
||||
supportEmail,
|
||||
documentationUrl,
|
||||
termsUrl,
|
||||
privacyUrl,
|
||||
hidePoweredBySim,
|
||||
])
|
||||
|
||||
if (isBillingEnabled) {
|
||||
if (!activeOrganization) {
|
||||
return (
|
||||
<div className='flex h-full items-center justify-center text-[var(--text-muted)] text-sm'>
|
||||
You must be part of an organization to configure whitelabeling.
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!hasEnterprisePlan) {
|
||||
return (
|
||||
<div className='flex h-full items-center justify-center text-[var(--text-muted)] text-sm'>
|
||||
Whitelabeling is available on Enterprise plans only.
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!canManage) {
|
||||
return (
|
||||
<div className='flex h-full items-center justify-center text-[var(--text-muted)] text-sm'>
|
||||
Only organization owners and admins can configure whitelabeling settings.
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-col gap-8'>
|
||||
{[...Array(3)].map((_, i) => (
|
||||
<div key={i} className='flex flex-col gap-3'>
|
||||
<div className='h-4 w-32 animate-pulse rounded bg-[var(--surface-3)]' />
|
||||
<div className='h-9 w-full animate-pulse rounded-lg bg-[var(--surface-3)]' />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const isUploading = logoUpload.isUploading || wordmarkUpload.isUploading
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-8'>
|
||||
<section>
|
||||
<SectionTitle>Brand Identity</SectionTitle>
|
||||
<div className='flex flex-col gap-5'>
|
||||
<div className='grid grid-cols-2 gap-4'>
|
||||
<SettingRow
|
||||
label='Logo'
|
||||
description='Shown in the collapsed sidebar. Square image recommended (PNG, JPEG, or SVG, max 5MB).'
|
||||
>
|
||||
<DropZone onDrop={logoUpload.handleFileDrop} className='flex items-center gap-4'>
|
||||
<button
|
||||
type='button'
|
||||
onClick={logoUpload.handleThumbnailClick}
|
||||
disabled={logoUpload.isUploading}
|
||||
className='group relative flex h-16 w-16 shrink-0 items-center justify-center overflow-hidden rounded-xl border border-[var(--border)] bg-[var(--surface-2)] transition-colors hover:bg-[var(--surface-3)] disabled:opacity-50'
|
||||
>
|
||||
{logoUpload.isUploading ? (
|
||||
<Loader2 className='h-5 w-5 animate-spin text-[var(--text-muted)]' />
|
||||
) : logoUpload.previewUrl ? (
|
||||
<Image
|
||||
src={logoUpload.previewUrl}
|
||||
alt='Logo'
|
||||
fill
|
||||
className='object-contain p-1'
|
||||
unoptimized
|
||||
/>
|
||||
) : (
|
||||
<span className='text-[11px] text-[var(--text-muted)]'>Logo</span>
|
||||
)}
|
||||
</button>
|
||||
<div className='flex gap-2'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={logoUpload.handleThumbnailClick}
|
||||
disabled={logoUpload.isUploading}
|
||||
className='text-[13px]'
|
||||
>
|
||||
{logoUpload.previewUrl ? 'Change' : 'Upload'}
|
||||
</Button>
|
||||
{logoUpload.previewUrl && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={logoUpload.handleRemove}
|
||||
className='text-[13px] text-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
<X className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<input
|
||||
ref={logoUpload.fileInputRef}
|
||||
type='file'
|
||||
accept='image/png,image/jpeg,image/jpg,image/svg+xml'
|
||||
onChange={logoUpload.handleFileChange}
|
||||
className='hidden'
|
||||
/>
|
||||
</DropZone>
|
||||
</SettingRow>
|
||||
|
||||
<SettingRow
|
||||
label='Wordmark'
|
||||
description='Shown in the expanded sidebar. Wide image recommended (PNG, JPEG, or SVG, max 5MB).'
|
||||
>
|
||||
<DropZone onDrop={wordmarkUpload.handleFileDrop} className='flex items-center gap-4'>
|
||||
<button
|
||||
type='button'
|
||||
onClick={wordmarkUpload.handleThumbnailClick}
|
||||
disabled={wordmarkUpload.isUploading}
|
||||
className='group relative flex h-16 w-40 shrink-0 items-center justify-center overflow-hidden rounded-xl border border-[var(--border)] bg-[var(--surface-2)] transition-colors hover:bg-[var(--surface-3)] disabled:opacity-50'
|
||||
>
|
||||
{wordmarkUpload.isUploading ? (
|
||||
<Loader2 className='h-5 w-5 animate-spin text-[var(--text-muted)]' />
|
||||
) : wordmarkUpload.previewUrl ? (
|
||||
<Image
|
||||
src={wordmarkUpload.previewUrl}
|
||||
alt='Wordmark'
|
||||
fill
|
||||
className='object-contain p-2'
|
||||
unoptimized
|
||||
/>
|
||||
) : (
|
||||
<span className='text-[11px] text-[var(--text-muted)]'>Wordmark</span>
|
||||
)}
|
||||
</button>
|
||||
<div className='flex gap-2'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={wordmarkUpload.handleThumbnailClick}
|
||||
disabled={wordmarkUpload.isUploading}
|
||||
className='text-[13px]'
|
||||
>
|
||||
{wordmarkUpload.previewUrl ? 'Change' : 'Upload'}
|
||||
</Button>
|
||||
{wordmarkUpload.previewUrl && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={wordmarkUpload.handleRemove}
|
||||
className='text-[13px] text-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
<X className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<input
|
||||
ref={wordmarkUpload.fileInputRef}
|
||||
type='file'
|
||||
accept='image/png,image/jpeg,image/jpg,image/svg+xml'
|
||||
onChange={wordmarkUpload.handleFileChange}
|
||||
className='hidden'
|
||||
/>
|
||||
</DropZone>
|
||||
</SettingRow>
|
||||
</div>
|
||||
|
||||
<SettingRow
|
||||
label='Brand name'
|
||||
description='Replaces "Sim" in the sidebar and select UI elements.'
|
||||
>
|
||||
<Input
|
||||
value={brandName}
|
||||
onChange={(e) => setBrandName(e.target.value)}
|
||||
placeholder='Your Company'
|
||||
className='h-[36px] max-w-[320px] text-[13px]'
|
||||
maxLength={64}
|
||||
/>
|
||||
</SettingRow>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<SectionTitle>Colors</SectionTitle>
|
||||
<div className='grid grid-cols-2 gap-4'>
|
||||
<ColorInput
|
||||
label='Primary color'
|
||||
value={primaryColor}
|
||||
onChange={setPrimaryColor}
|
||||
placeholder='#701ffc'
|
||||
/>
|
||||
<ColorInput
|
||||
label='Primary hover color'
|
||||
value={primaryHoverColor}
|
||||
onChange={setPrimaryHoverColor}
|
||||
placeholder='#802fff'
|
||||
/>
|
||||
<ColorInput
|
||||
label='Accent color'
|
||||
value={accentColor}
|
||||
onChange={setAccentColor}
|
||||
placeholder='#9d54ff'
|
||||
/>
|
||||
<ColorInput
|
||||
label='Accent hover color'
|
||||
value={accentHoverColor}
|
||||
onChange={setAccentHoverColor}
|
||||
placeholder='#a66fff'
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<SectionTitle>Links</SectionTitle>
|
||||
<div className='flex flex-col gap-4'>
|
||||
<SettingRow label='Support email'>
|
||||
<Input
|
||||
type='email'
|
||||
value={supportEmail}
|
||||
onChange={(e) => setSupportEmail(e.target.value)}
|
||||
placeholder='support@yourcompany.com'
|
||||
className='h-[36px] text-[13px]'
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingRow label='Documentation URL'>
|
||||
<Input
|
||||
type='url'
|
||||
value={documentationUrl}
|
||||
onChange={(e) => setDocumentationUrl(e.target.value)}
|
||||
placeholder='https://docs.yourcompany.com'
|
||||
className='h-[36px] text-[13px]'
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingRow label='Terms of service URL'>
|
||||
<Input
|
||||
type='url'
|
||||
value={termsUrl}
|
||||
onChange={(e) => setTermsUrl(e.target.value)}
|
||||
placeholder='https://yourcompany.com/terms'
|
||||
className='h-[36px] text-[13px]'
|
||||
/>
|
||||
</SettingRow>
|
||||
<SettingRow label='Privacy policy URL'>
|
||||
<Input
|
||||
type='url'
|
||||
value={privacyUrl}
|
||||
onChange={(e) => setPrivacyUrl(e.target.value)}
|
||||
placeholder='https://yourcompany.com/privacy'
|
||||
className='h-[36px] text-[13px]'
|
||||
/>
|
||||
</SettingRow>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<SectionTitle>Advanced</SectionTitle>
|
||||
<div className='flex items-center justify-between rounded-lg border border-[var(--border)] bg-[var(--surface-2)] px-4 py-3'>
|
||||
<div className='flex flex-col gap-0.5'>
|
||||
<span className='text-[13px] text-[var(--text-primary)]'>
|
||||
Hide "Powered by Sim" branding
|
||||
</span>
|
||||
<span className='text-[12px] text-[var(--text-muted)]'>
|
||||
Removes the Sim logo from deployed chats and forms.
|
||||
</span>
|
||||
</div>
|
||||
<Switch checked={hidePoweredBySim} onCheckedChange={setHidePoweredBySim} />
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<div className='flex items-center gap-3'>
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={updateSettings.isPending || isUploading}
|
||||
className='text-[13px]'
|
||||
>
|
||||
{updateSettings.isPending ? (
|
||||
<>
|
||||
<Loader2 className='mr-2 h-3.5 w-3.5 animate-spin' />
|
||||
Saving…
|
||||
</>
|
||||
) : (
|
||||
'Save changes'
|
||||
)}
|
||||
</Button>
|
||||
{saveSuccess && (
|
||||
<span className='text-[13px] text-green-500'>Settings saved successfully.</span>
|
||||
)}
|
||||
{saveError && <span className='text-[13px] text-red-500'>{saveError}</span>}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
83
apps/sim/ee/whitelabeling/hooks/whitelabel.ts
Normal file
83
apps/sim/ee/whitelabeling/hooks/whitelabel.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
'use client'
|
||||
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type { OrganizationWhitelabelSettings } from '@/lib/branding/types'
|
||||
import { organizationKeys } from '@/hooks/queries/organization'
|
||||
|
||||
/** PUT payload — string fields accept null to clear a previously-set value. */
|
||||
export type WhitelabelSettingsPayload = {
|
||||
[K in keyof OrganizationWhitelabelSettings]: OrganizationWhitelabelSettings[K] extends
|
||||
| string
|
||||
| undefined
|
||||
? string | null
|
||||
: OrganizationWhitelabelSettings[K]
|
||||
}
|
||||
|
||||
/**
|
||||
* Query key factories for whitelabel-related queries
|
||||
*/
|
||||
export const whitelabelKeys = {
|
||||
all: ['whitelabel'] as const,
|
||||
settings: (orgId: string) => [...whitelabelKeys.all, 'settings', orgId] as const,
|
||||
}
|
||||
|
||||
async function fetchWhitelabelSettings(
|
||||
orgId: string,
|
||||
signal?: AbortSignal
|
||||
): Promise<OrganizationWhitelabelSettings> {
|
||||
const response = await fetch(`/api/organizations/${orgId}/whitelabel`, { signal })
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error ?? 'Failed to fetch whitelabel settings')
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
return data as OrganizationWhitelabelSettings
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch whitelabel settings for an organization.
|
||||
*/
|
||||
export function useWhitelabelSettings(orgId: string | undefined) {
|
||||
return useQuery({
|
||||
queryKey: whitelabelKeys.settings(orgId ?? ''),
|
||||
queryFn: ({ signal }) => fetchWhitelabelSettings(orgId as string, signal),
|
||||
enabled: Boolean(orgId),
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
interface UpdateWhitelabelVariables {
|
||||
orgId: string
|
||||
settings: WhitelabelSettingsPayload
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to update whitelabel settings for an organization.
|
||||
*/
|
||||
export function useUpdateWhitelabelSettings() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ orgId, settings }: UpdateWhitelabelVariables) => {
|
||||
const response = await fetch(`/api/organizations/${orgId}/whitelabel`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(settings),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error ?? 'Failed to update whitelabel settings')
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
return data as OrganizationWhitelabelSettings
|
||||
},
|
||||
onSettled: (_data, _error, { orgId }) => {
|
||||
queryClient.invalidateQueries({ queryKey: whitelabelKeys.settings(orgId) })
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.detail(orgId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
export type { OrganizationWhitelabelSettings } from '@/lib/branding/types'
|
||||
export type { BrandConfig, ThemeColors } from './branding'
|
||||
export { getBrandConfig, useBrandConfig } from './branding'
|
||||
export { generateThemeCSS } from './inject-theme'
|
||||
export { generateBrandedMetadata, generateStructuredData } from './metadata'
|
||||
export { generateOrgThemeCSS, mergeOrgBrandConfig } from './org-branding-utils'
|
||||
|
||||
@@ -31,6 +31,7 @@ export function generateThemeCSS(): string {
|
||||
|
||||
if (process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR) {
|
||||
cssVars.push(`--brand-hover: ${process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR};`)
|
||||
cssVars.push(`--brand-accent-hover: ${process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR};`)
|
||||
cssVars.push(
|
||||
`--auth-primary-btn-hover-bg: ${process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR};`
|
||||
)
|
||||
|
||||
96
apps/sim/ee/whitelabeling/org-branding-utils.ts
Normal file
96
apps/sim/ee/whitelabeling/org-branding-utils.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import type { BrandConfig, OrganizationWhitelabelSettings } from '@/lib/branding/types'
|
||||
|
||||
/**
|
||||
* Merge org-level whitelabel settings over the instance-level brand config.
|
||||
* Org settings take priority for any field they define.
|
||||
*/
|
||||
export function mergeOrgBrandConfig(
|
||||
orgSettings: OrganizationWhitelabelSettings | null,
|
||||
instanceConfig: BrandConfig
|
||||
): BrandConfig {
|
||||
if (!orgSettings) {
|
||||
return instanceConfig
|
||||
}
|
||||
|
||||
return {
|
||||
...instanceConfig,
|
||||
name: orgSettings.brandName || instanceConfig.name,
|
||||
logoUrl: orgSettings.logoUrl || instanceConfig.logoUrl,
|
||||
wordmarkUrl: orgSettings.wordmarkUrl || instanceConfig.wordmarkUrl,
|
||||
supportEmail: orgSettings.supportEmail || instanceConfig.supportEmail,
|
||||
documentationUrl: orgSettings.documentationUrl || instanceConfig.documentationUrl,
|
||||
termsUrl: orgSettings.termsUrl || instanceConfig.termsUrl,
|
||||
privacyUrl: orgSettings.privacyUrl || instanceConfig.privacyUrl,
|
||||
theme: {
|
||||
...instanceConfig.theme,
|
||||
...(orgSettings.primaryColor && { primaryColor: orgSettings.primaryColor }),
|
||||
...(orgSettings.primaryHoverColor && { primaryHoverColor: orgSettings.primaryHoverColor }),
|
||||
...(orgSettings.accentColor && { accentColor: orgSettings.accentColor }),
|
||||
...(orgSettings.accentHoverColor && { accentHoverColor: orgSettings.accentHoverColor }),
|
||||
},
|
||||
isWhitelabeled:
|
||||
instanceConfig.isWhitelabeled ||
|
||||
Boolean(
|
||||
orgSettings.brandName ||
|
||||
orgSettings.logoUrl ||
|
||||
orgSettings.wordmarkUrl ||
|
||||
orgSettings.primaryColor
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
function isDarkBackground(hex: string): boolean {
|
||||
let clean = hex.replace('#', '')
|
||||
if (clean.length === 3) {
|
||||
clean = clean
|
||||
.split('')
|
||||
.map((c) => c + c)
|
||||
.join('')
|
||||
}
|
||||
const r = Number.parseInt(clean.slice(0, 2), 16)
|
||||
const g = Number.parseInt(clean.slice(2, 4), 16)
|
||||
const b = Number.parseInt(clean.slice(4, 6), 16)
|
||||
return (0.299 * r + 0.587 * g + 0.114 * b) / 255 < 0.5
|
||||
}
|
||||
|
||||
function getContrastTextColor(hex: string): string {
|
||||
return isDarkBackground(hex) ? '#ffffff' : '#000000'
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate CSS variable overrides from org whitelabel settings.
|
||||
* Returns an empty string when no color overrides are set.
|
||||
*/
|
||||
export function generateOrgThemeCSS(settings: OrganizationWhitelabelSettings): string {
|
||||
const vars: string[] = []
|
||||
|
||||
if (settings.primaryColor) {
|
||||
vars.push(`--brand: ${settings.primaryColor};`)
|
||||
vars.push(`--brand-accent: ${settings.primaryColor};`)
|
||||
vars.push(`--auth-primary-btn-bg: ${settings.primaryColor};`)
|
||||
vars.push(`--auth-primary-btn-border: ${settings.primaryColor};`)
|
||||
vars.push(`--auth-primary-btn-hover-bg: ${settings.primaryColor};`)
|
||||
vars.push(`--auth-primary-btn-hover-border: ${settings.primaryColor};`)
|
||||
const textColor = getContrastTextColor(settings.primaryColor)
|
||||
vars.push(`--auth-primary-btn-text: ${textColor};`)
|
||||
vars.push(`--auth-primary-btn-hover-text: ${textColor};`)
|
||||
}
|
||||
|
||||
if (settings.primaryHoverColor) {
|
||||
vars.push(`--brand-hover: ${settings.primaryHoverColor};`)
|
||||
vars.push(`--brand-accent-hover: ${settings.primaryHoverColor};`)
|
||||
vars.push(`--auth-primary-btn-hover-bg: ${settings.primaryHoverColor};`)
|
||||
vars.push(`--auth-primary-btn-hover-border: ${settings.primaryHoverColor};`)
|
||||
vars.push(`--auth-primary-btn-hover-text: ${getContrastTextColor(settings.primaryHoverColor)};`)
|
||||
}
|
||||
|
||||
if (settings.accentColor) {
|
||||
vars.push(`--brand-link: ${settings.accentColor};`)
|
||||
}
|
||||
|
||||
if (settings.accentHoverColor) {
|
||||
vars.push(`--brand-link-hover: ${settings.accentHoverColor};`)
|
||||
}
|
||||
|
||||
return vars.length > 0 ? `:root { ${vars.join(' ')} }` : ''
|
||||
}
|
||||
27
apps/sim/ee/whitelabeling/org-branding.ts
Normal file
27
apps/sim/ee/whitelabeling/org-branding.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { db } from '@sim/db'
|
||||
import { organization } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { OrganizationWhitelabelSettings } from '@/lib/branding/types'
|
||||
|
||||
const logger = createLogger('OrgBranding')
|
||||
|
||||
/**
|
||||
* Fetch whitelabel settings for an organization from the database.
|
||||
*/
|
||||
export async function getOrgWhitelabelSettings(
|
||||
orgId: string
|
||||
): Promise<OrganizationWhitelabelSettings | null> {
|
||||
try {
|
||||
const [org] = await db
|
||||
.select({ whitelabelSettings: organization.whitelabelSettings })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, orgId))
|
||||
.limit(1)
|
||||
|
||||
return org?.whitelabelSettings ?? null
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch org whitelabel settings', { error, orgId })
|
||||
return null
|
||||
}
|
||||
}
|
||||
@@ -112,6 +112,7 @@ export function serializePauseSnapshot(
|
||||
useDraftState,
|
||||
startTime: metadataFromContext?.startTime ?? new Date().toISOString(),
|
||||
isClientSession: metadataFromContext?.isClientSession,
|
||||
executionMode: metadataFromContext?.executionMode,
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot(
|
||||
|
||||
@@ -36,6 +36,7 @@ export interface ExecutionMetadata {
|
||||
}
|
||||
callChain?: string[]
|
||||
correlation?: AsyncExecutionCorrelation
|
||||
executionMode?: 'sync' | 'stream' | 'async'
|
||||
}
|
||||
|
||||
export interface SerializableExecutionState {
|
||||
|
||||
@@ -157,7 +157,7 @@ export function useAcceptCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: () => {
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.memberships() })
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.invitations() })
|
||||
},
|
||||
@@ -187,7 +187,7 @@ export function useCreateCredentialSet() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.list(variables.organizationId) })
|
||||
},
|
||||
})
|
||||
@@ -209,7 +209,7 @@ export function useCreateCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailInvitations(variables.credentialSetId),
|
||||
})
|
||||
@@ -264,7 +264,7 @@ export function useRemoveCredentialSetMember() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailMembers(variables.credentialSetId),
|
||||
})
|
||||
@@ -288,7 +288,7 @@ export function useLeaveCredentialSet() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: () => {
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: credentialSetKeys.memberships() })
|
||||
},
|
||||
})
|
||||
@@ -313,7 +313,7 @@ export function useDeleteCredentialSet() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.list(variables.organizationId),
|
||||
})
|
||||
@@ -370,7 +370,7 @@ export function useCancelCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailInvitations(variables.credentialSetId),
|
||||
})
|
||||
@@ -393,7 +393,7 @@ export function useResendCredentialSetInvitation() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: credentialSetKeys.detailInvitations(variables.credentialSetId),
|
||||
})
|
||||
|
||||
@@ -5,6 +5,12 @@ import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { environmentKeys } from '@/hooks/queries/environment'
|
||||
import { fetchJson } from '@/hooks/selectors/helpers'
|
||||
|
||||
/**
|
||||
* Key prefix for OAuth credential queries.
|
||||
* Duplicated here to avoid circular imports with oauth-credentials.ts.
|
||||
*/
|
||||
const OAUTH_CREDENTIALS_KEY = ['oauthCredentials'] as const
|
||||
|
||||
export type WorkspaceCredentialType = 'oauth' | 'env_workspace' | 'env_personal' | 'service_account'
|
||||
export type WorkspaceCredentialRole = 'admin' | 'member'
|
||||
export type WorkspaceCredentialMemberStatus = 'active' | 'pending' | 'revoked'
|
||||
@@ -192,6 +198,9 @@ export function useCreateWorkspaceCredential() {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.lists(),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: OAUTH_CREDENTIALS_KEY,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -269,6 +278,9 @@ export function useUpdateWorkspaceCredential() {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.lists(),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: OAUTH_CREDENTIALS_KEY,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -290,6 +302,7 @@ export function useDeleteWorkspaceCredential() {
|
||||
onSettled: (_data, _error, credentialId) => {
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.detail(credentialId) })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.lists() })
|
||||
queryClient.invalidateQueries({ queryKey: OAUTH_CREDENTIALS_KEY })
|
||||
queryClient.invalidateQueries({ queryKey: environmentKeys.all })
|
||||
},
|
||||
})
|
||||
|
||||
@@ -18,6 +18,7 @@ export type PermissionState = 'prompt' | 'granted' | 'denied'
|
||||
|
||||
interface UseSpeechToTextProps {
|
||||
onTranscript: (text: string) => void
|
||||
onUsageLimitExceeded?: () => void
|
||||
language?: string
|
||||
}
|
||||
|
||||
@@ -31,6 +32,7 @@ interface UseSpeechToTextReturn {
|
||||
|
||||
export function useSpeechToText({
|
||||
onTranscript,
|
||||
onUsageLimitExceeded,
|
||||
language,
|
||||
}: UseSpeechToTextProps): UseSpeechToTextReturn {
|
||||
const [isListening, setIsListening] = useState(false)
|
||||
@@ -38,6 +40,7 @@ export function useSpeechToText({
|
||||
const [permissionState, setPermissionState] = useState<PermissionState>('prompt')
|
||||
|
||||
const onTranscriptRef = useRef(onTranscript)
|
||||
const onUsageLimitExceededRef = useRef(onUsageLimitExceeded)
|
||||
const languageRef = useRef(language)
|
||||
const mountedRef = useRef(true)
|
||||
const startingRef = useRef(false)
|
||||
@@ -55,6 +58,7 @@ export function useSpeechToText({
|
||||
const committedTextRef = useRef('')
|
||||
|
||||
onTranscriptRef.current = onTranscript
|
||||
onUsageLimitExceededRef.current = onUsageLimitExceeded
|
||||
languageRef.current = language
|
||||
|
||||
useEffect(() => {
|
||||
@@ -165,6 +169,10 @@ export function useSpeechToText({
|
||||
})
|
||||
|
||||
if (!tokenResponse.ok) {
|
||||
if (tokenResponse.status === 402) {
|
||||
onUsageLimitExceededRef.current?.()
|
||||
return false
|
||||
}
|
||||
const body = await tokenResponse.json().catch(() => ({}))
|
||||
throw new Error(body.error || 'Failed to get speech token')
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, subscription, user, userStats } from '@sim/db/schema'
|
||||
import { member, subscription, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, sql } from 'drizzle-orm'
|
||||
import { getEffectiveBillingStatus, isOrganizationBillingBlocked } from '@/lib/billing/core/access'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
|
||||
import { getUserUsageLimit } from '@/lib/billing/core/usage'
|
||||
import {
|
||||
getPlanTierCredits,
|
||||
isOrgPlan,
|
||||
isPro as isPlanPro,
|
||||
isTeam as isPlanTeam,
|
||||
} from '@/lib/billing/plan-helpers'
|
||||
@@ -16,18 +14,15 @@ import {
|
||||
checkProPlan,
|
||||
checkTeamPlan,
|
||||
ENTITLED_SUBSCRIPTION_STATUSES,
|
||||
getFreeTierLimit,
|
||||
getPerUserMinimumLimit,
|
||||
hasUsableSubscriptionAccess,
|
||||
USABLE_SUBSCRIPTION_STATUSES,
|
||||
} from '@/lib/billing/subscriptions/utils'
|
||||
import type { UserSubscriptionState } from '@/lib/billing/types'
|
||||
import {
|
||||
isAccessControlEnabled,
|
||||
isBillingEnabled,
|
||||
isCredentialSetsEnabled,
|
||||
isHosted,
|
||||
isInboxEnabled,
|
||||
isProd,
|
||||
isSsoEnabled,
|
||||
} from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -98,7 +93,7 @@ export async function hasPaidSubscription(referenceId: string): Promise<boolean>
|
||||
*/
|
||||
export async function isProPlan(userId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -125,7 +120,7 @@ export async function isProPlan(userId: string): Promise<boolean> {
|
||||
*/
|
||||
export async function isTeamPlan(userId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -149,7 +144,7 @@ export async function isTeamPlan(userId: string): Promise<boolean> {
|
||||
*/
|
||||
export async function isEnterprisePlan(userId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -177,7 +172,7 @@ export async function isEnterprisePlan(userId: string): Promise<boolean> {
|
||||
*/
|
||||
export async function isEnterpriseOrgAdminOrOwner(userId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -241,7 +236,7 @@ export async function isEnterpriseOrgAdminOrOwner(userId: string): Promise<boole
|
||||
*/
|
||||
export async function isTeamOrgAdminOrOwner(userId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -304,7 +299,7 @@ export async function isOrganizationOnTeamOrEnterprisePlan(
|
||||
organizationId: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -340,7 +335,7 @@ export async function isOrganizationOnTeamOrEnterprisePlan(
|
||||
*/
|
||||
export async function isOrganizationOnEnterprisePlan(organizationId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -445,7 +440,7 @@ export async function hasInboxAccess(userId: string): Promise<boolean> {
|
||||
if (isInboxEnabled) {
|
||||
return true
|
||||
}
|
||||
if (!isProd) {
|
||||
if (!isBillingEnabled) {
|
||||
return true
|
||||
}
|
||||
const [sub, billingStatus] = await Promise.all([
|
||||
@@ -485,145 +480,6 @@ export async function hasLiveSyncAccess(userId: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has exceeded their cost limit based on current period usage
|
||||
*/
|
||||
export async function hasExceededCostLimit(userId: string): Promise<boolean> {
|
||||
try {
|
||||
if (!isProd) {
|
||||
return false
|
||||
}
|
||||
|
||||
const subscription = await getHighestPrioritySubscription(userId)
|
||||
|
||||
let limit = getFreeTierLimit() // Default free tier limit
|
||||
|
||||
if (subscription) {
|
||||
// Team/Enterprise: Use organization limit
|
||||
if (isOrgPlan(subscription.plan)) {
|
||||
limit = await getUserUsageLimit(userId)
|
||||
logger.info('Using organization limit', {
|
||||
userId,
|
||||
plan: subscription.plan,
|
||||
limit,
|
||||
})
|
||||
} else {
|
||||
// Pro/Free: Use individual limit
|
||||
limit = getPerUserMinimumLimit(subscription)
|
||||
logger.info('Using subscription-based limit', {
|
||||
userId,
|
||||
plan: subscription.plan,
|
||||
limit,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
logger.info('Using free tier limit', { userId, limit })
|
||||
}
|
||||
|
||||
// Get user stats to check current period usage
|
||||
const statsRecords = await db.select().from(userStats).where(eq(userStats.userId, userId))
|
||||
|
||||
if (statsRecords.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Use current period cost instead of total cost for accurate billing period tracking
|
||||
const currentCost = Number.parseFloat(
|
||||
statsRecords[0].currentPeriodCost?.toString() || statsRecords[0].totalCost.toString()
|
||||
)
|
||||
|
||||
logger.info('Checking cost limit', { userId, currentCost, limit })
|
||||
|
||||
return currentCost >= limit
|
||||
} catch (error) {
|
||||
logger.error('Error checking cost limit', { error, userId })
|
||||
return false // Be conservative in case of error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if sharing features are enabled for user
|
||||
*/
|
||||
// Removed unused feature flag helpers: isSharingEnabled, isMultiplayerEnabled, isWorkspaceCollaborationEnabled
|
||||
|
||||
/**
|
||||
* Get comprehensive subscription state for a user
|
||||
* Single function to get all subscription information
|
||||
*/
|
||||
export async function getUserSubscriptionState(userId: string): Promise<UserSubscriptionState> {
|
||||
try {
|
||||
// Get subscription and user stats in parallel to minimize DB calls
|
||||
const [subscription, statsRecords] = await Promise.all([
|
||||
getHighestPrioritySubscription(userId),
|
||||
db.select().from(userStats).where(eq(userStats.userId, userId)).limit(1),
|
||||
])
|
||||
|
||||
// Determine plan types based on subscription (avoid redundant DB calls)
|
||||
const isPro =
|
||||
!isProd ||
|
||||
!!(
|
||||
subscription &&
|
||||
(checkProPlan(subscription) ||
|
||||
checkTeamPlan(subscription) ||
|
||||
checkEnterprisePlan(subscription))
|
||||
)
|
||||
const isTeam =
|
||||
!isProd ||
|
||||
!!(subscription && (checkTeamPlan(subscription) || checkEnterprisePlan(subscription)))
|
||||
const isEnterprise = !isProd || !!(subscription && checkEnterprisePlan(subscription))
|
||||
const isFree = !isPro && !isTeam && !isEnterprise
|
||||
|
||||
// Determine plan name
|
||||
let planName = 'free'
|
||||
if (isEnterprise) planName = 'enterprise'
|
||||
else if (isTeam) planName = 'team'
|
||||
else if (isPro) planName = 'pro'
|
||||
|
||||
// Check cost limit using already-fetched user stats
|
||||
let hasExceededLimit = false
|
||||
if (isProd && statsRecords.length > 0) {
|
||||
let limit = getFreeTierLimit() // Default free tier limit
|
||||
if (subscription) {
|
||||
// Team/Enterprise: Use organization limit
|
||||
if (isOrgPlan(subscription.plan)) {
|
||||
limit = await getUserUsageLimit(userId)
|
||||
} else {
|
||||
// Pro/Free: Use individual limit
|
||||
limit = getPerUserMinimumLimit(subscription)
|
||||
}
|
||||
}
|
||||
|
||||
const currentCost = Number.parseFloat(
|
||||
statsRecords[0].currentPeriodCost?.toString() || statsRecords[0].totalCost.toString()
|
||||
)
|
||||
hasExceededLimit = currentCost >= limit
|
||||
}
|
||||
|
||||
return {
|
||||
isPro,
|
||||
isTeam,
|
||||
isEnterprise,
|
||||
isFree,
|
||||
highestPrioritySubscription: subscription,
|
||||
hasExceededLimit,
|
||||
planName,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error getting user subscription state', { error, userId })
|
||||
|
||||
// Return safe defaults in case of error
|
||||
return {
|
||||
isPro: false,
|
||||
isTeam: false,
|
||||
isEnterprise: false,
|
||||
isFree: true,
|
||||
highestPrioritySubscription: null,
|
||||
hasExceededLimit: false,
|
||||
planName: 'free',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send welcome email for Pro and Team plan subscriptions
|
||||
*/
|
||||
|
||||
@@ -9,7 +9,6 @@ export * from '@/lib/billing/core/organization'
|
||||
export * from '@/lib/billing/core/subscription'
|
||||
export {
|
||||
getHighestPrioritySubscription as getActiveSubscription,
|
||||
getUserSubscriptionState as getSubscriptionState,
|
||||
hasAccessControlAccess,
|
||||
hasCredentialSetsAccess,
|
||||
hasPaidSubscription,
|
||||
|
||||
@@ -73,16 +73,6 @@ export interface BillingData {
|
||||
daysRemaining: number
|
||||
}
|
||||
|
||||
export interface UserSubscriptionState {
|
||||
isPro: boolean
|
||||
isTeam: boolean
|
||||
isEnterprise: boolean
|
||||
isFree: boolean
|
||||
highestPrioritySubscription: any | null
|
||||
hasExceededLimit: boolean
|
||||
planName: string
|
||||
}
|
||||
|
||||
export interface SubscriptionPlan {
|
||||
name: string
|
||||
priceId: string
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { render } from '@react-email/components'
|
||||
import { render } from '@react-email/render'
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
member,
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { BrandConfig } from './types'
|
||||
export const defaultBrandConfig: BrandConfig = {
|
||||
name: 'Sim',
|
||||
logoUrl: undefined,
|
||||
wordmarkUrl: undefined,
|
||||
faviconUrl: undefined,
|
||||
customCssUrl: undefined,
|
||||
supportEmail: 'help@sim.ai',
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
export { defaultBrandConfig } from './defaults'
|
||||
export type { BrandConfig, ThemeColors } from './types'
|
||||
export type { BrandConfig, OrganizationWhitelabelSettings, ThemeColors } from './types'
|
||||
export { HEX_COLOR_REGEX } from './types'
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
/** Matches 3- or 6-digit hex colors, e.g. `#abc` or `#701ffc`. */
|
||||
export const HEX_COLOR_REGEX = /^#([0-9a-f]{3}|[0-9a-f]{6})$/i
|
||||
|
||||
export interface ThemeColors {
|
||||
primaryColor?: string
|
||||
primaryHoverColor?: string
|
||||
@@ -9,6 +12,7 @@ export interface ThemeColors {
|
||||
export interface BrandConfig {
|
||||
name: string
|
||||
logoUrl?: string
|
||||
wordmarkUrl?: string
|
||||
faviconUrl?: string
|
||||
customCssUrl?: string
|
||||
supportEmail?: string
|
||||
@@ -19,3 +23,22 @@ export interface BrandConfig {
|
||||
/** Whether this instance has custom branding applied (any brand env var is set) */
|
||||
isWhitelabeled: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Per-organization whitelabel settings stored in the database.
|
||||
* Only available for enterprise organizations on the hosted platform.
|
||||
*/
|
||||
export interface OrganizationWhitelabelSettings {
|
||||
brandName?: string
|
||||
logoUrl?: string
|
||||
wordmarkUrl?: string
|
||||
primaryColor?: string
|
||||
primaryHoverColor?: string
|
||||
accentColor?: string
|
||||
accentHoverColor?: string
|
||||
supportEmail?: string
|
||||
documentationUrl?: string
|
||||
termsUrl?: string
|
||||
privacyUrl?: string
|
||||
hidePoweredBySim?: boolean
|
||||
}
|
||||
|
||||
@@ -3,18 +3,18 @@
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/logger', () => {
|
||||
const createMockLogger = (): Record<string, any> => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
withMetadata: vi.fn(() => createMockLogger()),
|
||||
})
|
||||
return { createLogger: vi.fn(() => createMockLogger()) }
|
||||
})
|
||||
const { mockGetHighestPrioritySubscription } = vi.hoisted(() => ({
|
||||
mockGetHighestPrioritySubscription: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/billing/core/subscription', () => ({
|
||||
getUserSubscriptionState: vi.fn(),
|
||||
getHighestPrioritySubscription: mockGetHighestPrioritySubscription,
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/billing/plan-helpers', () => ({
|
||||
isPaid: vi.fn(
|
||||
(plan: string | null) => plan === 'pro' || plan === 'team' || plan === 'enterprise'
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
@@ -59,54 +59,47 @@ vi.mock('@/tools/params', () => ({
|
||||
createUserToolSchema: vi.fn(() => ({ type: 'object', properties: {} })),
|
||||
}))
|
||||
|
||||
import { getUserSubscriptionState } from '@/lib/billing/core/subscription'
|
||||
import { buildIntegrationToolSchemas } from './payload'
|
||||
|
||||
const mockedGetUserSubscriptionState = getUserSubscriptionState as unknown as {
|
||||
mockResolvedValue: (value: unknown) => void
|
||||
mockRejectedValue: (value: unknown) => void
|
||||
mockClear: () => void
|
||||
}
|
||||
|
||||
describe('buildIntegrationToolSchemas', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('appends the email footer prompt for free users', async () => {
|
||||
mockedGetUserSubscriptionState.mockResolvedValue({ isFree: true })
|
||||
mockGetHighestPrioritySubscription.mockResolvedValue(null)
|
||||
|
||||
const toolSchemas = await buildIntegrationToolSchemas('user-free')
|
||||
const gmailTool = toolSchemas.find((tool) => tool.name === 'gmail_send')
|
||||
|
||||
expect(getUserSubscriptionState).toHaveBeenCalledWith('user-free')
|
||||
expect(mockGetHighestPrioritySubscription).toHaveBeenCalledWith('user-free')
|
||||
expect(gmailTool?.description).toContain('sent with sim ai')
|
||||
})
|
||||
|
||||
it('does not append the email footer prompt for paid users', async () => {
|
||||
mockedGetUserSubscriptionState.mockResolvedValue({ isFree: false })
|
||||
mockGetHighestPrioritySubscription.mockResolvedValue({ plan: 'pro', status: 'active' })
|
||||
|
||||
const toolSchemas = await buildIntegrationToolSchemas('user-paid')
|
||||
const gmailTool = toolSchemas.find((tool) => tool.name === 'gmail_send')
|
||||
|
||||
expect(getUserSubscriptionState).toHaveBeenCalledWith('user-paid')
|
||||
expect(mockGetHighestPrioritySubscription).toHaveBeenCalledWith('user-paid')
|
||||
expect(gmailTool?.description).toBe('Send emails using Gmail')
|
||||
})
|
||||
|
||||
it('still builds integration tools when subscription lookup fails', async () => {
|
||||
mockedGetUserSubscriptionState.mockRejectedValue(new Error('db unavailable'))
|
||||
mockGetHighestPrioritySubscription.mockRejectedValue(new Error('db unavailable'))
|
||||
|
||||
const toolSchemas = await buildIntegrationToolSchemas('user-error')
|
||||
const gmailTool = toolSchemas.find((tool) => tool.name === 'gmail_send')
|
||||
const brandfetchTool = toolSchemas.find((tool) => tool.name === 'brandfetch_search')
|
||||
|
||||
expect(getUserSubscriptionState).toHaveBeenCalledWith('user-error')
|
||||
expect(mockGetHighestPrioritySubscription).toHaveBeenCalledWith('user-error')
|
||||
expect(gmailTool?.description).toBe('Send emails using Gmail')
|
||||
expect(brandfetchTool?.description).toBe('Search for brands by company name')
|
||||
})
|
||||
|
||||
it('emits executeLocally for dynamic client tools only', async () => {
|
||||
mockedGetUserSubscriptionState.mockResolvedValue({ isFree: false })
|
||||
mockGetHighestPrioritySubscription.mockResolvedValue({ plan: 'pro', status: 'active' })
|
||||
|
||||
const toolSchemas = await buildIntegrationToolSchemas('user-client')
|
||||
const gmailTool = toolSchemas.find((tool) => tool.name === 'gmail_send')
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getUserSubscriptionState } from '@/lib/billing/core/subscription'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { isPaid } from '@/lib/billing/plan-helpers'
|
||||
import { getToolEntry } from '@/lib/copilot/tool-executor/router'
|
||||
import { getCopilotToolDescription } from '@/lib/copilot/tools/descriptions'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
@@ -58,18 +59,13 @@ export async function buildIntegrationToolSchemas(
|
||||
let shouldAppendEmailTagline = false
|
||||
|
||||
try {
|
||||
const subscriptionState = await getUserSubscriptionState(userId)
|
||||
shouldAppendEmailTagline = subscriptionState.isFree
|
||||
const subscription = await getHighestPrioritySubscription(userId)
|
||||
shouldAppendEmailTagline = !subscription || !isPaid(subscription.plan)
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
messageId
|
||||
? `Failed to load subscription state for copilot tool descriptions [messageId:${messageId}]`
|
||||
: 'Failed to load subscription state for copilot tool descriptions',
|
||||
{
|
||||
userId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
}
|
||||
)
|
||||
reqLogger.warn('Failed to load subscription for copilot tool descriptions', {
|
||||
userId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
for (const [toolId, toolConfig] of Object.entries(latestTools)) {
|
||||
|
||||
@@ -95,6 +95,11 @@ export class BullMQJobQueue implements JobQueueBackend {
|
||||
return toJob('schedule-execution', scheduleJob)
|
||||
}
|
||||
|
||||
const resumeJob = await getBullMQQueue('resume-execution').getJob(jobId)
|
||||
if (resumeJob) {
|
||||
return toJob('resume-execution', resumeJob)
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ const JOB_TYPE_TO_TASK_ID: Record<JobType, string> = {
|
||||
'workflow-execution': 'workflow-execution',
|
||||
'schedule-execution': 'schedule-execution',
|
||||
'webhook-execution': 'webhook-execution',
|
||||
'resume-execution': 'resume-execution',
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,7 +20,11 @@ export const JOB_STATUS = {
|
||||
|
||||
export type JobStatus = (typeof JOB_STATUS)[keyof typeof JOB_STATUS]
|
||||
|
||||
export type JobType = 'workflow-execution' | 'schedule-execution' | 'webhook-execution'
|
||||
export type JobType =
|
||||
| 'workflow-execution'
|
||||
| 'schedule-execution'
|
||||
| 'webhook-execution'
|
||||
| 'resume-execution'
|
||||
|
||||
export type AsyncExecutionCorrelationSource = 'workflow' | 'schedule' | 'webhook'
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ export interface BullMQJobData<TPayload> {
|
||||
let workflowQueueInstance: Queue | null = null
|
||||
let webhookQueueInstance: Queue | null = null
|
||||
let scheduleQueueInstance: Queue | null = null
|
||||
let resumeQueueInstance: Queue | null = null
|
||||
let knowledgeConnectorSyncQueueInstance: Queue | null = null
|
||||
let knowledgeDocumentProcessingQueueInstance: Queue | null = null
|
||||
let mothershipJobExecutionQueueInstance: Queue | null = null
|
||||
@@ -45,6 +46,12 @@ function getQueueDefaultOptions(type: JobType) {
|
||||
removeOnComplete: { age: 24 * 60 * 60 },
|
||||
removeOnFail: { age: 3 * 24 * 60 * 60 },
|
||||
}
|
||||
case 'resume-execution':
|
||||
return {
|
||||
attempts: 1,
|
||||
removeOnComplete: { age: 24 * 60 * 60 },
|
||||
removeOnFail: { age: 3 * 24 * 60 * 60 },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,6 +128,11 @@ export function getBullMQQueue(type: JobType): Queue {
|
||||
scheduleQueueInstance = createQueue(type)
|
||||
}
|
||||
return scheduleQueueInstance
|
||||
case 'resume-execution':
|
||||
if (!resumeQueueInstance) {
|
||||
resumeQueueInstance = createQueue(type)
|
||||
}
|
||||
return resumeQueueInstance
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,6 +141,7 @@ export function getBullMQQueueByName(queueName: WorkspaceDispatchQueueName): Que
|
||||
case 'workflow-execution':
|
||||
case 'webhook-execution':
|
||||
case 'schedule-execution':
|
||||
case 'resume-execution':
|
||||
return getBullMQQueue(queueName)
|
||||
case KNOWLEDGE_CONNECTOR_SYNC_QUEUE:
|
||||
return getKnowledgeConnectorSyncQueue()
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
*/
|
||||
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getUserSubscriptionState } from '@/lib/billing/core/subscription'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { getPlanTypeForLimits } from '@/lib/billing/plan-helpers'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
import { type PlanName, TABLE_PLAN_LIMITS, type TablePlanLimits } from './constants'
|
||||
|
||||
@@ -29,8 +30,8 @@ export async function getWorkspaceTableLimits(workspaceId: string): Promise<Tabl
|
||||
return TABLE_PLAN_LIMITS.free
|
||||
}
|
||||
|
||||
const subscriptionState = await getUserSubscriptionState(billedAccountUserId)
|
||||
const planName = subscriptionState.planName as PlanName
|
||||
const subscription = await getHighestPrioritySubscription(billedAccountUserId)
|
||||
const planName = getPlanTypeForLimits(subscription?.plan) as PlanName
|
||||
|
||||
const limits = TABLE_PLAN_LIMITS[planName] ?? TABLE_PLAN_LIMITS.free
|
||||
|
||||
|
||||
@@ -30,6 +30,11 @@ export function processStreamingBlockLog(log: BlockLog, streamedContent: string)
|
||||
return false
|
||||
}
|
||||
|
||||
// Skip recalculation if cost was explicitly set by the billing layer (e.g. BYOK zero cost)
|
||||
if (log.output?.cost?.pricing) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if we have content to tokenize
|
||||
if (!streamedContent?.trim()) {
|
||||
return false
|
||||
|
||||
@@ -32,7 +32,7 @@ export const MIME_TYPE_MAPPING: Record<string, 'image' | 'document' | 'audio' |
|
||||
'image/png': 'image',
|
||||
'image/gif': 'image',
|
||||
'image/webp': 'image',
|
||||
// SVG is XML text, not a raster image — handled separately in createFileContent
|
||||
'image/svg+xml': 'image', // SVG upload is allowed; createFileContent handles it separately for Claude API
|
||||
|
||||
// Documents
|
||||
'application/pdf': 'document',
|
||||
|
||||
@@ -151,44 +151,68 @@ async function fetchNewEmails(
|
||||
let latestHistoryId = config.historyId
|
||||
|
||||
if (useHistoryApi) {
|
||||
const historyUrl = `https://gmail.googleapis.com/gmail/v1/users/me/history?startHistoryId=${config.historyId}`
|
||||
const messageIds = new Set<string>()
|
||||
let pageToken: string | undefined
|
||||
|
||||
const historyResponse = await fetch(historyUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
do {
|
||||
let historyUrl = `https://gmail.googleapis.com/gmail/v1/users/me/history?startHistoryId=${config.historyId}&historyTypes=messageAdded`
|
||||
if (pageToken) {
|
||||
historyUrl += `&pageToken=${pageToken}`
|
||||
}
|
||||
|
||||
if (!historyResponse.ok) {
|
||||
const errorData = await historyResponse.json()
|
||||
logger.error(`[${requestId}] Gmail history API error:`, {
|
||||
status: historyResponse.status,
|
||||
statusText: historyResponse.statusText,
|
||||
error: errorData,
|
||||
const historyResponse = await fetch(historyUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Falling back to search API after history API failure`)
|
||||
return searchEmails(accessToken, config, requestId, logger)
|
||||
}
|
||||
if (!historyResponse.ok) {
|
||||
const status = historyResponse.status
|
||||
const errorData = await historyResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Gmail history API error:`, {
|
||||
status,
|
||||
statusText: historyResponse.statusText,
|
||||
error: errorData,
|
||||
})
|
||||
|
||||
const historyData = await historyResponse.json()
|
||||
if (status === 403 || status === 429) {
|
||||
throw new Error(
|
||||
`Gmail API error ${status} — skipping to retry next poll cycle: ${JSON.stringify(errorData)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (!historyData.history || !historyData.history.length) {
|
||||
return { emails: [], latestHistoryId }
|
||||
}
|
||||
logger.info(`[${requestId}] Falling back to search API after history API error ${status}`)
|
||||
const searchResult = await searchEmails(accessToken, config, requestId, logger)
|
||||
if (searchResult.emails.length === 0) {
|
||||
const freshHistoryId = await getGmailProfileHistoryId(accessToken, requestId, logger)
|
||||
if (freshHistoryId) {
|
||||
logger.info(
|
||||
`[${requestId}] Fetched fresh historyId ${freshHistoryId} after invalid historyId (was: ${config.historyId})`
|
||||
)
|
||||
return { emails: [], latestHistoryId: freshHistoryId }
|
||||
}
|
||||
}
|
||||
return searchResult
|
||||
}
|
||||
|
||||
if (historyData.historyId) {
|
||||
latestHistoryId = historyData.historyId
|
||||
}
|
||||
const historyData = await historyResponse.json()
|
||||
|
||||
const messageIds = new Set<string>()
|
||||
for (const history of historyData.history) {
|
||||
if (history.messagesAdded) {
|
||||
for (const messageAdded of history.messagesAdded) {
|
||||
messageIds.add(messageAdded.message.id)
|
||||
if (historyData.historyId) {
|
||||
latestHistoryId = historyData.historyId
|
||||
}
|
||||
|
||||
if (historyData.history) {
|
||||
for (const history of historyData.history) {
|
||||
if (history.messagesAdded) {
|
||||
for (const messageAdded of history.messagesAdded) {
|
||||
messageIds.add(messageAdded.message.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (messageIds.size === 0) {
|
||||
pageToken = historyData.nextPageToken
|
||||
} while (pageToken)
|
||||
|
||||
if (!messageIds.size) {
|
||||
return { emails: [], latestHistoryId }
|
||||
}
|
||||
|
||||
@@ -352,6 +376,29 @@ async function searchEmails(
|
||||
}
|
||||
}
|
||||
|
||||
async function getGmailProfileHistoryId(
|
||||
accessToken: string,
|
||||
requestId: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch('https://gmail.googleapis.com/gmail/v1/users/me/profile', {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
if (!response.ok) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to fetch Gmail profile for fresh historyId: ${response.status}`
|
||||
)
|
||||
return null
|
||||
}
|
||||
const profile = await response.json()
|
||||
return (profile.historyId as string | undefined) ?? null
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Error fetching Gmail profile:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function getEmailDetails(accessToken: string, messageId: string): Promise<GmailEmail> {
|
||||
const messageUrl = `https://gmail.googleapis.com/gmail/v1/users/me/messages/${messageId}?format=full`
|
||||
|
||||
@@ -442,9 +489,7 @@ async function processEmails(
|
||||
if (headers.date) {
|
||||
try {
|
||||
date = new Date(headers.date).toISOString()
|
||||
} catch (_e) {
|
||||
// Keep date as null if parsing fails
|
||||
}
|
||||
} catch (_e) {}
|
||||
} else if (email.internalDate) {
|
||||
date = new Date(Number.parseInt(email.internalDate)).toISOString()
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ interface ImapWebhookConfig {
|
||||
includeAttachments: boolean
|
||||
lastProcessedUid?: number
|
||||
lastProcessedUidByMailbox?: Record<string, number>
|
||||
uidValidityByMailbox?: Record<string, string>
|
||||
lastCheckedTimestamp?: string
|
||||
maxEmailsPerPoll?: number
|
||||
}
|
||||
@@ -90,48 +91,90 @@ export const imapPollingHandler: PollingProviderHandler = {
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
const { emails, latestUidByMailbox } = await fetchNewEmails(
|
||||
config,
|
||||
requestId,
|
||||
hostValidation.resolvedIP!,
|
||||
logger
|
||||
)
|
||||
const pollTimestamp = new Date().toISOString()
|
||||
const client = new ImapFlow({
|
||||
host: hostValidation.resolvedIP!,
|
||||
servername: config.host,
|
||||
port: config.port || 993,
|
||||
secure: config.secure ?? true,
|
||||
auth: {
|
||||
user: config.username,
|
||||
pass: config.password,
|
||||
},
|
||||
tls: { rejectUnauthorized: true },
|
||||
logger: false,
|
||||
})
|
||||
|
||||
if (!emails || !emails.length) {
|
||||
await updateImapState(webhookId, latestUidByMailbox, pollTimestamp, config, logger)
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(`[${requestId}] No new emails found for webhook ${webhookId}`)
|
||||
return 'success'
|
||||
}
|
||||
let emails: Awaited<ReturnType<typeof fetchNewEmails>>['emails'] = []
|
||||
let latestUidByMailbox: Record<string, number> = {}
|
||||
let uidValidityByMailbox: Record<string, string> = {}
|
||||
|
||||
logger.info(`[${requestId}] Found ${emails.length} new emails for webhook ${webhookId}`)
|
||||
try {
|
||||
await client.connect()
|
||||
|
||||
const { processedCount, failedCount } = await processEmails(
|
||||
emails,
|
||||
webhookData,
|
||||
workflowData,
|
||||
config,
|
||||
requestId,
|
||||
hostValidation.resolvedIP!,
|
||||
logger
|
||||
)
|
||||
const result = await fetchNewEmails(client, config, requestId, logger)
|
||||
emails = result.emails
|
||||
latestUidByMailbox = result.latestUidByMailbox
|
||||
uidValidityByMailbox = result.uidValidityByMailbox
|
||||
|
||||
await updateImapState(webhookId, latestUidByMailbox, pollTimestamp, config, logger)
|
||||
const pollTimestamp = new Date().toISOString()
|
||||
|
||||
if (failedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
logger.warn(
|
||||
`[${requestId}] All ${failedCount} emails failed to process for webhook ${webhookId}`
|
||||
if (!emails.length) {
|
||||
await updateImapState(
|
||||
webhookId,
|
||||
latestUidByMailbox,
|
||||
pollTimestamp,
|
||||
config,
|
||||
logger,
|
||||
uidValidityByMailbox
|
||||
)
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(`[${requestId}] No new emails found for webhook ${webhookId}`)
|
||||
await client.logout()
|
||||
return 'success'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Found ${emails.length} new emails for webhook ${webhookId}`)
|
||||
|
||||
const { processedCount, failedCount } = await processEmails(
|
||||
emails,
|
||||
webhookData,
|
||||
workflowData,
|
||||
config,
|
||||
client,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed ${processedCount} emails for webhook ${webhookId}${failedCount > 0 ? ` (${failedCount} failed)` : ''}`
|
||||
)
|
||||
return 'success'
|
||||
await updateImapState(
|
||||
webhookId,
|
||||
latestUidByMailbox,
|
||||
pollTimestamp,
|
||||
config,
|
||||
logger,
|
||||
uidValidityByMailbox
|
||||
)
|
||||
|
||||
await client.logout()
|
||||
|
||||
if (failedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
logger.warn(
|
||||
`[${requestId}] All ${failedCount} emails failed to process for webhook ${webhookId}`
|
||||
)
|
||||
return 'failure'
|
||||
}
|
||||
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed ${processedCount} emails for webhook ${webhookId}${failedCount > 0 ? ` (${failedCount} failed)` : ''}`
|
||||
)
|
||||
return 'success'
|
||||
} catch (innerError) {
|
||||
try {
|
||||
await client.logout()
|
||||
} catch {}
|
||||
throw innerError
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error processing IMAP webhook ${webhookId}:`, error)
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
@@ -145,13 +188,35 @@ async function updateImapState(
|
||||
uidByMailbox: Record<string, number>,
|
||||
timestamp: string,
|
||||
config: ImapWebhookConfig,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>,
|
||||
uidValidityByMailbox: Record<string, string>
|
||||
) {
|
||||
const existingUidByMailbox = config.lastProcessedUidByMailbox || {}
|
||||
const mergedUidByMailbox = { ...existingUidByMailbox }
|
||||
const prevUidValidity = config.uidValidityByMailbox || {}
|
||||
|
||||
const resetMailboxes = new Set(
|
||||
Object.entries(uidValidityByMailbox)
|
||||
.filter(
|
||||
([mailbox, validity]) =>
|
||||
prevUidValidity[mailbox] !== undefined && prevUidValidity[mailbox] !== validity
|
||||
)
|
||||
.map(([mailbox]) => mailbox)
|
||||
)
|
||||
|
||||
const mergedUidByMailbox: Record<string, number> = {}
|
||||
|
||||
for (const [mailbox, uid] of Object.entries(existingUidByMailbox)) {
|
||||
if (!resetMailboxes.has(mailbox)) {
|
||||
mergedUidByMailbox[mailbox] = uid
|
||||
}
|
||||
}
|
||||
|
||||
for (const [mailbox, uid] of Object.entries(uidByMailbox)) {
|
||||
mergedUidByMailbox[mailbox] = Math.max(uid, mergedUidByMailbox[mailbox] || 0)
|
||||
if (resetMailboxes.has(mailbox)) {
|
||||
mergedUidByMailbox[mailbox] = uid
|
||||
} else {
|
||||
mergedUidByMailbox[mailbox] = Math.max(uid, mergedUidByMailbox[mailbox] || 0)
|
||||
}
|
||||
}
|
||||
|
||||
await updateWebhookProviderConfig(
|
||||
@@ -159,30 +224,18 @@ async function updateImapState(
|
||||
{
|
||||
lastProcessedUidByMailbox: mergedUidByMailbox,
|
||||
lastCheckedTimestamp: timestamp,
|
||||
uidValidityByMailbox,
|
||||
},
|
||||
logger
|
||||
)
|
||||
}
|
||||
|
||||
async function fetchNewEmails(
|
||||
client: ImapFlow,
|
||||
config: ImapWebhookConfig,
|
||||
requestId: string,
|
||||
resolvedIP: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
) {
|
||||
const client = new ImapFlow({
|
||||
host: resolvedIP,
|
||||
servername: config.host,
|
||||
port: config.port || 993,
|
||||
secure: config.secure ?? true,
|
||||
auth: {
|
||||
user: config.username,
|
||||
pass: config.password,
|
||||
},
|
||||
tls: { rejectUnauthorized: true },
|
||||
logger: false,
|
||||
})
|
||||
|
||||
const emails: Array<{
|
||||
uid: number
|
||||
mailboxPath: string
|
||||
@@ -193,97 +246,93 @@ async function fetchNewEmails(
|
||||
|
||||
const mailboxes = getMailboxesToCheck(config)
|
||||
const latestUidByMailbox: Record<string, number> = { ...(config.lastProcessedUidByMailbox || {}) }
|
||||
const uidValidityByMailbox: Record<string, string> = { ...(config.uidValidityByMailbox || {}) }
|
||||
|
||||
try {
|
||||
await client.connect()
|
||||
const maxEmails = config.maxEmailsPerPoll || 25
|
||||
let totalEmailsCollected = 0
|
||||
|
||||
const maxEmails = config.maxEmailsPerPoll || 25
|
||||
let totalEmailsCollected = 0
|
||||
for (const mailboxPath of mailboxes) {
|
||||
if (totalEmailsCollected >= maxEmails) break
|
||||
|
||||
for (const mailboxPath of mailboxes) {
|
||||
if (totalEmailsCollected >= maxEmails) break
|
||||
try {
|
||||
const mailbox = await client.mailboxOpen(mailboxPath)
|
||||
|
||||
try {
|
||||
await client.mailboxOpen(mailboxPath)
|
||||
const currentUidValidity = mailbox.uidValidity.toString()
|
||||
const storedUidValidity = uidValidityByMailbox[mailboxPath]
|
||||
|
||||
let searchCriteria: Record<string, unknown> = { unseen: true }
|
||||
if (config.searchCriteria) {
|
||||
if (typeof config.searchCriteria === 'object') {
|
||||
searchCriteria = config.searchCriteria as unknown as Record<string, unknown>
|
||||
} else if (typeof config.searchCriteria === 'string') {
|
||||
try {
|
||||
searchCriteria = JSON.parse(config.searchCriteria)
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid search criteria JSON, using default`)
|
||||
}
|
||||
if (storedUidValidity && storedUidValidity !== currentUidValidity) {
|
||||
logger.warn(
|
||||
`[${requestId}] UIDVALIDITY changed for ${mailboxPath} (${storedUidValidity} -> ${currentUidValidity}), discarding stored UID`
|
||||
)
|
||||
delete latestUidByMailbox[mailboxPath]
|
||||
}
|
||||
uidValidityByMailbox[mailboxPath] = currentUidValidity
|
||||
|
||||
let searchCriteria: Record<string, unknown> = { unseen: true }
|
||||
if (config.searchCriteria) {
|
||||
if (typeof config.searchCriteria === 'object') {
|
||||
searchCriteria = config.searchCriteria as unknown as Record<string, unknown>
|
||||
} else if (typeof config.searchCriteria === 'string') {
|
||||
try {
|
||||
searchCriteria = JSON.parse(config.searchCriteria)
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid search criteria JSON, using default`)
|
||||
}
|
||||
}
|
||||
|
||||
const lastUidForMailbox = latestUidByMailbox[mailboxPath] || config.lastProcessedUid
|
||||
|
||||
if (lastUidForMailbox) {
|
||||
searchCriteria = { ...searchCriteria, uid: `${lastUidForMailbox + 1}:*` }
|
||||
}
|
||||
|
||||
if (config.lastCheckedTimestamp) {
|
||||
const lastChecked = new Date(config.lastCheckedTimestamp)
|
||||
const bufferTime = new Date(lastChecked.getTime() - 60000)
|
||||
searchCriteria = { ...searchCriteria, since: bufferTime }
|
||||
} else {
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000)
|
||||
searchCriteria = { ...searchCriteria, since: oneDayAgo }
|
||||
}
|
||||
|
||||
let messageUids: number[] = []
|
||||
try {
|
||||
const searchResult = await client.search(searchCriteria, { uid: true })
|
||||
messageUids = searchResult === false ? [] : searchResult
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
|
||||
if (messageUids.length === 0) continue
|
||||
|
||||
messageUids.sort((a, b) => a - b)
|
||||
const remainingSlots = maxEmails - totalEmailsCollected
|
||||
const uidsToProcess = messageUids.slice(0, remainingSlots)
|
||||
|
||||
if (uidsToProcess.length > 0) {
|
||||
latestUidByMailbox[mailboxPath] = Math.max(
|
||||
...uidsToProcess,
|
||||
latestUidByMailbox[mailboxPath] || 0
|
||||
)
|
||||
}
|
||||
|
||||
for await (const msg of client.fetch(
|
||||
uidsToProcess,
|
||||
{ uid: true, envelope: true, bodyStructure: true, source: true },
|
||||
{ uid: true }
|
||||
)) {
|
||||
emails.push({
|
||||
uid: msg.uid,
|
||||
mailboxPath,
|
||||
envelope: msg.envelope,
|
||||
bodyStructure: msg.bodyStructure,
|
||||
source: msg.source,
|
||||
})
|
||||
totalEmailsCollected++
|
||||
}
|
||||
} catch (mailboxError) {
|
||||
logger.warn(`[${requestId}] Error processing mailbox ${mailboxPath}:`, mailboxError)
|
||||
}
|
||||
}
|
||||
|
||||
await client.logout()
|
||||
return { emails, latestUidByMailbox }
|
||||
} catch (error) {
|
||||
try {
|
||||
await client.logout()
|
||||
} catch {
|
||||
// Ignore logout errors
|
||||
const lastUidForMailbox = latestUidByMailbox[mailboxPath]
|
||||
|
||||
if (lastUidForMailbox) {
|
||||
searchCriteria = { ...searchCriteria, uid: `${lastUidForMailbox + 1}:*` }
|
||||
}
|
||||
|
||||
if (config.lastCheckedTimestamp) {
|
||||
const lastChecked = new Date(config.lastCheckedTimestamp)
|
||||
const bufferTime = new Date(lastChecked.getTime() - 60000)
|
||||
searchCriteria = { ...searchCriteria, since: bufferTime }
|
||||
} else {
|
||||
const oneDayAgo = new Date(Date.now() - 24 * 60 * 60 * 1000)
|
||||
searchCriteria = { ...searchCriteria, since: oneDayAgo }
|
||||
}
|
||||
|
||||
let messageUids: number[] = []
|
||||
try {
|
||||
const searchResult = await client.search(searchCriteria, { uid: true })
|
||||
messageUids = searchResult === false ? [] : searchResult
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
|
||||
if (messageUids.length === 0) continue
|
||||
|
||||
messageUids.sort((a, b) => a - b)
|
||||
const remainingSlots = maxEmails - totalEmailsCollected
|
||||
const uidsToProcess = messageUids.slice(0, remainingSlots)
|
||||
|
||||
for await (const msg of client.fetch(
|
||||
uidsToProcess,
|
||||
{ uid: true, envelope: true, bodyStructure: true, source: true },
|
||||
{ uid: true }
|
||||
)) {
|
||||
emails.push({
|
||||
uid: msg.uid,
|
||||
mailboxPath,
|
||||
envelope: msg.envelope,
|
||||
bodyStructure: msg.bodyStructure,
|
||||
source: msg.source,
|
||||
})
|
||||
if (msg.uid > (latestUidByMailbox[mailboxPath] || 0)) {
|
||||
latestUidByMailbox[mailboxPath] = msg.uid
|
||||
}
|
||||
totalEmailsCollected++
|
||||
}
|
||||
} catch (mailboxError) {
|
||||
logger.warn(`[${requestId}] Error processing mailbox ${mailboxPath}:`, mailboxError)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
return { emails, latestUidByMailbox, uidValidityByMailbox }
|
||||
}
|
||||
|
||||
function getMailboxesToCheck(config: ImapWebhookConfig): string[] {
|
||||
@@ -331,9 +380,7 @@ function extractTextFromSource(source: Buffer): { text: string; html: string } {
|
||||
if (lowerPart.includes('base64')) {
|
||||
try {
|
||||
text = Buffer.from(text.replace(/\s/g, ''), 'base64').toString('utf-8')
|
||||
} catch {
|
||||
// Keep as-is if base64 decode fails
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
} else if (lowerPart.includes('content-type: text/html')) {
|
||||
@@ -348,9 +395,7 @@ function extractTextFromSource(source: Buffer): { text: string; html: string } {
|
||||
if (lowerPart.includes('base64')) {
|
||||
try {
|
||||
html = Buffer.from(html.replace(/\s/g, ''), 'base64').toString('utf-8')
|
||||
} catch {
|
||||
// Keep as-is if base64 decode fails
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -405,9 +450,7 @@ function extractAttachmentsFromSource(
|
||||
mimeType,
|
||||
size: buffer.length,
|
||||
})
|
||||
} catch {
|
||||
// Skip if decode fails
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -437,34 +480,17 @@ async function processEmails(
|
||||
webhookData: PollWebhookContext['webhookData'],
|
||||
workflowData: PollWebhookContext['workflowData'],
|
||||
config: ImapWebhookConfig,
|
||||
client: ImapFlow,
|
||||
requestId: string,
|
||||
resolvedIP: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
) {
|
||||
let processedCount = 0
|
||||
let failedCount = 0
|
||||
|
||||
const client = new ImapFlow({
|
||||
host: resolvedIP,
|
||||
servername: config.host,
|
||||
port: config.port || 993,
|
||||
secure: config.secure ?? true,
|
||||
auth: {
|
||||
user: config.username,
|
||||
pass: config.password,
|
||||
},
|
||||
tls: { rejectUnauthorized: true },
|
||||
logger: false,
|
||||
})
|
||||
|
||||
let currentOpenMailbox: string | null = null
|
||||
const lockState: { lock: MailboxLockObject | null } = { lock: null }
|
||||
|
||||
try {
|
||||
if (config.markAsRead) {
|
||||
await client.connect()
|
||||
}
|
||||
|
||||
for (const email of emails) {
|
||||
try {
|
||||
await pollingIdempotency.executeWithIdempotency(
|
||||
@@ -541,7 +567,7 @@ async function processEmails(
|
||||
lockState.lock = await client.getMailboxLock(email.mailboxPath)
|
||||
currentOpenMailbox = email.mailboxPath
|
||||
}
|
||||
await client.messageFlagsAdd({ uid: email.uid }, ['\\Seen'], { uid: true })
|
||||
await client.messageFlagsAdd(email.uid, ['\\Seen'], { uid: true })
|
||||
} catch (flagError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to mark message ${email.uid} as read:`,
|
||||
@@ -565,15 +591,10 @@ async function processEmails(
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (config.markAsRead) {
|
||||
if (lockState.lock) {
|
||||
try {
|
||||
if (lockState.lock) {
|
||||
lockState.lock.release()
|
||||
}
|
||||
await client.logout()
|
||||
} catch {
|
||||
// Ignore logout errors
|
||||
}
|
||||
lockState.lock.release()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { htmlToText } from 'html-to-text'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
import type { PollingProviderHandler, PollWebhookContext } from '@/lib/webhooks/polling/types'
|
||||
import {
|
||||
markWebhookFailed,
|
||||
@@ -166,6 +167,12 @@ export const outlookPollingHandler: PollingProviderHandler = {
|
||||
},
|
||||
}
|
||||
|
||||
/** Hard cap on total emails fetched per poll to prevent unbounded pagination loops. */
|
||||
const OUTLOOK_HARD_MAX_EMAILS = 200
|
||||
|
||||
/** Number of items to request per Graph API page. Decoupled from the total cap so pagination actually runs. */
|
||||
const OUTLOOK_PAGE_SIZE = 50
|
||||
|
||||
async function fetchNewOutlookEmails(
|
||||
accessToken: string,
|
||||
config: OutlookWebhookConfig,
|
||||
@@ -181,53 +188,77 @@ async function fetchNewOutlookEmails(
|
||||
'id,conversationId,subject,bodyPreview,body,from,toRecipients,ccRecipients,receivedDateTime,sentDateTime,hasAttachments,isRead,parentFolderId'
|
||||
)
|
||||
params.append('$orderby', 'receivedDateTime desc')
|
||||
params.append('$top', (config.maxEmailsPerPoll || 25).toString())
|
||||
const maxEmails = Math.min(config.maxEmailsPerPoll || 25, OUTLOOK_HARD_MAX_EMAILS)
|
||||
params.append('$top', OUTLOOK_PAGE_SIZE.toString())
|
||||
|
||||
if (config.lastCheckedTimestamp) {
|
||||
const lastChecked = new Date(config.lastCheckedTimestamp)
|
||||
const bufferTime = new Date(lastChecked.getTime() - 60000)
|
||||
params.append('$filter', `receivedDateTime gt ${bufferTime.toISOString()}`)
|
||||
}
|
||||
const allEmails: OutlookEmail[] = []
|
||||
let nextUrl: string | undefined = `${apiUrl}?${params.toString()}`
|
||||
logger.info(`[${requestId}] Fetching emails from: ${nextUrl}`)
|
||||
|
||||
const fullUrl = `${apiUrl}?${params.toString()}`
|
||||
logger.info(`[${requestId}] Fetching emails from: ${fullUrl}`)
|
||||
|
||||
const response = await fetch(fullUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: { message: 'Unknown error' } }))
|
||||
logger.error(`[${requestId}] Microsoft Graph API error:`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorData,
|
||||
while (nextUrl && allEmails.length < maxEmails) {
|
||||
const response = await fetchWithRetry(nextUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
throw new Error(
|
||||
`Microsoft Graph API error: ${response.status} ${response.statusText} - ${JSON.stringify(errorData)}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response
|
||||
.json()
|
||||
.catch(() => ({ error: { message: 'Unknown error' } }))
|
||||
logger.error(`[${requestId}] Microsoft Graph API error:`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorData,
|
||||
})
|
||||
throw new Error(
|
||||
`Microsoft Graph API error: ${response.status} ${response.statusText} - ${JSON.stringify(errorData)}`
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const pageEmails: OutlookEmail[] = data.value || []
|
||||
const remaining = maxEmails - allEmails.length
|
||||
allEmails.push(...pageEmails.slice(0, remaining))
|
||||
|
||||
nextUrl =
|
||||
allEmails.length < maxEmails ? (data['@odata.nextLink'] as string | undefined) : undefined
|
||||
|
||||
if (pageEmails.length === 0) break
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const emails = data.value || []
|
||||
logger.info(`[${requestId}] Fetched ${allEmails.length} emails total`)
|
||||
|
||||
const emails = allEmails
|
||||
|
||||
let resolvedFolderIds: Map<string, string> | undefined
|
||||
let skipFolderFilter = false
|
||||
if (config.folderIds && config.folderIds.length > 0) {
|
||||
const hasWellKnownFolders = config.folderIds.some(isWellKnownFolderName)
|
||||
if (hasWellKnownFolders) {
|
||||
const wellKnownFolders = config.folderIds.filter(isWellKnownFolderName)
|
||||
if (wellKnownFolders.length > 0) {
|
||||
resolvedFolderIds = await resolveWellKnownFolderIds(
|
||||
accessToken,
|
||||
config.folderIds,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
if (resolvedFolderIds.size < wellKnownFolders.length) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not resolve all well-known folders (${resolvedFolderIds.size}/${wellKnownFolders.length}) — skipping folder filter to avoid incorrect results`
|
||||
)
|
||||
skipFolderFilter = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const filteredEmails = filterEmailsByFolder(emails, config, resolvedFolderIds)
|
||||
const filteredEmails = skipFolderFilter
|
||||
? emails
|
||||
: filterEmailsByFolder(emails, config, resolvedFolderIds)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Fetched ${emails.length} emails, ${filteredEmails.length} after filtering`
|
||||
@@ -262,12 +293,14 @@ async function resolveWellKnownFolderId(
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch(`https://graph.microsoft.com/v1.0/me/mailFolders/${folderName}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
const response = await fetchWithRetry(
|
||||
`https://graph.microsoft.com/v1.0/me/mailFolders/${folderName}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(
|
||||
@@ -455,12 +488,11 @@ async function downloadOutlookAttachments(
|
||||
const attachments: OutlookAttachment[] = []
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
const response = await fetchWithRetry(
|
||||
`https://graph.microsoft.com/v1.0/me/messages/${messageId}/attachments`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
)
|
||||
@@ -511,14 +543,17 @@ async function markOutlookEmailAsRead(
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
) {
|
||||
try {
|
||||
const response = await fetch(`https://graph.microsoft.com/v1.0/me/messages/${messageId}`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ isRead: true }),
|
||||
})
|
||||
const response = await fetchWithRetry(
|
||||
`https://graph.microsoft.com/v1.0/me/messages/${messageId}`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ isRead: true }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from '@/lib/webhooks/polling/utils'
|
||||
import { processPolledWebhookEvent } from '@/lib/webhooks/processor'
|
||||
|
||||
const MAX_GUIDS_TO_TRACK = 100
|
||||
const MAX_GUIDS_TO_TRACK = 500
|
||||
|
||||
interface RssWebhookConfig {
|
||||
feedUrl: string
|
||||
@@ -87,10 +87,15 @@ export const rssPollingHandler: PollingProviderHandler = {
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const { feed, items: newItems } = await fetchNewRssItems(config, requestId, logger)
|
||||
const {
|
||||
feed,
|
||||
items: newItems,
|
||||
etag,
|
||||
lastModified,
|
||||
} = await fetchNewRssItems(config, requestId, logger)
|
||||
|
||||
if (!newItems.length) {
|
||||
await updateRssState(webhookId, now.toISOString(), [], config, logger)
|
||||
await updateRssState(webhookId, now.toISOString(), [], config, logger, etag, lastModified)
|
||||
await markWebhookSuccess(webhookId, logger)
|
||||
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
|
||||
return 'success'
|
||||
@@ -108,10 +113,23 @@ export const rssPollingHandler: PollingProviderHandler = {
|
||||
)
|
||||
|
||||
const newGuids = newItems
|
||||
.map((item) => item.guid || item.link || '')
|
||||
.map(
|
||||
(item) =>
|
||||
item.guid ||
|
||||
item.link ||
|
||||
(item.title && item.pubDate ? `${item.title}-${item.pubDate}` : '')
|
||||
)
|
||||
.filter((guid) => guid.length > 0)
|
||||
|
||||
await updateRssState(webhookId, now.toISOString(), newGuids, config, logger)
|
||||
await updateRssState(
|
||||
webhookId,
|
||||
now.toISOString(),
|
||||
newGuids,
|
||||
config,
|
||||
logger,
|
||||
etag,
|
||||
lastModified
|
||||
)
|
||||
|
||||
if (failedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId, logger)
|
||||
@@ -139,7 +157,9 @@ async function updateRssState(
|
||||
timestamp: string,
|
||||
newGuids: string[],
|
||||
config: RssWebhookConfig,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>,
|
||||
etag?: string,
|
||||
lastModified?: string
|
||||
) {
|
||||
const existingGuids = config.lastSeenGuids || []
|
||||
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)
|
||||
@@ -149,6 +169,8 @@ async function updateRssState(
|
||||
{
|
||||
lastCheckedTimestamp: timestamp,
|
||||
lastSeenGuids: allGuids,
|
||||
...(etag !== undefined ? { etag } : {}),
|
||||
...(lastModified !== undefined ? { lastModified } : {}),
|
||||
},
|
||||
logger
|
||||
)
|
||||
@@ -158,7 +180,7 @@ async function fetchNewRssItems(
|
||||
config: RssWebhookConfig,
|
||||
requestId: string,
|
||||
logger: ReturnType<typeof import('@sim/logger').createLogger>
|
||||
): Promise<{ feed: RssFeed; items: RssItem[] }> {
|
||||
): Promise<{ feed: RssFeed; items: RssItem[]; etag?: string; lastModified?: string }> {
|
||||
try {
|
||||
const urlValidation = await validateUrlWithDNS(config.feedUrl, 'feedUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -166,24 +188,45 @@ async function fetchNewRssItems(
|
||||
throw new Error(`Invalid RSS feed URL: ${urlValidation.error}`)
|
||||
}
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'User-Agent': 'Sim/1.0 RSS Poller',
|
||||
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||
}
|
||||
if (config.etag) {
|
||||
headers['If-None-Match'] = config.etag
|
||||
}
|
||||
if (config.lastModified) {
|
||||
headers['If-Modified-Since'] = config.lastModified
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(config.feedUrl, urlValidation.resolvedIP!, {
|
||||
headers: {
|
||||
'User-Agent': 'Sim/1.0 RSS Poller',
|
||||
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||
},
|
||||
headers,
|
||||
timeout: 30000,
|
||||
})
|
||||
|
||||
if (response.status === 304) {
|
||||
logger.info(`[${requestId}] RSS feed not modified (304) for ${config.feedUrl}`)
|
||||
return {
|
||||
feed: { items: [] } as RssFeed,
|
||||
items: [],
|
||||
etag: response.headers.get('etag') ?? config.etag,
|
||||
lastModified: response.headers.get('last-modified') ?? config.lastModified,
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
await response.text().catch(() => {})
|
||||
throw new Error(`Failed to fetch RSS feed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const newEtag = response.headers.get('etag') ?? undefined
|
||||
const newLastModified = response.headers.get('last-modified') ?? undefined
|
||||
|
||||
const xmlContent = await response.text()
|
||||
const feed = await parser.parseString(xmlContent)
|
||||
|
||||
if (!feed.items || !feed.items.length) {
|
||||
return { feed: feed as RssFeed, items: [] }
|
||||
return { feed: feed as RssFeed, items: [], etag: newEtag, lastModified: newLastModified }
|
||||
}
|
||||
|
||||
const lastCheckedTime = config.lastCheckedTimestamp
|
||||
@@ -192,7 +235,10 @@ async function fetchNewRssItems(
|
||||
const lastSeenGuids = new Set(config.lastSeenGuids || [])
|
||||
|
||||
const newItems = feed.items.filter((item) => {
|
||||
const itemGuid = item.guid || item.link || ''
|
||||
const itemGuid =
|
||||
item.guid ||
|
||||
item.link ||
|
||||
(item.title && item.pubDate ? `${item.title}-${item.pubDate}` : '')
|
||||
|
||||
if (itemGuid && lastSeenGuids.has(itemGuid)) {
|
||||
return false
|
||||
@@ -220,7 +266,12 @@ async function fetchNewRssItems(
|
||||
`[${requestId}] Found ${newItems.length} new items (processing ${limitedItems.length})`
|
||||
)
|
||||
|
||||
return { feed: feed as RssFeed, items: limitedItems as RssItem[] }
|
||||
return {
|
||||
feed: feed as RssFeed,
|
||||
items: limitedItems as RssItem[],
|
||||
etag: newEtag,
|
||||
lastModified: newLastModified,
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Error fetching RSS feed:`, errorMessage)
|
||||
@@ -241,7 +292,17 @@ async function processRssItems(
|
||||
|
||||
for (const item of items) {
|
||||
try {
|
||||
const itemGuid = item.guid || item.link || `${item.title}-${item.pubDate}`
|
||||
const itemGuid =
|
||||
item.guid ||
|
||||
item.link ||
|
||||
(item.title && item.pubDate ? `${item.title}-${item.pubDate}` : '')
|
||||
|
||||
if (!itemGuid) {
|
||||
logger.warn(
|
||||
`[${requestId}] Skipping RSS item with no identifiable GUID for webhook ${webhookData.id}`
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
await pollingIdempotency.executeWithIdempotency(
|
||||
'rss',
|
||||
|
||||
@@ -725,7 +725,15 @@ export async function processPolledWebhookEvent(
|
||||
try {
|
||||
const preprocessResult = await checkWebhookPreprocessing(foundWorkflow, foundWebhook, requestId)
|
||||
if (preprocessResult.error) {
|
||||
return { success: false, error: 'Preprocessing failed', statusCode: 500 }
|
||||
const errorResponse = preprocessResult.error
|
||||
const statusCode = errorResponse.status
|
||||
const errorBody = await errorResponse.json().catch(() => ({}))
|
||||
const errorMessage = errorBody.error ?? 'Preprocessing failed'
|
||||
logger.warn(`[${requestId}] Polled webhook preprocessing failed`, {
|
||||
statusCode,
|
||||
error: errorMessage,
|
||||
})
|
||||
return { success: false, error: errorMessage, statusCode }
|
||||
}
|
||||
|
||||
if (foundWebhook.blockId) {
|
||||
|
||||
@@ -28,6 +28,7 @@ import { outlookHandler } from '@/lib/webhooks/providers/outlook'
|
||||
import { resendHandler } from '@/lib/webhooks/providers/resend'
|
||||
import { rssHandler } from '@/lib/webhooks/providers/rss'
|
||||
import { salesforceHandler } from '@/lib/webhooks/providers/salesforce'
|
||||
import { servicenowHandler } from '@/lib/webhooks/providers/servicenow'
|
||||
import { slackHandler } from '@/lib/webhooks/providers/slack'
|
||||
import { stripeHandler } from '@/lib/webhooks/providers/stripe'
|
||||
import { telegramHandler } from '@/lib/webhooks/providers/telegram'
|
||||
@@ -72,6 +73,7 @@ const PROVIDER_HANDLERS: Record<string, WebhookProviderHandler> = {
|
||||
outlook: outlookHandler,
|
||||
rss: rssHandler,
|
||||
salesforce: salesforceHandler,
|
||||
servicenow: servicenowHandler,
|
||||
slack: slackHandler,
|
||||
stripe: stripeHandler,
|
||||
telegram: telegramHandler,
|
||||
|
||||
57
apps/sim/lib/webhooks/providers/servicenow.ts
Normal file
57
apps/sim/lib/webhooks/providers/servicenow.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NextResponse } from 'next/server'
|
||||
import type {
|
||||
AuthContext,
|
||||
EventMatchContext,
|
||||
WebhookProviderHandler,
|
||||
} from '@/lib/webhooks/providers/types'
|
||||
import { verifyTokenAuth } from '@/lib/webhooks/providers/utils'
|
||||
|
||||
const logger = createLogger('WebhookProvider:ServiceNow')
|
||||
|
||||
function asRecord(body: unknown): Record<string, unknown> {
|
||||
return body && typeof body === 'object' && !Array.isArray(body)
|
||||
? (body as Record<string, unknown>)
|
||||
: {}
|
||||
}
|
||||
|
||||
export const servicenowHandler: WebhookProviderHandler = {
|
||||
verifyAuth({ request, requestId, providerConfig }: AuthContext): NextResponse | null {
|
||||
const secret = providerConfig.webhookSecret as string | undefined
|
||||
if (!secret?.trim()) {
|
||||
logger.warn(`[${requestId}] ServiceNow webhook missing webhookSecret — rejecting`)
|
||||
return new NextResponse('Unauthorized - Webhook secret not configured', { status: 401 })
|
||||
}
|
||||
|
||||
if (
|
||||
!verifyTokenAuth(request, secret.trim(), 'x-sim-webhook-secret') &&
|
||||
!verifyTokenAuth(request, secret.trim())
|
||||
) {
|
||||
logger.warn(`[${requestId}] ServiceNow webhook secret verification failed`)
|
||||
return new NextResponse('Unauthorized - Invalid webhook secret', { status: 401 })
|
||||
}
|
||||
|
||||
return null
|
||||
},
|
||||
|
||||
async matchEvent({ webhook, workflow, body, requestId, providerConfig }: EventMatchContext) {
|
||||
const triggerId = providerConfig.triggerId as string | undefined
|
||||
if (!triggerId) {
|
||||
return true
|
||||
}
|
||||
|
||||
const { isServiceNowEventMatch } = await import('@/triggers/servicenow/utils')
|
||||
const configuredTableName = providerConfig.tableName as string | undefined
|
||||
const obj = asRecord(body)
|
||||
|
||||
if (!isServiceNowEventMatch(triggerId, obj, configuredTableName)) {
|
||||
logger.debug(
|
||||
`[${requestId}] ServiceNow event mismatch for trigger ${triggerId}. Skipping execution.`,
|
||||
{ webhookId: webhook.id, workflowId: workflow.id, triggerId }
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
}
|
||||
@@ -14,7 +14,10 @@ import {
|
||||
normalizeVariables,
|
||||
sanitizeVariable,
|
||||
} from './normalize'
|
||||
import { formatValueForDisplay, resolveValueForDisplay } from './resolve-values'
|
||||
import { formatValueForDisplay, resolveFieldLabel, resolveValueForDisplay } from './resolve-values'
|
||||
|
||||
const MAX_CHANGES_PER_BLOCK = 6
|
||||
const MAX_EDGE_DETAILS = 3
|
||||
|
||||
const logger = createLogger('WorkflowComparison')
|
||||
|
||||
@@ -45,10 +48,22 @@ export interface WorkflowDiffSummary {
|
||||
addedBlocks: Array<{ id: string; type: string; name?: string }>
|
||||
removedBlocks: Array<{ id: string; type: string; name?: string }>
|
||||
modifiedBlocks: Array<{ id: string; type: string; name?: string; changes: FieldChange[] }>
|
||||
edgeChanges: { added: number; removed: number }
|
||||
edgeChanges: {
|
||||
added: number
|
||||
removed: number
|
||||
addedDetails: Array<{ sourceName: string; targetName: string }>
|
||||
removedDetails: Array<{ sourceName: string; targetName: string }>
|
||||
}
|
||||
loopChanges: { added: number; removed: number; modified: number }
|
||||
parallelChanges: { added: number; removed: number; modified: number }
|
||||
variableChanges: { added: number; removed: number; modified: number }
|
||||
variableChanges: {
|
||||
added: number
|
||||
removed: number
|
||||
modified: number
|
||||
addedNames: string[]
|
||||
removedNames: string[]
|
||||
modifiedNames: string[]
|
||||
}
|
||||
hasChanges: boolean
|
||||
}
|
||||
|
||||
@@ -63,10 +78,17 @@ export function generateWorkflowDiffSummary(
|
||||
addedBlocks: [],
|
||||
removedBlocks: [],
|
||||
modifiedBlocks: [],
|
||||
edgeChanges: { added: 0, removed: 0 },
|
||||
edgeChanges: { added: 0, removed: 0, addedDetails: [], removedDetails: [] },
|
||||
loopChanges: { added: 0, removed: 0, modified: 0 },
|
||||
parallelChanges: { added: 0, removed: 0, modified: 0 },
|
||||
variableChanges: { added: 0, removed: 0, modified: 0 },
|
||||
variableChanges: {
|
||||
added: 0,
|
||||
removed: 0,
|
||||
modified: 0,
|
||||
addedNames: [],
|
||||
removedNames: [],
|
||||
modifiedNames: [],
|
||||
},
|
||||
hasChanges: false,
|
||||
}
|
||||
|
||||
@@ -79,10 +101,28 @@ export function generateWorkflowDiffSummary(
|
||||
name: block.name,
|
||||
})
|
||||
}
|
||||
result.edgeChanges.added = (currentState.edges || []).length
|
||||
|
||||
const edges = currentState.edges || []
|
||||
result.edgeChanges.added = edges.length
|
||||
for (const edge of edges) {
|
||||
const sourceBlock = currentBlocks[edge.source]
|
||||
const targetBlock = currentBlocks[edge.target]
|
||||
result.edgeChanges.addedDetails.push({
|
||||
sourceName: sourceBlock?.name || sourceBlock?.type || edge.source,
|
||||
targetName: targetBlock?.name || targetBlock?.type || edge.target,
|
||||
})
|
||||
}
|
||||
|
||||
result.loopChanges.added = Object.keys(currentState.loops || {}).length
|
||||
result.parallelChanges.added = Object.keys(currentState.parallels || {}).length
|
||||
result.variableChanges.added = Object.keys(currentState.variables || {}).length
|
||||
|
||||
const variables = currentState.variables || {}
|
||||
const varEntries = Object.entries(variables)
|
||||
result.variableChanges.added = varEntries.length
|
||||
for (const [id, variable] of varEntries) {
|
||||
result.variableChanges.addedNames.push((variable as { name?: string }).name || id)
|
||||
}
|
||||
|
||||
result.hasChanges = true
|
||||
return result
|
||||
}
|
||||
@@ -121,7 +161,6 @@ export function generateWorkflowDiffSummary(
|
||||
const previousBlock = previousBlocks[id]
|
||||
const changes: FieldChange[] = []
|
||||
|
||||
// Use shared helpers for block field extraction (single source of truth)
|
||||
const {
|
||||
blockRest: currentRest,
|
||||
normalizedData: currentDataRest,
|
||||
@@ -156,8 +195,6 @@ export function generateWorkflowDiffSummary(
|
||||
newValue: currentBlock.enabled,
|
||||
})
|
||||
}
|
||||
// Check other block properties (boolean fields)
|
||||
// Use !! to normalize: null/undefined/false are all equivalent (falsy)
|
||||
const blockFields = ['horizontalHandles', 'advancedMode', 'triggerMode', 'locked'] as const
|
||||
for (const field of blockFields) {
|
||||
if (!!currentBlock[field] !== !!previousBlock[field]) {
|
||||
@@ -169,15 +206,27 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
if (normalizedStringify(currentDataRest) !== normalizedStringify(previousDataRest)) {
|
||||
changes.push({ field: 'data', oldValue: previousDataRest, newValue: currentDataRest })
|
||||
const allDataKeys = new Set([
|
||||
...Object.keys(currentDataRest),
|
||||
...Object.keys(previousDataRest),
|
||||
])
|
||||
for (const key of allDataKeys) {
|
||||
if (
|
||||
normalizedStringify(currentDataRest[key]) !== normalizedStringify(previousDataRest[key])
|
||||
) {
|
||||
changes.push({
|
||||
field: `data.${key}`,
|
||||
oldValue: previousDataRest[key] ?? null,
|
||||
newValue: currentDataRest[key] ?? null,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize trigger config values for both states before comparison
|
||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
||||
|
||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||
const allSubBlockIds = filterSubBlockIds([
|
||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
||||
])
|
||||
@@ -195,11 +244,9 @@ export function generateWorkflowDiffSummary(
|
||||
continue
|
||||
}
|
||||
|
||||
// Use shared helper for subBlock value normalization (single source of truth)
|
||||
const currentValue = normalizeSubBlockValue(subId, currentSub.value)
|
||||
const previousValue = normalizeSubBlockValue(subId, previousSub.value)
|
||||
|
||||
// For string values, compare directly to catch even small text changes
|
||||
if (typeof currentValue === 'string' && typeof previousValue === 'string') {
|
||||
if (currentValue !== previousValue) {
|
||||
changes.push({ field: subId, oldValue: previousSub.value, newValue: currentSub.value })
|
||||
@@ -212,7 +259,6 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
|
||||
// Use shared helper for subBlock REST extraction (single source of truth)
|
||||
const currentSubRest = extractSubBlockRest(currentSub)
|
||||
const previousSubRest = extractSubBlockRest(previousSub)
|
||||
|
||||
@@ -240,11 +286,30 @@ export function generateWorkflowDiffSummary(
|
||||
const currentEdgeSet = new Set(currentEdges.map(normalizedStringify))
|
||||
const previousEdgeSet = new Set(previousEdges.map(normalizedStringify))
|
||||
|
||||
for (const edge of currentEdgeSet) {
|
||||
if (!previousEdgeSet.has(edge)) result.edgeChanges.added++
|
||||
const resolveBlockName = (blockId: string): string => {
|
||||
const block = currentBlocks[blockId] || previousBlocks[blockId]
|
||||
return block?.name || block?.type || blockId
|
||||
}
|
||||
for (const edge of previousEdgeSet) {
|
||||
if (!currentEdgeSet.has(edge)) result.edgeChanges.removed++
|
||||
|
||||
for (const edgeStr of currentEdgeSet) {
|
||||
if (!previousEdgeSet.has(edgeStr)) {
|
||||
result.edgeChanges.added++
|
||||
const edge = JSON.parse(edgeStr) as { source: string; target: string }
|
||||
result.edgeChanges.addedDetails.push({
|
||||
sourceName: resolveBlockName(edge.source),
|
||||
targetName: resolveBlockName(edge.target),
|
||||
})
|
||||
}
|
||||
}
|
||||
for (const edgeStr of previousEdgeSet) {
|
||||
if (!currentEdgeSet.has(edgeStr)) {
|
||||
result.edgeChanges.removed++
|
||||
const edge = JSON.parse(edgeStr) as { source: string; target: string }
|
||||
result.edgeChanges.removedDetails.push({
|
||||
sourceName: resolveBlockName(edge.source),
|
||||
targetName: resolveBlockName(edge.target),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const currentLoops = currentState.loops || {}
|
||||
@@ -296,8 +361,18 @@ export function generateWorkflowDiffSummary(
|
||||
const currentVarIds = Object.keys(currentVars)
|
||||
const previousVarIds = Object.keys(previousVars)
|
||||
|
||||
result.variableChanges.added = currentVarIds.filter((id) => !previousVarIds.includes(id)).length
|
||||
result.variableChanges.removed = previousVarIds.filter((id) => !currentVarIds.includes(id)).length
|
||||
for (const id of currentVarIds) {
|
||||
if (!previousVarIds.includes(id)) {
|
||||
result.variableChanges.added++
|
||||
result.variableChanges.addedNames.push(currentVars[id].name || id)
|
||||
}
|
||||
}
|
||||
for (const id of previousVarIds) {
|
||||
if (!currentVarIds.includes(id)) {
|
||||
result.variableChanges.removed++
|
||||
result.variableChanges.removedNames.push(previousVars[id].name || id)
|
||||
}
|
||||
}
|
||||
|
||||
for (const id of currentVarIds) {
|
||||
if (!previousVarIds.includes(id)) continue
|
||||
@@ -305,6 +380,7 @@ export function generateWorkflowDiffSummary(
|
||||
const previousVar = normalizeValue(sanitizeVariable(previousVars[id]))
|
||||
if (normalizedStringify(currentVar) !== normalizedStringify(previousVar)) {
|
||||
result.variableChanges.modified++
|
||||
result.variableChanges.modifiedNames.push(currentVars[id].name || id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -349,56 +425,24 @@ export function formatDiffSummaryForDescription(summary: WorkflowDiffSummary): s
|
||||
|
||||
for (const block of summary.modifiedBlocks) {
|
||||
const name = block.name || block.type
|
||||
for (const change of block.changes.slice(0, 3)) {
|
||||
const meaningfulChanges = block.changes.filter((c) => !c.field.endsWith('.properties'))
|
||||
for (const change of meaningfulChanges.slice(0, MAX_CHANGES_PER_BLOCK)) {
|
||||
const fieldLabel = resolveFieldLabel(block.type, change.field)
|
||||
const oldStr = formatValueForDisplay(change.oldValue)
|
||||
const newStr = formatValueForDisplay(change.newValue)
|
||||
changes.push(`Modified ${name}: ${change.field} changed from "${oldStr}" to "${newStr}"`)
|
||||
changes.push(`Modified ${name}: ${fieldLabel} changed from "${oldStr}" to "${newStr}"`)
|
||||
}
|
||||
if (block.changes.length > 3) {
|
||||
changes.push(` ...and ${block.changes.length - 3} more changes in ${name}`)
|
||||
if (meaningfulChanges.length > MAX_CHANGES_PER_BLOCK) {
|
||||
changes.push(
|
||||
` ...and ${meaningfulChanges.length - MAX_CHANGES_PER_BLOCK} more changes in ${name}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (summary.edgeChanges.added > 0) {
|
||||
changes.push(`Added ${summary.edgeChanges.added} connection(s)`)
|
||||
}
|
||||
if (summary.edgeChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.edgeChanges.removed} connection(s)`)
|
||||
}
|
||||
|
||||
if (summary.loopChanges.added > 0) {
|
||||
changes.push(`Added ${summary.loopChanges.added} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.loopChanges.removed} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.loopChanges.modified} loop(s)`)
|
||||
}
|
||||
|
||||
if (summary.parallelChanges.added > 0) {
|
||||
changes.push(`Added ${summary.parallelChanges.added} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.parallelChanges.removed} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.parallelChanges.modified} parallel group(s)`)
|
||||
}
|
||||
|
||||
const varChanges: string[] = []
|
||||
if (summary.variableChanges.added > 0) {
|
||||
varChanges.push(`${summary.variableChanges.added} added`)
|
||||
}
|
||||
if (summary.variableChanges.removed > 0) {
|
||||
varChanges.push(`${summary.variableChanges.removed} removed`)
|
||||
}
|
||||
if (summary.variableChanges.modified > 0) {
|
||||
varChanges.push(`${summary.variableChanges.modified} modified`)
|
||||
}
|
||||
if (varChanges.length > 0) {
|
||||
changes.push(`Variables: ${varChanges.join(', ')}`)
|
||||
}
|
||||
formatEdgeChanges(summary, changes)
|
||||
formatCountChanges(summary.loopChanges, 'loop', changes)
|
||||
formatCountChanges(summary.parallelChanges, 'parallel group', changes)
|
||||
formatVariableChanges(summary, changes)
|
||||
|
||||
return changes.join('\n')
|
||||
}
|
||||
@@ -437,8 +481,9 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
const modifiedBlockPromises = summary.modifiedBlocks.map(async (block) => {
|
||||
const name = block.name || block.type
|
||||
const blockChanges: string[] = []
|
||||
const meaningfulChanges = block.changes.filter((c) => !c.field.endsWith('.properties'))
|
||||
|
||||
const changesToProcess = block.changes.slice(0, 3)
|
||||
const changesToProcess = meaningfulChanges.slice(0, MAX_CHANGES_PER_BLOCK)
|
||||
const resolvedChanges = await Promise.all(
|
||||
changesToProcess.map(async (change) => {
|
||||
const context = {
|
||||
@@ -455,7 +500,7 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
])
|
||||
|
||||
return {
|
||||
field: change.field,
|
||||
field: resolveFieldLabel(block.type, change.field),
|
||||
oldLabel: oldResolved.displayLabel,
|
||||
newLabel: newResolved.displayLabel,
|
||||
}
|
||||
@@ -468,8 +513,10 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
)
|
||||
}
|
||||
|
||||
if (block.changes.length > 3) {
|
||||
blockChanges.push(` ...and ${block.changes.length - 3} more changes in ${name}`)
|
||||
if (meaningfulChanges.length > MAX_CHANGES_PER_BLOCK) {
|
||||
blockChanges.push(
|
||||
` ...and ${meaningfulChanges.length - MAX_CHANGES_PER_BLOCK} more changes in ${name}`
|
||||
)
|
||||
}
|
||||
|
||||
return blockChanges
|
||||
@@ -480,46 +527,10 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
changes.push(...blockChanges)
|
||||
}
|
||||
|
||||
if (summary.edgeChanges.added > 0) {
|
||||
changes.push(`Added ${summary.edgeChanges.added} connection(s)`)
|
||||
}
|
||||
if (summary.edgeChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.edgeChanges.removed} connection(s)`)
|
||||
}
|
||||
|
||||
if (summary.loopChanges.added > 0) {
|
||||
changes.push(`Added ${summary.loopChanges.added} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.loopChanges.removed} loop(s)`)
|
||||
}
|
||||
if (summary.loopChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.loopChanges.modified} loop(s)`)
|
||||
}
|
||||
|
||||
if (summary.parallelChanges.added > 0) {
|
||||
changes.push(`Added ${summary.parallelChanges.added} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.removed > 0) {
|
||||
changes.push(`Removed ${summary.parallelChanges.removed} parallel group(s)`)
|
||||
}
|
||||
if (summary.parallelChanges.modified > 0) {
|
||||
changes.push(`Modified ${summary.parallelChanges.modified} parallel group(s)`)
|
||||
}
|
||||
|
||||
const varChanges: string[] = []
|
||||
if (summary.variableChanges.added > 0) {
|
||||
varChanges.push(`${summary.variableChanges.added} added`)
|
||||
}
|
||||
if (summary.variableChanges.removed > 0) {
|
||||
varChanges.push(`${summary.variableChanges.removed} removed`)
|
||||
}
|
||||
if (summary.variableChanges.modified > 0) {
|
||||
varChanges.push(`${summary.variableChanges.modified} modified`)
|
||||
}
|
||||
if (varChanges.length > 0) {
|
||||
changes.push(`Variables: ${varChanges.join(', ')}`)
|
||||
}
|
||||
formatEdgeChanges(summary, changes)
|
||||
formatCountChanges(summary.loopChanges, 'loop', changes)
|
||||
formatCountChanges(summary.parallelChanges, 'parallel group', changes)
|
||||
formatVariableChanges(summary, changes)
|
||||
|
||||
logger.info('Generated async diff description', {
|
||||
workflowId,
|
||||
@@ -529,3 +540,82 @@ export async function formatDiffSummaryForDescriptionAsync(
|
||||
|
||||
return changes.join('\n')
|
||||
}
|
||||
|
||||
function formatEdgeDetailList(
|
||||
edges: Array<{ sourceName: string; targetName: string }>,
|
||||
total: number,
|
||||
verb: string,
|
||||
changes: string[]
|
||||
): void {
|
||||
if (edges.length === 0) {
|
||||
changes.push(`${verb} ${total} connection(s)`)
|
||||
return
|
||||
}
|
||||
for (const edge of edges.slice(0, MAX_EDGE_DETAILS)) {
|
||||
changes.push(`${verb} connection: ${edge.sourceName} -> ${edge.targetName}`)
|
||||
}
|
||||
if (total > MAX_EDGE_DETAILS) {
|
||||
changes.push(` ...and ${total - MAX_EDGE_DETAILS} more ${verb.toLowerCase()} connection(s)`)
|
||||
}
|
||||
}
|
||||
|
||||
function formatEdgeChanges(summary: WorkflowDiffSummary, changes: string[]): void {
|
||||
if (summary.edgeChanges.added > 0) {
|
||||
formatEdgeDetailList(
|
||||
summary.edgeChanges.addedDetails ?? [],
|
||||
summary.edgeChanges.added,
|
||||
'Added',
|
||||
changes
|
||||
)
|
||||
}
|
||||
if (summary.edgeChanges.removed > 0) {
|
||||
formatEdgeDetailList(
|
||||
summary.edgeChanges.removedDetails ?? [],
|
||||
summary.edgeChanges.removed,
|
||||
'Removed',
|
||||
changes
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function formatCountChanges(
|
||||
counts: { added: number; removed: number; modified: number },
|
||||
label: string,
|
||||
changes: string[]
|
||||
): void {
|
||||
if (counts.added > 0) changes.push(`Added ${counts.added} ${label}(s)`)
|
||||
if (counts.removed > 0) changes.push(`Removed ${counts.removed} ${label}(s)`)
|
||||
if (counts.modified > 0) changes.push(`Modified ${counts.modified} ${label}(s)`)
|
||||
}
|
||||
|
||||
function formatVariableChanges(summary: WorkflowDiffSummary, changes: string[]): void {
|
||||
const categories = [
|
||||
{
|
||||
count: summary.variableChanges.added,
|
||||
names: summary.variableChanges.addedNames ?? [],
|
||||
verb: 'added',
|
||||
},
|
||||
{
|
||||
count: summary.variableChanges.removed,
|
||||
names: summary.variableChanges.removedNames ?? [],
|
||||
verb: 'removed',
|
||||
},
|
||||
{
|
||||
count: summary.variableChanges.modified,
|
||||
names: summary.variableChanges.modifiedNames ?? [],
|
||||
verb: 'modified',
|
||||
},
|
||||
] as const
|
||||
|
||||
const varParts: string[] = []
|
||||
for (const { count, names, verb } of categories) {
|
||||
if (count > 0) {
|
||||
varParts.push(
|
||||
names.length > 0 ? `${verb} ${names.map((n) => `"${n}"`).join(', ')}` : `${count} ${verb}`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (varParts.length > 0) {
|
||||
changes.push(`Variables: ${varParts.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
864
apps/sim/lib/workflows/comparison/format-description.test.ts
Normal file
864
apps/sim/lib/workflows/comparison/format-description.test.ts
Normal file
@@ -0,0 +1,864 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const { mockGetBlock } = vi.hoisted(() => ({
|
||||
mockGetBlock: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/subblocks/visibility', () => ({
|
||||
isNonEmptyValue: (v: unknown) => v !== null && v !== undefined && v !== '',
|
||||
}))
|
||||
|
||||
vi.mock('@/triggers/constants', () => ({
|
||||
SYSTEM_SUBBLOCK_IDS: [],
|
||||
TRIGGER_RUNTIME_SUBBLOCK_IDS: [],
|
||||
}))
|
||||
|
||||
vi.mock('@/blocks/types', () => ({
|
||||
SELECTOR_TYPES_HYDRATION_REQUIRED: [],
|
||||
}))
|
||||
|
||||
vi.mock('@/executor/constants', () => ({
|
||||
CREDENTIAL_SET: { PREFIX: 'cred_set_' },
|
||||
isUuid: (v: string) => /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i.test(v),
|
||||
}))
|
||||
|
||||
vi.mock('@/blocks/registry', () => ({
|
||||
getBlock: mockGetBlock,
|
||||
getAllBlocks: () => ({}),
|
||||
getAllBlockTypes: () => [],
|
||||
registry: {},
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/subblocks/context', () => ({
|
||||
buildSelectorContextFromBlock: vi.fn(() => ({})),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/queries/credential-sets', () => ({
|
||||
fetchCredentialSetById: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/queries/oauth/oauth-credentials', () => ({
|
||||
fetchOAuthCredentialDetail: vi.fn(() => []),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/selectors/registry', () => ({
|
||||
getSelectorDefinition: vi.fn(() => ({ fetchList: vi.fn(() => []) })),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/selectors/resolution', () => ({
|
||||
resolveSelectorForSubBlock: vi.fn(),
|
||||
}))
|
||||
|
||||
import { WorkflowBuilder } from '@sim/testing'
|
||||
import type { WorkflowDiffSummary } from '@/lib/workflows/comparison/compare'
|
||||
import {
|
||||
formatDiffSummaryForDescription,
|
||||
formatDiffSummaryForDescriptionAsync,
|
||||
generateWorkflowDiffSummary,
|
||||
} from '@/lib/workflows/comparison/compare'
|
||||
import { formatValueForDisplay, resolveFieldLabel } from '@/lib/workflows/comparison/resolve-values'
|
||||
|
||||
function emptyDiffSummary(overrides: Partial<WorkflowDiffSummary> = {}): WorkflowDiffSummary {
|
||||
return {
|
||||
addedBlocks: [],
|
||||
removedBlocks: [],
|
||||
modifiedBlocks: [],
|
||||
edgeChanges: { added: 0, removed: 0, addedDetails: [], removedDetails: [] },
|
||||
loopChanges: { added: 0, removed: 0, modified: 0 },
|
||||
parallelChanges: { added: 0, removed: 0, modified: 0 },
|
||||
variableChanges: {
|
||||
added: 0,
|
||||
removed: 0,
|
||||
modified: 0,
|
||||
addedNames: [],
|
||||
removedNames: [],
|
||||
modifiedNames: [],
|
||||
},
|
||||
hasChanges: false,
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('resolveFieldLabel', () => {
|
||||
it('resolves subBlock id to its title', () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
],
|
||||
})
|
||||
expect(resolveFieldLabel('agent', 'systemPrompt')).toBe('System Prompt')
|
||||
expect(resolveFieldLabel('agent', 'model')).toBe('Model')
|
||||
})
|
||||
|
||||
it('falls back to raw id when block not found', () => {
|
||||
mockGetBlock.mockReturnValue(null)
|
||||
expect(resolveFieldLabel('unknown_type', 'someField')).toBe('someField')
|
||||
})
|
||||
|
||||
it('falls back to raw id when subBlock not found', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [{ id: 'other', title: 'Other' }] })
|
||||
expect(resolveFieldLabel('agent', 'missingField')).toBe('missingField')
|
||||
})
|
||||
|
||||
it('converts data.* fields to Title Case', () => {
|
||||
expect(resolveFieldLabel('agent', 'data.loopType')).toBe('Loop Type')
|
||||
expect(resolveFieldLabel('agent', 'data.canonicalModes')).toBe('Canonical Modes')
|
||||
expect(resolveFieldLabel('agent', 'data.isStarter')).toBe('Is Starter')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatValueForDisplay', () => {
|
||||
it('handles null/undefined', () => {
|
||||
expect(formatValueForDisplay(null)).toBe('(none)')
|
||||
expect(formatValueForDisplay(undefined)).toBe('(none)')
|
||||
})
|
||||
|
||||
it('handles booleans', () => {
|
||||
expect(formatValueForDisplay(true)).toBe('enabled')
|
||||
expect(formatValueForDisplay(false)).toBe('disabled')
|
||||
})
|
||||
|
||||
it('truncates long strings', () => {
|
||||
const longStr = 'a'.repeat(60)
|
||||
expect(formatValueForDisplay(longStr)).toBe(`${'a'.repeat(50)}...`)
|
||||
})
|
||||
|
||||
it('handles empty string', () => {
|
||||
expect(formatValueForDisplay('')).toBe('(empty)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatDiffSummaryForDescription', () => {
|
||||
it('returns no-changes message for empty diff', () => {
|
||||
const result = formatDiffSummaryForDescription(emptyDiffSummary())
|
||||
expect(result).toBe('No structural changes detected (configuration may have changed)')
|
||||
})
|
||||
|
||||
it('uses human-readable field labels for modified blocks', () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
],
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'agent',
|
||||
name: 'My Agent',
|
||||
changes: [
|
||||
{ field: 'systemPrompt', oldValue: 'You are helpful', newValue: 'You are an expert' },
|
||||
{ field: 'model', oldValue: 'gpt-4o', newValue: 'claude-sonnet-4-5' },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain(
|
||||
'Modified My Agent: System Prompt changed from "You are helpful" to "You are an expert"'
|
||||
)
|
||||
expect(result).toContain(
|
||||
'Modified My Agent: Model changed from "gpt-4o" to "claude-sonnet-4-5"'
|
||||
)
|
||||
expect(result).not.toContain('systemPrompt')
|
||||
expect(result).not.toContain('model changed')
|
||||
})
|
||||
|
||||
it('filters out .properties changes', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [
|
||||
{ field: 'systemPrompt', oldValue: 'old', newValue: 'new' },
|
||||
{
|
||||
field: 'systemPrompt.properties',
|
||||
oldValue: { some: 'meta' },
|
||||
newValue: { some: 'other' },
|
||||
},
|
||||
{ field: 'model.properties', oldValue: {}, newValue: { x: 1 } },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain('systemPrompt changed')
|
||||
expect(result).not.toContain('.properties')
|
||||
expect(result).not.toContain('model.properties')
|
||||
})
|
||||
|
||||
it('respects MAX_CHANGES_PER_BLOCK limit of 6', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const changes = Array.from({ length: 8 }, (_, i) => ({
|
||||
field: `field${i}`,
|
||||
oldValue: `old${i}`,
|
||||
newValue: `new${i}`,
|
||||
}))
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [{ id: 'b1', type: 'agent', name: 'Agent', changes }],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
const lines = result.split('\n')
|
||||
const modifiedLines = lines.filter((l) => l.startsWith('Modified'))
|
||||
expect(modifiedLines).toHaveLength(6)
|
||||
expect(result).toContain('...and 2 more changes in Agent')
|
||||
})
|
||||
|
||||
it('shows edge changes with block names', () => {
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
edgeChanges: {
|
||||
added: 2,
|
||||
removed: 1,
|
||||
addedDetails: [
|
||||
{ sourceName: 'My Agent', targetName: 'Slack' },
|
||||
{ sourceName: 'Router', targetName: 'Gmail' },
|
||||
],
|
||||
removedDetails: [{ sourceName: 'Function', targetName: 'Webhook' }],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain('Added connection: My Agent -> Slack')
|
||||
expect(result).toContain('Added connection: Router -> Gmail')
|
||||
expect(result).toContain('Removed connection: Function -> Webhook')
|
||||
})
|
||||
|
||||
it('truncates edge details beyond MAX_EDGE_DETAILS', () => {
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
edgeChanges: {
|
||||
added: 5,
|
||||
removed: 0,
|
||||
addedDetails: [
|
||||
{ sourceName: 'A', targetName: 'B' },
|
||||
{ sourceName: 'C', targetName: 'D' },
|
||||
{ sourceName: 'E', targetName: 'F' },
|
||||
{ sourceName: 'G', targetName: 'H' },
|
||||
{ sourceName: 'I', targetName: 'J' },
|
||||
],
|
||||
removedDetails: [],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
const connectionLines = result.split('\n').filter((l) => l.startsWith('Added connection'))
|
||||
expect(connectionLines).toHaveLength(3)
|
||||
expect(result).toContain('...and 2 more added connection(s)')
|
||||
})
|
||||
|
||||
it('shows variable changes with names', () => {
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
variableChanges: {
|
||||
added: 2,
|
||||
removed: 1,
|
||||
modified: 1,
|
||||
addedNames: ['counter', 'apiKey'],
|
||||
removedNames: ['oldVar'],
|
||||
modifiedNames: ['threshold'],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain(
|
||||
'Variables: added "counter", "apiKey", removed "oldVar", modified "threshold"'
|
||||
)
|
||||
})
|
||||
|
||||
it('handles data.* fields with Title Case labels', () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [
|
||||
{ field: 'data.loopType', oldValue: 'for', newValue: 'forEach' },
|
||||
{ field: 'data.isStarter', oldValue: true, newValue: false },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
expect(result).toContain('Modified Agent: Loop Type changed from "for" to "forEach"')
|
||||
expect(result).toContain('Modified Agent: Is Starter changed from "enabled" to "disabled"')
|
||||
})
|
||||
|
||||
it('formats a realistic multi-block workflow change', () => {
|
||||
mockGetBlock.mockImplementation((type: string) => {
|
||||
if (type === 'agent') {
|
||||
return {
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
{ id: 'temperature', title: 'Temperature' },
|
||||
],
|
||||
}
|
||||
}
|
||||
if (type === 'slack') {
|
||||
return {
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ id: 'slack_send_message', label: 'Send Message' },
|
||||
{ id: 'slack_list_channels', label: 'List Channels' },
|
||||
],
|
||||
},
|
||||
{ id: 'channel', title: 'Channel' },
|
||||
{ id: 'credential', title: 'Slack Account' },
|
||||
],
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
addedBlocks: [{ id: 'b3', type: 'gmail', name: 'Gmail Notifications' }],
|
||||
removedBlocks: [{ id: 'b4', type: 'function', name: 'Legacy Transform' }],
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'AI Assistant',
|
||||
changes: [
|
||||
{ field: 'model', oldValue: 'gpt-4o', newValue: 'claude-sonnet-4-5' },
|
||||
{ field: 'temperature', oldValue: '0.7', newValue: '0.3' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'b2',
|
||||
type: 'slack',
|
||||
name: 'Slack Alert',
|
||||
changes: [{ field: 'channel', oldValue: '#general', newValue: '#alerts' }],
|
||||
},
|
||||
],
|
||||
edgeChanges: {
|
||||
added: 1,
|
||||
removed: 0,
|
||||
addedDetails: [{ sourceName: 'AI Assistant', targetName: 'Gmail Notifications' }],
|
||||
removedDetails: [],
|
||||
},
|
||||
variableChanges: {
|
||||
added: 1,
|
||||
removed: 0,
|
||||
modified: 0,
|
||||
addedNames: ['errorCount'],
|
||||
removedNames: [],
|
||||
modifiedNames: [],
|
||||
},
|
||||
})
|
||||
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Gmail Notifications (gmail)')
|
||||
expect(result).toContain('Removed block: Legacy Transform (function)')
|
||||
expect(result).toContain(
|
||||
'Modified AI Assistant: Model changed from "gpt-4o" to "claude-sonnet-4-5"'
|
||||
)
|
||||
expect(result).toContain('Modified AI Assistant: Temperature changed from "0.7" to "0.3"')
|
||||
expect(result).toContain('Modified Slack Alert: Channel changed from "#general" to "#alerts"')
|
||||
expect(result).toContain('Added connection: AI Assistant -> Gmail Notifications')
|
||||
expect(result).toContain('Variables: added "errorCount"')
|
||||
})
|
||||
})
|
||||
|
||||
describe('formatDiffSummaryForDescriptionAsync', () => {
|
||||
it('resolves dropdown values to labels', async () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ id: 'calendly_get_current_user', label: 'Get Current User' },
|
||||
{ id: 'calendly_list_event_types', label: 'List Event Types' },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'calendly',
|
||||
name: 'Calendly',
|
||||
changes: [
|
||||
{
|
||||
field: 'operation',
|
||||
oldValue: 'calendly_get_current_user',
|
||||
newValue: 'calendly_list_event_types',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const mockState = { blocks: {} } as any
|
||||
const result = await formatDiffSummaryForDescriptionAsync(summary, mockState, 'wf-1')
|
||||
expect(result).toContain(
|
||||
'Modified Calendly: Operation changed from "Get Current User" to "List Event Types"'
|
||||
)
|
||||
expect(result).not.toContain('calendly_get_current_user')
|
||||
})
|
||||
|
||||
it('uses field titles in async path', async () => {
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [{ id: 'systemPrompt', title: 'System Prompt' }],
|
||||
})
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [{ field: 'systemPrompt', oldValue: 'Be helpful', newValue: 'Be concise' }],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const mockState = { blocks: {} } as any
|
||||
const result = await formatDiffSummaryForDescriptionAsync(summary, mockState, 'wf-1')
|
||||
expect(result).toContain('System Prompt')
|
||||
expect(result).not.toContain('systemPrompt')
|
||||
})
|
||||
|
||||
it('filters .properties changes in async path', async () => {
|
||||
mockGetBlock.mockReturnValue({ subBlocks: [] })
|
||||
|
||||
const summary = emptyDiffSummary({
|
||||
hasChanges: true,
|
||||
modifiedBlocks: [
|
||||
{
|
||||
id: 'b1',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
changes: [
|
||||
{ field: 'prompt', oldValue: 'old', newValue: 'new' },
|
||||
{ field: 'prompt.properties', oldValue: {}, newValue: { x: 1 } },
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const mockState = { blocks: {} } as any
|
||||
const result = await formatDiffSummaryForDescriptionAsync(summary, mockState, 'wf-1')
|
||||
expect(result).not.toContain('.properties')
|
||||
})
|
||||
})
|
||||
|
||||
describe('end-to-end: generateWorkflowDiffSummary + formatDiffSummaryForDescription', () => {
|
||||
beforeEach(() => {
|
||||
mockGetBlock.mockReturnValue(null)
|
||||
})
|
||||
|
||||
it('detects added and removed blocks between two workflow versions', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Summarizer')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Summarizer')
|
||||
.addFunction('func-1', undefined, 'Formatter')
|
||||
.connect('start', 'agent-1')
|
||||
.connect('agent-1', 'func-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Formatter (function)')
|
||||
expect(result).toContain('Added connection: Summarizer -> Formatter')
|
||||
expect(result).not.toContain('Removed')
|
||||
})
|
||||
|
||||
it('detects block removal and edge removal', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Classifier')
|
||||
.addFunction('func-1', undefined, 'Logger')
|
||||
.connect('start', 'agent-1')
|
||||
.connect('agent-1', 'func-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Classifier')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Removed block: Logger (function)')
|
||||
expect(result).toContain('Removed connection: Classifier -> Logger')
|
||||
expect(result).not.toContain('Added block')
|
||||
})
|
||||
|
||||
it('detects subBlock value changes on modified blocks', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Writer')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
previous.blocks['agent-1'].subBlocks = {
|
||||
systemPrompt: { id: 'systemPrompt', value: 'You are a helpful assistant' },
|
||||
model: { id: 'model', value: 'gpt-4o' },
|
||||
}
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Writer')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
current.blocks['agent-1'].subBlocks = {
|
||||
systemPrompt: { id: 'systemPrompt', value: 'You are a concise writer' },
|
||||
model: { id: 'model', value: 'claude-sonnet-4-5' },
|
||||
}
|
||||
|
||||
mockGetBlock.mockReturnValue({
|
||||
subBlocks: [
|
||||
{ id: 'systemPrompt', title: 'System Prompt' },
|
||||
{ id: 'model', title: 'Model' },
|
||||
],
|
||||
})
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain(
|
||||
'Modified Writer: System Prompt changed from "You are a helpful assistant" to "You are a concise writer"'
|
||||
)
|
||||
expect(result).toContain('Modified Writer: Model changed from "gpt-4o" to "claude-sonnet-4-5"')
|
||||
})
|
||||
|
||||
it('detects loop addition with correct count', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Process')
|
||||
.connect('start', 'func-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Process')
|
||||
.addLoop('loop-1', undefined, { iterations: 5, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'func-1')
|
||||
.connect('func-1', 'loop-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Loop (loop)')
|
||||
expect(result).toContain('Added block: loop-body (function)')
|
||||
expect(result).toContain('Added 1 loop(s)')
|
||||
expect(result).toContain('Added connection: Process -> Loop')
|
||||
})
|
||||
|
||||
it('detects loop removal', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'agent')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Direct Agent')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Removed block: Loop (loop)')
|
||||
expect(result).toContain('Removed 1 loop(s)')
|
||||
expect(result).toContain('Added block: Direct Agent (agent)')
|
||||
})
|
||||
|
||||
it('detects loop modification when iterations change', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 10, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Modified 1 loop(s)')
|
||||
})
|
||||
|
||||
it('detects parallel addition', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Sequencer')
|
||||
.connect('start', 'func-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 3, parallelType: 'count' })
|
||||
.addParallelChild('par-1', 'par-task-1', 'agent')
|
||||
.addParallelChild('par-1', 'par-task-2', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Parallel (parallel)')
|
||||
expect(result).toContain('Added 1 parallel group(s)')
|
||||
expect(result).toContain('Removed block: Sequencer (function)')
|
||||
})
|
||||
|
||||
it('detects parallel removal', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 2 })
|
||||
.addParallelChild('par-1', 'par-task', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addFunction('func-1', undefined, 'Simple Step')
|
||||
.connect('start', 'func-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Removed block: Parallel (parallel)')
|
||||
expect(result).toContain('Removed 1 parallel group(s)')
|
||||
expect(result).toContain('Added block: Simple Step (function)')
|
||||
})
|
||||
|
||||
it('detects parallel modification when count changes', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 2, parallelType: 'count' })
|
||||
.addParallelChild('par-1', 'par-task', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 5, parallelType: 'count' })
|
||||
.addParallelChild('par-1', 'par-task', 'function')
|
||||
.connect('start', 'par-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Modified 1 parallel group(s)')
|
||||
})
|
||||
|
||||
it('detects variable additions and removals with names', () => {
|
||||
const previous = new WorkflowBuilder().addStarter('start').build()
|
||||
previous.variables = {
|
||||
v1: { id: 'v1', name: 'retryCount', type: 'number', value: 3 },
|
||||
v2: { id: 'v2', name: 'apiEndpoint', type: 'string', value: 'https://api.example.com' },
|
||||
}
|
||||
|
||||
const current = new WorkflowBuilder().addStarter('start').build()
|
||||
current.variables = {
|
||||
v1: { id: 'v1', name: 'retryCount', type: 'number', value: 5 },
|
||||
v3: { id: 'v3', name: 'timeout', type: 'number', value: 30 },
|
||||
}
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Variables:')
|
||||
expect(result).toContain('added "timeout"')
|
||||
expect(result).toContain('removed "apiEndpoint"')
|
||||
expect(result).toContain('modified "retryCount"')
|
||||
})
|
||||
|
||||
it('produces no-change message for identical workflows', () => {
|
||||
const workflow = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('agent-1', undefined, 'Agent')
|
||||
.connect('start', 'agent-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(workflow, workflow)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toBe('No structural changes detected (configuration may have changed)')
|
||||
})
|
||||
|
||||
it('handles complex scenario: loop replaced with parallel + new connections + variables', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 5 })
|
||||
.addLoopChild('loop-1', 'loop-task', 'agent')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.connect('start', 'loop-1')
|
||||
.connect('loop-1', 'sink')
|
||||
.build()
|
||||
previous.variables = {
|
||||
v1: { id: 'v1', name: 'batchSize', type: 'number', value: 10 },
|
||||
}
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addParallel('par-1', undefined, { count: 3 })
|
||||
.addParallelChild('par-1', 'par-task', 'agent')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.addAgent('agg', undefined, 'Aggregator')
|
||||
.connect('start', 'par-1')
|
||||
.connect('par-1', 'agg')
|
||||
.connect('agg', 'sink')
|
||||
.build()
|
||||
current.variables = {
|
||||
v1: { id: 'v1', name: 'batchSize', type: 'number', value: 25 },
|
||||
v2: { id: 'v2', name: 'concurrency', type: 'number', value: 3 },
|
||||
}
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Added block: Parallel (parallel)')
|
||||
expect(result).toContain('Added block: Aggregator (agent)')
|
||||
expect(result).toContain('Removed block: Loop (loop)')
|
||||
expect(result).toContain('Added 1 parallel group(s)')
|
||||
expect(result).toContain('Removed 1 loop(s)')
|
||||
expect(result).toContain('added "concurrency"')
|
||||
expect(result).toContain('modified "batchSize"')
|
||||
|
||||
const lines = result.split('\n')
|
||||
expect(lines.length).toBeGreaterThanOrEqual(7)
|
||||
})
|
||||
|
||||
it('detects edge rewiring without block changes', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('a', undefined, 'Agent A')
|
||||
.addAgent('b', undefined, 'Agent B')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.connect('start', 'a')
|
||||
.connect('a', 'sink')
|
||||
.connect('start', 'b')
|
||||
.connect('b', 'sink')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addAgent('a', undefined, 'Agent A')
|
||||
.addAgent('b', undefined, 'Agent B')
|
||||
.addFunction('sink', undefined, 'Output')
|
||||
.connect('start', 'a')
|
||||
.connect('a', 'b')
|
||||
.connect('b', 'sink')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(summary.addedBlocks).toHaveLength(0)
|
||||
expect(summary.removedBlocks).toHaveLength(0)
|
||||
expect(result).toContain('Added connection: Agent A -> Agent B')
|
||||
expect(result).toContain('Removed connection:')
|
||||
expect(result).not.toContain('Added block')
|
||||
expect(result).not.toContain('Removed block')
|
||||
})
|
||||
|
||||
it('detects data field changes with human-readable labels', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addBlock('custom-1', 'function', undefined, 'Processor')
|
||||
.connect('start', 'custom-1')
|
||||
.build()
|
||||
previous.blocks['custom-1'].data = { isStarter: true, retryPolicy: 'linear' }
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addBlock('custom-1', 'function', undefined, 'Processor')
|
||||
.connect('start', 'custom-1')
|
||||
.build()
|
||||
current.blocks['custom-1'].data = { isStarter: false, retryPolicy: 'exponential' }
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Is Starter')
|
||||
expect(result).toContain('Retry Policy')
|
||||
expect(result).toContain('enabled')
|
||||
expect(result).toContain('disabled')
|
||||
expect(result).toContain('linear')
|
||||
expect(result).toContain('exponential')
|
||||
})
|
||||
|
||||
it('detects loop type change via loop config modification', () => {
|
||||
const previous = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'for' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const current = new WorkflowBuilder()
|
||||
.addStarter('start')
|
||||
.addLoop('loop-1', undefined, { iterations: 3, loopType: 'forEach' })
|
||||
.addLoopChild('loop-1', 'loop-body', 'function')
|
||||
.connect('start', 'loop-1')
|
||||
.build()
|
||||
|
||||
const summary = generateWorkflowDiffSummary(current, previous)
|
||||
const result = formatDiffSummaryForDescription(summary)
|
||||
|
||||
expect(result).toContain('Modified 1 loop(s)')
|
||||
})
|
||||
})
|
||||
@@ -9,6 +9,7 @@ import { getSelectorDefinition } from '@/hooks/selectors/registry'
|
||||
import { resolveSelectorForSubBlock } from '@/hooks/selectors/resolution'
|
||||
import type { SelectorContext, SelectorKey } from '@/hooks/selectors/types'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { formatParameterLabel } from '@/tools/params'
|
||||
|
||||
const logger = createLogger('ResolveValues')
|
||||
|
||||
@@ -126,6 +127,33 @@ function extractMcpToolName(toolId: string): string {
|
||||
return withoutPrefix
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a subBlock field ID to its human-readable title.
|
||||
* Falls back to the raw ID if the block or subBlock is not found.
|
||||
*/
|
||||
export function resolveFieldLabel(blockType: string, subBlockId: string): string {
|
||||
if (subBlockId.startsWith('data.')) {
|
||||
return formatParameterLabel(subBlockId.slice(5))
|
||||
}
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (!blockConfig) return subBlockId
|
||||
const subBlockConfig = blockConfig.subBlocks.find((sb) => sb.id === subBlockId)
|
||||
return subBlockConfig?.title ?? subBlockId
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a dropdown option ID to its human-readable label.
|
||||
* Returns null if the subBlock is not a dropdown or the value is not found.
|
||||
*/
|
||||
function resolveDropdownLabel(subBlockConfig: SubBlockConfig, value: string): string | null {
|
||||
if (subBlockConfig.type !== 'dropdown') return null
|
||||
if (!subBlockConfig.options) return null
|
||||
const options =
|
||||
typeof subBlockConfig.options === 'function' ? subBlockConfig.options() : subBlockConfig.options
|
||||
const match = options.find((opt) => opt.id === value)
|
||||
return match?.label ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a value for display in diff descriptions.
|
||||
*/
|
||||
@@ -138,7 +166,10 @@ export function formatValueForDisplay(value: unknown): string {
|
||||
if (typeof value === 'boolean') return value ? 'enabled' : 'disabled'
|
||||
if (typeof value === 'number') return String(value)
|
||||
if (Array.isArray(value)) return `[${value.length} items]`
|
||||
if (typeof value === 'object') return `${JSON.stringify(value).slice(0, 50)}...`
|
||||
if (typeof value === 'object') {
|
||||
const json = JSON.stringify(value)
|
||||
return json.length > 50 ? `${json.slice(0, 50)}...` : json
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
@@ -165,7 +196,6 @@ export async function resolveValueForDisplay(
|
||||
value: unknown,
|
||||
context: ResolutionContext
|
||||
): Promise<ResolvedValue> {
|
||||
// Non-string or empty values can't be resolved
|
||||
if (typeof value !== 'string' || !value) {
|
||||
return {
|
||||
original: value,
|
||||
@@ -190,9 +220,8 @@ export async function resolveValueForDisplay(
|
||||
)
|
||||
: { workflowId: context.workflowId, workspaceId: context.workspaceId }
|
||||
|
||||
// Credential fields (oauth-input or credential subBlockId)
|
||||
const isCredentialField =
|
||||
subBlockConfig?.type === 'oauth-input' || context.subBlockId === 'credential'
|
||||
subBlockConfig.type === 'oauth-input' || context.subBlockId === 'credential'
|
||||
|
||||
if (isCredentialField && (value.startsWith(CREDENTIAL_SET.PREFIX) || isUuid(value))) {
|
||||
const label = await resolveCredential(value, context.workflowId)
|
||||
@@ -202,8 +231,7 @@ export async function resolveValueForDisplay(
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// Workflow selector
|
||||
if (subBlockConfig?.type === 'workflow-selector' && isUuid(value)) {
|
||||
if (subBlockConfig.type === 'workflow-selector' && isUuid(value)) {
|
||||
const label = await resolveWorkflow(value, selectorCtx.workspaceId)
|
||||
if (label) {
|
||||
return { original: value, displayLabel: label, resolved: true }
|
||||
@@ -211,15 +239,27 @@ export async function resolveValueForDisplay(
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// MCP tool selector
|
||||
if (subBlockConfig?.type === 'mcp-tool-selector') {
|
||||
if (subBlockConfig.type === 'mcp-tool-selector') {
|
||||
const toolName = extractMcpToolName(value)
|
||||
return { original: value, displayLabel: toolName, resolved: true }
|
||||
}
|
||||
|
||||
// Selector types that require hydration (file-selector, sheet-selector, etc.)
|
||||
// These support external service IDs like Google Drive file IDs
|
||||
if (subBlockConfig && SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlockConfig.type)) {
|
||||
if (subBlockConfig.type === 'dropdown') {
|
||||
try {
|
||||
const label = resolveDropdownLabel(subBlockConfig, value)
|
||||
if (label) {
|
||||
return { original: value, displayLabel: label, resolved: true }
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve dropdown label', {
|
||||
value,
|
||||
subBlockId: context.subBlockId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlockConfig.type)) {
|
||||
const resolution = resolveSelectorForSubBlock(subBlockConfig, selectorCtx)
|
||||
|
||||
if (resolution?.key) {
|
||||
@@ -228,22 +268,17 @@ export async function resolveValueForDisplay(
|
||||
return { original: value, displayLabel: label, resolved: true }
|
||||
}
|
||||
}
|
||||
// If resolution failed for a hydration-required type, use semantic fallback
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// For fields without specific subBlock types, use pattern matching
|
||||
// UUID fallback
|
||||
if (isUuid(value)) {
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// Slack-style IDs (channels: C..., users: U.../W...) get semantic fallback
|
||||
if (/^C[A-Z0-9]{8,}$/.test(value) || /^[UW][A-Z0-9]{8,}$/.test(value)) {
|
||||
return { original: value, displayLabel: semanticFallback, resolved: true }
|
||||
}
|
||||
|
||||
// Credential set prefix without credential field type
|
||||
if (value.startsWith(CREDENTIAL_SET.PREFIX)) {
|
||||
const label = await resolveCredential(value, context.workflowId)
|
||||
if (label) {
|
||||
|
||||
@@ -30,6 +30,7 @@ export interface ExecuteWorkflowOptions {
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
}
|
||||
executionMode?: 'sync' | 'stream' | 'async'
|
||||
}
|
||||
|
||||
export interface WorkflowInfo {
|
||||
@@ -70,6 +71,7 @@ export async function executeWorkflow(
|
||||
useDraftState: streamConfig?.useDraftState ?? false,
|
||||
startTime: new Date().toISOString(),
|
||||
isClientSession: false,
|
||||
executionMode: streamConfig?.executionMode,
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot(
|
||||
|
||||
@@ -111,6 +111,10 @@ interface StartResumeExecutionArgs {
|
||||
contextId: string
|
||||
resumeInput: unknown
|
||||
userId: string
|
||||
sendEvent?: (event: ExecutionEvent) => void
|
||||
onStream?: (streamingExec: StreamingExecution) => Promise<void>
|
||||
onBlockComplete?: (blockId: string, output: unknown) => Promise<void>
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
export class PauseResumeManager {
|
||||
@@ -293,8 +297,18 @@ export class PauseResumeManager {
|
||||
}
|
||||
|
||||
static async startResumeExecution(args: StartResumeExecutionArgs): Promise<ExecutionResult> {
|
||||
const { resumeEntryId, resumeExecutionId, pausedExecution, contextId, resumeInput, userId } =
|
||||
args
|
||||
const {
|
||||
resumeEntryId,
|
||||
resumeExecutionId,
|
||||
pausedExecution,
|
||||
contextId,
|
||||
resumeInput,
|
||||
userId,
|
||||
sendEvent,
|
||||
onStream,
|
||||
onBlockComplete,
|
||||
abortSignal,
|
||||
} = args
|
||||
|
||||
const pausePointsRecord = pausedExecution.pausePoints as Record<string, any>
|
||||
const pausePointForContext = pausePointsRecord?.[contextId]
|
||||
@@ -309,6 +323,10 @@ export class PauseResumeManager {
|
||||
contextId,
|
||||
resumeInput,
|
||||
userId,
|
||||
sendEvent,
|
||||
onStream,
|
||||
onBlockComplete,
|
||||
abortSignal,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
@@ -384,8 +402,22 @@ export class PauseResumeManager {
|
||||
contextId: string
|
||||
resumeInput: unknown
|
||||
userId: string
|
||||
sendEvent?: (event: ExecutionEvent) => void
|
||||
onStream?: (streamingExec: StreamingExecution) => Promise<void>
|
||||
onBlockComplete?: (blockId: string, output: unknown) => Promise<void>
|
||||
abortSignal?: AbortSignal
|
||||
}): Promise<ExecutionResult> {
|
||||
const { resumeExecutionId, pausedExecution, contextId, resumeInput, userId } = args
|
||||
const {
|
||||
resumeExecutionId,
|
||||
pausedExecution,
|
||||
contextId,
|
||||
resumeInput,
|
||||
userId,
|
||||
sendEvent,
|
||||
onStream: externalOnStream,
|
||||
onBlockComplete: externalOnBlockComplete,
|
||||
abortSignal: externalAbortSignal,
|
||||
} = args
|
||||
const parentExecutionId = pausedExecution.executionId
|
||||
|
||||
await db
|
||||
@@ -798,6 +830,7 @@ export class PauseResumeManager {
|
||||
localEventSeq++
|
||||
event.eventId = localEventSeq
|
||||
eventWriter.write(event).catch(() => {})
|
||||
sendEvent?.(event)
|
||||
}
|
||||
|
||||
writeBufferedEvent({
|
||||
@@ -887,6 +920,10 @@ export class PauseResumeManager {
|
||||
workflowId,
|
||||
data: hasError ? { ...sharedData, error: output?.error } : { ...sharedData, output },
|
||||
} as ExecutionEvent)
|
||||
|
||||
if (externalOnBlockComplete) {
|
||||
await externalOnBlockComplete(blockId, callbackData.output)
|
||||
}
|
||||
},
|
||||
onChildWorkflowInstanceReady: (
|
||||
blockId: string,
|
||||
@@ -911,6 +948,11 @@ export class PauseResumeManager {
|
||||
} as ExecutionEvent)
|
||||
},
|
||||
onStream: async (streamingExec: StreamingExecution) => {
|
||||
if (externalOnStream) {
|
||||
await externalOnStream(streamingExec)
|
||||
return
|
||||
}
|
||||
|
||||
const blockId = (streamingExec.execution as unknown as Record<string, unknown>)
|
||||
.blockId as string
|
||||
const reader = streamingExec.stream.getReader()
|
||||
@@ -949,9 +991,9 @@ export class PauseResumeManager {
|
||||
},
|
||||
}
|
||||
|
||||
const timeoutController = createTimeoutAbortController(
|
||||
preprocessingResult.executionTimeout?.async
|
||||
)
|
||||
const timeoutController = externalAbortSignal
|
||||
? null
|
||||
: createTimeoutAbortController(preprocessingResult.executionTimeout?.async)
|
||||
|
||||
let result: ExecutionResult
|
||||
let finalMetaStatus: 'complete' | 'error' | 'cancelled' = 'complete'
|
||||
@@ -963,15 +1005,15 @@ export class PauseResumeManager {
|
||||
skipLogCreation: true,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
abortSignal: timeoutController.signal,
|
||||
abortSignal: externalAbortSignal ?? timeoutController?.signal,
|
||||
})
|
||||
|
||||
if (
|
||||
result.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
timeoutController?.isTimedOut() &&
|
||||
timeoutController?.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController!.timeoutMs)
|
||||
logger.info('Resume execution timed out', {
|
||||
resumeExecutionId,
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
@@ -1042,7 +1084,7 @@ export class PauseResumeManager {
|
||||
finalMetaStatus = 'error'
|
||||
throw execError
|
||||
} finally {
|
||||
timeoutController.cleanup()
|
||||
timeoutController?.cleanup()
|
||||
try {
|
||||
await eventWriter.close()
|
||||
} catch (closeError) {
|
||||
@@ -1246,6 +1288,17 @@ export class PauseResumeManager {
|
||||
)
|
||||
}
|
||||
|
||||
static async getPausedExecutionById(
|
||||
id: string
|
||||
): Promise<typeof pausedExecutions.$inferSelect | null> {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(pausedExecutions)
|
||||
.where(eq(pausedExecutions.id, id))
|
||||
.limit(1)
|
||||
return rows[0] ?? null
|
||||
}
|
||||
|
||||
static async getPausedExecutionDetail(options: {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { normalizeName, REFERENCE } from '@/executor/constants'
|
||||
|
||||
export const SYSTEM_REFERENCE_PREFIXES = new Set(['start', 'loop', 'parallel', 'variable'])
|
||||
export const SYSTEM_REFERENCE_PREFIXES = new Set(['loop', 'parallel', 'variable'])
|
||||
|
||||
const INVALID_REFERENCE_CHARS = /[+*/=<>!]/
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
|
||||
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
|
||||
/**
|
||||
@@ -36,25 +35,24 @@ export interface StreamingConfig {
|
||||
timeoutMs?: number
|
||||
}
|
||||
|
||||
export type StreamingExecutorFn = (callbacks: {
|
||||
onStream: (streamingExec: StreamingExecution) => Promise<void>
|
||||
onBlockComplete: (blockId: string, output: unknown) => Promise<void>
|
||||
abortSignal: AbortSignal
|
||||
}) => Promise<ExecutionResult>
|
||||
|
||||
export interface StreamingResponseOptions {
|
||||
requestId: string
|
||||
workflow: {
|
||||
id: string
|
||||
userId: string
|
||||
workspaceId?: string | null
|
||||
isDeployed?: boolean
|
||||
variables?: Record<string, unknown>
|
||||
}
|
||||
input: unknown
|
||||
executingUserId: string
|
||||
streamConfig: StreamingConfig
|
||||
executionId?: string
|
||||
executeFn: StreamingExecutorFn
|
||||
}
|
||||
|
||||
interface StreamingState {
|
||||
streamedChunks: Map<string, string[]>
|
||||
processedOutputs: Set<string>
|
||||
streamCompletionTimes: Map<string, number>
|
||||
completedBlockIds: Set<string>
|
||||
}
|
||||
|
||||
function resolveStreamedContent(state: StreamingState): Map<string, string> {
|
||||
@@ -77,6 +75,7 @@ async function buildMinimalResult(
|
||||
result: ExecutionResult,
|
||||
selectedOutputs: string[] | undefined,
|
||||
streamedContent: Map<string, string>,
|
||||
completedBlockIds: Set<string>,
|
||||
requestId: string,
|
||||
includeFileBase64: boolean,
|
||||
base64MaxBytes: number | undefined
|
||||
@@ -87,6 +86,11 @@ async function buildMinimalResult(
|
||||
output: {} as Record<string, unknown>,
|
||||
}
|
||||
|
||||
if (result.status === 'paused') {
|
||||
minimalResult.output = result.output || {}
|
||||
return minimalResult
|
||||
}
|
||||
|
||||
if (!selectedOutputs?.length) {
|
||||
minimalResult.output = result.output || {}
|
||||
return minimalResult
|
||||
@@ -103,6 +107,10 @@ async function buildMinimalResult(
|
||||
continue
|
||||
}
|
||||
|
||||
if (!completedBlockIds.has(blockId)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (isDangerousKey(blockId)) {
|
||||
logger.warn(`[${requestId}] Blocked dangerous blockId: ${blockId}`)
|
||||
continue
|
||||
@@ -182,7 +190,7 @@ async function completeLoggingSession(result: ExecutionResult): Promise<void> {
|
||||
export async function createStreamingResponse(
|
||||
options: StreamingResponseOptions
|
||||
): Promise<ReadableStream> {
|
||||
const { requestId, workflow, input, executingUserId, streamConfig, executionId } = options
|
||||
const { requestId, streamConfig, executionId, executeFn } = options
|
||||
const timeoutController = createTimeoutAbortController(streamConfig.timeoutMs)
|
||||
|
||||
return new ReadableStream({
|
||||
@@ -191,6 +199,7 @@ export async function createStreamingResponse(
|
||||
streamedChunks: new Map(),
|
||||
processedOutputs: new Set(),
|
||||
streamCompletionTimes: new Map(),
|
||||
completedBlockIds: new Set(),
|
||||
}
|
||||
|
||||
const sendChunk = (blockId: string, content: string) => {
|
||||
@@ -250,6 +259,8 @@ export async function createStreamingResponse(
|
||||
const base64MaxBytes = streamConfig.base64MaxBytes
|
||||
|
||||
const onBlockCompleteCallback = async (blockId: string, output: unknown) => {
|
||||
state.completedBlockIds.add(blockId)
|
||||
|
||||
if (!streamConfig.selectedOutputs?.length) {
|
||||
return
|
||||
}
|
||||
@@ -284,25 +295,11 @@ export async function createStreamingResponse(
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
workflow,
|
||||
requestId,
|
||||
input,
|
||||
executingUserId,
|
||||
{
|
||||
enabled: true,
|
||||
selectedOutputs: streamConfig.selectedOutputs,
|
||||
isSecureMode: streamConfig.isSecureMode,
|
||||
workflowTriggerType: streamConfig.workflowTriggerType,
|
||||
onStream: onStreamCallback,
|
||||
onBlockComplete: onBlockCompleteCallback,
|
||||
skipLoggingComplete: true,
|
||||
includeFileBase64: streamConfig.includeFileBase64,
|
||||
base64MaxBytes: streamConfig.base64MaxBytes,
|
||||
abortSignal: timeoutController.signal,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
const result = await executeFn({
|
||||
onStream: onStreamCallback,
|
||||
onBlockComplete: onBlockCompleteCallback,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
const streamedContent =
|
||||
state.streamedChunks.size > 0 ? resolveStreamedContent(state) : new Map<string, string>()
|
||||
@@ -336,12 +333,21 @@ export async function createStreamingResponse(
|
||||
result,
|
||||
streamConfig.selectedOutputs,
|
||||
streamedContent,
|
||||
state.completedBlockIds,
|
||||
requestId,
|
||||
streamConfig.includeFileBase64 ?? true,
|
||||
streamConfig.base64MaxBytes
|
||||
)
|
||||
|
||||
controller.enqueue(encodeSSE({ event: 'final', data: minimalResult }))
|
||||
controller.enqueue(
|
||||
encodeSSE({
|
||||
event: 'final',
|
||||
data: {
|
||||
...minimalResult,
|
||||
...(result.status === 'paused' && { status: 'paused' }),
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
controller.enqueue(encodeSSE('[DONE]'))
|
||||
|
||||
@@ -54,6 +54,38 @@ function isReadableStream(response: any): response is ReadableStream {
|
||||
return response instanceof ReadableStream
|
||||
}
|
||||
|
||||
const ZERO_COST = Object.freeze({
|
||||
input: 0,
|
||||
output: 0,
|
||||
total: 0,
|
||||
pricing: Object.freeze({ input: 0, output: 0, updatedAt: new Date(0).toISOString() }),
|
||||
})
|
||||
|
||||
/**
|
||||
* Prevents streaming callbacks from writing non-zero model cost for BYOK users
|
||||
* while preserving tool costs. The property is frozen via defineProperty because
|
||||
* providers set cost inside streaming callbacks that fire after this function returns.
|
||||
*/
|
||||
function zeroCostForBYOK(response: StreamingExecution): void {
|
||||
const output = response.execution?.output
|
||||
if (!output || typeof output !== 'object') {
|
||||
logger.warn('zeroCostForBYOK: output not available at intercept time; cost may not be zeroed')
|
||||
return
|
||||
}
|
||||
|
||||
let toolCost = 0
|
||||
Object.defineProperty(output, 'cost', {
|
||||
get: () => (toolCost > 0 ? { ...ZERO_COST, toolCost, total: toolCost } : ZERO_COST),
|
||||
set: (value: Record<string, unknown>) => {
|
||||
if (value?.toolCost && typeof value.toolCost === 'number') {
|
||||
toolCost = value.toolCost
|
||||
}
|
||||
},
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
})
|
||||
}
|
||||
|
||||
export async function executeProviderRequest(
|
||||
providerId: string,
|
||||
request: ProviderRequest
|
||||
@@ -80,6 +112,12 @@ export async function executeProviderRequest(
|
||||
)
|
||||
resolvedRequest = { ...resolvedRequest, apiKey: result.apiKey }
|
||||
isBYOK = result.isBYOK
|
||||
logger.info('API key resolved', {
|
||||
provider: providerId,
|
||||
model: request.model,
|
||||
workspaceId: request.workspaceId,
|
||||
isBYOK,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to resolve API key:', {
|
||||
provider: providerId,
|
||||
@@ -118,7 +156,10 @@ export async function executeProviderRequest(
|
||||
const response = await provider.executeRequest(sanitizedRequest)
|
||||
|
||||
if (isStreamingExecution(response)) {
|
||||
logger.info('Provider returned StreamingExecution')
|
||||
logger.info('Provider returned StreamingExecution', { isBYOK })
|
||||
if (isBYOK) {
|
||||
zeroCostForBYOK(response)
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
@@ -154,9 +195,9 @@ export async function executeProviderRequest(
|
||||
},
|
||||
}
|
||||
if (isBYOK) {
|
||||
logger.debug(`Not billing model usage for ${response.model} - workspace BYOK key used`)
|
||||
logger.info(`Not billing model usage for ${response.model} - workspace BYOK key used`)
|
||||
} else {
|
||||
logger.debug(
|
||||
logger.info(
|
||||
`Not billing model usage for ${response.model} - user provided API key or not hosted model`
|
||||
)
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user