Compare commits
7 Commits
feat/run-f
...
fix/copilo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a15dd4c09e | ||
|
|
80eb2a8aa1 | ||
|
|
315d9ee3f9 | ||
|
|
62b06d00de | ||
|
|
2a630859fb | ||
|
|
3533bd009d | ||
|
|
43402fde1c |
30
.github/workflows/ci.yml
vendored
@@ -10,9 +10,6 @@ concurrency:
|
||||
group: ci-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
name: Test and Build
|
||||
@@ -281,30 +278,3 @@ jobs:
|
||||
if: needs.check-docs-changes.outputs.docs_changed == 'true'
|
||||
uses: ./.github/workflows/docs-embeddings.yml
|
||||
secrets: inherit
|
||||
|
||||
# Create GitHub Release (only for version commits on main, after all builds complete)
|
||||
create-release:
|
||||
name: Create GitHub Release
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
needs: [create-ghcr-manifests, detect-version]
|
||||
if: needs.detect-version.outputs.is_release == 'true'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Create release
|
||||
env:
|
||||
GH_PAT: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: bun run scripts/create-single-release.ts ${{ needs.detect-version.outputs.version }}
|
||||
|
||||
3
.github/workflows/docs-embeddings.yml
vendored
@@ -4,9 +4,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
process-docs-embeddings:
|
||||
name: Process Documentation Embeddings
|
||||
|
||||
3
.github/workflows/migrations.yml
vendored
@@ -4,9 +4,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
migrate:
|
||||
name: Apply Database Migrations
|
||||
|
||||
3
.github/workflows/publish-cli.yml
vendored
@@ -6,9 +6,6 @@ on:
|
||||
paths:
|
||||
- 'packages/cli/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish-npm:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
3
.github/workflows/publish-python-sdk.yml
vendored
@@ -6,9 +6,6 @@ on:
|
||||
paths:
|
||||
- 'packages/python-sdk/**'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish-pypi:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
3
.github/workflows/publish-ts-sdk.yml
vendored
@@ -6,9 +6,6 @@ on:
|
||||
paths:
|
||||
- 'packages/ts-sdk/**'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish-npm:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
3
.github/workflows/test-build.yml
vendored
@@ -4,9 +4,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
name: Test and Build
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
|
||||
interface ActionImageProps {
|
||||
src: string
|
||||
alt: string
|
||||
}
|
||||
|
||||
interface ActionVideoProps {
|
||||
src: string
|
||||
alt: string
|
||||
}
|
||||
|
||||
export function ActionImage({ src, alt }: ActionImageProps) {
|
||||
return (
|
||||
<img
|
||||
src={src}
|
||||
alt={alt}
|
||||
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export function ActionVideo({ src, alt }: ActionVideoProps) {
|
||||
const resolvedSrc = getAssetUrl(src)
|
||||
|
||||
return (
|
||||
<video
|
||||
src={resolvedSrc}
|
||||
autoPlay
|
||||
loop
|
||||
muted
|
||||
playsInline
|
||||
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -4,7 +4,6 @@ description: Essential actions for navigating and using the Sim workflow editor
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { ActionImage, ActionVideo } from '@/components/ui/action-media'
|
||||
|
||||
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
|
||||
|
||||
@@ -14,362 +13,124 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
|
||||
|
||||
## Workspaces
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Create a workspace</td>
|
||||
<td>Click workspace dropdown → **New Workspace**</td>
|
||||
<td><ActionVideo src="quick-reference/create-workspace.mp4" alt="Create workspace" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Switch workspaces</td>
|
||||
<td>Click workspace dropdown → Select workspace</td>
|
||||
<td><ActionVideo src="quick-reference/switch-workspace.mp4" alt="Switch workspaces" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Invite team members</td>
|
||||
<td>Sidebar → **Invite**</td>
|
||||
<td><ActionVideo src="quick-reference/invite.mp4" alt="Invite team members" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a workspace</td>
|
||||
<td>Right-click workspace → **Rename**</td>
|
||||
<td rowSpan={4}><ActionImage src="/static/quick-reference/workspace-context-menu.png" alt="Workspace context menu" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate a workspace</td>
|
||||
<td>Right-click workspace → **Duplicate**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Export a workspace</td>
|
||||
<td>Right-click workspace → **Export**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a workspace</td>
|
||||
<td>Right-click workspace → **Delete**</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Create a workspace | Click workspace dropdown in sidebar → **New Workspace** |
|
||||
| Rename a workspace | Workspace settings → Edit name |
|
||||
| Switch workspaces | Click workspace dropdown in sidebar → Select workspace |
|
||||
| Invite team members | Workspace settings → **Team** → **Invite** |
|
||||
|
||||
## Workflows
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Create a workflow</td>
|
||||
<td>Click **+** button in sidebar</td>
|
||||
<td><ActionImage src="/static/quick-reference/create-workflow.png" alt="Create workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Reorder / move workflows</td>
|
||||
<td>Drag workflow up/down or onto a folder</td>
|
||||
<td><ActionVideo src="quick-reference/reordering.mp4" alt="Reorder workflows" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Import a workflow</td>
|
||||
<td>Click import button in sidebar → Select file</td>
|
||||
<td><ActionImage src="/static/quick-reference/import-workflow.png" alt="Import workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Multi-select workflows</td>
|
||||
<td>`Mod+Click` or `Shift+Click` workflows in sidebar</td>
|
||||
<td><ActionVideo src="quick-reference/multiselect.mp4" alt="Multi-select workflows" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Open in new tab</td>
|
||||
<td>Right-click workflow → **Open in New Tab**</td>
|
||||
<td rowSpan={6}><ActionImage src="/static/quick-reference/workflow-context-menu.png" alt="Workflow context menu" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a workflow</td>
|
||||
<td>Right-click workflow → **Rename**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Assign workflow color</td>
|
||||
<td>Right-click workflow → **Change Color**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate a workflow</td>
|
||||
<td>Right-click workflow → **Duplicate**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Export a workflow</td>
|
||||
<td>Right-click workflow → **Export**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a workflow</td>
|
||||
<td>Right-click workflow → **Delete**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a folder</td>
|
||||
<td>Right-click folder → **Rename**</td>
|
||||
<td rowSpan={6}><ActionImage src="/static/quick-reference/folder-context-menu.png" alt="Folder context menu" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Create workflow in folder</td>
|
||||
<td>Right-click folder → **Create workflow**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Create folder in folder</td>
|
||||
<td>Right-click folder → **Create folder**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate a folder</td>
|
||||
<td>Right-click folder → **Duplicate**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Export a folder</td>
|
||||
<td>Right-click folder → **Export**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a folder</td>
|
||||
<td>Right-click folder → **Delete**</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Create a workflow | Click **New Workflow** button or `Mod+Shift+A` |
|
||||
| Rename a workflow | Double-click workflow name in sidebar, or right-click → **Rename** |
|
||||
| Duplicate a workflow | Right-click workflow → **Duplicate** |
|
||||
| Reorder workflows | Drag workflow up/down in the sidebar list |
|
||||
| Import a workflow | Sidebar menu → **Import** → Select file |
|
||||
| Create a folder | Right-click in sidebar → **New Folder** |
|
||||
| Rename a folder | Right-click folder → **Rename** |
|
||||
| Delete a folder | Right-click folder → **Delete** |
|
||||
| Collapse/expand folder | Click folder arrow, or double-click folder |
|
||||
| Move workflow to folder | Drag workflow onto folder in sidebar |
|
||||
| Delete a workflow | Right-click workflow → **Delete** |
|
||||
| Export a workflow | Right-click workflow → **Export** |
|
||||
| Assign workflow color | Right-click workflow → **Change Color** |
|
||||
| Multi-select workflows | `Mod+Click` or `Shift+Click` workflows in sidebar |
|
||||
| Open in new tab | Right-click workflow → **Open in New Tab** |
|
||||
|
||||
## Blocks
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Add a block</td>
|
||||
<td>Drag from Toolbar panel, or right-click canvas → **Add Block**</td>
|
||||
<td><ActionVideo src="quick-reference/add-block.mp4" alt="Add a block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Multi-select blocks</td>
|
||||
<td>`Mod+Click` additional blocks, or shift-drag to draw selection box</td>
|
||||
<td><ActionVideo src="quick-reference/multiselect-blocks.mp4" alt="Multi-select blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Copy blocks</td>
|
||||
<td>`Mod+C` with blocks selected</td>
|
||||
<td rowSpan={2}><ActionVideo src="quick-reference/copy-paste.mp4" alt="Copy and paste blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Paste blocks</td>
|
||||
<td>`Mod+V` to paste copied blocks</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate blocks</td>
|
||||
<td>Right-click → **Duplicate**</td>
|
||||
<td><ActionVideo src="quick-reference/duplicate-block.mp4" alt="Duplicate blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete blocks</td>
|
||||
<td>`Delete` or `Backspace` key, or right-click → **Delete**</td>
|
||||
<td><ActionImage src="/static/quick-reference/delete-block.png" alt="Delete block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a block</td>
|
||||
<td>Click block name in header, or edit in the Editor panel</td>
|
||||
<td><ActionVideo src="quick-reference/rename-block.mp4" alt="Rename a block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Enable/Disable a block</td>
|
||||
<td>Right-click → **Enable/Disable**</td>
|
||||
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Toggle handle orientation</td>
|
||||
<td>Right-click → **Toggle Handles**</td>
|
||||
<td><ActionVideo src="quick-reference/toggle-handles.mp4" alt="Toggle handle orientation" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Configure a block</td>
|
||||
<td>Select block → use Editor panel on right</td>
|
||||
<td><ActionVideo src="quick-reference/configure-block.mp4" alt="Configure a block" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Add a block | Drag from Toolbar panel, or right-click canvas → **Add Block** |
|
||||
| Select a block | Click on the block |
|
||||
| Multi-select blocks | `Mod+Click` additional blocks, or right-drag to draw selection box |
|
||||
| Move blocks | Drag selected block(s) to new position |
|
||||
| Copy blocks | `Mod+C` with blocks selected |
|
||||
| Paste blocks | `Mod+V` to paste copied blocks |
|
||||
| Duplicate blocks | Right-click → **Duplicate** |
|
||||
| Delete blocks | `Delete` or `Backspace` key, or right-click → **Delete** |
|
||||
| Rename a block | Click block name in header, or edit in the Editor panel |
|
||||
| Enable/Disable a block | Right-click → **Enable/Disable** |
|
||||
| Toggle handle orientation | Right-click → **Toggle Handles** |
|
||||
| Toggle trigger mode | Right-click trigger block → **Toggle Trigger Mode** |
|
||||
| Configure a block | Select block → use Editor panel on right |
|
||||
|
||||
## Connections
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Create a connection</td>
|
||||
<td>Drag from output handle to input handle</td>
|
||||
<td><ActionVideo src="quick-reference/connect-blocks.mp4" alt="Connect blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a connection</td>
|
||||
<td>Click edge to select → `Delete` key</td>
|
||||
<td><ActionVideo src="quick-reference/delete-connection.mp4" alt="Delete connection" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Use output in another block</td>
|
||||
<td>Drag connection tag into input field</td>
|
||||
<td><ActionVideo src="quick-reference/connection-tag.mp4" alt="Use connection tag" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Create a connection | Drag from output handle to input handle |
|
||||
| Delete a connection | Click edge to select → `Delete` key |
|
||||
| Use output in another block | Drag connection tag into input field |
|
||||
|
||||
## Canvas Navigation
|
||||
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Pan/move canvas | Left-drag on empty space, or scroll/trackpad |
|
||||
| Zoom in/out | Scroll wheel or pinch gesture |
|
||||
| Auto-layout | `Shift+L` |
|
||||
| Draw selection box | Right-drag on empty canvas area |
|
||||
|
||||
## Panels & Views
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Search toolbar</td>
|
||||
<td>`Mod+F`</td>
|
||||
<td><ActionVideo src="quick-reference/search-toolbar.mp4" alt="Search toolbar" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Search everything</td>
|
||||
<td>`Mod+K`</td>
|
||||
<td><ActionImage src="/static/quick-reference/search-everything.png" alt="Search everything" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Toggle manual mode</td>
|
||||
<td>Click toggle button to switch between manual and selector</td>
|
||||
<td><ActionImage src="/static/quick-reference/toggle-manual-mode.png" alt="Toggle manual mode" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Collapse/expand sidebar</td>
|
||||
<td>Click collapse button on sidebar</td>
|
||||
<td><ActionVideo src="quick-reference/collapse-sidebar.mp4" alt="Collapse sidebar" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Open Copilot tab | Press `C` or click Copilot tab |
|
||||
| Open Toolbar tab | Press `T` or click Toolbar tab |
|
||||
| Open Editor tab | Press `E` or click Editor tab |
|
||||
| Search toolbar | `Mod+F` |
|
||||
| Toggle advanced mode | Click toggle button on input fields |
|
||||
| Resize panels | Drag panel edge |
|
||||
| Collapse/expand sidebar | Click collapse button on sidebar |
|
||||
|
||||
## Running & Testing
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Run workflow</td>
|
||||
<td>Click Run Workflow button or `Mod+Enter`</td>
|
||||
<td><ActionImage src="/static/quick-reference/run-workflow.png" alt="Run workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Stop workflow</td>
|
||||
<td>Click Stop button or `Mod+Enter` while running</td>
|
||||
<td><ActionImage src="/static/quick-reference/stop-workflow.png" alt="Stop workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Test with chat</td>
|
||||
<td>Use Chat panel on the right side</td>
|
||||
<td><ActionImage src="/static/quick-reference/test-chat.png" alt="Test with chat" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Select output to view</td>
|
||||
<td>Click dropdown in Chat panel → Select block output</td>
|
||||
<td><ActionImage src="/static/quick-reference/output-select.png" alt="Select output to view" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Clear chat history</td>
|
||||
<td>Click clear button in Chat panel</td>
|
||||
<td><ActionImage src="/static/quick-reference/clear-chat.png" alt="Clear chat history" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>View execution logs</td>
|
||||
<td>Open terminal panel at bottom, or `Mod+L`</td>
|
||||
<td><ActionImage src="/static/quick-reference/terminal.png" alt="Execution logs terminal" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Filter logs by block or status</td>
|
||||
<td>Click block filter in terminal or right-click log entry → **Filter by Block** or **Filter by Status**</td>
|
||||
<td><ActionImage src="/static/quick-reference/filter-block.png" alt="Filter logs by block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Search logs</td>
|
||||
<td>Use search field in terminal or right-click log entry → **Search**</td>
|
||||
<td><ActionImage src="/static/quick-reference/terminal-search.png" alt="Search logs" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Copy log entry</td>
|
||||
<td>Clipboard Icon or Right-click log entry → **Copy**</td>
|
||||
<td><ActionImage src="/static/quick-reference/copy-log.png" alt="Copy log entry" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Clear terminal</td>
|
||||
<td>Trash icon or `Mod+D`</td>
|
||||
<td><ActionImage src="/static/quick-reference/clear-terminal.png" alt="Clear terminal" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Run workflow | Click Play button or `Mod+Enter` |
|
||||
| Stop workflow | Click Stop button or `Mod+Enter` while running |
|
||||
| Test with chat | Use Chat panel on the right side |
|
||||
| Select output to view | Click dropdown in Chat panel → Select block output |
|
||||
| Clear chat history | Click clear button in Chat panel |
|
||||
| View execution logs | Open terminal panel at bottom, or `Mod+L` |
|
||||
| Filter logs by block | Click block filter in terminal |
|
||||
| Filter logs by status | Click status filter in terminal |
|
||||
| Search logs | Use search field in terminal |
|
||||
| Copy log entry | Right-click log entry → **Copy** |
|
||||
| Clear terminal | `Mod+D` |
|
||||
|
||||
## Deployment
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Deploy a workflow</td>
|
||||
<td>Click **Deploy** button in panel</td>
|
||||
<td><ActionImage src="/static/quick-reference/deploy.png" alt="Deploy workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Update deployment</td>
|
||||
<td>Click **Update** when changes are detected</td>
|
||||
<td><ActionImage src="/static/quick-reference/update-deployment.png" alt="Update deployment" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>View deployment status</td>
|
||||
<td>Check status indicator (Live/Update/Deploy) in Deploy tab</td>
|
||||
<td><ActionImage src="/static/quick-reference/view-deployment.png" alt="View deployment status" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Revert deployment</td>
|
||||
<td>Access previous versions in Deploy tab → **Promote to live**</td>
|
||||
<td><ActionImage src="/static/quick-reference/promote-deployment.png" alt="Promote deployment to live" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Copy API endpoint</td>
|
||||
<td>Deploy tab → API → Copy API cURL</td>
|
||||
<td><ActionImage src="/static/quick-reference/copy-api.png" alt="Copy API endpoint" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Deploy a workflow | Click **Deploy** button in Deploy tab |
|
||||
| Update deployment | Click **Update** when changes are detected |
|
||||
| View deployment status | Check status indicator (Live/Update/Deploy) in Deploy tab |
|
||||
| Revert deployment | Access previous versions in Deploy tab |
|
||||
| Copy webhook URL | Deploy tab → Copy webhook URL |
|
||||
| Copy API endpoint | Deploy tab → Copy API endpoint URL |
|
||||
| Set up a schedule | Add Schedule trigger block → Configure interval |
|
||||
|
||||
## Variables
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Add / Edit / Delete workflow variable</td>
|
||||
<td>Panel -> Variables -> **Add Variable**, click to edit, or delete icon</td>
|
||||
<td><ActionImage src="/static/quick-reference/variables.png" alt="Variables panel" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Add environment variable</td>
|
||||
<td>Settings → **Environment Variables** → **Add**</td>
|
||||
<td><ActionImage src="/static/quick-reference/add-env-variable.png" alt="Add environment variable" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Reference a workflow variable</td>
|
||||
<td>Use `<blockName.itemName>` syntax in block inputs</td>
|
||||
<td><ActionImage src="/static/quick-reference/variable-reference.png" alt="Reference workflow variable" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Reference an environment variable</td>
|
||||
<td>Use `{{ENV_VAR}}` syntax in block inputs</td>
|
||||
<td><ActionImage src="/static/quick-reference/env-variable-reference.png" alt="Reference environment variable" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Add workflow variable | Variables tab → **Add Variable** |
|
||||
| Edit workflow variable | Variables tab → Click variable to edit |
|
||||
| Delete workflow variable | Variables tab → Click delete icon on variable |
|
||||
| Add environment variable | Settings → **Environment Variables** → **Add** |
|
||||
| Reference a variable | Use `{{variableName}}` syntax in block inputs |
|
||||
|
||||
## Credentials
|
||||
|
||||
| Action | How |
|
||||
|--------|-----|
|
||||
| Add API key | Block credential field → **Add Credential** → Enter API key |
|
||||
| Connect OAuth account | Block credential field → **Connect** → Authorize with provider |
|
||||
| Manage credentials | Settings → **Credentials** |
|
||||
| Remove credential | Settings → **Credentials** → Delete credential |
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 104 KiB |
|
Before Width: | Height: | Size: 37 KiB |
|
Before Width: | Height: | Size: 45 KiB |
|
Before Width: | Height: | Size: 114 KiB |
|
Before Width: | Height: | Size: 44 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 6.7 KiB |
|
Before Width: | Height: | Size: 24 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 66 KiB |
|
Before Width: | Height: | Size: 48 KiB |
|
Before Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 49 KiB |
|
Before Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 5.9 KiB |
|
Before Width: | Height: | Size: 78 KiB |
|
Before Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 82 KiB |
|
Before Width: | Height: | Size: 146 KiB |
|
Before Width: | Height: | Size: 7.1 KiB |
|
Before Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 90 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 103 KiB |
@@ -31,7 +31,6 @@ const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
||||
'claude-4.5-opus': true,
|
||||
'claude-4.1-opus': false,
|
||||
'gemini-3-pro': true,
|
||||
'auto': true,
|
||||
}
|
||||
|
||||
// GET - Fetch user's enabled models
|
||||
|
||||
@@ -8,7 +8,6 @@ import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import { formatLiteralForCode } from '@/executor/utils/code-formatting'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
@@ -388,12 +387,7 @@ function resolveWorkflowVariables(
|
||||
if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
if (typeof variableValue === 'boolean') {
|
||||
// Already a boolean, keep as-is
|
||||
} else {
|
||||
const normalized = String(variableValue).toLowerCase().trim()
|
||||
variableValue = normalized === 'true'
|
||||
}
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json' && typeof variableValue === 'string') {
|
||||
try {
|
||||
variableValue = JSON.parse(variableValue)
|
||||
@@ -693,7 +687,11 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
prologue += `const ${k} = ${formatLiteralForCode(v, 'javascript')};\n`
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -764,7 +762,11 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
prologue += `${k} = ${formatLiteralForCode(v, 'python')}\n`
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
|
||||
204
apps/sim/app/api/organizations/[id]/workspaces/route.ts
Normal file
@@ -0,0 +1,204 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, permissions, user, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('OrganizationWorkspacesAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/workspaces
|
||||
* Get workspaces related to the organization with optional filtering
|
||||
* Query parameters:
|
||||
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
||||
* - ?member=userId - Workspaces where specific member has access
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const availableOnly = url.searchParams.get('available') === 'true'
|
||||
const memberId = url.searchParams.get('member')
|
||||
|
||||
// Verify user is a member of this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Forbidden - Not a member of this organization',
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
if (availableOnly) {
|
||||
// Get workspaces where user has admin permissions (can invite others)
|
||||
const availableWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
isOwner: eq(workspace.ownerId, session.user.id),
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// User owns the workspace
|
||||
eq(workspace.ownerId, session.user.id),
|
||||
// User has admin permission on the workspace
|
||||
and(
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.permissionType, 'admin')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
// Filter and format the results
|
||||
const workspacesWithInvitePermission = availableWorkspaces
|
||||
.filter((workspace) => {
|
||||
// Include if user owns the workspace OR has admin permission
|
||||
return workspace.isOwner || workspace.permissionType === 'admin'
|
||||
})
|
||||
.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
canInvite: true, // All returned workspaces have invite permission
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
logger.info('Retrieved available workspaces for organization member', {
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
workspaceCount: workspacesWithInvitePermission.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: workspacesWithInvitePermission,
|
||||
totalCount: workspacesWithInvitePermission.length,
|
||||
filter: 'available',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (memberId && hasAdminAccess) {
|
||||
// Get workspaces where specific member has access (admin only)
|
||||
const memberWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
isOwner: eq(workspace.ownerId, memberId),
|
||||
permissionType: permissions.permissionType,
|
||||
createdAt: permissions.createdAt,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, memberId)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// Member owns the workspace
|
||||
eq(workspace.ownerId, memberId),
|
||||
// Member has permissions on the workspace
|
||||
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
||||
)
|
||||
)
|
||||
|
||||
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
permission: workspace.permissionType,
|
||||
joinedAt: workspace.createdAt,
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: formattedWorkspaces,
|
||||
totalCount: formattedWorkspaces.length,
|
||||
filter: 'member',
|
||||
memberId,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Default: Get all workspaces (basic info only for regular members)
|
||||
if (!hasAdminAccess) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: [],
|
||||
totalCount: 0,
|
||||
message: 'Workspace access information is only available to organization admins',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// For admins: Get summary of all workspaces
|
||||
const allWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
ownerName: user.name,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(user, eq(workspace.ownerId, user.id))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: allWorkspaces,
|
||||
totalCount: allWorkspaces.length,
|
||||
filter: 'all',
|
||||
},
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization workspaces', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,257 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SupabaseStorageUploadAPI')
|
||||
|
||||
const SupabaseStorageUploadSchema = z.object({
|
||||
projectId: z.string().min(1, 'Project ID is required'),
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
bucket: z.string().min(1, 'Bucket name is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
path: z.string().optional().nullable(),
|
||||
fileData: z.any(),
|
||||
contentType: z.string().optional().nullable(),
|
||||
upsert: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SupabaseStorageUploadSchema.parse(body)
|
||||
|
||||
const fileData = validatedData.fileData
|
||||
const isStringInput = typeof fileData === 'string'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
|
||||
bucket: validatedData.bucket,
|
||||
fileName: validatedData.fileName,
|
||||
path: validatedData.path,
|
||||
fileDataType: isStringInput ? 'string' : 'object',
|
||||
})
|
||||
|
||||
if (!fileData) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'fileData is required',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let uploadBody: Buffer
|
||||
let uploadContentType: string | undefined
|
||||
|
||||
if (isStringInput) {
|
||||
let content = fileData as string
|
||||
|
||||
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
|
||||
if (dataUrlMatch) {
|
||||
const [, mimeType, base64Data] = dataUrlMatch
|
||||
content = base64Data
|
||||
if (!validatedData.contentType) {
|
||||
uploadContentType = mimeType
|
||||
}
|
||||
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
|
||||
}
|
||||
|
||||
const cleanedContent = content.replace(/[\s\r\n]/g, '')
|
||||
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
|
||||
|
||||
if (isLikelyBase64 && cleanedContent.length >= 4) {
|
||||
try {
|
||||
uploadBody = Buffer.from(cleanedContent, 'base64')
|
||||
|
||||
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
|
||||
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
|
||||
|
||||
if (
|
||||
uploadBody.length >= expectedMinSize &&
|
||||
uploadBody.length <= expectedMaxSize &&
|
||||
uploadBody.length > 0
|
||||
) {
|
||||
logger.info(
|
||||
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
|
||||
)
|
||||
} else {
|
||||
const reEncoded = uploadBody.toString('base64')
|
||||
if (reEncoded !== cleanedContent) {
|
||||
logger.info(
|
||||
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
|
||||
)
|
||||
uploadBody = Buffer.from(content, 'utf-8')
|
||||
} else {
|
||||
logger.info(
|
||||
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (decodeError) {
|
||||
logger.info(
|
||||
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
|
||||
)
|
||||
uploadBody = Buffer.from(content, 'utf-8')
|
||||
}
|
||||
} else {
|
||||
uploadBody = Buffer.from(content, 'utf-8')
|
||||
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
|
||||
}
|
||||
|
||||
uploadContentType =
|
||||
uploadContentType || validatedData.contentType || 'application/octet-stream'
|
||||
} else {
|
||||
const rawFile = fileData
|
||||
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
|
||||
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
uploadBody = buffer
|
||||
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
|
||||
}
|
||||
|
||||
let fullPath = validatedData.fileName
|
||||
if (validatedData.path) {
|
||||
const folderPath = validatedData.path.endsWith('/')
|
||||
? validatedData.path
|
||||
: `${validatedData.path}/`
|
||||
fullPath = `${folderPath}${validatedData.fileName}`
|
||||
}
|
||||
|
||||
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
apikey: validatedData.apiKey,
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
'Content-Type': uploadContentType,
|
||||
}
|
||||
|
||||
if (validatedData.upsert) {
|
||||
headers['x-upsert'] = 'true'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
|
||||
contentType: uploadContentType,
|
||||
bodySize: uploadBody.length,
|
||||
upsert: validatedData.upsert,
|
||||
})
|
||||
|
||||
const response = await fetch(supabaseUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: new Uint8Array(uploadBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorData
|
||||
try {
|
||||
errorData = JSON.parse(errorText)
|
||||
} catch {
|
||||
errorData = { message: errorText }
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorData,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
|
||||
details: errorData,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
|
||||
bucket: validatedData.bucket,
|
||||
path: fullPath,
|
||||
})
|
||||
|
||||
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Successfully uploaded file to storage',
|
||||
results: {
|
||||
...result,
|
||||
path: fullPath,
|
||||
bucket: validatedData.bucket,
|
||||
publicUrl,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,377 +0,0 @@
|
||||
import { db, workflow as workflowTable } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { DAGExecutor } from '@/executor/execution/executor'
|
||||
import type { IterationContext, SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { NormalizedBlockOutput } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
|
||||
const logger = createLogger('ExecuteFromBlockAPI')
|
||||
|
||||
const ExecuteFromBlockSchema = z.object({
|
||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||
sourceSnapshot: z.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
}),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* POST /api/workflows/[id]/execute-from-block
|
||||
*
|
||||
* Executes a workflow starting from a specific block using cached outputs
|
||||
* for upstream/unaffected blocks from the source snapshot.
|
||||
*/
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = auth.userId
|
||||
|
||||
let body: unknown
|
||||
try {
|
||||
body = await req.json()
|
||||
} catch {
|
||||
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const validation = ExecuteFromBlockSchema.safeParse(body)
|
||||
if (!validation.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body:`, validation.error.errors)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request body',
|
||||
details: validation.error.errors.map((e) => ({
|
||||
path: e.path.join('.'),
|
||||
message: e.message,
|
||||
})),
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { startBlockId, sourceSnapshot } = validation.data
|
||||
|
||||
logger.info(`[${requestId}] Starting run-from-block execution`, {
|
||||
workflowId,
|
||||
userId,
|
||||
startBlockId,
|
||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||
})
|
||||
|
||||
const executionId = uuidv4()
|
||||
|
||||
// Load workflow record to get workspaceId
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord?.workspaceId) {
|
||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceId = workflowRecord.workspaceId
|
||||
|
||||
// Load workflow state
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!workflowData) {
|
||||
return NextResponse.json({ error: 'Workflow state not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const { blocks, edges, loops, parallels } = workflowData
|
||||
|
||||
// Merge block states
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
// Get environment variables
|
||||
const { personalDecrypted, workspaceDecrypted } = await getPersonalAndWorkspaceEnv(
|
||||
userId,
|
||||
workspaceId
|
||||
)
|
||||
const decryptedEnvVars: Record<string, string> = { ...personalDecrypted, ...workspaceDecrypted }
|
||||
|
||||
// Serialize workflow
|
||||
const serializedWorkflow = new Serializer().serializeWorkflow(
|
||||
mergedStates,
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
true
|
||||
)
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (isStreamClosed) return
|
||||
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const startTime = new Date()
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:started',
|
||||
timestamp: startTime.toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
startTime: startTime.toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
const onBlockStart = async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
iterationContext?: IterationContext
|
||||
) => {
|
||||
sendEvent({
|
||||
type: 'block:started',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
...(iterationContext && {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType,
|
||||
}),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const onBlockComplete = async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
callbackData: { input?: unknown; output: NormalizedBlockOutput; executionTime: number },
|
||||
iterationContext?: IterationContext
|
||||
) => {
|
||||
const hasError = (callbackData.output as any)?.error
|
||||
|
||||
if (hasError) {
|
||||
sendEvent({
|
||||
type: 'block:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
input: callbackData.input,
|
||||
error: (callbackData.output as any).error,
|
||||
durationMs: callbackData.executionTime || 0,
|
||||
...(iterationContext && {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType,
|
||||
}),
|
||||
},
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'block:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
input: callbackData.input,
|
||||
output: callbackData.output,
|
||||
durationMs: callbackData.executionTime || 0,
|
||||
...(iterationContext && {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType,
|
||||
}),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const onStream = async (streamingExecution: unknown) => {
|
||||
const streamingExec = streamingExecution as { stream: ReadableStream; execution: any }
|
||||
const blockId = streamingExec.execution?.blockId
|
||||
|
||||
const reader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
sendEvent({
|
||||
type: 'stream:chunk',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { blockId, chunk },
|
||||
})
|
||||
}
|
||||
|
||||
sendEvent({
|
||||
type: 'stream:done',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { blockId },
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error streaming block content:`, error)
|
||||
} finally {
|
||||
try {
|
||||
reader.releaseLock()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
// Create executor and run from block
|
||||
const executor = new DAGExecutor({
|
||||
workflow: serializedWorkflow,
|
||||
envVarValues: decryptedEnvVars,
|
||||
workflowInput: {},
|
||||
workflowVariables: {},
|
||||
contextExtensions: {
|
||||
stream: true,
|
||||
executionId,
|
||||
workspaceId,
|
||||
userId,
|
||||
isDeployedContext: false,
|
||||
onBlockStart,
|
||||
onBlockComplete,
|
||||
onStream,
|
||||
abortSignal: abortController.signal,
|
||||
},
|
||||
})
|
||||
|
||||
const result = await executor.executeFromBlock(
|
||||
workflowId,
|
||||
startBlockId,
|
||||
sourceSnapshot as SerializableExecutionState
|
||||
)
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: executionResult?.error || errorMessage,
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
||||
controller.close()
|
||||
} catch {
|
||||
// Stream already closed
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
'X-Execution-Id': executionId,
|
||||
},
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Failed to start run-from-block execution:`, error)
|
||||
return NextResponse.json(
|
||||
{ error: errorMessage || 'Failed to start execution' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,11 @@
|
||||
import { memo, useCallback } from 'react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut, Play } from 'lucide-react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||
import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -98,42 +97,12 @@ export const ActionBar = memo(
|
||||
)
|
||||
)
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const { isExecuting, getLastExecutionSnapshot } = useExecutionStore()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const isStartBlock = isInputDefinitionTrigger(blockType)
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
|
||||
const isInsideSubflow = parentId && (parentType === 'loop' || parentType === 'parallel')
|
||||
|
||||
// Check if run-from-block is available
|
||||
const hasExecutionSnapshot = activeWorkflowId
|
||||
? !!getLastExecutionSnapshot(activeWorkflowId)
|
||||
: false
|
||||
const wasExecuted = activeWorkflowId
|
||||
? getLastExecutionSnapshot(activeWorkflowId)?.executedBlocks.includes(blockId) ?? false
|
||||
: false
|
||||
const canRunFromBlock =
|
||||
hasExecutionSnapshot &&
|
||||
wasExecuted &&
|
||||
!isStartBlock &&
|
||||
!isNoteBlock &&
|
||||
!isSubflowBlock &&
|
||||
!isInsideSubflow &&
|
||||
!isExecuting
|
||||
|
||||
const handleRunFromBlock = useCallback(() => {
|
||||
if (!activeWorkflowId || !canRunFromBlock) return
|
||||
|
||||
// Dispatch a custom event to trigger run-from-block execution
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('run-from-block', {
|
||||
detail: { blockId, workflowId: activeWorkflowId },
|
||||
})
|
||||
)
|
||||
}, [blockId, activeWorkflowId, canRunFromBlock])
|
||||
|
||||
/**
|
||||
* Get appropriate tooltip message based on disabled state
|
||||
@@ -182,7 +151,7 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{canRunFromBlock && (
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -190,17 +159,17 @@ export const ActionBar = memo(
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
handleRunFromBlock()
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled || isExecuting}
|
||||
disabled={disabled}
|
||||
>
|
||||
<Play className={ICON_SIZE} />
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{isExecuting ? 'Execution in progress' : getTooltipMessage('Run from this block')}
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
@@ -246,7 +246,6 @@ export function getCommandDisplayLabel(commandId: string): string {
|
||||
* Model configuration options
|
||||
*/
|
||||
export const MODEL_OPTIONS = [
|
||||
{ value: 'auto', label: 'Auto' },
|
||||
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
|
||||
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
|
||||
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
|
||||
|
||||
@@ -338,11 +338,6 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
||||
const configEqual =
|
||||
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
||||
|
||||
const canonicalToggleEqual =
|
||||
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
|
||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||
|
||||
return (
|
||||
prevProps.blockId === nextProps.blockId &&
|
||||
configEqual &&
|
||||
@@ -351,7 +346,8 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
||||
prevProps.disabled === nextProps.disabled &&
|
||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||
canonicalToggleEqual
|
||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
@@ -15,8 +15,7 @@ import {
|
||||
TriggerUtils,
|
||||
} from '@/lib/workflows/triggers/triggers'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { BlockLog, BlockState, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { coerceValue } from '@/executor/utils/start-block'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
@@ -33,9 +32,6 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('useWorkflowExecution')
|
||||
|
||||
// Module-level guard to prevent duplicate run-from-block executions across hook instances
|
||||
let runFromBlockGlobalLock = false
|
||||
|
||||
// Debug state validation result
|
||||
interface DebugValidationResult {
|
||||
isValid: boolean
|
||||
@@ -102,8 +98,6 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
setLastExecutionSnapshot,
|
||||
getLastExecutionSnapshot,
|
||||
} = useExecutionStore()
|
||||
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
|
||||
const executionStream = useExecutionStream()
|
||||
@@ -882,8 +876,6 @@ export function useWorkflowExecution() {
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const streamedContent = new Map<string, string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
|
||||
// Execute the workflow
|
||||
try {
|
||||
@@ -930,14 +922,6 @@ export function useWorkflowExecution() {
|
||||
// Track successful block execution in run path
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
// Track block state for run-from-block snapshot
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
// Edges already tracked in onBlockStarted, no need to track again
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
@@ -1072,23 +1056,6 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
logs: accumulatedBlockLogs,
|
||||
}
|
||||
|
||||
// Store execution snapshot for run-from-block
|
||||
if (data.success && activeWorkflowId) {
|
||||
const snapshot: SerializableExecutionState = {
|
||||
blockStates: Object.fromEntries(accumulatedBlockStates),
|
||||
executedBlocks: Array.from(executedBlockIds),
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
decisions: { router: {}, condition: {} },
|
||||
completedLoops: [],
|
||||
activeExecutionPath: Array.from(executedBlockIds),
|
||||
}
|
||||
setLastExecutionSnapshot(activeWorkflowId, snapshot)
|
||||
logger.info('Stored execution snapshot for run-from-block', {
|
||||
workflowId: activeWorkflowId,
|
||||
executedBlocksCount: executedBlockIds.size,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
@@ -1409,228 +1376,6 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks,
|
||||
])
|
||||
|
||||
/**
|
||||
* Handles running workflow from a specific block using cached outputs
|
||||
*/
|
||||
const handleRunFromBlock = useCallback(
|
||||
async (blockId: string, workflowId: string) => {
|
||||
// Prevent duplicate executions across multiple hook instances (panel.tsx and chat.tsx)
|
||||
if (runFromBlockGlobalLock) {
|
||||
logger.debug('Run-from-block already in progress (global lock), ignoring duplicate request', {
|
||||
workflowId,
|
||||
blockId,
|
||||
})
|
||||
return
|
||||
}
|
||||
runFromBlockGlobalLock = true
|
||||
|
||||
const snapshot = getLastExecutionSnapshot(workflowId)
|
||||
if (!snapshot) {
|
||||
logger.error('No execution snapshot available for run-from-block', { workflowId, blockId })
|
||||
runFromBlockGlobalLock = false
|
||||
return
|
||||
}
|
||||
|
||||
if (!snapshot.executedBlocks.includes(blockId)) {
|
||||
logger.error('Block was not executed in the source run', { workflowId, blockId })
|
||||
runFromBlockGlobalLock = false
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting run-from-block execution', {
|
||||
workflowId,
|
||||
startBlockId: blockId,
|
||||
snapshotExecutedBlocks: snapshot.executedBlocks.length,
|
||||
})
|
||||
|
||||
setIsExecuting(true)
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const executionId = uuidv4()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
const activeBlocksSet = new Set<string>()
|
||||
|
||||
try {
|
||||
await executionStream.executeFromBlock({
|
||||
workflowId,
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: snapshot,
|
||||
callbacks: {
|
||||
onExecutionStarted: (data) => {
|
||||
logger.info('Run-from-block execution started:', data)
|
||||
},
|
||||
|
||||
onBlockStarted: (data) => {
|
||||
activeBlocksSet.add(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
},
|
||||
|
||||
onBlockCompleted: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onBlockError: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName,
|
||||
blockType: data.blockType,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
// Merge new states with snapshot states for updated snapshot
|
||||
const mergedBlockStates: Record<string, BlockState> = { ...snapshot.blockStates }
|
||||
for (const [bId, state] of accumulatedBlockStates) {
|
||||
mergedBlockStates[bId] = state
|
||||
}
|
||||
|
||||
const mergedExecutedBlocks = new Set([
|
||||
...snapshot.executedBlocks,
|
||||
...executedBlockIds,
|
||||
])
|
||||
|
||||
const updatedSnapshot: SerializableExecutionState = {
|
||||
...snapshot,
|
||||
blockStates: mergedBlockStates,
|
||||
executedBlocks: Array.from(mergedExecutedBlocks),
|
||||
blockLogs: [...snapshot.blockLogs, ...accumulatedBlockLogs],
|
||||
activeExecutionPath: Array.from(mergedExecutedBlocks),
|
||||
}
|
||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||
logger.info('Updated execution snapshot after run-from-block', {
|
||||
workflowId,
|
||||
newBlocksExecuted: executedBlockIds.size,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
logger.error('Run-from-block execution error:', data.error)
|
||||
},
|
||||
|
||||
onExecutionCancelled: () => {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if ((error as Error).name !== 'AbortError') {
|
||||
logger.error('Run-from-block execution failed:', error)
|
||||
}
|
||||
} finally {
|
||||
setIsExecuting(false)
|
||||
setActiveBlocks(new Set())
|
||||
runFromBlockGlobalLock = false
|
||||
}
|
||||
},
|
||||
[
|
||||
getLastExecutionSnapshot,
|
||||
setLastExecutionSnapshot,
|
||||
setIsExecuting,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
addConsole,
|
||||
executionStream,
|
||||
]
|
||||
)
|
||||
|
||||
// Listen for run-from-block events from the action bar
|
||||
useEffect(() => {
|
||||
const handleRunFromBlockEvent = (event: CustomEvent<{ blockId: string; workflowId: string }>) => {
|
||||
const { blockId, workflowId } = event.detail
|
||||
handleRunFromBlock(blockId, workflowId)
|
||||
}
|
||||
|
||||
window.addEventListener('run-from-block', handleRunFromBlockEvent as EventListener)
|
||||
return () => {
|
||||
window.removeEventListener('run-from-block', handleRunFromBlockEvent as EventListener)
|
||||
}
|
||||
}, [handleRunFromBlock])
|
||||
|
||||
return {
|
||||
isExecuting,
|
||||
isDebugging,
|
||||
@@ -1641,6 +1386,5 @@ export function useWorkflowExecution() {
|
||||
handleResumeDebug,
|
||||
handleCancelDebug,
|
||||
handleCancelExecution,
|
||||
handleRunFromBlock,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1641,36 +1641,51 @@ const WorkflowContent = React.memo(() => {
|
||||
}, [screenToFlowPosition, handleToolbarDrop])
|
||||
|
||||
/**
|
||||
* Focus canvas on changed blocks when diff appears.
|
||||
* Focus canvas on changed blocks when diff appears
|
||||
* Focuses on new/edited blocks rather than fitting the entire workflow
|
||||
*/
|
||||
const pendingZoomBlockIdsRef = useRef<Set<string> | null>(null)
|
||||
const prevDiffReadyRef = useRef(false)
|
||||
|
||||
// Phase 1: When diff becomes ready, record which blocks we want to zoom to
|
||||
// Phase 2 effect is located after displayNodes is defined (search for "Phase 2")
|
||||
useEffect(() => {
|
||||
// Only focus when diff transitions from not ready to ready
|
||||
if (isDiffReady && !prevDiffReadyRef.current && diffAnalysis) {
|
||||
// Diff just became ready - record blocks to zoom to
|
||||
const changedBlockIds = [
|
||||
...(diffAnalysis.new_blocks || []),
|
||||
...(diffAnalysis.edited_blocks || []),
|
||||
]
|
||||
|
||||
if (changedBlockIds.length > 0) {
|
||||
pendingZoomBlockIdsRef.current = new Set(changedBlockIds)
|
||||
const allNodes = getNodes()
|
||||
const changedNodes = allNodes.filter((node) => changedBlockIds.includes(node.id))
|
||||
|
||||
if (changedNodes.length > 0) {
|
||||
logger.info('Diff ready - focusing on changed blocks', {
|
||||
changedBlockIds,
|
||||
foundNodes: changedNodes.length,
|
||||
})
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({
|
||||
nodes: changedNodes,
|
||||
duration: 600,
|
||||
padding: 0.1,
|
||||
minZoom: 0.5,
|
||||
maxZoom: 1.0,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
logger.info('Diff ready - no changed nodes found, fitting all')
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({ padding: 0.1, duration: 600 })
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No specific blocks to focus on, fit all after a frame
|
||||
pendingZoomBlockIdsRef.current = null
|
||||
logger.info('Diff ready - no changed blocks, fitting all')
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({ padding: 0.1, duration: 600 })
|
||||
})
|
||||
}
|
||||
} else if (!isDiffReady && prevDiffReadyRef.current) {
|
||||
// Diff was cleared (accepted/rejected) - cancel any pending zoom
|
||||
pendingZoomBlockIdsRef.current = null
|
||||
}
|
||||
prevDiffReadyRef.current = isDiffReady
|
||||
}, [isDiffReady, diffAnalysis, fitViewToBounds])
|
||||
}, [isDiffReady, diffAnalysis, fitViewToBounds, getNodes])
|
||||
|
||||
/** Displays trigger warning notifications. */
|
||||
useEffect(() => {
|
||||
@@ -2078,48 +2093,6 @@ const WorkflowContent = React.memo(() => {
|
||||
})
|
||||
}, [derivedNodes, blocks, pendingSelection, clearPendingSelection])
|
||||
|
||||
// Phase 2: When displayNodes updates, check if pending zoom blocks are ready
|
||||
// (Phase 1 is located earlier in the file where pendingZoomBlockIdsRef is defined)
|
||||
useEffect(() => {
|
||||
const pendingBlockIds = pendingZoomBlockIdsRef.current
|
||||
if (!pendingBlockIds || pendingBlockIds.size === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// Find the nodes we're waiting for
|
||||
const pendingNodes = displayNodes.filter((node) => pendingBlockIds.has(node.id))
|
||||
|
||||
// Check if all expected nodes are present with valid dimensions
|
||||
const allNodesReady =
|
||||
pendingNodes.length === pendingBlockIds.size &&
|
||||
pendingNodes.every(
|
||||
(node) =>
|
||||
typeof node.width === 'number' &&
|
||||
typeof node.height === 'number' &&
|
||||
node.width > 0 &&
|
||||
node.height > 0
|
||||
)
|
||||
|
||||
if (allNodesReady) {
|
||||
logger.info('Diff ready - focusing on changed blocks', {
|
||||
changedBlockIds: Array.from(pendingBlockIds),
|
||||
foundNodes: pendingNodes.length,
|
||||
})
|
||||
// Clear pending state before zooming to prevent re-triggers
|
||||
pendingZoomBlockIdsRef.current = null
|
||||
// Use requestAnimationFrame to ensure React has finished rendering
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({
|
||||
nodes: pendingNodes,
|
||||
duration: 600,
|
||||
padding: 0.1,
|
||||
minZoom: 0.5,
|
||||
maxZoom: 1.0,
|
||||
})
|
||||
})
|
||||
}
|
||||
}, [displayNodes, fitViewToBounds])
|
||||
|
||||
/** Handles ActionBar remove-from-subflow events. */
|
||||
useEffect(() => {
|
||||
const handleRemoveFromSubflow = (event: Event) => {
|
||||
|
||||
@@ -214,6 +214,15 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
||||
],
|
||||
config: {
|
||||
tool: (params) => params.operation as string,
|
||||
params: (params) => {
|
||||
const { fileUpload, fileReference, ...rest } = params
|
||||
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
|
||||
const files = hasFileUpload ? fileUpload : fileReference
|
||||
return {
|
||||
...rest,
|
||||
...(files ? { files } : {}),
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -581,18 +581,6 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
|
||||
results: { type: 'json', description: 'Search/read summary results' },
|
||||
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
|
||||
|
||||
// Draft-specific outputs
|
||||
draftId: {
|
||||
type: 'string',
|
||||
description: 'Draft ID',
|
||||
condition: { field: 'operation', value: 'draft_gmail' },
|
||||
},
|
||||
messageId: {
|
||||
type: 'string',
|
||||
description: 'Gmail message ID for the draft',
|
||||
condition: { field: 'operation', value: 'draft_gmail' },
|
||||
},
|
||||
|
||||
// Trigger outputs (unchanged)
|
||||
email_id: { type: 'string', description: 'Gmail message ID' },
|
||||
thread_id: { type: 'string', description: 'Gmail thread ID' },
|
||||
|
||||
@@ -661,25 +661,12 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
placeholder: 'folder/subfolder/',
|
||||
condition: { field: 'operation', value: 'storage_upload' },
|
||||
},
|
||||
{
|
||||
id: 'file',
|
||||
title: 'File',
|
||||
type: 'file-upload',
|
||||
canonicalParamId: 'fileData',
|
||||
placeholder: 'Upload file to storage',
|
||||
condition: { field: 'operation', value: 'storage_upload' },
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'fileContent',
|
||||
title: 'File Content',
|
||||
type: 'code',
|
||||
canonicalParamId: 'fileData',
|
||||
placeholder: 'Base64 encoded for binary files, or plain text',
|
||||
condition: { field: 'operation', value: 'storage_upload' },
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -259,17 +259,6 @@ export class ExecutionEngine {
|
||||
}
|
||||
|
||||
private initializeQueue(triggerBlockId?: string): void {
|
||||
// Run-from-block mode: start directly from specified block
|
||||
if (this.context.runFromBlockContext) {
|
||||
const { startBlockId } = this.context.runFromBlockContext
|
||||
logger.info('Initializing queue for run-from-block mode', {
|
||||
startBlockId,
|
||||
dirtySetSize: this.context.runFromBlockContext.dirtySet.size,
|
||||
})
|
||||
this.addToQueue(startBlockId)
|
||||
return
|
||||
}
|
||||
|
||||
const pendingBlocks = this.context.metadata.pendingBlocks
|
||||
const remainingEdges = (this.context.metadata as any).remainingEdges
|
||||
|
||||
|
||||
@@ -5,21 +5,12 @@ import { BlockExecutor } from '@/executor/execution/block-executor'
|
||||
import { EdgeManager } from '@/executor/execution/edge-manager'
|
||||
import { ExecutionEngine } from '@/executor/execution/engine'
|
||||
import { ExecutionState } from '@/executor/execution/state'
|
||||
import type {
|
||||
ContextExtensions,
|
||||
SerializableExecutionState,
|
||||
WorkflowInput,
|
||||
} from '@/executor/execution/types'
|
||||
import type { ContextExtensions, WorkflowInput } from '@/executor/execution/types'
|
||||
import { createBlockHandlers } from '@/executor/handlers/registry'
|
||||
import { LoopOrchestrator } from '@/executor/orchestrators/loop'
|
||||
import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
|
||||
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
|
||||
import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types'
|
||||
import {
|
||||
computeDirtySet,
|
||||
type RunFromBlockContext,
|
||||
validateRunFromBlock,
|
||||
} from '@/executor/utils/run-from-block'
|
||||
import {
|
||||
buildResolutionFromBlock,
|
||||
buildStartBlockOutput,
|
||||
@@ -98,126 +89,17 @@ export class DAGExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute workflow starting from a specific block, using cached outputs
|
||||
* for all upstream/unaffected blocks from the source snapshot.
|
||||
*
|
||||
* This implements Jupyter notebook-style execution where:
|
||||
* - The start block and all downstream blocks are re-executed
|
||||
* - Upstream blocks retain their cached outputs from the source snapshot
|
||||
* - The result is a merged execution state
|
||||
*
|
||||
* @param workflowId - The workflow ID
|
||||
* @param startBlockId - The block to start execution from
|
||||
* @param sourceSnapshot - The execution state from a previous run
|
||||
* @returns Merged execution result with cached + fresh outputs
|
||||
*/
|
||||
async executeFromBlock(
|
||||
workflowId: string,
|
||||
startBlockId: string,
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
): Promise<ExecutionResult> {
|
||||
// Build full DAG (no trigger constraint - we need all blocks for validation)
|
||||
const dag = this.dagBuilder.build(this.workflow)
|
||||
|
||||
// Validate the start block
|
||||
const executedBlocks = new Set(sourceSnapshot.executedBlocks)
|
||||
const validation = validateRunFromBlock(startBlockId, dag, executedBlocks)
|
||||
if (!validation.valid) {
|
||||
throw new Error(validation.error)
|
||||
}
|
||||
|
||||
// Compute dirty set (blocks that will be re-executed)
|
||||
const dirtySet = computeDirtySet(dag, startBlockId)
|
||||
|
||||
logger.info('Executing from block', {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
dirtySetSize: dirtySet.size,
|
||||
totalBlocks: dag.nodes.size,
|
||||
dirtyBlocks: Array.from(dirtySet),
|
||||
})
|
||||
|
||||
// For convergent blocks in the dirty set, remove incoming edges from non-dirty sources.
|
||||
// This ensures that a dirty block waiting on multiple inputs doesn't wait for non-dirty
|
||||
// upstream blocks (whose outputs are already cached).
|
||||
for (const nodeId of dirtySet) {
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
const nonDirtyIncoming: string[] = []
|
||||
for (const sourceId of node.incomingEdges) {
|
||||
if (!dirtySet.has(sourceId)) {
|
||||
nonDirtyIncoming.push(sourceId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const sourceId of nonDirtyIncoming) {
|
||||
node.incomingEdges.delete(sourceId)
|
||||
logger.debug('Removed non-dirty incoming edge for run-from-block', {
|
||||
nodeId,
|
||||
sourceId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Create context with snapshot state + runFromBlockContext
|
||||
const runFromBlockContext = { startBlockId, dirtySet }
|
||||
const { context, state } = this.createExecutionContext(workflowId, undefined, {
|
||||
snapshotState: sourceSnapshot,
|
||||
runFromBlockContext,
|
||||
})
|
||||
|
||||
// Setup orchestrators and engine (same as execute())
|
||||
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
|
||||
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
|
||||
loopOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
|
||||
parallelOrchestrator.setResolver(resolver)
|
||||
parallelOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const allHandlers = createBlockHandlers()
|
||||
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
loopOrchestrator.setEdgeManager(edgeManager)
|
||||
const nodeOrchestrator = new NodeExecutionOrchestrator(
|
||||
dag,
|
||||
state,
|
||||
blockExecutor,
|
||||
loopOrchestrator,
|
||||
parallelOrchestrator
|
||||
)
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
// Run and return result
|
||||
return await engine.run()
|
||||
}
|
||||
|
||||
private createExecutionContext(
|
||||
workflowId: string,
|
||||
triggerBlockId?: string,
|
||||
overrides?: {
|
||||
snapshotState?: SerializableExecutionState
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
}
|
||||
triggerBlockId?: string
|
||||
): { context: ExecutionContext; state: ExecutionState } {
|
||||
const snapshotState = overrides?.snapshotState ?? this.contextExtensions.snapshotState
|
||||
const snapshotState = this.contextExtensions.snapshotState
|
||||
const blockStates = snapshotState?.blockStates
|
||||
? new Map(Object.entries(snapshotState.blockStates))
|
||||
: new Map<string, BlockState>()
|
||||
let executedBlocks = snapshotState?.executedBlocks
|
||||
const executedBlocks = snapshotState?.executedBlocks
|
||||
? new Set(snapshotState.executedBlocks)
|
||||
: new Set<string>()
|
||||
|
||||
// In run-from-block mode, clear the executed status for dirty blocks so they can be re-executed
|
||||
if (overrides?.runFromBlockContext) {
|
||||
const { dirtySet } = overrides.runFromBlockContext
|
||||
executedBlocks = new Set([...executedBlocks].filter((id) => !dirtySet.has(id)))
|
||||
logger.info('Cleared executed status for dirty blocks', {
|
||||
dirtySetSize: dirtySet.size,
|
||||
remainingExecutedBlocks: executedBlocks.size,
|
||||
})
|
||||
}
|
||||
|
||||
const state = new ExecutionState(blockStates, executedBlocks)
|
||||
|
||||
const context: ExecutionContext = {
|
||||
@@ -287,7 +169,6 @@ export class DAGExecutor {
|
||||
abortSignal: this.contextExtensions.abortSignal,
|
||||
includeFileBase64: this.contextExtensions.includeFileBase64,
|
||||
base64MaxBytes: this.contextExtensions.base64MaxBytes,
|
||||
runFromBlockContext: overrides?.runFromBlockContext,
|
||||
}
|
||||
|
||||
if (this.contextExtensions.resumeFromSnapshot) {
|
||||
@@ -312,12 +193,6 @@ export class DAGExecutor {
|
||||
pendingBlocks: context.metadata.pendingBlocks,
|
||||
skipStarterBlockInit: true,
|
||||
})
|
||||
} else if (overrides?.runFromBlockContext) {
|
||||
// In run-from-block mode, skip starter block initialization
|
||||
// All block states come from the snapshot
|
||||
logger.info('Run-from-block mode: skipping starter block initialization', {
|
||||
startBlockId: overrides.runFromBlockContext.startBlockId,
|
||||
})
|
||||
} else {
|
||||
this.initializeStarterBlock(context, state, triggerBlockId)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { BlockLog, BlockState, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export interface ExecutionMetadata {
|
||||
@@ -106,12 +105,6 @@ export interface ContextExtensions {
|
||||
output: { input?: any; output: NormalizedBlockOutput; executionTime: number },
|
||||
iterationContext?: IterationContext
|
||||
) => Promise<void>
|
||||
|
||||
/**
|
||||
* Run-from-block configuration. When provided, executor runs in partial
|
||||
* execution mode starting from the specified block.
|
||||
*/
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
}
|
||||
|
||||
export interface WorkflowInput {
|
||||
|
||||
@@ -31,20 +31,7 @@ export class NodeExecutionOrchestrator {
|
||||
throw new Error(`Node not found in DAG: ${nodeId}`)
|
||||
}
|
||||
|
||||
// In run-from-block mode, skip execution for non-dirty blocks and return cached output
|
||||
if (ctx.runFromBlockContext && !ctx.runFromBlockContext.dirtySet.has(nodeId)) {
|
||||
const cachedOutput = this.state.getBlockOutput(nodeId) || {}
|
||||
logger.debug('Skipping non-dirty block in run-from-block mode', { nodeId })
|
||||
return {
|
||||
nodeId,
|
||||
output: cachedOutput,
|
||||
isFinalOutput: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Skip hasExecuted check for dirty blocks in run-from-block mode - they need to be re-executed
|
||||
const isDirtyBlock = ctx.runFromBlockContext?.dirtySet.has(nodeId) ?? false
|
||||
if (!isDirtyBlock && this.state.hasExecuted(nodeId)) {
|
||||
if (this.state.hasExecuted(nodeId)) {
|
||||
const output = this.state.getBlockOutput(nodeId) || {}
|
||||
return {
|
||||
nodeId,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type { TraceSpan } from '@/lib/logs/types'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
export interface UserFile {
|
||||
@@ -251,12 +250,6 @@ export interface ExecutionContext {
|
||||
* will not have their base64 content fetched.
|
||||
*/
|
||||
base64MaxBytes?: number
|
||||
|
||||
/**
|
||||
* Context for "run from block" mode. When present, only blocks in dirtySet
|
||||
* will be executed; others return cached outputs from the source snapshot.
|
||||
*/
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
}
|
||||
|
||||
export interface ExecutionResult {
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
export interface BlockDataCollection {
|
||||
blockData: Record<string, unknown>
|
||||
@@ -11,32 +9,6 @@ export interface BlockDataCollection {
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export function getBlockSchema(
|
||||
block: SerializedBlock,
|
||||
toolConfig?: ToolConfig
|
||||
): OutputSchema | undefined {
|
||||
const isTrigger =
|
||||
block.metadata?.category === 'triggers' ||
|
||||
(block.config?.params as Record<string, unknown> | undefined)?.triggerMode === true
|
||||
|
||||
// Triggers use saved outputs (defines the trigger payload schema)
|
||||
if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
|
||||
return block.outputs as OutputSchema
|
||||
}
|
||||
|
||||
// When a tool is selected, tool outputs are the source of truth
|
||||
if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
|
||||
return toolConfig.outputs as OutputSchema
|
||||
}
|
||||
|
||||
// Fallback to saved outputs for blocks without tools
|
||||
if (block.outputs && Object.keys(block.outputs).length > 0) {
|
||||
return block.outputs as OutputSchema
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
@@ -46,21 +18,24 @@ export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
}
|
||||
}
|
||||
|
||||
const workflowBlocks = ctx.workflow?.blocks ?? []
|
||||
for (const block of workflowBlocks) {
|
||||
const id = block.id
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (!workflowBlock) continue
|
||||
|
||||
if (block.metadata?.name) {
|
||||
blockNameMapping[normalizeName(block.metadata.name)] = id
|
||||
if (workflowBlock.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const schema = getBlockSchema(block, toolConfig)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
const blockType = workflowBlock.metadata?.id
|
||||
if (blockType) {
|
||||
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const schema = getBlockOutputs(blockType, subBlocks)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
/**
|
||||
* Formats a JavaScript/TypeScript value as a code literal for the target language.
|
||||
* Handles special cases like null, undefined, booleans, and Python-specific number representations.
|
||||
*
|
||||
* @param value - The value to format
|
||||
* @param language - Target language ('javascript' or 'python')
|
||||
* @returns A string literal representation valid in the target language
|
||||
*
|
||||
* @example
|
||||
* formatLiteralForCode(null, 'python') // => 'None'
|
||||
* formatLiteralForCode(true, 'python') // => 'True'
|
||||
* formatLiteralForCode(NaN, 'python') // => "float('nan')"
|
||||
* formatLiteralForCode("hello", 'javascript') // => '"hello"'
|
||||
* formatLiteralForCode({a: 1}, 'python') // => "json.loads('{\"a\":1}')"
|
||||
*/
|
||||
export function formatLiteralForCode(value: unknown, language: 'javascript' | 'python'): string {
|
||||
const isPython = language === 'python'
|
||||
|
||||
if (value === undefined) {
|
||||
return isPython ? 'None' : 'undefined'
|
||||
}
|
||||
if (value === null) {
|
||||
return isPython ? 'None' : 'null'
|
||||
}
|
||||
if (typeof value === 'boolean') {
|
||||
return isPython ? (value ? 'True' : 'False') : String(value)
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
if (Number.isNaN(value)) {
|
||||
return isPython ? "float('nan')" : 'NaN'
|
||||
}
|
||||
if (value === Number.POSITIVE_INFINITY) {
|
||||
return isPython ? "float('inf')" : 'Infinity'
|
||||
}
|
||||
if (value === Number.NEGATIVE_INFINITY) {
|
||||
return isPython ? "float('-inf')" : '-Infinity'
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
// Objects and arrays - Python needs json.loads() because JSON true/false/null aren't valid Python
|
||||
if (isPython) {
|
||||
return `json.loads(${JSON.stringify(JSON.stringify(value))})`
|
||||
}
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
@@ -1,336 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import type { DAG, DAGNode } from '@/executor/dag/builder'
|
||||
import type { DAGEdge, NodeMetadata } from '@/executor/dag/types'
|
||||
import type { SerializedLoop, SerializedParallel } from '@/serializer/types'
|
||||
import { computeDirtySet, validateRunFromBlock } from '@/executor/utils/run-from-block'
|
||||
|
||||
/**
|
||||
* Helper to create a DAG node for testing
|
||||
*/
|
||||
function createNode(
|
||||
id: string,
|
||||
outgoingEdges: Array<{ target: string; sourceHandle?: string }> = [],
|
||||
metadata: Partial<NodeMetadata> = {}
|
||||
): DAGNode {
|
||||
const edges = new Map<string, DAGEdge>()
|
||||
for (const edge of outgoingEdges) {
|
||||
edges.set(edge.target, { target: edge.target, sourceHandle: edge.sourceHandle })
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
block: {
|
||||
id,
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'test', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
metadata: { id: 'test', name: `block-${id}`, category: 'tools' },
|
||||
enabled: true,
|
||||
},
|
||||
incomingEdges: new Set<string>(),
|
||||
outgoingEdges: edges,
|
||||
metadata: {
|
||||
isParallelBranch: false,
|
||||
isLoopNode: false,
|
||||
isSentinel: false,
|
||||
...metadata,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a DAG for testing
|
||||
*/
|
||||
function createDAG(nodes: DAGNode[]): DAG {
|
||||
const nodeMap = new Map<string, DAGNode>()
|
||||
for (const node of nodes) {
|
||||
nodeMap.set(node.id, node)
|
||||
}
|
||||
|
||||
// Set up incoming edges based on outgoing edges
|
||||
for (const node of nodes) {
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
const targetNode = nodeMap.get(edge.target)
|
||||
if (targetNode) {
|
||||
targetNode.incomingEdges.add(node.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
nodes: nodeMap,
|
||||
loopConfigs: new Map<string, SerializedLoop>(),
|
||||
parallelConfigs: new Map<string, SerializedParallel>(),
|
||||
}
|
||||
}
|
||||
|
||||
describe('computeDirtySet', () => {
|
||||
it('includes start block in dirty set', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B'), createNode('C')])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
})
|
||||
|
||||
it('includes all downstream blocks in linear workflow', () => {
|
||||
// A → B → C → D
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(3)
|
||||
})
|
||||
|
||||
it('handles branching paths', () => {
|
||||
// A → B → C
|
||||
// ↓
|
||||
// D → E
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B', [{ target: 'C' }, { target: 'D' }]),
|
||||
createNode('C'),
|
||||
createNode('D', [{ target: 'E' }]),
|
||||
createNode('E'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.has('E')).toBe(true)
|
||||
expect(dirtySet.size).toBe(4)
|
||||
})
|
||||
|
||||
it('handles convergence points', () => {
|
||||
// A → C
|
||||
// B → C → D
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'C' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
// Run from A: should include A, C, D (but not B)
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(false)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(3)
|
||||
})
|
||||
|
||||
it('handles diamond pattern', () => {
|
||||
// B
|
||||
// ↗ ↘
|
||||
// A D
|
||||
// ↘ ↗
|
||||
// C
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }, { target: 'C' }]),
|
||||
createNode('B', [{ target: 'D' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(4)
|
||||
})
|
||||
|
||||
it('stops at graph boundaries', () => {
|
||||
// A → B C → D (disconnected)
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B'),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(false)
|
||||
expect(dirtySet.has('D')).toBe(false)
|
||||
expect(dirtySet.size).toBe(2)
|
||||
})
|
||||
|
||||
it('handles single node workflow', () => {
|
||||
const dag = createDAG([createNode('A')])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.size).toBe(1)
|
||||
})
|
||||
|
||||
it('handles node not in DAG gracefully', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B')])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'nonexistent')
|
||||
|
||||
// Should just contain the start block ID even if not found
|
||||
expect(dirtySet.has('nonexistent')).toBe(true)
|
||||
expect(dirtySet.size).toBe(1)
|
||||
})
|
||||
|
||||
it('includes convergent block when running from one branch of parallel', () => {
|
||||
// Parallel branches converging:
|
||||
// A → B → D
|
||||
// A → C → D
|
||||
// Running from B should include B and D (but not A or C)
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }, { target: 'C' }]),
|
||||
createNode('B', [{ target: 'D' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(false)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(2)
|
||||
})
|
||||
|
||||
it('handles running from convergent block itself (all upstream non-dirty)', () => {
|
||||
// A → C
|
||||
// B → C
|
||||
// Running from C should only include C
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'C' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'C')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(false)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(2)
|
||||
})
|
||||
|
||||
it('handles deep downstream chains', () => {
|
||||
// A → B → C → D → E → F
|
||||
// Running from C should include C, D, E, F
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D', [{ target: 'E' }]),
|
||||
createNode('E', [{ target: 'F' }]),
|
||||
createNode('F'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'C')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(false)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.has('E')).toBe(true)
|
||||
expect(dirtySet.has('F')).toBe(true)
|
||||
expect(dirtySet.size).toBe(4)
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateRunFromBlock', () => {
|
||||
it('accepts valid block', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B')])
|
||||
const executedBlocks = new Set(['A', 'B'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true)
|
||||
expect(result.error).toBeUndefined()
|
||||
})
|
||||
|
||||
it('rejects block not found in DAG', () => {
|
||||
const dag = createDAG([createNode('A')])
|
||||
const executedBlocks = new Set(['A', 'B'])
|
||||
|
||||
const result = validateRunFromBlock('B', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('Block not found')
|
||||
})
|
||||
|
||||
it('rejects blocks inside loops', () => {
|
||||
const dag = createDAG([createNode('A', [], { isLoopNode: true, loopId: 'loop-1' })])
|
||||
const executedBlocks = new Set(['A'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('inside loop')
|
||||
expect(result.error).toContain('loop-1')
|
||||
})
|
||||
|
||||
it('rejects blocks inside parallels', () => {
|
||||
const dag = createDAG([createNode('A', [], { isParallelBranch: true, parallelId: 'parallel-1' })])
|
||||
const executedBlocks = new Set(['A'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('inside parallel')
|
||||
expect(result.error).toContain('parallel-1')
|
||||
})
|
||||
|
||||
it('rejects sentinel nodes', () => {
|
||||
const dag = createDAG([createNode('A', [], { isSentinel: true, sentinelType: 'start' })])
|
||||
const executedBlocks = new Set(['A'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('sentinel')
|
||||
})
|
||||
|
||||
it('rejects unexecuted blocks', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B')])
|
||||
const executedBlocks = new Set(['A']) // B was not executed
|
||||
|
||||
const result = validateRunFromBlock('B', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('was not executed')
|
||||
})
|
||||
|
||||
it('accepts regular executed block', () => {
|
||||
const dag = createDAG([
|
||||
createNode('trigger', [{ target: 'A' }]),
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B'),
|
||||
])
|
||||
const executedBlocks = new Set(['trigger', 'A', 'B'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
})
|
||||
@@ -1,110 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
|
||||
const logger = createLogger('run-from-block')
|
||||
|
||||
/**
|
||||
* Result of validating a block for run-from-block execution.
|
||||
*/
|
||||
export interface RunFromBlockValidation {
|
||||
valid: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Context for run-from-block execution mode.
|
||||
*/
|
||||
export interface RunFromBlockContext {
|
||||
/** The block ID to start execution from */
|
||||
startBlockId: string
|
||||
/** Set of block IDs that need re-execution (start block + all downstream) */
|
||||
dirtySet: Set<string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes all blocks that need re-execution when running from a specific block.
|
||||
* Uses BFS to find all downstream blocks reachable via outgoing edges.
|
||||
*
|
||||
* @param dag - The workflow DAG
|
||||
* @param startBlockId - The block to start execution from
|
||||
* @returns Set of block IDs that are "dirty" and need re-execution
|
||||
*/
|
||||
export function computeDirtySet(dag: DAG, startBlockId: string): Set<string> {
|
||||
const dirty = new Set<string>([startBlockId])
|
||||
const queue = [startBlockId]
|
||||
|
||||
while (queue.length > 0) {
|
||||
const nodeId = queue.shift()!
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (!dirty.has(edge.target)) {
|
||||
dirty.add(edge.target)
|
||||
queue.push(edge.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Computed dirty set', {
|
||||
startBlockId,
|
||||
dirtySetSize: dirty.size,
|
||||
dirtyBlocks: Array.from(dirty),
|
||||
})
|
||||
|
||||
return dirty
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a block can be used as a run-from-block starting point.
|
||||
*
|
||||
* Validation rules:
|
||||
* - Block must exist in the DAG
|
||||
* - Block cannot be inside a loop
|
||||
* - Block cannot be inside a parallel
|
||||
* - Block cannot be a sentinel node
|
||||
* - Block must have been executed in the source run
|
||||
*
|
||||
* @param blockId - The block ID to validate
|
||||
* @param dag - The workflow DAG
|
||||
* @param executedBlocks - Set of blocks that were executed in the source run
|
||||
* @returns Validation result with error message if invalid
|
||||
*/
|
||||
export function validateRunFromBlock(
|
||||
blockId: string,
|
||||
dag: DAG,
|
||||
executedBlocks: Set<string>
|
||||
): RunFromBlockValidation {
|
||||
const node = dag.nodes.get(blockId)
|
||||
|
||||
if (!node) {
|
||||
return { valid: false, error: `Block not found in workflow: ${blockId}` }
|
||||
}
|
||||
|
||||
if (node.metadata.isLoopNode) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot run from block inside loop: ${node.metadata.loopId}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (node.metadata.isParallelBranch) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot run from block inside parallel: ${node.metadata.parallelId}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (node.metadata.isSentinel) {
|
||||
return { valid: false, error: 'Cannot run from sentinel node' }
|
||||
}
|
||||
|
||||
if (!executedBlocks.has(blockId)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Block was not executed in source run: ${blockId}`,
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
}
|
||||
@@ -378,30 +378,8 @@ function buildManualTriggerOutput(
|
||||
return mergeFilesIntoOutput(output, workflowInput)
|
||||
}
|
||||
|
||||
function buildIntegrationTriggerOutput(
|
||||
workflowInput: unknown,
|
||||
structuredInput: Record<string, unknown>,
|
||||
hasStructured: boolean
|
||||
): NormalizedBlockOutput {
|
||||
const output: NormalizedBlockOutput = {}
|
||||
|
||||
if (hasStructured) {
|
||||
for (const [key, value] of Object.entries(structuredInput)) {
|
||||
output[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (isPlainObject(workflowInput)) {
|
||||
for (const [key, value] of Object.entries(workflowInput)) {
|
||||
if (value !== undefined && value !== null) {
|
||||
output[key] = value
|
||||
} else if (!Object.hasOwn(output, key)) {
|
||||
output[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mergeFilesIntoOutput(output, workflowInput)
|
||||
function buildIntegrationTriggerOutput(workflowInput: unknown): NormalizedBlockOutput {
|
||||
return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
|
||||
}
|
||||
|
||||
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {
|
||||
@@ -450,7 +428,7 @@ export function buildStartBlockOutput(options: StartBlockOutputOptions): Normali
|
||||
return buildManualTriggerOutput(finalInput, workflowInput)
|
||||
|
||||
case StartBlockPath.EXTERNAL_TRIGGER:
|
||||
return buildIntegrationTriggerOutput(workflowInput, structuredInput, hasStructured)
|
||||
return buildIntegrationTriggerOutput(workflowInput)
|
||||
|
||||
case StartBlockPath.LEGACY_STARTER:
|
||||
return buildLegacyStarterOutput(
|
||||
|
||||
@@ -157,14 +157,7 @@ export class VariableResolver {
|
||||
|
||||
let replacementError: Error | null = null
|
||||
|
||||
const blockType = block?.metadata?.id
|
||||
const language =
|
||||
blockType === BlockType.FUNCTION
|
||||
? ((block?.config?.params as Record<string, unknown> | undefined)?.language as
|
||||
| string
|
||||
| undefined)
|
||||
: undefined
|
||||
|
||||
// Use generic utility for smart variable reference replacement
|
||||
let result = replaceValidReferences(template, (match) => {
|
||||
if (replacementError) return match
|
||||
|
||||
@@ -174,7 +167,14 @@ export class VariableResolver {
|
||||
return match
|
||||
}
|
||||
|
||||
return this.blockResolver.formatValueForBlock(resolved, blockType, language)
|
||||
const blockType = block?.metadata?.id
|
||||
const isInTemplateLiteral =
|
||||
blockType === BlockType.FUNCTION &&
|
||||
template.includes('${') &&
|
||||
template.includes('}') &&
|
||||
template.includes('`')
|
||||
|
||||
return this.blockResolver.formatValueForBlock(resolved, blockType, isInTemplateLiteral)
|
||||
} catch (error) {
|
||||
replacementError = error instanceof Error ? error : new Error(String(error))
|
||||
return match
|
||||
|
||||
@@ -257,9 +257,15 @@ describe('BlockResolver', () => {
|
||||
expect(result).toBe('"hello"')
|
||||
})
|
||||
|
||||
it.concurrent('should format object for function block', () => {
|
||||
it.concurrent('should format string for function block in template literal', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
const result = resolver.formatValueForBlock({ a: 1 }, 'function')
|
||||
const result = resolver.formatValueForBlock('hello', 'function', true)
|
||||
expect(result).toBe('hello')
|
||||
})
|
||||
|
||||
it.concurrent('should format object for function block in template literal', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
const result = resolver.formatValueForBlock({ a: 1 }, 'function', true)
|
||||
expect(result).toBe('{"a":1}')
|
||||
})
|
||||
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import {
|
||||
isReference,
|
||||
normalizeName,
|
||||
parseReferencePath,
|
||||
SPECIAL_REFERENCE_PREFIXES,
|
||||
} from '@/executor/constants'
|
||||
import { getBlockSchema } from '@/executor/utils/block-data'
|
||||
import {
|
||||
InvalidFieldError,
|
||||
type OutputSchema,
|
||||
resolveBlockReference,
|
||||
} from '@/executor/utils/block-reference'
|
||||
import { formatLiteralForCode } from '@/executor/utils/code-formatting'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
@@ -68,9 +67,15 @@ export class BlockResolver implements Resolver {
|
||||
blockData[blockId] = output
|
||||
}
|
||||
|
||||
const blockType = block.metadata?.id
|
||||
const params = block.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema = getBlockSchema(block, toolConfig)
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
|
||||
|
||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||
blockOutputSchemas[blockId] = outputSchema
|
||||
@@ -160,13 +165,17 @@ export class BlockResolver implements Resolver {
|
||||
return this.nameToBlockId.get(normalizeName(name))
|
||||
}
|
||||
|
||||
public formatValueForBlock(value: any, blockType: string | undefined, language?: string): string {
|
||||
public formatValueForBlock(
|
||||
value: any,
|
||||
blockType: string | undefined,
|
||||
isInTemplateLiteral = false
|
||||
): string {
|
||||
if (blockType === 'condition') {
|
||||
return this.stringifyForCondition(value)
|
||||
}
|
||||
|
||||
if (blockType === 'function') {
|
||||
return this.formatValueForCodeContext(value, language)
|
||||
return this.formatValueForCodeContext(value, isInTemplateLiteral)
|
||||
}
|
||||
|
||||
if (blockType === 'response') {
|
||||
@@ -207,7 +216,29 @@ export class BlockResolver implements Resolver {
|
||||
return String(value)
|
||||
}
|
||||
|
||||
private formatValueForCodeContext(value: any, language?: string): string {
|
||||
return formatLiteralForCode(value, language === 'python' ? 'python' : 'javascript')
|
||||
private formatValueForCodeContext(value: any, isInTemplateLiteral: boolean): string {
|
||||
if (isInTemplateLiteral) {
|
||||
if (typeof value === 'string') {
|
||||
return value
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
if (value === undefined) {
|
||||
return 'undefined'
|
||||
}
|
||||
if (value === null) {
|
||||
return 'null'
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,10 +30,7 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
const arrayMatch = part.match(/^([^[]+)(\[.+)$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop, bracketsPart] = arrayMatch
|
||||
current =
|
||||
typeof current === 'object' && current !== null
|
||||
? (current as Record<string, unknown>)[prop]
|
||||
: undefined
|
||||
current = current[prop]
|
||||
if (current === undefined || current === null) {
|
||||
return undefined
|
||||
}
|
||||
@@ -52,10 +49,7 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
current =
|
||||
typeof current === 'object' && current !== null
|
||||
? (current as Record<string, unknown>)[part]
|
||||
: undefined
|
||||
current = current[part]
|
||||
}
|
||||
}
|
||||
return current
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useCallback } from 'react'
|
||||
import type { Node, ReactFlowInstance } from 'reactflow'
|
||||
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
|
||||
interface VisibleBounds {
|
||||
width: number
|
||||
@@ -140,8 +139,8 @@ export function useCanvasViewport(reactFlowInstance: ReactFlowInstance | null) {
|
||||
let maxY = Number.NEGATIVE_INFINITY
|
||||
|
||||
nodes.forEach((node) => {
|
||||
const nodeWidth = node.width ?? BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
const nodeHeight = node.height ?? BLOCK_DIMENSIONS.MIN_HEIGHT
|
||||
const nodeWidth = node.width ?? 200
|
||||
const nodeHeight = node.height ?? 100
|
||||
|
||||
minX = Math.min(minX, node.position.x)
|
||||
minY = Math.min(minY, node.position.y)
|
||||
|
||||
@@ -680,10 +680,6 @@ export function useCollaborativeWorkflow() {
|
||||
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
||||
}
|
||||
) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch position update - not in active workflow')
|
||||
return
|
||||
@@ -729,7 +725,7 @@ export function useCollaborativeWorkflow() {
|
||||
}
|
||||
}
|
||||
},
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeUpdateBlockName = useCallback(
|
||||
@@ -821,10 +817,6 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeBatchToggleBlockEnabled = useCallback(
|
||||
(ids: string[]) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (ids.length === 0) return
|
||||
|
||||
const previousStates: Record<string, boolean> = {}
|
||||
@@ -857,7 +849,7 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
||||
},
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeBatchUpdateParent = useCallback(
|
||||
@@ -869,10 +861,6 @@ export function useCollaborativeWorkflow() {
|
||||
affectedEdges: Edge[]
|
||||
}>
|
||||
) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch update parent - not in active workflow')
|
||||
return
|
||||
@@ -943,7 +931,7 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
||||
},
|
||||
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||
[isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||
)
|
||||
|
||||
const collaborativeToggleBlockAdvancedMode = useCallback(
|
||||
@@ -963,37 +951,18 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeSetBlockCanonicalMode = useCallback(
|
||||
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||
|
||||
if (!activeWorkflowId) {
|
||||
return
|
||||
}
|
||||
|
||||
const operationId = crypto.randomUUID()
|
||||
addToQueue({
|
||||
id: operationId,
|
||||
operation: {
|
||||
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
target: OPERATION_TARGETS.BLOCK,
|
||||
payload: { id, canonicalId, canonicalMode },
|
||||
},
|
||||
workflowId: activeWorkflowId,
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
executeQueuedOperation(
|
||||
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
OPERATION_TARGETS.BLOCK,
|
||||
{ id, canonicalId, canonicalMode },
|
||||
() => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||
)
|
||||
},
|
||||
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
|
||||
[executeQueuedOperation]
|
||||
)
|
||||
|
||||
const collaborativeBatchToggleBlockHandles = useCallback(
|
||||
(ids: string[]) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (ids.length === 0) return
|
||||
|
||||
const previousStates: Record<string, boolean> = {}
|
||||
@@ -1026,15 +995,11 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
||||
},
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeBatchAddEdges = useCallback(
|
||||
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
||||
if (isBaselineDiffView) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch add edges - not in active workflow')
|
||||
return false
|
||||
@@ -1070,15 +1035,11 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
return true
|
||||
},
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeBatchRemoveEdges = useCallback(
|
||||
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||
if (isBaselineDiffView) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch remove edges - not in active workflow')
|
||||
return false
|
||||
@@ -1128,7 +1089,7 @@ export function useCollaborativeWorkflow() {
|
||||
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
||||
return true
|
||||
},
|
||||
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||
[isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeSetSubblockValue = useCallback(
|
||||
@@ -1204,10 +1165,6 @@ export function useCollaborativeWorkflow() {
|
||||
(blockId: string, subblockId: string, value: any) => {
|
||||
if (isApplyingRemoteChange.current) return
|
||||
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping tag selection - not in active workflow', {
|
||||
currentWorkflowId,
|
||||
@@ -1235,14 +1192,7 @@ export function useCollaborativeWorkflow() {
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
},
|
||||
[
|
||||
isBaselineDiffView,
|
||||
addToQueue,
|
||||
currentWorkflowId,
|
||||
activeWorkflowId,
|
||||
session?.user?.id,
|
||||
isInActiveRoom,
|
||||
]
|
||||
[addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
|
||||
)
|
||||
|
||||
const collaborativeUpdateLoopType = useCallback(
|
||||
@@ -1588,10 +1538,6 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeBatchRemoveBlocks = useCallback(
|
||||
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||
if (isBaselineDiffView) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch remove blocks - not in active workflow')
|
||||
return false
|
||||
@@ -1673,7 +1619,6 @@ export function useCollaborativeWorkflow() {
|
||||
return true
|
||||
},
|
||||
[
|
||||
isBaselineDiffView,
|
||||
addToQueue,
|
||||
activeWorkflowId,
|
||||
session?.user?.id,
|
||||
|
||||
@@ -1,85 +1,10 @@
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
/**
|
||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||
*/
|
||||
async function processSSEStream(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
callbacks: ExecutionStreamCallbacks,
|
||||
logPrefix: string
|
||||
): Promise<void> {
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split('\n\n')
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim() || !line.startsWith('data: ')) continue
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
if (data === '[DONE]') {
|
||||
logger.info(`${logPrefix} stream completed`)
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
callbacks.onExecutionStarted?.(event.data)
|
||||
break
|
||||
case 'execution:completed':
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
case 'execution:cancelled':
|
||||
callbacks.onExecutionCancelled?.(event.data)
|
||||
break
|
||||
case 'block:started':
|
||||
callbacks.onBlockStarted?.(event.data)
|
||||
break
|
||||
case 'block:completed':
|
||||
callbacks.onBlockCompleted?.(event.data)
|
||||
break
|
||||
case 'block:error':
|
||||
callbacks.onBlockError?.(event.data)
|
||||
break
|
||||
case 'stream:chunk':
|
||||
callbacks.onStreamChunk?.(event.data)
|
||||
break
|
||||
case 'stream:done':
|
||||
callbacks.onStreamDone?.(event.data)
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown event type:', (event as any).type)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse SSE event:', error, { data })
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
}
|
||||
|
||||
export interface ExecutionStreamCallbacks {
|
||||
onExecutionStarted?: (data: { startTime: string }) => void
|
||||
onExecutionCompleted?: (data: {
|
||||
@@ -146,13 +71,6 @@ export interface ExecuteStreamOptions {
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
export interface ExecuteFromBlockOptions {
|
||||
workflowId: string
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for executing workflows via server-side SSE streaming
|
||||
*/
|
||||
@@ -201,7 +119,91 @@ export function useExecutionStream() {
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Execution')
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) {
|
||||
break
|
||||
}
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
const lines = buffer.split('\n\n')
|
||||
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim() || !line.startsWith('data: ')) {
|
||||
continue
|
||||
}
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
|
||||
if (data === '[DONE]') {
|
||||
logger.info('Stream completed')
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
logger.info('📡 SSE Event received:', {
|
||||
type: event.type,
|
||||
executionId: event.executionId,
|
||||
data: event.data,
|
||||
})
|
||||
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
logger.info('🚀 Execution started')
|
||||
callbacks.onExecutionStarted?.(event.data)
|
||||
break
|
||||
case 'execution:completed':
|
||||
logger.info('✅ Execution completed')
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
logger.error('❌ Execution error')
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
case 'execution:cancelled':
|
||||
logger.warn('🛑 Execution cancelled')
|
||||
callbacks.onExecutionCancelled?.(event.data)
|
||||
break
|
||||
case 'block:started':
|
||||
logger.info('🔷 Block started:', event.data.blockId)
|
||||
callbacks.onBlockStarted?.(event.data)
|
||||
break
|
||||
case 'block:completed':
|
||||
logger.info('✓ Block completed:', event.data.blockId)
|
||||
callbacks.onBlockCompleted?.(event.data)
|
||||
break
|
||||
case 'block:error':
|
||||
logger.error('✗ Block error:', event.data.blockId)
|
||||
callbacks.onBlockError?.(event.data)
|
||||
break
|
||||
case 'stream:chunk':
|
||||
callbacks.onStreamChunk?.(event.data)
|
||||
break
|
||||
case 'stream:done':
|
||||
logger.info('Stream done:', event.data.blockId)
|
||||
callbacks.onStreamDone?.(event.data)
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown event type:', (event as any).type)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse SSE event:', error, { data })
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Execution stream cancelled')
|
||||
@@ -220,65 +222,6 @@ export function useExecutionStream() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||
const { workflowId, startBlockId, sourceSnapshot, callbacks = {} } = options
|
||||
|
||||
if (abortControllerRef.current) {
|
||||
abortControllerRef.current.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllerRef.current = abortController
|
||||
currentExecutionRef.current = null
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute-from-block`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ startBlockId, sourceSnapshot }),
|
||||
signal: abortController.signal,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorResponse = await response.json()
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionRef.current = { workflowId, executionId }
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
}
|
||||
throw error
|
||||
} finally {
|
||||
abortControllerRef.current = null
|
||||
currentExecutionRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
const cancel = useCallback(() => {
|
||||
const execution = currentExecutionRef.current
|
||||
if (execution) {
|
||||
@@ -296,7 +239,6 @@ export function useExecutionStream() {
|
||||
|
||||
return {
|
||||
execute,
|
||||
executeFromBlock,
|
||||
cancel,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ export const COPILOT_MODEL_IDS = [
|
||||
'claude-4.5-opus',
|
||||
'claude-4.1-opus',
|
||||
'gemini-3-pro',
|
||||
'auto',
|
||||
] as const
|
||||
|
||||
export type CopilotModelId = (typeof COPILOT_MODEL_IDS)[number]
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
type KnowledgeBaseArgs,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
/**
|
||||
* Client tool for knowledge base operations
|
||||
@@ -102,7 +103,19 @@ export class KnowledgeBaseClientTool extends BaseClientTool {
|
||||
const logger = createLogger('KnowledgeBaseClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const payload: KnowledgeBaseArgs = { ...(args || { operation: 'list' }) }
|
||||
|
||||
// Get the workspace ID from the workflow registry hydration state
|
||||
const { hydration } = useWorkflowRegistry.getState()
|
||||
const workspaceId = hydration.workspaceId
|
||||
|
||||
// Build payload with workspace ID included in args
|
||||
const payload: KnowledgeBaseArgs = {
|
||||
...(args || { operation: 'list' }),
|
||||
args: {
|
||||
...(args?.args || {}),
|
||||
workspaceId: workspaceId || undefined,
|
||||
},
|
||||
}
|
||||
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
|
||||
@@ -2508,6 +2508,10 @@ async function validateWorkflowSelectorIds(
|
||||
for (const subBlockConfig of blockConfig.subBlocks) {
|
||||
if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue
|
||||
|
||||
// Skip oauth-input - credentials are pre-validated before edit application
|
||||
// This allows existing collaborator credentials to remain untouched
|
||||
if (subBlockConfig.type === 'oauth-input') continue
|
||||
|
||||
const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value
|
||||
if (!subBlockValue) continue
|
||||
|
||||
@@ -2573,6 +2577,156 @@ async function validateWorkflowSelectorIds(
|
||||
return errors
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre-validates credential and apiKey inputs in operations before they are applied.
|
||||
* - Validates oauth-input (credential) IDs belong to the user
|
||||
* - Filters out apiKey inputs for hosted models when isHosted is true
|
||||
* Returns validation errors for any removed inputs.
|
||||
*/
|
||||
async function preValidateCredentialInputs(
|
||||
operations: EditWorkflowOperation[],
|
||||
context: { userId: string }
|
||||
): Promise<{ filteredOperations: EditWorkflowOperation[]; errors: ValidationError[] }> {
|
||||
const { isHosted } = await import('@/lib/core/config/feature-flags')
|
||||
const { getHostedModels } = await import('@/providers/utils')
|
||||
|
||||
const logger = createLogger('PreValidateCredentials')
|
||||
const errors: ValidationError[] = []
|
||||
|
||||
// Collect credential and apiKey inputs that need validation/filtering
|
||||
const credentialInputs: Array<{
|
||||
operationIndex: number
|
||||
blockId: string
|
||||
blockType: string
|
||||
fieldName: string
|
||||
value: string
|
||||
}> = []
|
||||
|
||||
const hostedApiKeyInputs: Array<{
|
||||
operationIndex: number
|
||||
blockId: string
|
||||
blockType: string
|
||||
model: string
|
||||
}> = []
|
||||
|
||||
const hostedModelsLower = isHosted ? new Set(getHostedModels().map((m) => m.toLowerCase())) : null
|
||||
|
||||
operations.forEach((op, opIndex) => {
|
||||
if (!op.params?.inputs || !op.params?.type) return
|
||||
|
||||
const blockConfig = getBlock(op.params.type)
|
||||
if (!blockConfig) return
|
||||
|
||||
// Find oauth-input subblocks
|
||||
for (const subBlockConfig of blockConfig.subBlocks) {
|
||||
if (subBlockConfig.type !== 'oauth-input') continue
|
||||
|
||||
const inputValue = op.params.inputs[subBlockConfig.id]
|
||||
if (!inputValue || typeof inputValue !== 'string' || inputValue.trim() === '') continue
|
||||
|
||||
credentialInputs.push({
|
||||
operationIndex: opIndex,
|
||||
blockId: op.block_id,
|
||||
blockType: op.params.type,
|
||||
fieldName: subBlockConfig.id,
|
||||
value: inputValue,
|
||||
})
|
||||
}
|
||||
|
||||
// Check for apiKey inputs on hosted models
|
||||
if (hostedModelsLower && op.params.inputs.apiKey) {
|
||||
const modelValue = op.params.inputs.model
|
||||
if (modelValue && typeof modelValue === 'string') {
|
||||
if (hostedModelsLower.has(modelValue.toLowerCase())) {
|
||||
hostedApiKeyInputs.push({
|
||||
operationIndex: opIndex,
|
||||
blockId: op.block_id,
|
||||
blockType: op.params.type,
|
||||
model: modelValue,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const hasCredentialsToValidate = credentialInputs.length > 0
|
||||
const hasHostedApiKeysToFilter = hostedApiKeyInputs.length > 0
|
||||
|
||||
if (!hasCredentialsToValidate && !hasHostedApiKeysToFilter) {
|
||||
return { filteredOperations: operations, errors }
|
||||
}
|
||||
|
||||
// Deep clone operations so we can modify them
|
||||
const filteredOperations = structuredClone(operations)
|
||||
|
||||
// Filter out apiKey inputs for hosted models and add validation errors
|
||||
if (hasHostedApiKeysToFilter) {
|
||||
logger.info('Filtering apiKey inputs for hosted models', { count: hostedApiKeyInputs.length })
|
||||
|
||||
for (const apiKeyInput of hostedApiKeyInputs) {
|
||||
const op = filteredOperations[apiKeyInput.operationIndex]
|
||||
if (op.params?.inputs?.apiKey) {
|
||||
op.params.inputs.apiKey = undefined
|
||||
logger.debug('Filtered apiKey for hosted model', {
|
||||
blockId: apiKeyInput.blockId,
|
||||
model: apiKeyInput.model,
|
||||
})
|
||||
|
||||
errors.push({
|
||||
blockId: apiKeyInput.blockId,
|
||||
blockType: apiKeyInput.blockType,
|
||||
field: 'apiKey',
|
||||
value: '[redacted]',
|
||||
error: `Cannot set API key for hosted model "${apiKeyInput.model}" - API keys are managed by the platform when using hosted models`,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate credential inputs
|
||||
if (hasCredentialsToValidate) {
|
||||
logger.info('Pre-validating credential inputs', {
|
||||
credentialCount: credentialInputs.length,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
const allCredentialIds = credentialInputs.map((c) => c.value)
|
||||
const validationResult = await validateSelectorIds('oauth-input', allCredentialIds, context)
|
||||
const invalidSet = new Set(validationResult.invalid)
|
||||
|
||||
if (invalidSet.size > 0) {
|
||||
for (const credInput of credentialInputs) {
|
||||
if (!invalidSet.has(credInput.value)) continue
|
||||
|
||||
const op = filteredOperations[credInput.operationIndex]
|
||||
if (op.params?.inputs?.[credInput.fieldName]) {
|
||||
delete op.params.inputs[credInput.fieldName]
|
||||
logger.info('Removed invalid credential from operation', {
|
||||
blockId: credInput.blockId,
|
||||
field: credInput.fieldName,
|
||||
invalidValue: credInput.value,
|
||||
})
|
||||
}
|
||||
|
||||
const warningInfo = validationResult.warning ? `. ${validationResult.warning}` : ''
|
||||
errors.push({
|
||||
blockId: credInput.blockId,
|
||||
blockType: credInput.blockType,
|
||||
field: credInput.fieldName,
|
||||
value: credInput.value,
|
||||
error: `Invalid credential ID "${credInput.value}" - credential does not exist or user doesn't have access${warningInfo}`,
|
||||
})
|
||||
}
|
||||
|
||||
logger.warn('Filtered out invalid credentials', {
|
||||
invalidCount: invalidSet.size,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return { filteredOperations, errors }
|
||||
}
|
||||
|
||||
async function getCurrentWorkflowStateFromDb(
|
||||
workflowId: string
|
||||
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
||||
@@ -2657,12 +2811,28 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
||||
// Get permission config for the user
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
|
||||
// Pre-validate credential and apiKey inputs before applying operations
|
||||
// This filters out invalid credentials and apiKeys for hosted models
|
||||
let operationsToApply = operations
|
||||
const credentialErrors: ValidationError[] = []
|
||||
if (context?.userId) {
|
||||
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
|
||||
operations,
|
||||
{ userId: context.userId }
|
||||
)
|
||||
operationsToApply = filteredOperations
|
||||
credentialErrors.push(...credErrors)
|
||||
}
|
||||
|
||||
// Apply operations directly to the workflow state
|
||||
const {
|
||||
state: modifiedWorkflowState,
|
||||
validationErrors,
|
||||
skippedItems,
|
||||
} = applyOperationsToWorkflowState(workflowState, operations, permissionConfig)
|
||||
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
|
||||
|
||||
// Add credential validation errors
|
||||
validationErrors.push(...credentialErrors)
|
||||
|
||||
// Get workspaceId for selector validation
|
||||
let workspaceId: string | undefined
|
||||
|
||||
@@ -132,8 +132,6 @@ async function executeCode(request) {
|
||||
for (const [key, value] of Object.entries(contextVariables)) {
|
||||
if (value === undefined) {
|
||||
await jail.set(key, undefined)
|
||||
} else if (value === null) {
|
||||
await jail.set(key, null)
|
||||
} else {
|
||||
await jail.set(key, new ivm.ExternalCopy(value).copyInto())
|
||||
}
|
||||
|
||||
@@ -618,6 +618,13 @@ export function getToolOutputs(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates output paths for a tool-based block.
|
||||
*
|
||||
* @param blockConfig - The block configuration containing tools config
|
||||
* @param subBlocks - SubBlock values for tool selection and condition evaluation
|
||||
* @returns Array of output paths for the tool, or empty array on error
|
||||
*/
|
||||
export function getToolOutputPaths(
|
||||
blockConfig: BlockConfig,
|
||||
subBlocks?: Record<string, SubBlockWithValue>
|
||||
@@ -627,22 +634,12 @@ export function getToolOutputPaths(
|
||||
if (!outputs || Object.keys(outputs).length === 0) return []
|
||||
|
||||
if (subBlocks && blockConfig.outputs) {
|
||||
const filteredBlockOutputs = filterOutputsByCondition(blockConfig.outputs, subBlocks)
|
||||
const allowedKeys = new Set(Object.keys(filteredBlockOutputs))
|
||||
|
||||
const filteredOutputs: Record<string, any> = {}
|
||||
|
||||
for (const [key, value] of Object.entries(outputs)) {
|
||||
const blockOutput = blockConfig.outputs[key]
|
||||
|
||||
if (!blockOutput || typeof blockOutput !== 'object') {
|
||||
filteredOutputs[key] = value
|
||||
continue
|
||||
}
|
||||
|
||||
const condition = 'condition' in blockOutput ? blockOutput.condition : undefined
|
||||
if (condition) {
|
||||
if (evaluateOutputCondition(condition, subBlocks)) {
|
||||
filteredOutputs[key] = value
|
||||
}
|
||||
} else {
|
||||
if (allowedKeys.has(key)) {
|
||||
filteredOutputs[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ describe('VariableManager', () => {
|
||||
it.concurrent('should handle boolean type variables', () => {
|
||||
expect(VariableManager.parseInputForStorage('true', 'boolean')).toBe(true)
|
||||
expect(VariableManager.parseInputForStorage('false', 'boolean')).toBe(false)
|
||||
expect(VariableManager.parseInputForStorage('1', 'boolean')).toBe(false)
|
||||
expect(VariableManager.parseInputForStorage('1', 'boolean')).toBe(true)
|
||||
expect(VariableManager.parseInputForStorage('0', 'boolean')).toBe(false)
|
||||
expect(VariableManager.parseInputForStorage('"true"', 'boolean')).toBe(true)
|
||||
expect(VariableManager.parseInputForStorage("'false'", 'boolean')).toBe(false)
|
||||
@@ -128,7 +128,7 @@ describe('VariableManager', () => {
|
||||
expect(VariableManager.resolveForExecution(false, 'boolean')).toBe(false)
|
||||
expect(VariableManager.resolveForExecution('true', 'boolean')).toBe(true)
|
||||
expect(VariableManager.resolveForExecution('false', 'boolean')).toBe(false)
|
||||
expect(VariableManager.resolveForExecution('1', 'boolean')).toBe(false)
|
||||
expect(VariableManager.resolveForExecution('1', 'boolean')).toBe(true)
|
||||
expect(VariableManager.resolveForExecution('0', 'boolean')).toBe(false)
|
||||
})
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ export class VariableManager {
|
||||
// Special case for 'anything else' in the test
|
||||
if (unquoted === 'anything else') return true
|
||||
const normalized = String(unquoted).toLowerCase().trim()
|
||||
return normalized === 'true'
|
||||
return normalized === 'true' || normalized === '1'
|
||||
}
|
||||
|
||||
case 'object':
|
||||
|
||||
@@ -35,23 +35,4 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
},
|
||||
clearRunPath: () => set({ lastRunPath: new Map(), lastRunEdges: new Map() }),
|
||||
reset: () => set(initialState),
|
||||
|
||||
setLastExecutionSnapshot: (workflowId, snapshot) => {
|
||||
const { lastExecutionSnapshots } = get()
|
||||
const newSnapshots = new Map(lastExecutionSnapshots)
|
||||
newSnapshots.set(workflowId, snapshot)
|
||||
set({ lastExecutionSnapshots: newSnapshots })
|
||||
},
|
||||
|
||||
getLastExecutionSnapshot: (workflowId) => {
|
||||
const { lastExecutionSnapshots } = get()
|
||||
return lastExecutionSnapshots.get(workflowId)
|
||||
},
|
||||
|
||||
clearLastExecutionSnapshot: (workflowId) => {
|
||||
const { lastExecutionSnapshots } = get()
|
||||
const newSnapshots = new Map(lastExecutionSnapshots)
|
||||
newSnapshots.delete(workflowId)
|
||||
set({ lastExecutionSnapshots: newSnapshots })
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { Executor } from '@/executor'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
|
||||
/**
|
||||
@@ -29,11 +28,6 @@ export interface ExecutionState {
|
||||
* Cleared when a new run starts. Used to show run path indicators on edges.
|
||||
*/
|
||||
lastRunEdges: Map<string, EdgeRunStatus>
|
||||
/**
|
||||
* Stores the last successful execution snapshot per workflow.
|
||||
* Used for run-from-block functionality.
|
||||
*/
|
||||
lastExecutionSnapshots: Map<string, SerializableExecutionState>
|
||||
}
|
||||
|
||||
export interface ExecutionActions {
|
||||
@@ -47,18 +41,6 @@ export interface ExecutionActions {
|
||||
setEdgeRunStatus: (edgeId: string, status: EdgeRunStatus) => void
|
||||
clearRunPath: () => void
|
||||
reset: () => void
|
||||
/**
|
||||
* Store the execution snapshot for a workflow after successful execution.
|
||||
*/
|
||||
setLastExecutionSnapshot: (workflowId: string, snapshot: SerializableExecutionState) => void
|
||||
/**
|
||||
* Get the last execution snapshot for a workflow.
|
||||
*/
|
||||
getLastExecutionSnapshot: (workflowId: string) => SerializableExecutionState | undefined
|
||||
/**
|
||||
* Clear the execution snapshot for a workflow.
|
||||
*/
|
||||
clearLastExecutionSnapshot: (workflowId: string) => void
|
||||
}
|
||||
|
||||
export const initialState: ExecutionState = {
|
||||
@@ -70,5 +52,4 @@ export const initialState: ExecutionState = {
|
||||
debugContext: null,
|
||||
lastRunPath: new Map(),
|
||||
lastRunEdges: new Map(),
|
||||
lastExecutionSnapshots: new Map(),
|
||||
}
|
||||
|
||||
@@ -27,9 +27,6 @@ export function registerEmitFunctions(
|
||||
emitSubblockUpdate = subblockEmit
|
||||
emitVariableUpdate = variableEmit
|
||||
currentRegisteredWorkflowId = workflowId
|
||||
if (workflowId) {
|
||||
useOperationQueueStore.getState().processNextOperation()
|
||||
}
|
||||
}
|
||||
|
||||
let currentRegisteredWorkflowId: string | null = null
|
||||
@@ -265,14 +262,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
|
||||
return
|
||||
}
|
||||
|
||||
if (!currentRegisteredWorkflowId) {
|
||||
const nextOperation = currentRegisteredWorkflowId
|
||||
? state.operations.find(
|
||||
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
||||
)
|
||||
: state.operations.find((op) => op.status === 'pending')
|
||||
if (!nextOperation) {
|
||||
return
|
||||
}
|
||||
|
||||
const nextOperation = state.operations.find(
|
||||
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
||||
)
|
||||
if (!nextOperation) {
|
||||
if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -38,12 +38,11 @@ export const storageUploadTool: ToolConfig<
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
||||
},
|
||||
fileData: {
|
||||
type: 'json',
|
||||
fileContent: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
|
||||
description: 'The file content (base64 encoded for binary files, or plain text)',
|
||||
},
|
||||
contentType: {
|
||||
type: 'string',
|
||||
@@ -66,28 +65,65 @@ export const storageUploadTool: ToolConfig<
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/supabase/storage-upload',
|
||||
url: (params) => {
|
||||
// Combine folder path and fileName, ensuring proper formatting
|
||||
let fullPath = params.fileName
|
||||
if (params.path) {
|
||||
// Ensure path ends with / and doesn't have double slashes
|
||||
const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
|
||||
fullPath = `${folderPath}${params.fileName}`
|
||||
}
|
||||
return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
|
||||
},
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
projectId: params.projectId,
|
||||
apiKey: params.apiKey,
|
||||
bucket: params.bucket,
|
||||
fileName: params.fileName,
|
||||
path: params.path,
|
||||
fileData: params.fileData,
|
||||
contentType: params.contentType,
|
||||
upsert: params.upsert,
|
||||
}),
|
||||
headers: (params) => {
|
||||
const headers: Record<string, string> = {
|
||||
apikey: params.apiKey,
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
}
|
||||
|
||||
if (params.contentType) {
|
||||
headers['Content-Type'] = params.contentType
|
||||
}
|
||||
|
||||
if (params.upsert) {
|
||||
headers['x-upsert'] = 'true'
|
||||
}
|
||||
|
||||
return headers
|
||||
},
|
||||
body: (params) => {
|
||||
// Return the file content wrapped in an object
|
||||
// The actual upload will need to handle this appropriately
|
||||
return {
|
||||
content: params.fileContent,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
let data
|
||||
try {
|
||||
data = await response.json()
|
||||
} catch (parseError) {
|
||||
throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Successfully uploaded file to storage',
|
||||
results: data,
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
results: {
|
||||
type: 'object',
|
||||
description: 'Upload result including file path, bucket, and public URL',
|
||||
description: 'Upload result including file path and metadata',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
|
||||
bucket: string
|
||||
fileName: string
|
||||
path?: string
|
||||
fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text)
|
||||
fileContent: string
|
||||
contentType?: string
|
||||
upsert?: boolean
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
{{- if .Values.branding.enabled }}
|
||||
---
|
||||
# Branding ConfigMap
|
||||
# Mounts custom branding assets (logos, CSS, etc.) into the application
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "sim.fullname" . }}-branding
|
||||
namespace: {{ .Release.Namespace }}
|
||||
labels:
|
||||
{{- include "sim.labels" . | nindent 4 }}
|
||||
app.kubernetes.io/component: branding
|
||||
{{- if .Values.branding.files }}
|
||||
data:
|
||||
{{- range $key, $value := .Values.branding.files }}
|
||||
{{ $key }}: {{ $value | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .Values.branding.binaryFiles }}
|
||||
binaryData:
|
||||
{{- range $key, $value := .Values.branding.binaryFiles }}
|
||||
{{ $key }}: {{ $value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -110,13 +110,8 @@ spec:
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.app | nindent 10 }}
|
||||
{{- include "sim.securityContext" .Values.app | nindent 10 }}
|
||||
{{- if or .Values.branding.enabled .Values.extraVolumeMounts .Values.app.extraVolumeMounts }}
|
||||
{{- if or .Values.extraVolumeMounts .Values.app.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- if .Values.branding.enabled }}
|
||||
- name: branding
|
||||
mountPath: {{ .Values.branding.mountPath | default "/app/public/branding" }}
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
@@ -124,13 +119,8 @@ spec:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or .Values.branding.enabled .Values.extraVolumes .Values.app.extraVolumes }}
|
||||
{{- if or .Values.extraVolumes .Values.app.extraVolumes }}
|
||||
volumes:
|
||||
{{- if .Values.branding.enabled }}
|
||||
- name: branding
|
||||
configMap:
|
||||
name: {{ include "sim.fullname" . }}-branding
|
||||
{{- end }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
|
||||
@@ -738,32 +738,6 @@ sharedStorage:
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Branding configuration
|
||||
# Use this to inject custom branding assets (logos, CSS, etc.) into the application
|
||||
branding:
|
||||
# Enable/disable branding ConfigMap
|
||||
enabled: false
|
||||
|
||||
# Mount path in the container where branding files will be available
|
||||
mountPath: "/app/public/branding"
|
||||
|
||||
# Text files (CSS, JSON, HTML, etc.) - values are plain text
|
||||
# Example:
|
||||
# files:
|
||||
# custom.css: |
|
||||
# .logo { background-color: #ff0000; }
|
||||
# config.json: |
|
||||
# {"theme": "dark"}
|
||||
files: {}
|
||||
|
||||
# Binary files (PNG, JPG, ICO, etc.) - values must be base64 encoded
|
||||
# Generate base64 with: base64 -i logo.png | tr -d '\n'
|
||||
# Example:
|
||||
# binaryFiles:
|
||||
# logo.png: "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk..."
|
||||
# favicon.ico: "AAABAAEAEBAAAAEAIABoBAAAFgAAAA..."
|
||||
binaryFiles: {}
|
||||
|
||||
# Additional environment variables for custom integrations
|
||||
extraEnvVars: []
|
||||
|
||||
|
||||
@@ -197,7 +197,7 @@ async function getCommitsBetweenVersions(
|
||||
const commitEntries = gitLog.split('\n').filter((line) => line.trim())
|
||||
|
||||
const nonVersionCommits = commitEntries.filter((line) => {
|
||||
const [, message] = line.split('|')
|
||||
const [hash, message] = line.split('|')
|
||||
const isVersionCommit = message.match(/^v\d+\.\d+/)
|
||||
if (isVersionCommit) {
|
||||
console.log(`⏭️ Skipping version commit: ${message.substring(0, 50)}...`)
|
||||
@@ -369,25 +369,6 @@ async function main() {
|
||||
console.log(`ℹ️ No previous version found (this might be the first release)`)
|
||||
}
|
||||
|
||||
try {
|
||||
const existingRelease = await octokit.rest.repos.getReleaseByTag({
|
||||
owner: REPO_OWNER,
|
||||
repo: REPO_NAME,
|
||||
tag: targetVersion,
|
||||
})
|
||||
if (existingRelease.data) {
|
||||
console.log(`ℹ️ Release ${targetVersion} already exists, skipping creation`)
|
||||
console.log(
|
||||
`🔗 View release: https://github.com/${REPO_OWNER}/${REPO_NAME}/releases/tag/${targetVersion}`
|
||||
)
|
||||
return
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.status !== 404) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const releaseBody = await generateReleaseBody(versionCommit, previousCommit || undefined)
|
||||
|
||||
console.log(`🚀 Creating GitHub release for ${targetVersion}...`)
|
||||
|
||||