Compare commits
2 Commits
main
...
fix/copilo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3533bd009d | ||
|
|
43402fde1c |
@@ -1,40 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import { getAssetUrl } from '@/lib/utils'
|
|
||||||
|
|
||||||
interface ActionImageProps {
|
|
||||||
src: string
|
|
||||||
alt: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ActionVideoProps {
|
|
||||||
src: string
|
|
||||||
alt: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ActionImage({ src, alt }: ActionImageProps) {
|
|
||||||
const resolvedSrc = getAssetUrl(src.startsWith('/') ? src.slice(1) : src)
|
|
||||||
|
|
||||||
return (
|
|
||||||
<img
|
|
||||||
src={resolvedSrc}
|
|
||||||
alt={alt}
|
|
||||||
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ActionVideo({ src, alt }: ActionVideoProps) {
|
|
||||||
const resolvedSrc = getAssetUrl(src.startsWith('/') ? src.slice(1) : src)
|
|
||||||
|
|
||||||
return (
|
|
||||||
<video
|
|
||||||
src={resolvedSrc}
|
|
||||||
autoPlay
|
|
||||||
loop
|
|
||||||
muted
|
|
||||||
playsInline
|
|
||||||
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -4,7 +4,6 @@ description: Essential actions for navigating and using the Sim workflow editor
|
|||||||
---
|
---
|
||||||
|
|
||||||
import { Callout } from 'fumadocs-ui/components/callout'
|
import { Callout } from 'fumadocs-ui/components/callout'
|
||||||
import { ActionImage, ActionVideo } from '@/components/ui/action-media'
|
|
||||||
|
|
||||||
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
|
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
|
||||||
|
|
||||||
@@ -14,362 +13,124 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
|
|||||||
|
|
||||||
## Workspaces
|
## Workspaces
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Create a workspace | Click workspace dropdown in sidebar → **New Workspace** |
|
||||||
</thead>
|
| Rename a workspace | Workspace settings → Edit name |
|
||||||
<tbody>
|
| Switch workspaces | Click workspace dropdown in sidebar → Select workspace |
|
||||||
<tr>
|
| Invite team members | Workspace settings → **Team** → **Invite** |
|
||||||
<td>Create a workspace</td>
|
|
||||||
<td>Click workspace dropdown → **New Workspace**</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/create-workspace.mp4" alt="Create workspace" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Switch workspaces</td>
|
|
||||||
<td>Click workspace dropdown → Select workspace</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/switch-workspace.mp4" alt="Switch workspaces" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Invite team members</td>
|
|
||||||
<td>Sidebar → **Invite**</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/invite.mp4" alt="Invite team members" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Rename a workspace</td>
|
|
||||||
<td>Right-click workspace → **Rename**</td>
|
|
||||||
<td rowSpan={4}><ActionImage src="/static/quick-reference/workspace-context-menu.png" alt="Workspace context menu" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Duplicate a workspace</td>
|
|
||||||
<td>Right-click workspace → **Duplicate**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Export a workspace</td>
|
|
||||||
<td>Right-click workspace → **Export**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Delete a workspace</td>
|
|
||||||
<td>Right-click workspace → **Delete**</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Workflows
|
## Workflows
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Create a workflow | Click **New Workflow** button or `Mod+Shift+A` |
|
||||||
</thead>
|
| Rename a workflow | Double-click workflow name in sidebar, or right-click → **Rename** |
|
||||||
<tbody>
|
| Duplicate a workflow | Right-click workflow → **Duplicate** |
|
||||||
<tr>
|
| Reorder workflows | Drag workflow up/down in the sidebar list |
|
||||||
<td>Create a workflow</td>
|
| Import a workflow | Sidebar menu → **Import** → Select file |
|
||||||
<td>Click **+** button in sidebar</td>
|
| Create a folder | Right-click in sidebar → **New Folder** |
|
||||||
<td><ActionImage src="/static/quick-reference/create-workflow.png" alt="Create workflow" /></td>
|
| Rename a folder | Right-click folder → **Rename** |
|
||||||
</tr>
|
| Delete a folder | Right-click folder → **Delete** |
|
||||||
<tr>
|
| Collapse/expand folder | Click folder arrow, or double-click folder |
|
||||||
<td>Reorder / move workflows</td>
|
| Move workflow to folder | Drag workflow onto folder in sidebar |
|
||||||
<td>Drag workflow up/down or onto a folder</td>
|
| Delete a workflow | Right-click workflow → **Delete** |
|
||||||
<td><ActionVideo src="/static/quick-reference/reordering.mp4" alt="Reorder workflows" /></td>
|
| Export a workflow | Right-click workflow → **Export** |
|
||||||
</tr>
|
| Assign workflow color | Right-click workflow → **Change Color** |
|
||||||
<tr>
|
| Multi-select workflows | `Mod+Click` or `Shift+Click` workflows in sidebar |
|
||||||
<td>Import a workflow</td>
|
| Open in new tab | Right-click workflow → **Open in New Tab** |
|
||||||
<td>Click import button in sidebar → Select file</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/import-workflow.png" alt="Import workflow" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Multi-select workflows</td>
|
|
||||||
<td>`Mod+Click` or `Shift+Click` workflows in sidebar</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/multiselect.mp4" alt="Multi-select workflows" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Open in new tab</td>
|
|
||||||
<td>Right-click workflow → **Open in New Tab**</td>
|
|
||||||
<td rowSpan={6}><ActionImage src="/static/quick-reference/workflow-context-menu.png" alt="Workflow context menu" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Rename a workflow</td>
|
|
||||||
<td>Right-click workflow → **Rename**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Assign workflow color</td>
|
|
||||||
<td>Right-click workflow → **Change Color**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Duplicate a workflow</td>
|
|
||||||
<td>Right-click workflow → **Duplicate**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Export a workflow</td>
|
|
||||||
<td>Right-click workflow → **Export**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Delete a workflow</td>
|
|
||||||
<td>Right-click workflow → **Delete**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Rename a folder</td>
|
|
||||||
<td>Right-click folder → **Rename**</td>
|
|
||||||
<td rowSpan={6}><ActionImage src="/static/quick-reference/folder-context-menu.png" alt="Folder context menu" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Create workflow in folder</td>
|
|
||||||
<td>Right-click folder → **Create workflow**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Create folder in folder</td>
|
|
||||||
<td>Right-click folder → **Create folder**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Duplicate a folder</td>
|
|
||||||
<td>Right-click folder → **Duplicate**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Export a folder</td>
|
|
||||||
<td>Right-click folder → **Export**</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Delete a folder</td>
|
|
||||||
<td>Right-click folder → **Delete**</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Blocks
|
## Blocks
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Add a block | Drag from Toolbar panel, or right-click canvas → **Add Block** |
|
||||||
</thead>
|
| Select a block | Click on the block |
|
||||||
<tbody>
|
| Multi-select blocks | `Mod+Click` additional blocks, or right-drag to draw selection box |
|
||||||
<tr>
|
| Move blocks | Drag selected block(s) to new position |
|
||||||
<td>Add a block</td>
|
| Copy blocks | `Mod+C` with blocks selected |
|
||||||
<td>Drag from Toolbar panel, or right-click canvas → **Add Block**</td>
|
| Paste blocks | `Mod+V` to paste copied blocks |
|
||||||
<td><ActionVideo src="/static/quick-reference/add-block.mp4" alt="Add a block" /></td>
|
| Duplicate blocks | Right-click → **Duplicate** |
|
||||||
</tr>
|
| Delete blocks | `Delete` or `Backspace` key, or right-click → **Delete** |
|
||||||
<tr>
|
| Rename a block | Click block name in header, or edit in the Editor panel |
|
||||||
<td>Multi-select blocks</td>
|
| Enable/Disable a block | Right-click → **Enable/Disable** |
|
||||||
<td>`Mod+Click` additional blocks, or shift-drag to draw selection box</td>
|
| Toggle handle orientation | Right-click → **Toggle Handles** |
|
||||||
<td><ActionVideo src="/static/quick-reference/multiselect-blocks.mp4" alt="Multi-select blocks" /></td>
|
| Toggle trigger mode | Right-click trigger block → **Toggle Trigger Mode** |
|
||||||
</tr>
|
| Configure a block | Select block → use Editor panel on right |
|
||||||
<tr>
|
|
||||||
<td>Copy blocks</td>
|
|
||||||
<td>`Mod+C` with blocks selected</td>
|
|
||||||
<td rowSpan={2}><ActionVideo src="/static/quick-reference/copy-paste.mp4" alt="Copy and paste blocks" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Paste blocks</td>
|
|
||||||
<td>`Mod+V` to paste copied blocks</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Duplicate blocks</td>
|
|
||||||
<td>Right-click → **Duplicate**</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/duplicate-block.mp4" alt="Duplicate blocks" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Delete blocks</td>
|
|
||||||
<td>`Delete` or `Backspace` key, or right-click → **Delete**</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/delete-block.png" alt="Delete block" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Rename a block</td>
|
|
||||||
<td>Click block name in header, or edit in the Editor panel</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/rename-block.mp4" alt="Rename a block" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Enable/Disable a block</td>
|
|
||||||
<td>Right-click → **Enable/Disable**</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Toggle handle orientation</td>
|
|
||||||
<td>Right-click → **Toggle Handles**</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/toggle-handles.mp4" alt="Toggle handle orientation" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Configure a block</td>
|
|
||||||
<td>Select block → use Editor panel on right</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/configure-block.mp4" alt="Configure a block" /></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Connections
|
## Connections
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Create a connection | Drag from output handle to input handle |
|
||||||
</thead>
|
| Delete a connection | Click edge to select → `Delete` key |
|
||||||
<tbody>
|
| Use output in another block | Drag connection tag into input field |
|
||||||
<tr>
|
|
||||||
<td>Create a connection</td>
|
## Canvas Navigation
|
||||||
<td>Drag from output handle to input handle</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/connect-blocks.mp4" alt="Connect blocks" /></td>
|
| Action | How |
|
||||||
</tr>
|
|--------|-----|
|
||||||
<tr>
|
| Pan/move canvas | Left-drag on empty space, or scroll/trackpad |
|
||||||
<td>Delete a connection</td>
|
| Zoom in/out | Scroll wheel or pinch gesture |
|
||||||
<td>Click edge to select → `Delete` key</td>
|
| Auto-layout | `Shift+L` |
|
||||||
<td><ActionVideo src="/static/quick-reference/delete-connection.mp4" alt="Delete connection" /></td>
|
| Draw selection box | Right-drag on empty canvas area |
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Use output in another block</td>
|
|
||||||
<td>Drag connection tag into input field</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/connection-tag.mp4" alt="Use connection tag" /></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Panels & Views
|
## Panels & Views
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Open Copilot tab | Press `C` or click Copilot tab |
|
||||||
</thead>
|
| Open Toolbar tab | Press `T` or click Toolbar tab |
|
||||||
<tbody>
|
| Open Editor tab | Press `E` or click Editor tab |
|
||||||
<tr>
|
| Search toolbar | `Mod+F` |
|
||||||
<td>Search toolbar</td>
|
| Toggle advanced mode | Click toggle button on input fields |
|
||||||
<td>`Mod+F`</td>
|
| Resize panels | Drag panel edge |
|
||||||
<td><ActionVideo src="/static/quick-reference/search-toolbar.mp4" alt="Search toolbar" /></td>
|
| Collapse/expand sidebar | Click collapse button on sidebar |
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Search everything</td>
|
|
||||||
<td>`Mod+K`</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/search-everything.png" alt="Search everything" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Toggle manual mode</td>
|
|
||||||
<td>Click toggle button to switch between manual and selector</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/toggle-manual-mode.png" alt="Toggle manual mode" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Collapse/expand sidebar</td>
|
|
||||||
<td>Click collapse button on sidebar</td>
|
|
||||||
<td><ActionVideo src="/static/quick-reference/collapse-sidebar.mp4" alt="Collapse sidebar" /></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Running & Testing
|
## Running & Testing
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Run workflow | Click Play button or `Mod+Enter` |
|
||||||
</thead>
|
| Stop workflow | Click Stop button or `Mod+Enter` while running |
|
||||||
<tbody>
|
| Test with chat | Use Chat panel on the right side |
|
||||||
<tr>
|
| Select output to view | Click dropdown in Chat panel → Select block output |
|
||||||
<td>Run workflow</td>
|
| Clear chat history | Click clear button in Chat panel |
|
||||||
<td>Click Run Workflow button or `Mod+Enter`</td>
|
| View execution logs | Open terminal panel at bottom, or `Mod+L` |
|
||||||
<td><ActionImage src="/static/quick-reference/run-workflow.png" alt="Run workflow" /></td>
|
| Filter logs by block | Click block filter in terminal |
|
||||||
</tr>
|
| Filter logs by status | Click status filter in terminal |
|
||||||
<tr>
|
| Search logs | Use search field in terminal |
|
||||||
<td>Stop workflow</td>
|
| Copy log entry | Right-click log entry → **Copy** |
|
||||||
<td>Click Stop button or `Mod+Enter` while running</td>
|
| Clear terminal | `Mod+D` |
|
||||||
<td><ActionImage src="/static/quick-reference/stop-workflow.png" alt="Stop workflow" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Test with chat</td>
|
|
||||||
<td>Use Chat panel on the right side</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/test-chat.png" alt="Test with chat" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Select output to view</td>
|
|
||||||
<td>Click dropdown in Chat panel → Select block output</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/output-select.png" alt="Select output to view" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Clear chat history</td>
|
|
||||||
<td>Click clear button in Chat panel</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/clear-chat.png" alt="Clear chat history" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>View execution logs</td>
|
|
||||||
<td>Open terminal panel at bottom, or `Mod+L`</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/terminal.png" alt="Execution logs terminal" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Filter logs by block or status</td>
|
|
||||||
<td>Click block filter in terminal or right-click log entry → **Filter by Block** or **Filter by Status**</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/filter-block.png" alt="Filter logs by block" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Search logs</td>
|
|
||||||
<td>Use search field in terminal or right-click log entry → **Search**</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/terminal-search.png" alt="Search logs" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Copy log entry</td>
|
|
||||||
<td>Clipboard Icon or Right-click log entry → **Copy**</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/copy-log.png" alt="Copy log entry" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Clear terminal</td>
|
|
||||||
<td>Trash icon or `Mod+D`</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/clear-terminal.png" alt="Clear terminal" /></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Deployment
|
## Deployment
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Deploy a workflow | Click **Deploy** button in Deploy tab |
|
||||||
</thead>
|
| Update deployment | Click **Update** when changes are detected |
|
||||||
<tbody>
|
| View deployment status | Check status indicator (Live/Update/Deploy) in Deploy tab |
|
||||||
<tr>
|
| Revert deployment | Access previous versions in Deploy tab |
|
||||||
<td>Deploy a workflow</td>
|
| Copy webhook URL | Deploy tab → Copy webhook URL |
|
||||||
<td>Click **Deploy** button in panel</td>
|
| Copy API endpoint | Deploy tab → Copy API endpoint URL |
|
||||||
<td><ActionImage src="/static/quick-reference/deploy.png" alt="Deploy workflow" /></td>
|
| Set up a schedule | Add Schedule trigger block → Configure interval |
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Update deployment</td>
|
|
||||||
<td>Click **Update** when changes are detected</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/update-deployment.png" alt="Update deployment" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>View deployment status</td>
|
|
||||||
<td>Check status indicator (Live/Update/Deploy) in Deploy tab</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/view-deployment.png" alt="View deployment status" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Revert deployment</td>
|
|
||||||
<td>Access previous versions in Deploy tab → **Promote to live**</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/promote-deployment.png" alt="Promote deployment to live" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Copy API endpoint</td>
|
|
||||||
<td>Deploy tab → Copy API endpoint URL</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/copy-api.png" alt="Copy API endpoint" /></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Variables
|
## Variables
|
||||||
|
|
||||||
<table>
|
| Action | How |
|
||||||
<thead>
|
|--------|-----|
|
||||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
| Add workflow variable | Variables tab → **Add Variable** |
|
||||||
</thead>
|
| Edit workflow variable | Variables tab → Click variable to edit |
|
||||||
<tbody>
|
| Delete workflow variable | Variables tab → Click delete icon on variable |
|
||||||
<tr>
|
| Add environment variable | Settings → **Environment Variables** → **Add** |
|
||||||
<td>Add / Edit / Delete workflow variable</td>
|
| Reference a variable | Use `{{variableName}}` syntax in block inputs |
|
||||||
<td>Panel -> Variables -> **Add Variable**, click to edit, or delete icon</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/variables.png" alt="Variables panel" /></td>
|
## Credentials
|
||||||
</tr>
|
|
||||||
<tr>
|
| Action | How |
|
||||||
<td>Add environment variable</td>
|
|--------|-----|
|
||||||
<td>Settings → **Environment Variables** → **Add**</td>
|
| Add API key | Block credential field → **Add Credential** → Enter API key |
|
||||||
<td><ActionImage src="/static/quick-reference/add-env-variable.png" alt="Add environment variable" /></td>
|
| Connect OAuth account | Block credential field → **Connect** → Authorize with provider |
|
||||||
</tr>
|
| Manage credentials | Settings → **Credentials** |
|
||||||
<tr>
|
| Remove credential | Settings → **Credentials** → Delete credential |
|
||||||
<td>Reference a workflow variable</td>
|
|
||||||
<td>Use `<blockName.itemName>` syntax in block inputs</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/variable-reference.png" alt="Reference workflow variable" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Reference an environment variable</td>
|
|
||||||
<td>Use `{{ENV_VAR}}` syntax in block inputs</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/env-variable-reference.png" alt="Reference environment variable" /></td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 104 KiB |
|
Before Width: | Height: | Size: 37 KiB |
|
Before Width: | Height: | Size: 45 KiB |
|
Before Width: | Height: | Size: 114 KiB |
|
Before Width: | Height: | Size: 44 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 6.7 KiB |
|
Before Width: | Height: | Size: 24 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 66 KiB |
|
Before Width: | Height: | Size: 48 KiB |
|
Before Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 49 KiB |
|
Before Width: | Height: | Size: 25 KiB |
|
Before Width: | Height: | Size: 5.9 KiB |
|
Before Width: | Height: | Size: 78 KiB |
|
Before Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 82 KiB |
|
Before Width: | Height: | Size: 146 KiB |
|
Before Width: | Height: | Size: 7.1 KiB |
|
Before Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 60 KiB |
|
Before Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 90 KiB |
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 103 KiB |
204
apps/sim/app/api/organizations/[id]/workspaces/route.ts
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { member, permissions, user, workspace } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq, or } from 'drizzle-orm'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
|
||||||
|
const logger = createLogger('OrganizationWorkspacesAPI')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/organizations/[id]/workspaces
|
||||||
|
* Get workspaces related to the organization with optional filtering
|
||||||
|
* Query parameters:
|
||||||
|
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
||||||
|
* - ?member=userId - Workspaces where specific member has access
|
||||||
|
*/
|
||||||
|
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { id: organizationId } = await params
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const availableOnly = url.searchParams.get('available') === 'true'
|
||||||
|
const memberId = url.searchParams.get('member')
|
||||||
|
|
||||||
|
// Verify user is a member of this organization
|
||||||
|
const memberEntry = await db
|
||||||
|
.select()
|
||||||
|
.from(member)
|
||||||
|
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (memberEntry.length === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Forbidden - Not a member of this organization',
|
||||||
|
},
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const userRole = memberEntry[0].role
|
||||||
|
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||||
|
|
||||||
|
if (availableOnly) {
|
||||||
|
// Get workspaces where user has admin permissions (can invite others)
|
||||||
|
const availableWorkspaces = await db
|
||||||
|
.select({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
isOwner: eq(workspace.ownerId, session.user.id),
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
})
|
||||||
|
.from(workspace)
|
||||||
|
.leftJoin(
|
||||||
|
permissions,
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workspace.id),
|
||||||
|
eq(permissions.userId, session.user.id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
or(
|
||||||
|
// User owns the workspace
|
||||||
|
eq(workspace.ownerId, session.user.id),
|
||||||
|
// User has admin permission on the workspace
|
||||||
|
and(
|
||||||
|
eq(permissions.userId, session.user.id),
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.permissionType, 'admin')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Filter and format the results
|
||||||
|
const workspacesWithInvitePermission = availableWorkspaces
|
||||||
|
.filter((workspace) => {
|
||||||
|
// Include if user owns the workspace OR has admin permission
|
||||||
|
return workspace.isOwner || workspace.permissionType === 'admin'
|
||||||
|
})
|
||||||
|
.map((workspace) => ({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
isOwner: workspace.isOwner,
|
||||||
|
canInvite: true, // All returned workspaces have invite permission
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
}))
|
||||||
|
|
||||||
|
logger.info('Retrieved available workspaces for organization member', {
|
||||||
|
organizationId,
|
||||||
|
userId: session.user.id,
|
||||||
|
workspaceCount: workspacesWithInvitePermission.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: workspacesWithInvitePermission,
|
||||||
|
totalCount: workspacesWithInvitePermission.length,
|
||||||
|
filter: 'available',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (memberId && hasAdminAccess) {
|
||||||
|
// Get workspaces where specific member has access (admin only)
|
||||||
|
const memberWorkspaces = await db
|
||||||
|
.select({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
isOwner: eq(workspace.ownerId, memberId),
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
createdAt: permissions.createdAt,
|
||||||
|
})
|
||||||
|
.from(workspace)
|
||||||
|
.leftJoin(
|
||||||
|
permissions,
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workspace.id),
|
||||||
|
eq(permissions.userId, memberId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
or(
|
||||||
|
// Member owns the workspace
|
||||||
|
eq(workspace.ownerId, memberId),
|
||||||
|
// Member has permissions on the workspace
|
||||||
|
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
isOwner: workspace.isOwner,
|
||||||
|
permission: workspace.permissionType,
|
||||||
|
joinedAt: workspace.createdAt,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: formattedWorkspaces,
|
||||||
|
totalCount: formattedWorkspaces.length,
|
||||||
|
filter: 'member',
|
||||||
|
memberId,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: Get all workspaces (basic info only for regular members)
|
||||||
|
if (!hasAdminAccess) {
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: [],
|
||||||
|
totalCount: 0,
|
||||||
|
message: 'Workspace access information is only available to organization admins',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// For admins: Get summary of all workspaces
|
||||||
|
const allWorkspaces = await db
|
||||||
|
.select({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
ownerName: user.name,
|
||||||
|
})
|
||||||
|
.from(workspace)
|
||||||
|
.leftJoin(user, eq(workspace.ownerId, user.id))
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: allWorkspaces,
|
||||||
|
totalCount: allWorkspaces.length,
|
||||||
|
filter: 'all',
|
||||||
|
},
|
||||||
|
userRole,
|
||||||
|
hasAdminAccess,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to get organization workspaces', { error })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Internal server error',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,257 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('SupabaseStorageUploadAPI')
|
|
||||||
|
|
||||||
const SupabaseStorageUploadSchema = z.object({
|
|
||||||
projectId: z.string().min(1, 'Project ID is required'),
|
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
|
||||||
bucket: z.string().min(1, 'Bucket name is required'),
|
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
|
||||||
path: z.string().optional().nullable(),
|
|
||||||
fileData: z.any(),
|
|
||||||
contentType: z.string().optional().nullable(),
|
|
||||||
upsert: z.boolean().optional().default(false),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
|
||||||
|
|
||||||
if (!authResult.success) {
|
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
|
|
||||||
)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: authResult.error || 'Authentication required',
|
|
||||||
},
|
|
||||||
{ status: 401 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
|
|
||||||
{
|
|
||||||
userId: authResult.userId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const validatedData = SupabaseStorageUploadSchema.parse(body)
|
|
||||||
|
|
||||||
const fileData = validatedData.fileData
|
|
||||||
const isStringInput = typeof fileData === 'string'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
|
|
||||||
bucket: validatedData.bucket,
|
|
||||||
fileName: validatedData.fileName,
|
|
||||||
path: validatedData.path,
|
|
||||||
fileDataType: isStringInput ? 'string' : 'object',
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileData) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'fileData is required',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let uploadBody: Buffer
|
|
||||||
let uploadContentType: string | undefined
|
|
||||||
|
|
||||||
if (isStringInput) {
|
|
||||||
let content = fileData as string
|
|
||||||
|
|
||||||
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
|
|
||||||
if (dataUrlMatch) {
|
|
||||||
const [, mimeType, base64Data] = dataUrlMatch
|
|
||||||
content = base64Data
|
|
||||||
if (!validatedData.contentType) {
|
|
||||||
uploadContentType = mimeType
|
|
||||||
}
|
|
||||||
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const cleanedContent = content.replace(/[\s\r\n]/g, '')
|
|
||||||
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
|
|
||||||
|
|
||||||
if (isLikelyBase64 && cleanedContent.length >= 4) {
|
|
||||||
try {
|
|
||||||
uploadBody = Buffer.from(cleanedContent, 'base64')
|
|
||||||
|
|
||||||
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
|
|
||||||
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
|
|
||||||
|
|
||||||
if (
|
|
||||||
uploadBody.length >= expectedMinSize &&
|
|
||||||
uploadBody.length <= expectedMaxSize &&
|
|
||||||
uploadBody.length > 0
|
|
||||||
) {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
const reEncoded = uploadBody.toString('base64')
|
|
||||||
if (reEncoded !== cleanedContent) {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
|
|
||||||
)
|
|
||||||
uploadBody = Buffer.from(content, 'utf-8')
|
|
||||||
} else {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (decodeError) {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
|
|
||||||
)
|
|
||||||
uploadBody = Buffer.from(content, 'utf-8')
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
uploadBody = Buffer.from(content, 'utf-8')
|
|
||||||
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
uploadContentType =
|
|
||||||
uploadContentType || validatedData.contentType || 'application/octet-stream'
|
|
||||||
} else {
|
|
||||||
const rawFile = fileData
|
|
||||||
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
|
|
||||||
|
|
||||||
let userFile
|
|
||||||
try {
|
|
||||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
|
||||||
} catch (error) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
|
||||||
|
|
||||||
uploadBody = buffer
|
|
||||||
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
|
|
||||||
}
|
|
||||||
|
|
||||||
let fullPath = validatedData.fileName
|
|
||||||
if (validatedData.path) {
|
|
||||||
const folderPath = validatedData.path.endsWith('/')
|
|
||||||
? validatedData.path
|
|
||||||
: `${validatedData.path}/`
|
|
||||||
fullPath = `${folderPath}${validatedData.fileName}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
|
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
apikey: validatedData.apiKey,
|
|
||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
|
||||||
'Content-Type': uploadContentType,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (validatedData.upsert) {
|
|
||||||
headers['x-upsert'] = 'true'
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
|
|
||||||
contentType: uploadContentType,
|
|
||||||
bodySize: uploadBody.length,
|
|
||||||
upsert: validatedData.upsert,
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await fetch(supabaseUrl, {
|
|
||||||
method: 'POST',
|
|
||||||
headers,
|
|
||||||
body: new Uint8Array(uploadBody),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
let errorData
|
|
||||||
try {
|
|
||||||
errorData = JSON.parse(errorText)
|
|
||||||
} catch {
|
|
||||||
errorData = { message: errorText }
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorData,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
|
|
||||||
details: errorData,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await response.json()
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
|
|
||||||
bucket: validatedData.bucket,
|
|
||||||
path: fullPath,
|
|
||||||
})
|
|
||||||
|
|
||||||
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
message: 'Successfully uploaded file to storage',
|
|
||||||
results: {
|
|
||||||
...result,
|
|
||||||
path: fullPath,
|
|
||||||
bucket: validatedData.bucket,
|
|
||||||
publicUrl,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Invalid request data',
|
|
||||||
details: error.errors,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -338,11 +338,6 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
const configEqual =
|
const configEqual =
|
||||||
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
||||||
|
|
||||||
const canonicalToggleEqual =
|
|
||||||
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
|
|
||||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
|
||||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
prevProps.blockId === nextProps.blockId &&
|
prevProps.blockId === nextProps.blockId &&
|
||||||
configEqual &&
|
configEqual &&
|
||||||
@@ -351,7 +346,8 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
prevProps.disabled === nextProps.disabled &&
|
prevProps.disabled === nextProps.disabled &&
|
||||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||||
canonicalToggleEqual
|
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||||
|
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -214,6 +214,15 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
|||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => params.operation as string,
|
tool: (params) => params.operation as string,
|
||||||
|
params: (params) => {
|
||||||
|
const { fileUpload, fileReference, ...rest } = params
|
||||||
|
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
|
||||||
|
const files = hasFileUpload ? fileUpload : fileReference
|
||||||
|
return {
|
||||||
|
...rest,
|
||||||
|
...(files ? { files } : {}),
|
||||||
|
}
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: {
|
inputs: {
|
||||||
|
|||||||
@@ -581,18 +581,6 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
|
|||||||
results: { type: 'json', description: 'Search/read summary results' },
|
results: { type: 'json', description: 'Search/read summary results' },
|
||||||
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
|
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
|
||||||
|
|
||||||
// Draft-specific outputs
|
|
||||||
draftId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Draft ID',
|
|
||||||
condition: { field: 'operation', value: 'draft_gmail' },
|
|
||||||
},
|
|
||||||
messageId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Gmail message ID for the draft',
|
|
||||||
condition: { field: 'operation', value: 'draft_gmail' },
|
|
||||||
},
|
|
||||||
|
|
||||||
// Trigger outputs (unchanged)
|
// Trigger outputs (unchanged)
|
||||||
email_id: { type: 'string', description: 'Gmail message ID' },
|
email_id: { type: 'string', description: 'Gmail message ID' },
|
||||||
thread_id: { type: 'string', description: 'Gmail thread ID' },
|
thread_id: { type: 'string', description: 'Gmail thread ID' },
|
||||||
|
|||||||
@@ -661,25 +661,12 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
|||||||
placeholder: 'folder/subfolder/',
|
placeholder: 'folder/subfolder/',
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'file',
|
|
||||||
title: 'File',
|
|
||||||
type: 'file-upload',
|
|
||||||
canonicalParamId: 'fileData',
|
|
||||||
placeholder: 'Upload file to storage',
|
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
|
||||||
mode: 'basic',
|
|
||||||
multiple: false,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
id: 'fileContent',
|
id: 'fileContent',
|
||||||
title: 'File Content',
|
title: 'File Content',
|
||||||
type: 'code',
|
type: 'code',
|
||||||
canonicalParamId: 'fileData',
|
|
||||||
placeholder: 'Base64 encoded for binary files, or plain text',
|
placeholder: 'Base64 encoded for binary files, or plain text',
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
mode: 'advanced',
|
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
|
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||||
import { normalizeName } from '@/executor/constants'
|
import { normalizeName } from '@/executor/constants'
|
||||||
import type { ExecutionContext } from '@/executor/types'
|
import type { ExecutionContext } from '@/executor/types'
|
||||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||||
import type { SerializedBlock } from '@/serializer/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
import { getTool } from '@/tools/utils'
|
|
||||||
|
|
||||||
export interface BlockDataCollection {
|
export interface BlockDataCollection {
|
||||||
blockData: Record<string, unknown>
|
blockData: Record<string, unknown>
|
||||||
@@ -11,32 +9,6 @@ export interface BlockDataCollection {
|
|||||||
blockOutputSchemas: Record<string, OutputSchema>
|
blockOutputSchemas: Record<string, OutputSchema>
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getBlockSchema(
|
|
||||||
block: SerializedBlock,
|
|
||||||
toolConfig?: ToolConfig
|
|
||||||
): OutputSchema | undefined {
|
|
||||||
const isTrigger =
|
|
||||||
block.metadata?.category === 'triggers' ||
|
|
||||||
(block.config?.params as Record<string, unknown> | undefined)?.triggerMode === true
|
|
||||||
|
|
||||||
// Triggers use saved outputs (defines the trigger payload schema)
|
|
||||||
if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
|
|
||||||
return block.outputs as OutputSchema
|
|
||||||
}
|
|
||||||
|
|
||||||
// When a tool is selected, tool outputs are the source of truth
|
|
||||||
if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
|
|
||||||
return toolConfig.outputs as OutputSchema
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback to saved outputs for blocks without tools
|
|
||||||
if (block.outputs && Object.keys(block.outputs).length > 0) {
|
|
||||||
return block.outputs as OutputSchema
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||||
const blockData: Record<string, unknown> = {}
|
const blockData: Record<string, unknown> = {}
|
||||||
const blockNameMapping: Record<string, string> = {}
|
const blockNameMapping: Record<string, string> = {}
|
||||||
@@ -46,23 +18,26 @@ export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
|||||||
if (state.output !== undefined) {
|
if (state.output !== undefined) {
|
||||||
blockData[id] = state.output
|
blockData[id] = state.output
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||||
|
if (!workflowBlock) continue
|
||||||
|
|
||||||
|
if (workflowBlock.metadata?.name) {
|
||||||
|
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowBlocks = ctx.workflow?.blocks ?? []
|
const blockType = workflowBlock.metadata?.id
|
||||||
for (const block of workflowBlocks) {
|
if (blockType) {
|
||||||
const id = block.id
|
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
|
||||||
|
const subBlocks = params
|
||||||
if (block.metadata?.name) {
|
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||||
blockNameMapping[normalizeName(block.metadata.name)] = id
|
: undefined
|
||||||
}
|
const schema = getBlockOutputs(blockType, subBlocks)
|
||||||
|
|
||||||
const toolId = block.config?.tool
|
|
||||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
|
||||||
const schema = getBlockSchema(block, toolConfig)
|
|
||||||
if (schema && Object.keys(schema).length > 0) {
|
if (schema && Object.keys(schema).length > 0) {
|
||||||
blockOutputSchemas[id] = schema
|
blockOutputSchemas[id] = schema
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return { blockData, blockNameMapping, blockOutputSchemas }
|
return { blockData, blockNameMapping, blockOutputSchemas }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -378,30 +378,8 @@ function buildManualTriggerOutput(
|
|||||||
return mergeFilesIntoOutput(output, workflowInput)
|
return mergeFilesIntoOutput(output, workflowInput)
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildIntegrationTriggerOutput(
|
function buildIntegrationTriggerOutput(workflowInput: unknown): NormalizedBlockOutput {
|
||||||
workflowInput: unknown,
|
return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
|
||||||
structuredInput: Record<string, unknown>,
|
|
||||||
hasStructured: boolean
|
|
||||||
): NormalizedBlockOutput {
|
|
||||||
const output: NormalizedBlockOutput = {}
|
|
||||||
|
|
||||||
if (hasStructured) {
|
|
||||||
for (const [key, value] of Object.entries(structuredInput)) {
|
|
||||||
output[key] = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isPlainObject(workflowInput)) {
|
|
||||||
for (const [key, value] of Object.entries(workflowInput)) {
|
|
||||||
if (value !== undefined && value !== null) {
|
|
||||||
output[key] = value
|
|
||||||
} else if (!Object.hasOwn(output, key)) {
|
|
||||||
output[key] = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return mergeFilesIntoOutput(output, workflowInput)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {
|
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {
|
||||||
@@ -450,7 +428,7 @@ export function buildStartBlockOutput(options: StartBlockOutputOptions): Normali
|
|||||||
return buildManualTriggerOutput(finalInput, workflowInput)
|
return buildManualTriggerOutput(finalInput, workflowInput)
|
||||||
|
|
||||||
case StartBlockPath.EXTERNAL_TRIGGER:
|
case StartBlockPath.EXTERNAL_TRIGGER:
|
||||||
return buildIntegrationTriggerOutput(workflowInput, structuredInput, hasStructured)
|
return buildIntegrationTriggerOutput(workflowInput)
|
||||||
|
|
||||||
case StartBlockPath.LEGACY_STARTER:
|
case StartBlockPath.LEGACY_STARTER:
|
||||||
return buildLegacyStarterOutput(
|
return buildLegacyStarterOutput(
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
|
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||||
import {
|
import {
|
||||||
isReference,
|
isReference,
|
||||||
normalizeName,
|
normalizeName,
|
||||||
parseReferencePath,
|
parseReferencePath,
|
||||||
SPECIAL_REFERENCE_PREFIXES,
|
SPECIAL_REFERENCE_PREFIXES,
|
||||||
} from '@/executor/constants'
|
} from '@/executor/constants'
|
||||||
import { getBlockSchema } from '@/executor/utils/block-data'
|
|
||||||
import {
|
import {
|
||||||
InvalidFieldError,
|
InvalidFieldError,
|
||||||
type OutputSchema,
|
type OutputSchema,
|
||||||
@@ -67,9 +67,15 @@ export class BlockResolver implements Resolver {
|
|||||||
blockData[blockId] = output
|
blockData[blockId] = output
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const blockType = block.metadata?.id
|
||||||
|
const params = block.config?.params as Record<string, unknown> | undefined
|
||||||
|
const subBlocks = params
|
||||||
|
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||||
|
: undefined
|
||||||
const toolId = block.config?.tool
|
const toolId = block.config?.tool
|
||||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||||
const outputSchema = getBlockSchema(block, toolConfig)
|
const outputSchema =
|
||||||
|
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
|
||||||
|
|
||||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||||
blockOutputSchemas[blockId] = outputSchema
|
blockOutputSchemas[blockId] = outputSchema
|
||||||
|
|||||||
@@ -680,10 +680,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
||||||
}
|
}
|
||||||
) => {
|
) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch position update - not in active workflow')
|
logger.debug('Skipping batch position update - not in active workflow')
|
||||||
return
|
return
|
||||||
@@ -729,7 +725,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeUpdateBlockName = useCallback(
|
const collaborativeUpdateBlockName = useCallback(
|
||||||
@@ -821,10 +817,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeBatchToggleBlockEnabled = useCallback(
|
const collaborativeBatchToggleBlockEnabled = useCallback(
|
||||||
(ids: string[]) => {
|
(ids: string[]) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ids.length === 0) return
|
if (ids.length === 0) return
|
||||||
|
|
||||||
const previousStates: Record<string, boolean> = {}
|
const previousStates: Record<string, boolean> = {}
|
||||||
@@ -857,7 +849,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchUpdateParent = useCallback(
|
const collaborativeBatchUpdateParent = useCallback(
|
||||||
@@ -869,10 +861,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
affectedEdges: Edge[]
|
affectedEdges: Edge[]
|
||||||
}>
|
}>
|
||||||
) => {
|
) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch update parent - not in active workflow')
|
logger.debug('Skipping batch update parent - not in active workflow')
|
||||||
return
|
return
|
||||||
@@ -943,7 +931,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
[isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeToggleBlockAdvancedMode = useCallback(
|
const collaborativeToggleBlockAdvancedMode = useCallback(
|
||||||
@@ -963,37 +951,18 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeSetBlockCanonicalMode = useCallback(
|
const collaborativeSetBlockCanonicalMode = useCallback(
|
||||||
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
||||||
if (isBaselineDiffView) {
|
executeQueuedOperation(
|
||||||
return
|
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||||
}
|
OPERATION_TARGETS.BLOCK,
|
||||||
|
{ id, canonicalId, canonicalMode },
|
||||||
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
() => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||||
|
)
|
||||||
if (!activeWorkflowId) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const operationId = crypto.randomUUID()
|
|
||||||
addToQueue({
|
|
||||||
id: operationId,
|
|
||||||
operation: {
|
|
||||||
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
|
||||||
target: OPERATION_TARGETS.BLOCK,
|
|
||||||
payload: { id, canonicalId, canonicalMode },
|
|
||||||
},
|
},
|
||||||
workflowId: activeWorkflowId,
|
[executeQueuedOperation]
|
||||||
userId: session?.user?.id || 'unknown',
|
|
||||||
})
|
|
||||||
},
|
|
||||||
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchToggleBlockHandles = useCallback(
|
const collaborativeBatchToggleBlockHandles = useCallback(
|
||||||
(ids: string[]) => {
|
(ids: string[]) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ids.length === 0) return
|
if (ids.length === 0) return
|
||||||
|
|
||||||
const previousStates: Record<string, boolean> = {}
|
const previousStates: Record<string, boolean> = {}
|
||||||
@@ -1026,15 +995,11 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchAddEdges = useCallback(
|
const collaborativeBatchAddEdges = useCallback(
|
||||||
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch add edges - not in active workflow')
|
logger.debug('Skipping batch add edges - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1070,15 +1035,11 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchRemoveEdges = useCallback(
|
const collaborativeBatchRemoveEdges = useCallback(
|
||||||
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch remove edges - not in active workflow')
|
logger.debug('Skipping batch remove edges - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1128,7 +1089,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
[isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeSetSubblockValue = useCallback(
|
const collaborativeSetSubblockValue = useCallback(
|
||||||
@@ -1204,10 +1165,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
(blockId: string, subblockId: string, value: any) => {
|
(blockId: string, subblockId: string, value: any) => {
|
||||||
if (isApplyingRemoteChange.current) return
|
if (isApplyingRemoteChange.current) return
|
||||||
|
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping tag selection - not in active workflow', {
|
logger.debug('Skipping tag selection - not in active workflow', {
|
||||||
currentWorkflowId,
|
currentWorkflowId,
|
||||||
@@ -1235,14 +1192,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
userId: session?.user?.id || 'unknown',
|
userId: session?.user?.id || 'unknown',
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
[
|
[addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
|
||||||
isBaselineDiffView,
|
|
||||||
addToQueue,
|
|
||||||
currentWorkflowId,
|
|
||||||
activeWorkflowId,
|
|
||||||
session?.user?.id,
|
|
||||||
isInActiveRoom,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeUpdateLoopType = useCallback(
|
const collaborativeUpdateLoopType = useCallback(
|
||||||
@@ -1588,10 +1538,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeBatchRemoveBlocks = useCallback(
|
const collaborativeBatchRemoveBlocks = useCallback(
|
||||||
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch remove blocks - not in active workflow')
|
logger.debug('Skipping batch remove blocks - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1673,7 +1619,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
isBaselineDiffView,
|
|
||||||
addToQueue,
|
addToQueue,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
session?.user?.id,
|
session?.user?.id,
|
||||||
|
|||||||
@@ -2508,6 +2508,10 @@ async function validateWorkflowSelectorIds(
|
|||||||
for (const subBlockConfig of blockConfig.subBlocks) {
|
for (const subBlockConfig of blockConfig.subBlocks) {
|
||||||
if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue
|
if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue
|
||||||
|
|
||||||
|
// Skip oauth-input - credentials are pre-validated before edit application
|
||||||
|
// This allows existing collaborator credentials to remain untouched
|
||||||
|
if (subBlockConfig.type === 'oauth-input') continue
|
||||||
|
|
||||||
const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value
|
const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value
|
||||||
if (!subBlockValue) continue
|
if (!subBlockValue) continue
|
||||||
|
|
||||||
@@ -2573,6 +2577,150 @@ async function validateWorkflowSelectorIds(
|
|||||||
return errors
|
return errors
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pre-validates credential and apiKey inputs in operations before they are applied.
|
||||||
|
* - Validates oauth-input (credential) IDs belong to the user
|
||||||
|
* - Filters out apiKey inputs for hosted models when isHosted is true
|
||||||
|
* Returns validation errors for any removed inputs.
|
||||||
|
*/
|
||||||
|
async function preValidateCredentialInputs(
|
||||||
|
operations: EditWorkflowOperation[],
|
||||||
|
context: { userId: string }
|
||||||
|
): Promise<{ filteredOperations: EditWorkflowOperation[]; errors: ValidationError[] }> {
|
||||||
|
const { isHosted } = await import('@/lib/core/config/feature-flags')
|
||||||
|
const { getHostedModels } = await import('@/providers/utils')
|
||||||
|
|
||||||
|
const logger = createLogger('PreValidateCredentials')
|
||||||
|
const errors: ValidationError[] = []
|
||||||
|
|
||||||
|
// Collect credential and apiKey inputs that need validation/filtering
|
||||||
|
const credentialInputs: Array<{
|
||||||
|
operationIndex: number
|
||||||
|
blockId: string
|
||||||
|
blockType: string
|
||||||
|
fieldName: string
|
||||||
|
value: string
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
const hostedApiKeyInputs: Array<{
|
||||||
|
operationIndex: number
|
||||||
|
blockId: string
|
||||||
|
blockType: string
|
||||||
|
model: string
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
const hostedModelsLower = isHosted
|
||||||
|
? new Set(getHostedModels().map((m) => m.toLowerCase()))
|
||||||
|
: null
|
||||||
|
|
||||||
|
operations.forEach((op, opIndex) => {
|
||||||
|
if (!op.params?.inputs || !op.params?.type) return
|
||||||
|
|
||||||
|
const blockConfig = getBlock(op.params.type)
|
||||||
|
if (!blockConfig) return
|
||||||
|
|
||||||
|
// Find oauth-input subblocks
|
||||||
|
for (const subBlockConfig of blockConfig.subBlocks) {
|
||||||
|
if (subBlockConfig.type !== 'oauth-input') continue
|
||||||
|
|
||||||
|
const inputValue = op.params.inputs[subBlockConfig.id]
|
||||||
|
if (!inputValue || typeof inputValue !== 'string' || inputValue.trim() === '') continue
|
||||||
|
|
||||||
|
credentialInputs.push({
|
||||||
|
operationIndex: opIndex,
|
||||||
|
blockId: op.block_id,
|
||||||
|
blockType: op.params.type,
|
||||||
|
fieldName: subBlockConfig.id,
|
||||||
|
value: inputValue,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for apiKey inputs on hosted models
|
||||||
|
if (hostedModelsLower && op.params.inputs.apiKey) {
|
||||||
|
const modelValue = op.params.inputs.model
|
||||||
|
if (modelValue && typeof modelValue === 'string') {
|
||||||
|
if (hostedModelsLower.has(modelValue.toLowerCase())) {
|
||||||
|
hostedApiKeyInputs.push({
|
||||||
|
operationIndex: opIndex,
|
||||||
|
blockId: op.block_id,
|
||||||
|
blockType: op.params.type,
|
||||||
|
model: modelValue,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const hasCredentialsToValidate = credentialInputs.length > 0
|
||||||
|
const hasHostedApiKeysToFilter = hostedApiKeyInputs.length > 0
|
||||||
|
|
||||||
|
if (!hasCredentialsToValidate && !hasHostedApiKeysToFilter) {
|
||||||
|
return { filteredOperations: operations, errors }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deep clone operations so we can modify them
|
||||||
|
const filteredOperations = structuredClone(operations)
|
||||||
|
|
||||||
|
// Filter out apiKey inputs for hosted models
|
||||||
|
if (hasHostedApiKeysToFilter) {
|
||||||
|
logger.info('Filtering apiKey inputs for hosted models', { count: hostedApiKeyInputs.length })
|
||||||
|
|
||||||
|
for (const apiKeyInput of hostedApiKeyInputs) {
|
||||||
|
const op = filteredOperations[apiKeyInput.operationIndex]
|
||||||
|
if (op.params?.inputs?.apiKey) {
|
||||||
|
delete op.params.inputs.apiKey
|
||||||
|
logger.debug('Silently filtered apiKey for hosted model', {
|
||||||
|
blockId: apiKeyInput.blockId,
|
||||||
|
model: apiKeyInput.model,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate credential inputs
|
||||||
|
if (hasCredentialsToValidate) {
|
||||||
|
logger.info('Pre-validating credential inputs', {
|
||||||
|
credentialCount: credentialInputs.length,
|
||||||
|
userId: context.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const allCredentialIds = credentialInputs.map((c) => c.value)
|
||||||
|
const validationResult = await validateSelectorIds('oauth-input', allCredentialIds, context)
|
||||||
|
const invalidSet = new Set(validationResult.invalid)
|
||||||
|
|
||||||
|
if (invalidSet.size > 0) {
|
||||||
|
for (const credInput of credentialInputs) {
|
||||||
|
if (!invalidSet.has(credInput.value)) continue
|
||||||
|
|
||||||
|
const op = filteredOperations[credInput.operationIndex]
|
||||||
|
if (op.params?.inputs?.[credInput.fieldName]) {
|
||||||
|
delete op.params.inputs[credInput.fieldName]
|
||||||
|
logger.info('Removed invalid credential from operation', {
|
||||||
|
blockId: credInput.blockId,
|
||||||
|
field: credInput.fieldName,
|
||||||
|
invalidValue: credInput.value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const warningInfo = validationResult.warning ? `. ${validationResult.warning}` : ''
|
||||||
|
errors.push({
|
||||||
|
blockId: credInput.blockId,
|
||||||
|
blockType: credInput.blockType,
|
||||||
|
field: credInput.fieldName,
|
||||||
|
value: credInput.value,
|
||||||
|
error: `Invalid credential ID "${credInput.value}" - credential does not exist or user doesn't have access${warningInfo}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn('Filtered out invalid credentials', {
|
||||||
|
invalidCount: invalidSet.size,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { filteredOperations, errors }
|
||||||
|
}
|
||||||
|
|
||||||
async function getCurrentWorkflowStateFromDb(
|
async function getCurrentWorkflowStateFromDb(
|
||||||
workflowId: string
|
workflowId: string
|
||||||
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
||||||
@@ -2657,12 +2805,28 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
|||||||
// Get permission config for the user
|
// Get permission config for the user
|
||||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||||
|
|
||||||
|
// Pre-validate credential and apiKey inputs before applying operations
|
||||||
|
// This filters out invalid credentials and apiKeys for hosted models
|
||||||
|
let operationsToApply = operations
|
||||||
|
const credentialErrors: ValidationError[] = []
|
||||||
|
if (context?.userId) {
|
||||||
|
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
|
||||||
|
operations,
|
||||||
|
{ userId: context.userId }
|
||||||
|
)
|
||||||
|
operationsToApply = filteredOperations
|
||||||
|
credentialErrors.push(...credErrors)
|
||||||
|
}
|
||||||
|
|
||||||
// Apply operations directly to the workflow state
|
// Apply operations directly to the workflow state
|
||||||
const {
|
const {
|
||||||
state: modifiedWorkflowState,
|
state: modifiedWorkflowState,
|
||||||
validationErrors,
|
validationErrors,
|
||||||
skippedItems,
|
skippedItems,
|
||||||
} = applyOperationsToWorkflowState(workflowState, operations, permissionConfig)
|
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
|
||||||
|
|
||||||
|
// Add credential validation errors
|
||||||
|
validationErrors.push(...credentialErrors)
|
||||||
|
|
||||||
// Get workspaceId for selector validation
|
// Get workspaceId for selector validation
|
||||||
let workspaceId: string | undefined
|
let workspaceId: string | undefined
|
||||||
|
|||||||
@@ -618,6 +618,13 @@ export function getToolOutputs(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates output paths for a tool-based block.
|
||||||
|
*
|
||||||
|
* @param blockConfig - The block configuration containing tools config
|
||||||
|
* @param subBlocks - SubBlock values for tool selection and condition evaluation
|
||||||
|
* @returns Array of output paths for the tool, or empty array on error
|
||||||
|
*/
|
||||||
export function getToolOutputPaths(
|
export function getToolOutputPaths(
|
||||||
blockConfig: BlockConfig,
|
blockConfig: BlockConfig,
|
||||||
subBlocks?: Record<string, SubBlockWithValue>
|
subBlocks?: Record<string, SubBlockWithValue>
|
||||||
@@ -627,22 +634,12 @@ export function getToolOutputPaths(
|
|||||||
if (!outputs || Object.keys(outputs).length === 0) return []
|
if (!outputs || Object.keys(outputs).length === 0) return []
|
||||||
|
|
||||||
if (subBlocks && blockConfig.outputs) {
|
if (subBlocks && blockConfig.outputs) {
|
||||||
|
const filteredBlockOutputs = filterOutputsByCondition(blockConfig.outputs, subBlocks)
|
||||||
|
const allowedKeys = new Set(Object.keys(filteredBlockOutputs))
|
||||||
|
|
||||||
const filteredOutputs: Record<string, any> = {}
|
const filteredOutputs: Record<string, any> = {}
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(outputs)) {
|
for (const [key, value] of Object.entries(outputs)) {
|
||||||
const blockOutput = blockConfig.outputs[key]
|
if (allowedKeys.has(key)) {
|
||||||
|
|
||||||
if (!blockOutput || typeof blockOutput !== 'object') {
|
|
||||||
filteredOutputs[key] = value
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const condition = 'condition' in blockOutput ? blockOutput.condition : undefined
|
|
||||||
if (condition) {
|
|
||||||
if (evaluateOutputCondition(condition, subBlocks)) {
|
|
||||||
filteredOutputs[key] = value
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
filteredOutputs[key] = value
|
filteredOutputs[key] = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,9 +27,6 @@ export function registerEmitFunctions(
|
|||||||
emitSubblockUpdate = subblockEmit
|
emitSubblockUpdate = subblockEmit
|
||||||
emitVariableUpdate = variableEmit
|
emitVariableUpdate = variableEmit
|
||||||
currentRegisteredWorkflowId = workflowId
|
currentRegisteredWorkflowId = workflowId
|
||||||
if (workflowId) {
|
|
||||||
useOperationQueueStore.getState().processNextOperation()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let currentRegisteredWorkflowId: string | null = null
|
let currentRegisteredWorkflowId: string | null = null
|
||||||
@@ -265,14 +262,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!currentRegisteredWorkflowId) {
|
const nextOperation = currentRegisteredWorkflowId
|
||||||
|
? state.operations.find(
|
||||||
|
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
||||||
|
)
|
||||||
|
: state.operations.find((op) => op.status === 'pending')
|
||||||
|
if (!nextOperation) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const nextOperation = state.operations.find(
|
if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
|
||||||
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
|
||||||
)
|
|
||||||
if (!nextOperation) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,12 +38,11 @@ export const storageUploadTool: ToolConfig<
|
|||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
||||||
},
|
},
|
||||||
fileData: {
|
fileContent: {
|
||||||
type: 'json',
|
type: 'string',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description:
|
description: 'The file content (base64 encoded for binary files, or plain text)',
|
||||||
'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
|
|
||||||
},
|
},
|
||||||
contentType: {
|
contentType: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
@@ -66,28 +65,65 @@ export const storageUploadTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
request: {
|
request: {
|
||||||
url: '/api/tools/supabase/storage-upload',
|
url: (params) => {
|
||||||
|
// Combine folder path and fileName, ensuring proper formatting
|
||||||
|
let fullPath = params.fileName
|
||||||
|
if (params.path) {
|
||||||
|
// Ensure path ends with / and doesn't have double slashes
|
||||||
|
const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
|
||||||
|
fullPath = `${folderPath}${params.fileName}`
|
||||||
|
}
|
||||||
|
return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
|
||||||
|
},
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: () => ({
|
headers: (params) => {
|
||||||
'Content-Type': 'application/json',
|
const headers: Record<string, string> = {
|
||||||
}),
|
apikey: params.apiKey,
|
||||||
body: (params) => ({
|
Authorization: `Bearer ${params.apiKey}`,
|
||||||
projectId: params.projectId,
|
}
|
||||||
apiKey: params.apiKey,
|
|
||||||
bucket: params.bucket,
|
if (params.contentType) {
|
||||||
fileName: params.fileName,
|
headers['Content-Type'] = params.contentType
|
||||||
path: params.path,
|
}
|
||||||
fileData: params.fileData,
|
|
||||||
contentType: params.contentType,
|
if (params.upsert) {
|
||||||
upsert: params.upsert,
|
headers['x-upsert'] = 'true'
|
||||||
}),
|
}
|
||||||
|
|
||||||
|
return headers
|
||||||
|
},
|
||||||
|
body: (params) => {
|
||||||
|
// Return the file content wrapped in an object
|
||||||
|
// The actual upload will need to handle this appropriately
|
||||||
|
return {
|
||||||
|
content: params.fileContent,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
transformResponse: async (response: Response) => {
|
||||||
|
let data
|
||||||
|
try {
|
||||||
|
data = await response.json()
|
||||||
|
} catch (parseError) {
|
||||||
|
throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
message: 'Successfully uploaded file to storage',
|
||||||
|
results: data,
|
||||||
|
},
|
||||||
|
error: undefined,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
message: { type: 'string', description: 'Operation status message' },
|
message: { type: 'string', description: 'Operation status message' },
|
||||||
results: {
|
results: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
description: 'Upload result including file path, bucket, and public URL',
|
description: 'Upload result including file path and metadata',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
|
|||||||
bucket: string
|
bucket: string
|
||||||
fileName: string
|
fileName: string
|
||||||
path?: string
|
path?: string
|
||||||
fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text)
|
fileContent: string
|
||||||
contentType?: string
|
contentType?: string
|
||||||
upsert?: boolean
|
upsert?: boolean
|
||||||
}
|
}
|
||||||
|
|||||||