Compare commits

..

1 Commits

Author SHA1 Message Date
Siddharth Ganesan
5f0ae238be fix(copilot): pre-validate credentials and apiKeys before applying edits
- Add preValidateCredentialInputs to validate inputs before operations are applied
- Block invalid credential IDs (non-existent or not owned by user) from being set
- Filter out apiKey inputs for hosted models when isHosted is true
- Skip oauth-input in post-validation to preserve existing collaborator credentials
- Return validation errors for LLM feedback on blocked inputs

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-25 13:09:48 -08:00
61 changed files with 687 additions and 1028 deletions

View File

@@ -44,7 +44,7 @@ services:
deploy: deploy:
resources: resources:
limits: limits:
memory: 1G memory: 4G
environment: environment:
- NODE_ENV=development - NODE_ENV=development
- DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio - DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio

View File

@@ -1,40 +0,0 @@
'use client'
import { getAssetUrl } from '@/lib/utils'
interface ActionImageProps {
src: string
alt: string
}
interface ActionVideoProps {
src: string
alt: string
}
export function ActionImage({ src, alt }: ActionImageProps) {
const resolvedSrc = getAssetUrl(src.startsWith('/') ? src.slice(1) : src)
return (
<img
src={resolvedSrc}
alt={alt}
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
/>
)
}
export function ActionVideo({ src, alt }: ActionVideoProps) {
const resolvedSrc = getAssetUrl(src.startsWith('/') ? src.slice(1) : src)
return (
<video
src={resolvedSrc}
autoPlay
loop
muted
playsInline
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
/>
)
}

View File

@@ -10,20 +10,12 @@ Stellen Sie Sim auf Ihrer eigenen Infrastruktur mit Docker oder Kubernetes berei
## Anforderungen ## Anforderungen
| Ressource | Klein | Standard | Produktion | | Ressource | Minimum | Empfohlen |
|----------|-------|----------|------------| |----------|---------|-------------|
| CPU | 2 Kerne | 4 Kerne | 8+ Kerne | | CPU | 2 Kerne | 4+ Kerne |
| RAM | 12 GB | 16 GB | 32+ GB | | RAM | 12 GB | 16+ GB |
| Speicher | 20 GB SSD | 50 GB SSD | 100+ GB SSD | | Speicher | 20 GB SSD | 50+ GB SSD |
| Docker | 20.10+ | 20.10+ | Neueste Version | | Docker | 20.10+ | Neueste Version |
**Klein**: Entwicklung, Tests, Einzelnutzer (1-5 Nutzer)
**Standard**: Teams (5-50 Nutzer), moderate Arbeitslasten
**Produktion**: Große Teams (50+ Nutzer), Hochverfügbarkeit, intensive Workflow-Ausführung
<Callout type="info">
Die Ressourcenanforderungen werden durch Workflow-Ausführung (isolated-vm Sandboxing), Dateiverarbeitung (In-Memory-Dokumentenparsing) und Vektoroperationen (pgvector) bestimmt. Arbeitsspeicher ist typischerweise der limitierende Faktor, nicht CPU. Produktionsdaten zeigen, dass die Hauptanwendung durchschnittlich 4-8 GB und bei hoher Last bis zu 12 GB benötigt.
</Callout>
## Schnellstart ## Schnellstart

View File

@@ -4,7 +4,6 @@ description: Essential actions for navigating and using the Sim workflow editor
--- ---
import { Callout } from 'fumadocs-ui/components/callout' import { Callout } from 'fumadocs-ui/components/callout'
import { ActionImage, ActionVideo } from '@/components/ui/action-media'
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts). A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
@@ -14,362 +13,124 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
## Workspaces ## Workspaces
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Create a workspace | Click workspace dropdown in sidebar → **New Workspace** |
</thead> | Rename a workspace | Workspace settings → Edit name |
<tbody> | Switch workspaces | Click workspace dropdown in sidebar → Select workspace |
<tr> | Invite team members | Workspace settings → **Team** → **Invite** |
<td>Create a workspace</td>
<td>Click workspace dropdown → **New Workspace**</td>
<td><ActionVideo src="/static/quick-reference/create-workspace.mp4" alt="Create workspace" /></td>
</tr>
<tr>
<td>Switch workspaces</td>
<td>Click workspace dropdown → Select workspace</td>
<td><ActionVideo src="/static/quick-reference/switch-workspace.mp4" alt="Switch workspaces" /></td>
</tr>
<tr>
<td>Invite team members</td>
<td>Sidebar → **Invite**</td>
<td><ActionVideo src="/static/quick-reference/invite.mp4" alt="Invite team members" /></td>
</tr>
<tr>
<td>Rename a workspace</td>
<td>Right-click workspace → **Rename**</td>
<td rowSpan={4}><ActionImage src="/static/quick-reference/workspace-context-menu.png" alt="Workspace context menu" /></td>
</tr>
<tr>
<td>Duplicate a workspace</td>
<td>Right-click workspace → **Duplicate**</td>
</tr>
<tr>
<td>Export a workspace</td>
<td>Right-click workspace → **Export**</td>
</tr>
<tr>
<td>Delete a workspace</td>
<td>Right-click workspace → **Delete**</td>
</tr>
</tbody>
</table>
## Workflows ## Workflows
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Create a workflow | Click **New Workflow** button or `Mod+Shift+A` |
</thead> | Rename a workflow | Double-click workflow name in sidebar, or right-click → **Rename** |
<tbody> | Duplicate a workflow | Right-click workflow → **Duplicate** |
<tr> | Reorder workflows | Drag workflow up/down in the sidebar list |
<td>Create a workflow</td> | Import a workflow | Sidebar menu → **Import** → Select file |
<td>Click **+** button in sidebar</td> | Create a folder | Right-click in sidebar → **New Folder** |
<td><ActionImage src="/static/quick-reference/create-workflow.png" alt="Create workflow" /></td> | Rename a folder | Right-click folder → **Rename** |
</tr> | Delete a folder | Right-click folder → **Delete** |
<tr> | Collapse/expand folder | Click folder arrow, or double-click folder |
<td>Reorder / move workflows</td> | Move workflow to folder | Drag workflow onto folder in sidebar |
<td>Drag workflow up/down or onto a folder</td> | Delete a workflow | Right-click workflow → **Delete** |
<td><ActionVideo src="/static/quick-reference/reordering.mp4" alt="Reorder workflows" /></td> | Export a workflow | Right-click workflow → **Export** |
</tr> | Assign workflow color | Right-click workflow → **Change Color** |
<tr> | Multi-select workflows | `Mod+Click` or `Shift+Click` workflows in sidebar |
<td>Import a workflow</td> | Open in new tab | Right-click workflow → **Open in New Tab** |
<td>Click import button in sidebar → Select file</td>
<td><ActionImage src="/static/quick-reference/import-workflow.png" alt="Import workflow" /></td>
</tr>
<tr>
<td>Multi-select workflows</td>
<td>`Mod+Click` or `Shift+Click` workflows in sidebar</td>
<td><ActionVideo src="/static/quick-reference/multiselect.mp4" alt="Multi-select workflows" /></td>
</tr>
<tr>
<td>Open in new tab</td>
<td>Right-click workflow → **Open in New Tab**</td>
<td rowSpan={6}><ActionImage src="/static/quick-reference/workflow-context-menu.png" alt="Workflow context menu" /></td>
</tr>
<tr>
<td>Rename a workflow</td>
<td>Right-click workflow → **Rename**</td>
</tr>
<tr>
<td>Assign workflow color</td>
<td>Right-click workflow → **Change Color**</td>
</tr>
<tr>
<td>Duplicate a workflow</td>
<td>Right-click workflow → **Duplicate**</td>
</tr>
<tr>
<td>Export a workflow</td>
<td>Right-click workflow → **Export**</td>
</tr>
<tr>
<td>Delete a workflow</td>
<td>Right-click workflow → **Delete**</td>
</tr>
<tr>
<td>Rename a folder</td>
<td>Right-click folder → **Rename**</td>
<td rowSpan={6}><ActionImage src="/static/quick-reference/folder-context-menu.png" alt="Folder context menu" /></td>
</tr>
<tr>
<td>Create workflow in folder</td>
<td>Right-click folder → **Create workflow**</td>
</tr>
<tr>
<td>Create folder in folder</td>
<td>Right-click folder → **Create folder**</td>
</tr>
<tr>
<td>Duplicate a folder</td>
<td>Right-click folder → **Duplicate**</td>
</tr>
<tr>
<td>Export a folder</td>
<td>Right-click folder → **Export**</td>
</tr>
<tr>
<td>Delete a folder</td>
<td>Right-click folder → **Delete**</td>
</tr>
</tbody>
</table>
## Blocks ## Blocks
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Add a block | Drag from Toolbar panel, or right-click canvas → **Add Block** |
</thead> | Select a block | Click on the block |
<tbody> | Multi-select blocks | `Mod+Click` additional blocks, or right-drag to draw selection box |
<tr> | Move blocks | Drag selected block(s) to new position |
<td>Add a block</td> | Copy blocks | `Mod+C` with blocks selected |
<td>Drag from Toolbar panel, or right-click canvas → **Add Block**</td> | Paste blocks | `Mod+V` to paste copied blocks |
<td><ActionVideo src="/static/quick-reference/add-block.mp4" alt="Add a block" /></td> | Duplicate blocks | Right-click → **Duplicate** |
</tr> | Delete blocks | `Delete` or `Backspace` key, or right-click → **Delete** |
<tr> | Rename a block | Click block name in header, or edit in the Editor panel |
<td>Multi-select blocks</td> | Enable/Disable a block | Right-click → **Enable/Disable** |
<td>`Mod+Click` additional blocks, or shift-drag to draw selection box</td> | Toggle handle orientation | Right-click → **Toggle Handles** |
<td><ActionVideo src="/static/quick-reference/multiselect-blocks.mp4" alt="Multi-select blocks" /></td> | Toggle trigger mode | Right-click trigger block → **Toggle Trigger Mode** |
</tr> | Configure a block | Select block → use Editor panel on right |
<tr>
<td>Copy blocks</td>
<td>`Mod+C` with blocks selected</td>
<td rowSpan={2}><ActionVideo src="/static/quick-reference/copy-paste.mp4" alt="Copy and paste blocks" /></td>
</tr>
<tr>
<td>Paste blocks</td>
<td>`Mod+V` to paste copied blocks</td>
</tr>
<tr>
<td>Duplicate blocks</td>
<td>Right-click → **Duplicate**</td>
<td><ActionVideo src="/static/quick-reference/duplicate-block.mp4" alt="Duplicate blocks" /></td>
</tr>
<tr>
<td>Delete blocks</td>
<td>`Delete` or `Backspace` key, or right-click → **Delete**</td>
<td><ActionImage src="/static/quick-reference/delete-block.png" alt="Delete block" /></td>
</tr>
<tr>
<td>Rename a block</td>
<td>Click block name in header, or edit in the Editor panel</td>
<td><ActionVideo src="/static/quick-reference/rename-block.mp4" alt="Rename a block" /></td>
</tr>
<tr>
<td>Enable/Disable a block</td>
<td>Right-click → **Enable/Disable**</td>
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
</tr>
<tr>
<td>Toggle handle orientation</td>
<td>Right-click → **Toggle Handles**</td>
<td><ActionVideo src="/static/quick-reference/toggle-handles.mp4" alt="Toggle handle orientation" /></td>
</tr>
<tr>
<td>Configure a block</td>
<td>Select block → use Editor panel on right</td>
<td><ActionVideo src="/static/quick-reference/configure-block.mp4" alt="Configure a block" /></td>
</tr>
</tbody>
</table>
## Connections ## Connections
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Create a connection | Drag from output handle to input handle |
</thead> | Delete a connection | Click edge to select → `Delete` key |
<tbody> | Use output in another block | Drag connection tag into input field |
<tr>
<td>Create a connection</td> ## Canvas Navigation
<td>Drag from output handle to input handle</td>
<td><ActionVideo src="/static/quick-reference/connect-blocks.mp4" alt="Connect blocks" /></td> | Action | How |
</tr> |--------|-----|
<tr> | Pan/move canvas | Left-drag on empty space, or scroll/trackpad |
<td>Delete a connection</td> | Zoom in/out | Scroll wheel or pinch gesture |
<td>Click edge to select `Delete` key</td> | Auto-layout | `Shift+L` |
<td><ActionVideo src="/static/quick-reference/delete-connection.mp4" alt="Delete connection" /></td> | Draw selection box | Right-drag on empty canvas area |
</tr>
<tr>
<td>Use output in another block</td>
<td>Drag connection tag into input field</td>
<td><ActionVideo src="/static/quick-reference/connection-tag.mp4" alt="Use connection tag" /></td>
</tr>
</tbody>
</table>
## Panels & Views ## Panels & Views
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Open Copilot tab | Press `C` or click Copilot tab |
</thead> | Open Toolbar tab | Press `T` or click Toolbar tab |
<tbody> | Open Editor tab | Press `E` or click Editor tab |
<tr> | Search toolbar | `Mod+F` |
<td>Search toolbar</td> | Toggle advanced mode | Click toggle button on input fields |
<td>`Mod+F`</td> | Resize panels | Drag panel edge |
<td><ActionVideo src="/static/quick-reference/search-toolbar.mp4" alt="Search toolbar" /></td> | Collapse/expand sidebar | Click collapse button on sidebar |
</tr>
<tr>
<td>Search everything</td>
<td>`Mod+K`</td>
<td><ActionImage src="/static/quick-reference/search-everything.png" alt="Search everything" /></td>
</tr>
<tr>
<td>Toggle manual mode</td>
<td>Click toggle button to switch between manual and selector</td>
<td><ActionImage src="/static/quick-reference/toggle-manual-mode.png" alt="Toggle manual mode" /></td>
</tr>
<tr>
<td>Collapse/expand sidebar</td>
<td>Click collapse button on sidebar</td>
<td><ActionVideo src="/static/quick-reference/collapse-sidebar.mp4" alt="Collapse sidebar" /></td>
</tr>
</tbody>
</table>
## Running & Testing ## Running & Testing
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Run workflow | Click Play button or `Mod+Enter` |
</thead> | Stop workflow | Click Stop button or `Mod+Enter` while running |
<tbody> | Test with chat | Use Chat panel on the right side |
<tr> | Select output to view | Click dropdown in Chat panel → Select block output |
<td>Run workflow</td> | Clear chat history | Click clear button in Chat panel |
<td>Click Run Workflow button or `Mod+Enter`</td> | View execution logs | Open terminal panel at bottom, or `Mod+L` |
<td><ActionImage src="/static/quick-reference/run-workflow.png" alt="Run workflow" /></td> | Filter logs by block | Click block filter in terminal |
</tr> | Filter logs by status | Click status filter in terminal |
<tr> | Search logs | Use search field in terminal |
<td>Stop workflow</td> | Copy log entry | Right-click log entry → **Copy** |
<td>Click Stop button or `Mod+Enter` while running</td> | Clear terminal | `Mod+D` |
<td><ActionImage src="/static/quick-reference/stop-workflow.png" alt="Stop workflow" /></td>
</tr>
<tr>
<td>Test with chat</td>
<td>Use Chat panel on the right side</td>
<td><ActionImage src="/static/quick-reference/test-chat.png" alt="Test with chat" /></td>
</tr>
<tr>
<td>Select output to view</td>
<td>Click dropdown in Chat panel → Select block output</td>
<td><ActionImage src="/static/quick-reference/output-select.png" alt="Select output to view" /></td>
</tr>
<tr>
<td>Clear chat history</td>
<td>Click clear button in Chat panel</td>
<td><ActionImage src="/static/quick-reference/clear-chat.png" alt="Clear chat history" /></td>
</tr>
<tr>
<td>View execution logs</td>
<td>Open terminal panel at bottom, or `Mod+L`</td>
<td><ActionImage src="/static/quick-reference/terminal.png" alt="Execution logs terminal" /></td>
</tr>
<tr>
<td>Filter logs by block or status</td>
<td>Click block filter in terminal or right-click log entry → **Filter by Block** or **Filter by Status**</td>
<td><ActionImage src="/static/quick-reference/filter-block.png" alt="Filter logs by block" /></td>
</tr>
<tr>
<td>Search logs</td>
<td>Use search field in terminal or right-click log entry → **Search**</td>
<td><ActionImage src="/static/quick-reference/terminal-search.png" alt="Search logs" /></td>
</tr>
<tr>
<td>Copy log entry</td>
<td>Clipboard Icon or Right-click log entry → **Copy**</td>
<td><ActionImage src="/static/quick-reference/copy-log.png" alt="Copy log entry" /></td>
</tr>
<tr>
<td>Clear terminal</td>
<td>Trash icon or `Mod+D`</td>
<td><ActionImage src="/static/quick-reference/clear-terminal.png" alt="Clear terminal" /></td>
</tr>
</tbody>
</table>
## Deployment ## Deployment
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Deploy a workflow | Click **Deploy** button in Deploy tab |
</thead> | Update deployment | Click **Update** when changes are detected |
<tbody> | View deployment status | Check status indicator (Live/Update/Deploy) in Deploy tab |
<tr> | Revert deployment | Access previous versions in Deploy tab |
<td>Deploy a workflow</td> | Copy webhook URL | Deploy tab → Copy webhook URL |
<td>Click **Deploy** button in panel</td> | Copy API endpoint | Deploy tab → Copy API endpoint URL |
<td><ActionImage src="/static/quick-reference/deploy.png" alt="Deploy workflow" /></td> | Set up a schedule | Add Schedule trigger block → Configure interval |
</tr>
<tr>
<td>Update deployment</td>
<td>Click **Update** when changes are detected</td>
<td><ActionImage src="/static/quick-reference/update-deployment.png" alt="Update deployment" /></td>
</tr>
<tr>
<td>View deployment status</td>
<td>Check status indicator (Live/Update/Deploy) in Deploy tab</td>
<td><ActionImage src="/static/quick-reference/view-deployment.png" alt="View deployment status" /></td>
</tr>
<tr>
<td>Revert deployment</td>
<td>Access previous versions in Deploy tab → **Promote to live**</td>
<td><ActionImage src="/static/quick-reference/promote-deployment.png" alt="Promote deployment to live" /></td>
</tr>
<tr>
<td>Copy API endpoint</td>
<td>Deploy tab → Copy API endpoint URL</td>
<td><ActionImage src="/static/quick-reference/copy-api.png" alt="Copy API endpoint" /></td>
</tr>
</tbody>
</table>
## Variables ## Variables
<table> | Action | How |
<thead> |--------|-----|
<tr><th>Action</th><th>How</th><th>Preview</th></tr> | Add workflow variable | Variables tab → **Add Variable** |
</thead> | Edit workflow variable | Variables tab → Click variable to edit |
<tbody> | Delete workflow variable | Variables tab → Click delete icon on variable |
<tr> | Add environment variable | Settings → **Environment Variables** → **Add** |
<td>Add / Edit / Delete workflow variable</td> | Reference a variable | Use `{{variableName}}` syntax in block inputs |
<td>Panel -> Variables -> **Add Variable**, click to edit, or delete icon</td>
<td><ActionImage src="/static/quick-reference/variables.png" alt="Variables panel" /></td> ## Credentials
</tr>
<tr> | Action | How |
<td>Add environment variable</td> |--------|-----|
<td>Settings → **Environment Variables** → **Add**</td> | Add API key | Block credential field → **Add Credential** → Enter API key |
<td><ActionImage src="/static/quick-reference/add-env-variable.png" alt="Add environment variable" /></td> | Connect OAuth account | Block credential field → **Connect** → Authorize with provider |
</tr> | Manage credentials | Settings → **Credentials** |
<tr> | Remove credential | Settings → **Credentials** → Delete credential |
<td>Reference a workflow variable</td>
<td>Use `<blockName.itemName>` syntax in block inputs</td>
<td><ActionImage src="/static/quick-reference/variable-reference.png" alt="Reference workflow variable" /></td>
</tr>
<tr>
<td>Reference an environment variable</td>
<td>Use `&#123;&#123;ENV_VAR&#125;&#125;` syntax in block inputs</td>
<td><ActionImage src="/static/quick-reference/env-variable-reference.png" alt="Reference environment variable" /></td>
</tr>
</tbody>
</table>

View File

@@ -16,20 +16,12 @@ Deploy Sim on your own infrastructure with Docker or Kubernetes.
## Requirements ## Requirements
| Resource | Small | Standard | Production | | Resource | Minimum | Recommended |
|----------|-------|----------|------------| |----------|---------|-------------|
| CPU | 2 cores | 4 cores | 8+ cores | | CPU | 2 cores | 4+ cores |
| RAM | 12 GB | 16 GB | 32+ GB | | RAM | 12 GB | 16+ GB |
| Storage | 20 GB SSD | 50 GB SSD | 100+ GB SSD | | Storage | 20 GB SSD | 50+ GB SSD |
| Docker | 20.10+ | 20.10+ | Latest | | Docker | 20.10+ | Latest |
**Small**: Development, testing, single user (1-5 users)
**Standard**: Teams (5-50 users), moderate workloads
**Production**: Large teams (50+ users), high availability, heavy workflow execution
<Callout type="info">
Resource requirements are driven by workflow execution (isolated-vm sandboxing), file processing (in-memory document parsing), and vector operations (pgvector). Memory is typically the constraining factor rather than CPU. Production telemetry shows the main app uses 4-8 GB average with peaks up to 12 GB under heavy load.
</Callout>
## Quick Start ## Quick Start

View File

@@ -10,20 +10,12 @@ Despliega Sim en tu propia infraestructura con Docker o Kubernetes.
## Requisitos ## Requisitos
| Recurso | Pequeño | Estándar | Producción | | Recurso | Mínimo | Recomendado |
|----------|---------|----------|------------| |----------|---------|-------------|
| CPU | 2 núcleos | 4 núcleos | 8+ núcleos | | CPU | 2 núcleos | 4+ núcleos |
| RAM | 12 GB | 16 GB | 32+ GB | | RAM | 12 GB | 16+ GB |
| Almacenamiento | 20 GB SSD | 50 GB SSD | 100+ GB SSD | | Almacenamiento | 20 GB SSD | 50+ GB SSD |
| Docker | 20.10+ | 20.10+ | Última versión | | Docker | 20.10+ | Última versión |
**Pequeño**: Desarrollo, pruebas, usuario único (1-5 usuarios)
**Estándar**: Equipos (5-50 usuarios), cargas de trabajo moderadas
**Producción**: Equipos grandes (50+ usuarios), alta disponibilidad, ejecución intensiva de workflows
<Callout type="info">
Los requisitos de recursos están determinados por la ejecución de workflows (sandboxing isolated-vm), procesamiento de archivos (análisis de documentos en memoria) y operaciones vectoriales (pgvector). La memoria suele ser el factor limitante, no la CPU. La telemetría de producción muestra que la aplicación principal usa 4-8 GB en promedio con picos de hasta 12 GB bajo carga pesada.
</Callout>
## Inicio rápido ## Inicio rápido

View File

@@ -10,20 +10,12 @@ Déployez Sim sur votre propre infrastructure avec Docker ou Kubernetes.
## Prérequis ## Prérequis
| Ressource | Petit | Standard | Production | | Ressource | Minimum | Recommandé |
|----------|-------|----------|------------| |----------|---------|-------------|
| CPU | 2 cœurs | 4 cœurs | 8+ cœurs | | CPU | 2 cœurs | 4+ cœurs |
| RAM | 12 Go | 16 Go | 32+ Go | | RAM | 12 Go | 16+ Go |
| Stockage | 20 Go SSD | 50 Go SSD | 100+ Go SSD | | Stockage | 20 Go SSD | 50+ Go SSD |
| Docker | 20.10+ | 20.10+ | Dernière version | | Docker | 20.10+ | Dernière version |
**Petit** : Développement, tests, utilisateur unique (1-5 utilisateurs)
**Standard** : Équipes (5-50 utilisateurs), charges de travail modérées
**Production** : Grandes équipes (50+ utilisateurs), haute disponibilité, exécution intensive de workflows
<Callout type="info">
Les besoins en ressources sont déterminés par l'exécution des workflows (sandboxing isolated-vm), le traitement des fichiers (analyse de documents en mémoire) et les opérations vectorielles (pgvector). La mémoire est généralement le facteur limitant, pas le CPU. La télémétrie de production montre que l'application principale utilise 4-8 Go en moyenne avec des pics jusqu'à 12 Go sous forte charge.
</Callout>
## Démarrage rapide ## Démarrage rapide

View File

@@ -10,20 +10,12 @@ DockerまたはKubernetesを使用して、自社のインフラストラクチ
## 要件 ## 要件
| リソース | スモール | スタンダード | プロダクション | | リソース | 最小 | 推奨 |
|----------|---------|-------------|----------------| |----------|---------|-------------|
| CPU | 2コア | 4コア | 8+コア | | CPU | 2コア | 4+コア |
| RAM | 12 GB | 16 GB | 32+ GB | | RAM | 12 GB | 16+ GB |
| ストレージ | 20 GB SSD | 50 GB SSD | 100+ GB SSD | | ストレージ | 20 GB SSD | 50+ GB SSD |
| Docker | 20.10+ | 20.10+ | 最新版 | | Docker | 20.10+ | 最新版 |
**スモール**: 開発、テスト、シングルユーザー1-5ユーザー
**スタンダード**: チーム5-50ユーザー、中程度のワークロード
**プロダクション**: 大規模チーム50+ユーザー)、高可用性、高負荷ワークフロー実行
<Callout type="info">
リソース要件は、ワークフロー実行isolated-vmサンドボックス、ファイル処理メモリ内ドキュメント解析、ベクトル演算pgvectorによって決まります。CPUよりもメモリが制約要因となることが多いです。本番環境のテレメトリによると、メインアプリは平均4-8 GB、高負荷時は最大12 GBを使用します。
</Callout>
## クイックスタート ## クイックスタート

View File

@@ -10,20 +10,12 @@ import { Callout } from 'fumadocs-ui/components/callout'
## 要求 ## 要求
| 资源 | 小型 | 标准 | 生产环境 | | 资源 | 最低要求 | 推荐配置 |
|----------|------|------|----------| |----------|---------|-------------|
| CPU | 2 核 | 4 核 | 8+ 核 | | CPU | 2 核 | 4 核及以上 |
| 内存 | 12 GB | 16 GB | 32+ GB | | 内存 | 12 GB | 16 GB 及以上 |
| 存储 | 20 GB SSD | 50 GB SSD | 100+ GB SSD | | 存储 | 20 GB SSD | 50 GB 及以上 SSD |
| Docker | 20.10+ | 20.10+ | 最新版本 | | Docker | 20.10+ | 最新版本 |
**小型**: 开发、测试、单用户1-5 用户)
**标准**: 团队5-50 用户)、中等工作负载
**生产环境**: 大型团队50+ 用户)、高可用性、密集工作流执行
<Callout type="info">
资源需求由工作流执行isolated-vm 沙箱、文件处理内存中文档解析和向量运算pgvector决定。内存通常是限制因素而不是 CPU。生产遥测数据显示主应用平均使用 4-8 GB高负载时峰值可达 12 GB。
</Callout>
## 快速开始 ## 快速开始

Binary file not shown.

Before

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 114 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 146 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

View File

@@ -408,7 +408,6 @@ describe('Knowledge Search Utils', () => {
input: ['test query'], input: ['test query'],
model: 'text-embedding-3-small', model: 'text-embedding-3-small',
encoding_format: 'float', encoding_format: 'float',
dimensions: 1536,
}), }),
}) })
) )

View File

@@ -0,0 +1,204 @@
import { db } from '@sim/db'
import { member, permissions, user, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
const logger = createLogger('OrganizationWorkspacesAPI')
/**
* GET /api/organizations/[id]/workspaces
* Get workspaces related to the organization with optional filtering
* Query parameters:
* - ?available=true - Only workspaces where user can invite others (admin permissions)
* - ?member=userId - Workspaces where specific member has access
*/
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: organizationId } = await params
const url = new URL(request.url)
const availableOnly = url.searchParams.get('available') === 'true'
const memberId = url.searchParams.get('member')
// Verify user is a member of this organization
const memberEntry = await db
.select()
.from(member)
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
.limit(1)
if (memberEntry.length === 0) {
return NextResponse.json(
{
error: 'Forbidden - Not a member of this organization',
},
{ status: 403 }
)
}
const userRole = memberEntry[0].role
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
if (availableOnly) {
// Get workspaces where user has admin permissions (can invite others)
const availableWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
createdAt: workspace.createdAt,
isOwner: eq(workspace.ownerId, session.user.id),
permissionType: permissions.permissionType,
})
.from(workspace)
.leftJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspace.id),
eq(permissions.userId, session.user.id)
)
)
.where(
or(
// User owns the workspace
eq(workspace.ownerId, session.user.id),
// User has admin permission on the workspace
and(
eq(permissions.userId, session.user.id),
eq(permissions.entityType, 'workspace'),
eq(permissions.permissionType, 'admin')
)
)
)
// Filter and format the results
const workspacesWithInvitePermission = availableWorkspaces
.filter((workspace) => {
// Include if user owns the workspace OR has admin permission
return workspace.isOwner || workspace.permissionType === 'admin'
})
.map((workspace) => ({
id: workspace.id,
name: workspace.name,
isOwner: workspace.isOwner,
canInvite: true, // All returned workspaces have invite permission
createdAt: workspace.createdAt,
}))
logger.info('Retrieved available workspaces for organization member', {
organizationId,
userId: session.user.id,
workspaceCount: workspacesWithInvitePermission.length,
})
return NextResponse.json({
success: true,
data: {
workspaces: workspacesWithInvitePermission,
totalCount: workspacesWithInvitePermission.length,
filter: 'available',
},
})
}
if (memberId && hasAdminAccess) {
// Get workspaces where specific member has access (admin only)
const memberWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
isOwner: eq(workspace.ownerId, memberId),
permissionType: permissions.permissionType,
createdAt: permissions.createdAt,
})
.from(workspace)
.leftJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspace.id),
eq(permissions.userId, memberId)
)
)
.where(
or(
// Member owns the workspace
eq(workspace.ownerId, memberId),
// Member has permissions on the workspace
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
)
)
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
id: workspace.id,
name: workspace.name,
isOwner: workspace.isOwner,
permission: workspace.permissionType,
joinedAt: workspace.createdAt,
createdAt: workspace.createdAt,
}))
return NextResponse.json({
success: true,
data: {
workspaces: formattedWorkspaces,
totalCount: formattedWorkspaces.length,
filter: 'member',
memberId,
},
})
}
// Default: Get all workspaces (basic info only for regular members)
if (!hasAdminAccess) {
return NextResponse.json({
success: true,
data: {
workspaces: [],
totalCount: 0,
message: 'Workspace access information is only available to organization admins',
},
})
}
// For admins: Get summary of all workspaces
const allWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
createdAt: workspace.createdAt,
ownerName: user.name,
})
.from(workspace)
.leftJoin(user, eq(workspace.ownerId, user.id))
return NextResponse.json({
success: true,
data: {
workspaces: allWorkspaces,
totalCount: allWorkspaces.length,
filter: 'all',
},
userRole,
hasAdminAccess,
})
} catch (error) {
logger.error('Failed to get organization workspaces', { error })
return NextResponse.json(
{
error: 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -1,257 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic'
const logger = createLogger('SupabaseStorageUploadAPI')
const SupabaseStorageUploadSchema = z.object({
projectId: z.string().min(1, 'Project ID is required'),
apiKey: z.string().min(1, 'API key is required'),
bucket: z.string().min(1, 'Bucket name is required'),
fileName: z.string().min(1, 'File name is required'),
path: z.string().optional().nullable(),
fileData: z.any(),
contentType: z.string().optional().nullable(),
upsert: z.boolean().optional().default(false),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
logger.info(
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
{
userId: authResult.userId,
}
)
const body = await request.json()
const validatedData = SupabaseStorageUploadSchema.parse(body)
const fileData = validatedData.fileData
const isStringInput = typeof fileData === 'string'
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
bucket: validatedData.bucket,
fileName: validatedData.fileName,
path: validatedData.path,
fileDataType: isStringInput ? 'string' : 'object',
})
if (!fileData) {
return NextResponse.json(
{
success: false,
error: 'fileData is required',
},
{ status: 400 }
)
}
let uploadBody: Buffer
let uploadContentType: string | undefined
if (isStringInput) {
let content = fileData as string
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
if (dataUrlMatch) {
const [, mimeType, base64Data] = dataUrlMatch
content = base64Data
if (!validatedData.contentType) {
uploadContentType = mimeType
}
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
}
const cleanedContent = content.replace(/[\s\r\n]/g, '')
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
if (isLikelyBase64 && cleanedContent.length >= 4) {
try {
uploadBody = Buffer.from(cleanedContent, 'base64')
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
if (
uploadBody.length >= expectedMinSize &&
uploadBody.length <= expectedMaxSize &&
uploadBody.length > 0
) {
logger.info(
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
)
} else {
const reEncoded = uploadBody.toString('base64')
if (reEncoded !== cleanedContent) {
logger.info(
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
)
uploadBody = Buffer.from(content, 'utf-8')
} else {
logger.info(
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
)
}
}
} catch (decodeError) {
logger.info(
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
)
uploadBody = Buffer.from(content, 'utf-8')
}
} else {
uploadBody = Buffer.from(content, 'utf-8')
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
}
uploadContentType =
uploadContentType || validatedData.contentType || 'application/octet-stream'
} else {
const rawFile = fileData
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
let userFile
try {
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
} catch (error) {
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Failed to process file',
},
{ status: 400 }
)
}
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
uploadBody = buffer
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
}
let fullPath = validatedData.fileName
if (validatedData.path) {
const folderPath = validatedData.path.endsWith('/')
? validatedData.path
: `${validatedData.path}/`
fullPath = `${folderPath}${validatedData.fileName}`
}
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
const headers: Record<string, string> = {
apikey: validatedData.apiKey,
Authorization: `Bearer ${validatedData.apiKey}`,
'Content-Type': uploadContentType,
}
if (validatedData.upsert) {
headers['x-upsert'] = 'true'
}
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
contentType: uploadContentType,
bodySize: uploadBody.length,
upsert: validatedData.upsert,
})
const response = await fetch(supabaseUrl, {
method: 'POST',
headers,
body: new Uint8Array(uploadBody),
})
if (!response.ok) {
const errorText = await response.text()
let errorData
try {
errorData = JSON.parse(errorText)
} catch {
errorData = { message: errorText }
}
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
status: response.status,
statusText: response.statusText,
error: errorData,
})
return NextResponse.json(
{
success: false,
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
details: errorData,
},
{ status: response.status }
)
}
const result = await response.json()
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
bucket: validatedData.bucket,
path: fullPath,
})
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
return NextResponse.json({
success: true,
output: {
message: 'Successfully uploaded file to storage',
results: {
...result,
path: fullPath,
bucket: validatedData.bucket,
publicUrl,
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -338,11 +338,6 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
const configEqual = const configEqual =
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
const canonicalToggleEqual =
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
return ( return (
prevProps.blockId === nextProps.blockId && prevProps.blockId === nextProps.blockId &&
configEqual && configEqual &&
@@ -351,7 +346,8 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
prevProps.disabled === nextProps.disabled && prevProps.disabled === nextProps.disabled &&
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus && prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview && prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
canonicalToggleEqual prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
) )
} }

View File

@@ -214,6 +214,15 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
], ],
config: { config: {
tool: (params) => params.operation as string, tool: (params) => params.operation as string,
params: (params) => {
const { fileUpload, fileReference, ...rest } = params
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
const files = hasFileUpload ? fileUpload : fileReference
return {
...rest,
...(files ? { files } : {}),
}
},
}, },
}, },
inputs: { inputs: {

View File

@@ -581,18 +581,6 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
results: { type: 'json', description: 'Search/read summary results' }, results: { type: 'json', description: 'Search/read summary results' },
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' }, attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
// Draft-specific outputs
draftId: {
type: 'string',
description: 'Draft ID',
condition: { field: 'operation', value: 'draft_gmail' },
},
messageId: {
type: 'string',
description: 'Gmail message ID for the draft',
condition: { field: 'operation', value: 'draft_gmail' },
},
// Trigger outputs (unchanged) // Trigger outputs (unchanged)
email_id: { type: 'string', description: 'Gmail message ID' }, email_id: { type: 'string', description: 'Gmail message ID' },
thread_id: { type: 'string', description: 'Gmail thread ID' }, thread_id: { type: 'string', description: 'Gmail thread ID' },

View File

@@ -661,25 +661,12 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
placeholder: 'folder/subfolder/', placeholder: 'folder/subfolder/',
condition: { field: 'operation', value: 'storage_upload' }, condition: { field: 'operation', value: 'storage_upload' },
}, },
{
id: 'file',
title: 'File',
type: 'file-upload',
canonicalParamId: 'fileData',
placeholder: 'Upload file to storage',
condition: { field: 'operation', value: 'storage_upload' },
mode: 'basic',
multiple: false,
required: true,
},
{ {
id: 'fileContent', id: 'fileContent',
title: 'File Content', title: 'File Content',
type: 'code', type: 'code',
canonicalParamId: 'fileData',
placeholder: 'Base64 encoded for binary files, or plain text', placeholder: 'Base64 encoded for binary files, or plain text',
condition: { field: 'operation', value: 'storage_upload' }, condition: { field: 'operation', value: 'storage_upload' },
mode: 'advanced',
required: true, required: true,
}, },
{ {

View File

@@ -1,9 +1,7 @@
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { normalizeName } from '@/executor/constants' import { normalizeName } from '@/executor/constants'
import type { ExecutionContext } from '@/executor/types' import type { ExecutionContext } from '@/executor/types'
import type { OutputSchema } from '@/executor/utils/block-reference' import type { OutputSchema } from '@/executor/utils/block-reference'
import type { SerializedBlock } from '@/serializer/types'
import type { ToolConfig } from '@/tools/types'
import { getTool } from '@/tools/utils'
export interface BlockDataCollection { export interface BlockDataCollection {
blockData: Record<string, unknown> blockData: Record<string, unknown>
@@ -11,32 +9,6 @@ export interface BlockDataCollection {
blockOutputSchemas: Record<string, OutputSchema> blockOutputSchemas: Record<string, OutputSchema>
} }
export function getBlockSchema(
block: SerializedBlock,
toolConfig?: ToolConfig
): OutputSchema | undefined {
const isTrigger =
block.metadata?.category === 'triggers' ||
(block.config?.params as Record<string, unknown> | undefined)?.triggerMode === true
// Triggers use saved outputs (defines the trigger payload schema)
if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
return block.outputs as OutputSchema
}
// When a tool is selected, tool outputs are the source of truth
if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
return toolConfig.outputs as OutputSchema
}
// Fallback to saved outputs for blocks without tools
if (block.outputs && Object.keys(block.outputs).length > 0) {
return block.outputs as OutputSchema
}
return undefined
}
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection { export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
const blockData: Record<string, unknown> = {} const blockData: Record<string, unknown> = {}
const blockNameMapping: Record<string, string> = {} const blockNameMapping: Record<string, string> = {}
@@ -46,23 +18,26 @@ export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
if (state.output !== undefined) { if (state.output !== undefined) {
blockData[id] = state.output blockData[id] = state.output
} }
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
if (!workflowBlock) continue
if (workflowBlock.metadata?.name) {
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
} }
const workflowBlocks = ctx.workflow?.blocks ?? [] const blockType = workflowBlock.metadata?.id
for (const block of workflowBlocks) { if (blockType) {
const id = block.id const params = workflowBlock.config?.params as Record<string, unknown> | undefined
const subBlocks = params
if (block.metadata?.name) { ? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
blockNameMapping[normalizeName(block.metadata.name)] = id : undefined
} const schema = getBlockOutputs(blockType, subBlocks)
const toolId = block.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined
const schema = getBlockSchema(block, toolConfig)
if (schema && Object.keys(schema).length > 0) { if (schema && Object.keys(schema).length > 0) {
blockOutputSchemas[id] = schema blockOutputSchemas[id] = schema
} }
} }
}
return { blockData, blockNameMapping, blockOutputSchemas } return { blockData, blockNameMapping, blockOutputSchemas }
} }

View File

@@ -378,30 +378,8 @@ function buildManualTriggerOutput(
return mergeFilesIntoOutput(output, workflowInput) return mergeFilesIntoOutput(output, workflowInput)
} }
function buildIntegrationTriggerOutput( function buildIntegrationTriggerOutput(workflowInput: unknown): NormalizedBlockOutput {
workflowInput: unknown, return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
structuredInput: Record<string, unknown>,
hasStructured: boolean
): NormalizedBlockOutput {
const output: NormalizedBlockOutput = {}
if (hasStructured) {
for (const [key, value] of Object.entries(structuredInput)) {
output[key] = value
}
}
if (isPlainObject(workflowInput)) {
for (const [key, value] of Object.entries(workflowInput)) {
if (value !== undefined && value !== null) {
output[key] = value
} else if (!Object.hasOwn(output, key)) {
output[key] = value
}
}
}
return mergeFilesIntoOutput(output, workflowInput)
} }
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined { function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {
@@ -450,7 +428,7 @@ export function buildStartBlockOutput(options: StartBlockOutputOptions): Normali
return buildManualTriggerOutput(finalInput, workflowInput) return buildManualTriggerOutput(finalInput, workflowInput)
case StartBlockPath.EXTERNAL_TRIGGER: case StartBlockPath.EXTERNAL_TRIGGER:
return buildIntegrationTriggerOutput(workflowInput, structuredInput, hasStructured) return buildIntegrationTriggerOutput(workflowInput)
case StartBlockPath.LEGACY_STARTER: case StartBlockPath.LEGACY_STARTER:
return buildLegacyStarterOutput( return buildLegacyStarterOutput(

View File

@@ -1,10 +1,10 @@
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { import {
isReference, isReference,
normalizeName, normalizeName,
parseReferencePath, parseReferencePath,
SPECIAL_REFERENCE_PREFIXES, SPECIAL_REFERENCE_PREFIXES,
} from '@/executor/constants' } from '@/executor/constants'
import { getBlockSchema } from '@/executor/utils/block-data'
import { import {
InvalidFieldError, InvalidFieldError,
type OutputSchema, type OutputSchema,
@@ -67,9 +67,15 @@ export class BlockResolver implements Resolver {
blockData[blockId] = output blockData[blockId] = output
} }
const blockType = block.metadata?.id
const params = block.config?.params as Record<string, unknown> | undefined
const subBlocks = params
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
: undefined
const toolId = block.config?.tool const toolId = block.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined const toolConfig = toolId ? getTool(toolId) : undefined
const outputSchema = getBlockSchema(block, toolConfig) const outputSchema =
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
if (outputSchema && Object.keys(outputSchema).length > 0) { if (outputSchema && Object.keys(outputSchema).length > 0) {
blockOutputSchemas[blockId] = outputSchema blockOutputSchemas[blockId] = outputSchema

View File

@@ -680,10 +680,6 @@ export function useCollaborativeWorkflow() {
previousPositions?: Map<string, { x: number; y: number; parentId?: string }> previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
} }
) => { ) => {
if (isBaselineDiffView) {
return
}
if (!isInActiveRoom()) { if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow') logger.debug('Skipping batch position update - not in active workflow')
return return
@@ -729,7 +725,7 @@ export function useCollaborativeWorkflow() {
} }
} }
}, },
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo] [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
) )
const collaborativeUpdateBlockName = useCallback( const collaborativeUpdateBlockName = useCallback(
@@ -821,10 +817,6 @@ export function useCollaborativeWorkflow() {
const collaborativeBatchToggleBlockEnabled = useCallback( const collaborativeBatchToggleBlockEnabled = useCallback(
(ids: string[]) => { (ids: string[]) => {
if (isBaselineDiffView) {
return
}
if (ids.length === 0) return if (ids.length === 0) return
const previousStates: Record<string, boolean> = {} const previousStates: Record<string, boolean> = {}
@@ -857,7 +849,7 @@ export function useCollaborativeWorkflow() {
undoRedo.recordBatchToggleEnabled(validIds, previousStates) undoRedo.recordBatchToggleEnabled(validIds, previousStates)
}, },
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo] [addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
) )
const collaborativeBatchUpdateParent = useCallback( const collaborativeBatchUpdateParent = useCallback(
@@ -869,10 +861,6 @@ export function useCollaborativeWorkflow() {
affectedEdges: Edge[] affectedEdges: Edge[]
}> }>
) => { ) => {
if (isBaselineDiffView) {
return
}
if (!isInActiveRoom()) { if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow') logger.debug('Skipping batch update parent - not in active workflow')
return return
@@ -943,7 +931,7 @@ export function useCollaborativeWorkflow() {
logger.debug('Batch updated parent for blocks', { updateCount: updates.length }) logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
}, },
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id] [isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
) )
const collaborativeToggleBlockAdvancedMode = useCallback( const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -963,37 +951,18 @@ export function useCollaborativeWorkflow() {
const collaborativeSetBlockCanonicalMode = useCallback( const collaborativeSetBlockCanonicalMode = useCallback(
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => { (id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
if (isBaselineDiffView) { executeQueuedOperation(
return BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
} OPERATION_TARGETS.BLOCK,
{ id, canonicalId, canonicalMode },
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode) () => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
)
if (!activeWorkflowId) {
return
}
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
target: OPERATION_TARGETS.BLOCK,
payload: { id, canonicalId, canonicalMode },
}, },
workflowId: activeWorkflowId, [executeQueuedOperation]
userId: session?.user?.id || 'unknown',
})
},
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
) )
const collaborativeBatchToggleBlockHandles = useCallback( const collaborativeBatchToggleBlockHandles = useCallback(
(ids: string[]) => { (ids: string[]) => {
if (isBaselineDiffView) {
return
}
if (ids.length === 0) return if (ids.length === 0) return
const previousStates: Record<string, boolean> = {} const previousStates: Record<string, boolean> = {}
@@ -1026,15 +995,11 @@ export function useCollaborativeWorkflow() {
undoRedo.recordBatchToggleHandles(validIds, previousStates) undoRedo.recordBatchToggleHandles(validIds, previousStates)
}, },
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo] [addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
) )
const collaborativeBatchAddEdges = useCallback( const collaborativeBatchAddEdges = useCallback(
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => { (edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!isInActiveRoom()) { if (!isInActiveRoom()) {
logger.debug('Skipping batch add edges - not in active workflow') logger.debug('Skipping batch add edges - not in active workflow')
return false return false
@@ -1070,15 +1035,11 @@ export function useCollaborativeWorkflow() {
return true return true
}, },
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo] [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
) )
const collaborativeBatchRemoveEdges = useCallback( const collaborativeBatchRemoveEdges = useCallback(
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => { (edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!isInActiveRoom()) { if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow') logger.debug('Skipping batch remove edges - not in active workflow')
return false return false
@@ -1128,7 +1089,7 @@ export function useCollaborativeWorkflow() {
logger.info('Batch removed edges', { count: validEdgeIds.length }) logger.info('Batch removed edges', { count: validEdgeIds.length })
return true return true
}, },
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo] [isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
) )
const collaborativeSetSubblockValue = useCallback( const collaborativeSetSubblockValue = useCallback(
@@ -1204,10 +1165,6 @@ export function useCollaborativeWorkflow() {
(blockId: string, subblockId: string, value: any) => { (blockId: string, subblockId: string, value: any) => {
if (isApplyingRemoteChange.current) return if (isApplyingRemoteChange.current) return
if (isBaselineDiffView) {
return
}
if (!isInActiveRoom()) { if (!isInActiveRoom()) {
logger.debug('Skipping tag selection - not in active workflow', { logger.debug('Skipping tag selection - not in active workflow', {
currentWorkflowId, currentWorkflowId,
@@ -1235,14 +1192,7 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown', userId: session?.user?.id || 'unknown',
}) })
}, },
[ [addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
isBaselineDiffView,
addToQueue,
currentWorkflowId,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
]
) )
const collaborativeUpdateLoopType = useCallback( const collaborativeUpdateLoopType = useCallback(
@@ -1588,10 +1538,6 @@ export function useCollaborativeWorkflow() {
const collaborativeBatchRemoveBlocks = useCallback( const collaborativeBatchRemoveBlocks = useCallback(
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => { (blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!isInActiveRoom()) { if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow') logger.debug('Skipping batch remove blocks - not in active workflow')
return false return false
@@ -1673,7 +1619,6 @@ export function useCollaborativeWorkflow() {
return true return true
}, },
[ [
isBaselineDiffView,
addToQueue, addToQueue,
activeWorkflowId, activeWorkflowId,
session?.user?.id, session?.user?.id,

View File

@@ -2508,6 +2508,10 @@ async function validateWorkflowSelectorIds(
for (const subBlockConfig of blockConfig.subBlocks) { for (const subBlockConfig of blockConfig.subBlocks) {
if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue
// Skip oauth-input - credentials are pre-validated before edit application
// This allows existing collaborator credentials to remain untouched
if (subBlockConfig.type === 'oauth-input') continue
const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value
if (!subBlockValue) continue if (!subBlockValue) continue
@@ -2573,6 +2577,157 @@ async function validateWorkflowSelectorIds(
return errors return errors
} }
/**
* Pre-validates credential and apiKey inputs in operations before they are applied.
* - Validates oauth-input (credential) IDs belong to the user
* - Filters out apiKey inputs for hosted models when isHosted is true
* Returns validation errors for any removed inputs.
*/
async function preValidateCredentialInputs(
operations: EditWorkflowOperation[],
context: { userId: string }
): Promise<{ filteredOperations: EditWorkflowOperation[]; errors: ValidationError[] }> {
const { isHosted } = await import('@/lib/core/config/feature-flags')
const { getHostedModels } = await import('@/providers/utils')
const logger = createLogger('PreValidateCredentials')
const errors: ValidationError[] = []
// Collect credential and apiKey inputs that need validation/filtering
const credentialInputs: Array<{
operationIndex: number
blockId: string
blockType: string
fieldName: string
value: string
}> = []
const hostedApiKeyInputs: Array<{
operationIndex: number
blockId: string
blockType: string
model: string
}> = []
const hostedModels = isHosted ? getHostedModels() : []
const hostedModelsLower = new Set(hostedModels.map((m) => m.toLowerCase()))
operations.forEach((op, opIndex) => {
if (!op.params?.inputs || !op.params?.type) return
const blockConfig = getBlock(op.params.type)
if (!blockConfig) return
// Find oauth-input subblocks
for (const subBlockConfig of blockConfig.subBlocks) {
if (subBlockConfig.type !== 'oauth-input') continue
const inputValue = op.params.inputs[subBlockConfig.id]
if (!inputValue || typeof inputValue !== 'string' || inputValue.trim() === '') continue
credentialInputs.push({
operationIndex: opIndex,
blockId: op.block_id,
blockType: op.params.type,
fieldName: subBlockConfig.id,
value: inputValue,
})
}
// Check for apiKey inputs on hosted models
if (isHosted && op.params.inputs.apiKey) {
const modelValue = op.params.inputs.model
if (modelValue && typeof modelValue === 'string') {
if (hostedModelsLower.has(modelValue.toLowerCase())) {
hostedApiKeyInputs.push({
operationIndex: opIndex,
blockId: op.block_id,
blockType: op.params.type,
model: modelValue,
})
}
}
}
})
const hasCredentialsToValidate = credentialInputs.length > 0
const hasHostedApiKeysToFilter = hostedApiKeyInputs.length > 0
if (!hasCredentialsToValidate && !hasHostedApiKeysToFilter) {
return { filteredOperations: operations, errors }
}
// Deep clone operations so we can modify them
const filteredOperations = JSON.parse(JSON.stringify(operations)) as EditWorkflowOperation[]
// Filter out apiKey inputs for hosted models
if (hasHostedApiKeysToFilter) {
logger.info('Filtering apiKey inputs for hosted models', { count: hostedApiKeyInputs.length })
for (const apiKeyInput of hostedApiKeyInputs) {
const op = filteredOperations[apiKeyInput.operationIndex]
if (op.params?.inputs?.apiKey) {
op.params.inputs.apiKey = undefined
logger.info('Removed apiKey for hosted model', {
blockId: apiKeyInput.blockId,
model: apiKeyInput.model,
})
}
errors.push({
blockId: apiKeyInput.blockId,
blockType: apiKeyInput.blockType,
field: 'apiKey',
value: '[redacted]',
error: `API key not allowed for hosted model "${apiKeyInput.model}" - platform provides the key`,
})
}
}
// Validate credential inputs
if (hasCredentialsToValidate) {
logger.info('Pre-validating credential inputs', {
credentialCount: credentialInputs.length,
userId: context.userId,
})
const allCredentialIds = credentialInputs.map((c) => c.value)
const validationResult = await validateSelectorIds('oauth-input', allCredentialIds, context)
const invalidSet = new Set(validationResult.invalid)
if (invalidSet.size > 0) {
for (const credInput of credentialInputs) {
if (!invalidSet.has(credInput.value)) continue
const op = filteredOperations[credInput.operationIndex]
if (op.params?.inputs?.[credInput.fieldName]) {
delete op.params.inputs[credInput.fieldName]
logger.info('Removed invalid credential from operation', {
blockId: credInput.blockId,
field: credInput.fieldName,
invalidValue: credInput.value,
})
}
const warningInfo = validationResult.warning ? `. ${validationResult.warning}` : ''
errors.push({
blockId: credInput.blockId,
blockType: credInput.blockType,
field: credInput.fieldName,
value: credInput.value,
error: `Invalid credential ID "${credInput.value}" - credential does not exist or user doesn't have access${warningInfo}`,
})
}
logger.warn('Filtered out invalid credentials', {
invalidCount: invalidSet.size,
})
}
}
return { filteredOperations, errors }
}
async function getCurrentWorkflowStateFromDb( async function getCurrentWorkflowStateFromDb(
workflowId: string workflowId: string
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> { ): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
@@ -2657,12 +2812,28 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
// Get permission config for the user // Get permission config for the user
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
// Pre-validate credential and apiKey inputs before applying operations
// This filters out invalid credentials and apiKeys for hosted models
let operationsToApply = operations
const credentialErrors: ValidationError[] = []
if (context?.userId) {
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
operations,
{ userId: context.userId }
)
operationsToApply = filteredOperations
credentialErrors.push(...credErrors)
}
// Apply operations directly to the workflow state // Apply operations directly to the workflow state
const { const {
state: modifiedWorkflowState, state: modifiedWorkflowState,
validationErrors, validationErrors,
skippedItems, skippedItems,
} = applyOperationsToWorkflowState(workflowState, operations, permissionConfig) } = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
// Add credential validation errors
validationErrors.push(...credentialErrors)
// Get workspaceId for selector validation // Get workspaceId for selector validation
let workspaceId: string | undefined let workspaceId: string | undefined

View File

@@ -8,17 +8,6 @@ const logger = createLogger('EmbeddingUtils')
const MAX_TOKENS_PER_REQUEST = 8000 const MAX_TOKENS_PER_REQUEST = 8000
const MAX_CONCURRENT_BATCHES = env.KB_CONFIG_CONCURRENCY_LIMIT || 50 const MAX_CONCURRENT_BATCHES = env.KB_CONFIG_CONCURRENCY_LIMIT || 50
const EMBEDDING_DIMENSIONS = 1536
/**
* Check if the model supports custom dimensions.
* text-embedding-3-* models support the dimensions parameter.
* Checks for 'embedding-3' to handle Azure deployments with custom naming conventions.
*/
function supportsCustomDimensions(modelName: string): boolean {
const name = modelName.toLowerCase()
return name.includes('embedding-3') && !name.includes('ada')
}
export class EmbeddingAPIError extends Error { export class EmbeddingAPIError extends Error {
public status: number public status: number
@@ -104,19 +93,15 @@ async function getEmbeddingConfig(
async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Promise<number[][]> { async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Promise<number[][]> {
return retryWithExponentialBackoff( return retryWithExponentialBackoff(
async () => { async () => {
const useDimensions = supportsCustomDimensions(config.modelName)
const requestBody = config.useAzure const requestBody = config.useAzure
? { ? {
input: inputs, input: inputs,
encoding_format: 'float', encoding_format: 'float',
...(useDimensions && { dimensions: EMBEDDING_DIMENSIONS }),
} }
: { : {
input: inputs, input: inputs,
model: config.modelName, model: config.modelName,
encoding_format: 'float', encoding_format: 'float',
...(useDimensions && { dimensions: EMBEDDING_DIMENSIONS }),
} }
const response = await fetch(config.apiUrl, { const response = await fetch(config.apiUrl, {

View File

@@ -18,52 +18,6 @@ const logger = createLogger('BlobClient')
let _blobServiceClient: BlobServiceClientInstance | null = null let _blobServiceClient: BlobServiceClientInstance | null = null
interface ParsedCredentials {
accountName: string
accountKey: string
}
/**
* Extract account name and key from an Azure connection string.
* Connection strings have the format: DefaultEndpointsProtocol=https;AccountName=...;AccountKey=...;EndpointSuffix=...
*/
function parseConnectionString(connectionString: string): ParsedCredentials {
const accountNameMatch = connectionString.match(/AccountName=([^;]+)/)
if (!accountNameMatch) {
throw new Error('Cannot extract account name from connection string')
}
const accountKeyMatch = connectionString.match(/AccountKey=([^;]+)/)
if (!accountKeyMatch) {
throw new Error('Cannot extract account key from connection string')
}
return {
accountName: accountNameMatch[1],
accountKey: accountKeyMatch[1],
}
}
/**
* Get account credentials from BLOB_CONFIG, extracting from connection string if necessary.
*/
function getAccountCredentials(): ParsedCredentials {
if (BLOB_CONFIG.connectionString) {
return parseConnectionString(BLOB_CONFIG.connectionString)
}
if (BLOB_CONFIG.accountName && BLOB_CONFIG.accountKey) {
return {
accountName: BLOB_CONFIG.accountName,
accountKey: BLOB_CONFIG.accountKey,
}
}
throw new Error(
'Azure Blob Storage credentials are missing set AZURE_CONNECTION_STRING or both AZURE_ACCOUNT_NAME and AZURE_ACCOUNT_KEY'
)
}
export async function getBlobServiceClient(): Promise<BlobServiceClientInstance> { export async function getBlobServiceClient(): Promise<BlobServiceClientInstance> {
if (_blobServiceClient) return _blobServiceClient if (_blobServiceClient) return _blobServiceClient
@@ -173,8 +127,6 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName) const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
const blockBlobClient = containerClient.getBlockBlobClient(key) const blockBlobClient = containerClient.getBlockBlobClient(key)
const { accountName, accountKey } = getAccountCredentials()
const sasOptions = { const sasOptions = {
containerName: BLOB_CONFIG.containerName, containerName: BLOB_CONFIG.containerName,
blobName: key, blobName: key,
@@ -185,7 +137,13 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
const sasToken = generateBlobSASQueryParameters( const sasToken = generateBlobSASQueryParameters(
sasOptions, sasOptions,
new StorageSharedKeyCredential(accountName, accountKey) new StorageSharedKeyCredential(
BLOB_CONFIG.accountName,
BLOB_CONFIG.accountKey ??
(() => {
throw new Error('AZURE_ACCOUNT_KEY is required when using account name authentication')
})()
)
).toString() ).toString()
return `${blockBlobClient.url}?${sasToken}` return `${blockBlobClient.url}?${sasToken}`
@@ -210,14 +168,9 @@ export async function getPresignedUrlWithConfig(
StorageSharedKeyCredential, StorageSharedKeyCredential,
} = await import('@azure/storage-blob') } = await import('@azure/storage-blob')
let tempBlobServiceClient: BlobServiceClientInstance let tempBlobServiceClient: BlobServiceClientInstance
let accountName: string
let accountKey: string
if (customConfig.connectionString) { if (customConfig.connectionString) {
tempBlobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString) tempBlobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
const credentials = parseConnectionString(customConfig.connectionString)
accountName = credentials.accountName
accountKey = credentials.accountKey
} else if (customConfig.accountName && customConfig.accountKey) { } else if (customConfig.accountName && customConfig.accountKey) {
const sharedKeyCredential = new StorageSharedKeyCredential( const sharedKeyCredential = new StorageSharedKeyCredential(
customConfig.accountName, customConfig.accountName,
@@ -227,8 +180,6 @@ export async function getPresignedUrlWithConfig(
`https://${customConfig.accountName}.blob.core.windows.net`, `https://${customConfig.accountName}.blob.core.windows.net`,
sharedKeyCredential sharedKeyCredential
) )
accountName = customConfig.accountName
accountKey = customConfig.accountKey
} else { } else {
throw new Error( throw new Error(
'Custom blob config must include either connectionString or accountName + accountKey' 'Custom blob config must include either connectionString or accountName + accountKey'
@@ -248,7 +199,13 @@ export async function getPresignedUrlWithConfig(
const sasToken = generateBlobSASQueryParameters( const sasToken = generateBlobSASQueryParameters(
sasOptions, sasOptions,
new StorageSharedKeyCredential(accountName, accountKey) new StorageSharedKeyCredential(
customConfig.accountName,
customConfig.accountKey ??
(() => {
throw new Error('Account key is required when using account name authentication')
})()
)
).toString() ).toString()
return `${blockBlobClient.url}?${sasToken}` return `${blockBlobClient.url}?${sasToken}`
@@ -446,9 +403,13 @@ export async function getMultipartPartUrls(
if (customConfig) { if (customConfig) {
if (customConfig.connectionString) { if (customConfig.connectionString) {
blobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString) blobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
const credentials = parseConnectionString(customConfig.connectionString) const match = customConfig.connectionString.match(/AccountName=([^;]+)/)
accountName = credentials.accountName if (!match) throw new Error('Cannot extract account name from connection string')
accountKey = credentials.accountKey accountName = match[1]
const keyMatch = customConfig.connectionString.match(/AccountKey=([^;]+)/)
if (!keyMatch) throw new Error('Cannot extract account key from connection string')
accountKey = keyMatch[1]
} else if (customConfig.accountName && customConfig.accountKey) { } else if (customConfig.accountName && customConfig.accountKey) {
const credential = new StorageSharedKeyCredential( const credential = new StorageSharedKeyCredential(
customConfig.accountName, customConfig.accountName,
@@ -467,9 +428,12 @@ export async function getMultipartPartUrls(
} else { } else {
blobServiceClient = await getBlobServiceClient() blobServiceClient = await getBlobServiceClient()
containerName = BLOB_CONFIG.containerName containerName = BLOB_CONFIG.containerName
const credentials = getAccountCredentials() accountName = BLOB_CONFIG.accountName
accountName = credentials.accountName accountKey =
accountKey = credentials.accountKey BLOB_CONFIG.accountKey ||
(() => {
throw new Error('AZURE_ACCOUNT_KEY is required')
})()
} }
const containerClient = blobServiceClient.getContainerClient(containerName) const containerClient = blobServiceClient.getContainerClient(containerName)
@@ -537,10 +501,12 @@ export async function completeMultipartUpload(
const containerClient = blobServiceClient.getContainerClient(containerName) const containerClient = blobServiceClient.getContainerClient(containerName)
const blockBlobClient = containerClient.getBlockBlobClient(key) const blockBlobClient = containerClient.getBlockBlobClient(key)
// Sort parts by part number and extract block IDs
const sortedBlockIds = parts const sortedBlockIds = parts
.sort((a, b) => a.partNumber - b.partNumber) .sort((a, b) => a.partNumber - b.partNumber)
.map((part) => part.blockId) .map((part) => part.blockId)
// Commit the block list to create the final blob
await blockBlobClient.commitBlockList(sortedBlockIds, { await blockBlobClient.commitBlockList(sortedBlockIds, {
metadata: { metadata: {
multipartUpload: 'completed', multipartUpload: 'completed',
@@ -591,8 +557,10 @@ export async function abortMultipartUpload(key: string, customConfig?: BlobConfi
const blockBlobClient = containerClient.getBlockBlobClient(key) const blockBlobClient = containerClient.getBlockBlobClient(key)
try { try {
// Delete the blob if it exists (this also cleans up any uncommitted blocks)
await blockBlobClient.deleteIfExists() await blockBlobClient.deleteIfExists()
} catch (error) { } catch (error) {
// Ignore errors since we're just cleaning up
logger.warn('Error cleaning up multipart upload:', error) logger.warn('Error cleaning up multipart upload:', error)
} }
} }

View File

@@ -618,6 +618,13 @@ export function getToolOutputs(
} }
} }
/**
* Generates output paths for a tool-based block.
*
* @param blockConfig - The block configuration containing tools config
* @param subBlocks - SubBlock values for tool selection and condition evaluation
* @returns Array of output paths for the tool, or empty array on error
*/
export function getToolOutputPaths( export function getToolOutputPaths(
blockConfig: BlockConfig, blockConfig: BlockConfig,
subBlocks?: Record<string, SubBlockWithValue> subBlocks?: Record<string, SubBlockWithValue>
@@ -627,22 +634,12 @@ export function getToolOutputPaths(
if (!outputs || Object.keys(outputs).length === 0) return [] if (!outputs || Object.keys(outputs).length === 0) return []
if (subBlocks && blockConfig.outputs) { if (subBlocks && blockConfig.outputs) {
const filteredBlockOutputs = filterOutputsByCondition(blockConfig.outputs, subBlocks)
const allowedKeys = new Set(Object.keys(filteredBlockOutputs))
const filteredOutputs: Record<string, any> = {} const filteredOutputs: Record<string, any> = {}
for (const [key, value] of Object.entries(outputs)) { for (const [key, value] of Object.entries(outputs)) {
const blockOutput = blockConfig.outputs[key] if (allowedKeys.has(key)) {
if (!blockOutput || typeof blockOutput !== 'object') {
filteredOutputs[key] = value
continue
}
const condition = 'condition' in blockOutput ? blockOutput.condition : undefined
if (condition) {
if (evaluateOutputCondition(condition, subBlocks)) {
filteredOutputs[key] = value
}
} else {
filteredOutputs[key] = value filteredOutputs[key] = value
} }
} }

View File

@@ -27,9 +27,6 @@ export function registerEmitFunctions(
emitSubblockUpdate = subblockEmit emitSubblockUpdate = subblockEmit
emitVariableUpdate = variableEmit emitVariableUpdate = variableEmit
currentRegisteredWorkflowId = workflowId currentRegisteredWorkflowId = workflowId
if (workflowId) {
useOperationQueueStore.getState().processNextOperation()
}
} }
let currentRegisteredWorkflowId: string | null = null let currentRegisteredWorkflowId: string | null = null
@@ -265,14 +262,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
return return
} }
if (!currentRegisteredWorkflowId) { const nextOperation = currentRegisteredWorkflowId
? state.operations.find(
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
)
: state.operations.find((op) => op.status === 'pending')
if (!nextOperation) {
return return
} }
const nextOperation = state.operations.find( if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
)
if (!nextOperation) {
return return
} }

View File

@@ -38,12 +38,11 @@ export const storageUploadTool: ToolConfig<
visibility: 'user-or-llm', visibility: 'user-or-llm',
description: 'Optional folder path (e.g., "folder/subfolder/")', description: 'Optional folder path (e.g., "folder/subfolder/")',
}, },
fileData: { fileContent: {
type: 'json', type: 'string',
required: true, required: true,
visibility: 'user-or-llm', visibility: 'user-or-llm',
description: description: 'The file content (base64 encoded for binary files, or plain text)',
'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
}, },
contentType: { contentType: {
type: 'string', type: 'string',
@@ -66,28 +65,65 @@ export const storageUploadTool: ToolConfig<
}, },
request: { request: {
url: '/api/tools/supabase/storage-upload', url: (params) => {
// Combine folder path and fileName, ensuring proper formatting
let fullPath = params.fileName
if (params.path) {
// Ensure path ends with / and doesn't have double slashes
const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
fullPath = `${folderPath}${params.fileName}`
}
return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
},
method: 'POST', method: 'POST',
headers: () => ({ headers: (params) => {
'Content-Type': 'application/json', const headers: Record<string, string> = {
}), apikey: params.apiKey,
body: (params) => ({ Authorization: `Bearer ${params.apiKey}`,
projectId: params.projectId, }
apiKey: params.apiKey,
bucket: params.bucket, if (params.contentType) {
fileName: params.fileName, headers['Content-Type'] = params.contentType
path: params.path, }
fileData: params.fileData,
contentType: params.contentType, if (params.upsert) {
upsert: params.upsert, headers['x-upsert'] = 'true'
}), }
return headers
},
body: (params) => {
// Return the file content wrapped in an object
// The actual upload will need to handle this appropriately
return {
content: params.fileContent,
}
},
},
transformResponse: async (response: Response) => {
let data
try {
data = await response.json()
} catch (parseError) {
throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
}
return {
success: true,
output: {
message: 'Successfully uploaded file to storage',
results: data,
},
error: undefined,
}
}, },
outputs: { outputs: {
message: { type: 'string', description: 'Operation status message' }, message: { type: 'string', description: 'Operation status message' },
results: { results: {
type: 'object', type: 'object',
description: 'Upload result including file path, bucket, and public URL', description: 'Upload result including file path and metadata',
}, },
}, },
} }

View File

@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
bucket: string bucket: string
fileName: string fileName: string
path?: string path?: string
fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text) fileContent: string
contentType?: string contentType?: string
upsert?: boolean upsert?: boolean
} }

View File

@@ -52,7 +52,7 @@ services:
deploy: deploy:
resources: resources:
limits: limits:
memory: 1G memory: 8G
healthcheck: healthcheck:
test: ['CMD', 'wget', '--spider', '--quiet', 'http://127.0.0.1:3002/health'] test: ['CMD', 'wget', '--spider', '--quiet', 'http://127.0.0.1:3002/health']
interval: 90s interval: 90s

View File

@@ -56,7 +56,7 @@ services:
deploy: deploy:
resources: resources:
limits: limits:
memory: 1G memory: 8G
healthcheck: healthcheck:
test: ['CMD', 'wget', '--spider', '--quiet', 'http://127.0.0.1:3002/health'] test: ['CMD', 'wget', '--spider', '--quiet', 'http://127.0.0.1:3002/health']
interval: 90s interval: 90s

View File

@@ -42,7 +42,7 @@ services:
deploy: deploy:
resources: resources:
limits: limits:
memory: 1G memory: 4G
environment: environment:
- DATABASE_URL=postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@db:5432/${POSTGRES_DB:-simstudio} - DATABASE_URL=postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@db:5432/${POSTGRES_DB:-simstudio}
- NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000} - NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000}

View File

@@ -13,10 +13,10 @@ app:
resources: resources:
limits: limits:
memory: "8Gi" memory: "6Gi"
cpu: "2000m" cpu: "2000m"
requests: requests:
memory: "6Gi" memory: "4Gi"
cpu: "1000m" cpu: "1000m"
# Production URLs (REQUIRED - update with your actual domain names) # Production URLs (REQUIRED - update with your actual domain names)
@@ -52,11 +52,11 @@ realtime:
resources: resources:
limits: limits:
memory: "1Gi" memory: "4Gi"
cpu: "500m" cpu: "1000m"
requests: requests:
memory: "512Mi" memory: "2Gi"
cpu: "250m" cpu: "500m"
env: env:
NEXT_PUBLIC_APP_URL: "https://sim.acme.ai" NEXT_PUBLIC_APP_URL: "https://sim.acme.ai"

View File

@@ -29,10 +29,10 @@ app:
# Resource limits and requests # Resource limits and requests
resources: resources:
limits: limits:
memory: "8Gi" memory: "4Gi"
cpu: "2000m" cpu: "2000m"
requests: requests:
memory: "4Gi" memory: "2Gi"
cpu: "1000m" cpu: "1000m"
# Node selector for pod scheduling (leave empty to allow scheduling on any node) # Node selector for pod scheduling (leave empty to allow scheduling on any node)
@@ -245,11 +245,11 @@ realtime:
# Resource limits and requests # Resource limits and requests
resources: resources:
limits: limits:
memory: "2Gi"
cpu: "1000m"
requests:
memory: "1Gi" memory: "1Gi"
cpu: "500m" cpu: "500m"
requests:
memory: "512Mi"
cpu: "250m"
# Node selector for pod scheduling (leave empty to allow scheduling on any node) # Node selector for pod scheduling (leave empty to allow scheduling on any node)
nodeSelector: {} nodeSelector: {}