diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml
index c107e30d3..012777093 100644
--- a/.devcontainer/docker-compose.yml
+++ b/.devcontainer/docker-compose.yml
@@ -44,7 +44,7 @@ services:
deploy:
resources:
limits:
- memory: 4G
+ memory: 1G
environment:
- NODE_ENV=development
- DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio
diff --git a/apps/docs/components/ui/action-media.tsx b/apps/docs/components/ui/action-media.tsx
new file mode 100644
index 000000000..13c906368
--- /dev/null
+++ b/apps/docs/components/ui/action-media.tsx
@@ -0,0 +1,40 @@
+'use client'
+
+import { getAssetUrl } from '@/lib/utils'
+
+interface ActionImageProps {
+ src: string
+ alt: string
+}
+
+interface ActionVideoProps {
+ src: string
+ alt: string
+}
+
+export function ActionImage({ src, alt }: ActionImageProps) {
+ const resolvedSrc = getAssetUrl(src.startsWith('/') ? src.slice(1) : src)
+
+ return (
+
+ )
+}
+
+export function ActionVideo({ src, alt }: ActionVideoProps) {
+ const resolvedSrc = getAssetUrl(src.startsWith('/') ? src.slice(1) : src)
+
+ return (
+
+ )
+}
diff --git a/apps/docs/content/docs/de/self-hosting/index.mdx b/apps/docs/content/docs/de/self-hosting/index.mdx
index 7a5faa4d3..1ad498091 100644
--- a/apps/docs/content/docs/de/self-hosting/index.mdx
+++ b/apps/docs/content/docs/de/self-hosting/index.mdx
@@ -10,12 +10,20 @@ Stellen Sie Sim auf Ihrer eigenen Infrastruktur mit Docker oder Kubernetes berei
## Anforderungen
-| Ressource | Minimum | Empfohlen |
-|----------|---------|-------------|
-| CPU | 2 Kerne | 4+ Kerne |
-| RAM | 12 GB | 16+ GB |
-| Speicher | 20 GB SSD | 50+ GB SSD |
-| Docker | 20.10+ | Neueste Version |
+| Ressource | Klein | Standard | Produktion |
+|----------|-------|----------|------------|
+| CPU | 2 Kerne | 4 Kerne | 8+ Kerne |
+| RAM | 12 GB | 16 GB | 32+ GB |
+| Speicher | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
+| Docker | 20.10+ | 20.10+ | Neueste Version |
+
+**Klein**: Entwicklung, Tests, Einzelnutzer (1-5 Nutzer)
+**Standard**: Teams (5-50 Nutzer), moderate Arbeitslasten
+**Produktion**: Große Teams (50+ Nutzer), Hochverfügbarkeit, intensive Workflow-Ausführung
+
+
+Die Ressourcenanforderungen werden durch Workflow-Ausführung (isolated-vm Sandboxing), Dateiverarbeitung (In-Memory-Dokumentenparsing) und Vektoroperationen (pgvector) bestimmt. Arbeitsspeicher ist typischerweise der limitierende Faktor, nicht CPU. Produktionsdaten zeigen, dass die Hauptanwendung durchschnittlich 4-8 GB und bei hoher Last bis zu 12 GB benötigt.
+
## Schnellstart
diff --git a/apps/docs/content/docs/en/quick-reference/index.mdx b/apps/docs/content/docs/en/quick-reference/index.mdx
index 02126a624..e0996d0b1 100644
--- a/apps/docs/content/docs/en/quick-reference/index.mdx
+++ b/apps/docs/content/docs/en/quick-reference/index.mdx
@@ -4,6 +4,7 @@ description: Essential actions for navigating and using the Sim workflow editor
---
import { Callout } from 'fumadocs-ui/components/callout'
+import { ActionImage, ActionVideo } from '@/components/ui/action-media'
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
@@ -13,124 +14,362 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
## Workspaces
-| Action | How |
-|--------|-----|
-| Create a workspace | Click workspace dropdown in sidebar → **New Workspace** |
-| Rename a workspace | Workspace settings → Edit name |
-| Switch workspaces | Click workspace dropdown in sidebar → Select workspace |
-| Invite team members | Workspace settings → **Team** → **Invite** |
+
+
+ | Action | How | Preview |
+
+
+
+ | Create a workspace |
+ Click workspace dropdown → **New Workspace** |
+ |
+
+
+ | Switch workspaces |
+ Click workspace dropdown → Select workspace |
+ |
+
+
+ | Invite team members |
+ Sidebar → **Invite** |
+ |
+
+
+ | Rename a workspace |
+ Right-click workspace → **Rename** |
+ |
+
+
+ | Duplicate a workspace |
+ Right-click workspace → **Duplicate** |
+
+
+ | Export a workspace |
+ Right-click workspace → **Export** |
+
+
+ | Delete a workspace |
+ Right-click workspace → **Delete** |
+
+
+
## Workflows
-| Action | How |
-|--------|-----|
-| Create a workflow | Click **New Workflow** button or `Mod+Shift+A` |
-| Rename a workflow | Double-click workflow name in sidebar, or right-click → **Rename** |
-| Duplicate a workflow | Right-click workflow → **Duplicate** |
-| Reorder workflows | Drag workflow up/down in the sidebar list |
-| Import a workflow | Sidebar menu → **Import** → Select file |
-| Create a folder | Right-click in sidebar → **New Folder** |
-| Rename a folder | Right-click folder → **Rename** |
-| Delete a folder | Right-click folder → **Delete** |
-| Collapse/expand folder | Click folder arrow, or double-click folder |
-| Move workflow to folder | Drag workflow onto folder in sidebar |
-| Delete a workflow | Right-click workflow → **Delete** |
-| Export a workflow | Right-click workflow → **Export** |
-| Assign workflow color | Right-click workflow → **Change Color** |
-| Multi-select workflows | `Mod+Click` or `Shift+Click` workflows in sidebar |
-| Open in new tab | Right-click workflow → **Open in New Tab** |
+
+
+ | Action | How | Preview |
+
+
+
+ | Create a workflow |
+ Click **+** button in sidebar |
+ |
+
+
+ | Reorder / move workflows |
+ Drag workflow up/down or onto a folder |
+ |
+
+
+ | Import a workflow |
+ Click import button in sidebar → Select file |
+ |
+
+
+ | Multi-select workflows |
+ `Mod+Click` or `Shift+Click` workflows in sidebar |
+ |
+
+
+ | Open in new tab |
+ Right-click workflow → **Open in New Tab** |
+ |
+
+
+ | Rename a workflow |
+ Right-click workflow → **Rename** |
+
+
+ | Assign workflow color |
+ Right-click workflow → **Change Color** |
+
+
+ | Duplicate a workflow |
+ Right-click workflow → **Duplicate** |
+
+
+ | Export a workflow |
+ Right-click workflow → **Export** |
+
+
+ | Delete a workflow |
+ Right-click workflow → **Delete** |
+
+
+ | Rename a folder |
+ Right-click folder → **Rename** |
+ |
+
+
+ | Create workflow in folder |
+ Right-click folder → **Create workflow** |
+
+
+ | Create folder in folder |
+ Right-click folder → **Create folder** |
+
+
+ | Duplicate a folder |
+ Right-click folder → **Duplicate** |
+
+
+ | Export a folder |
+ Right-click folder → **Export** |
+
+
+ | Delete a folder |
+ Right-click folder → **Delete** |
+
+
+
## Blocks
-| Action | How |
-|--------|-----|
-| Add a block | Drag from Toolbar panel, or right-click canvas → **Add Block** |
-| Select a block | Click on the block |
-| Multi-select blocks | `Mod+Click` additional blocks, or right-drag to draw selection box |
-| Move blocks | Drag selected block(s) to new position |
-| Copy blocks | `Mod+C` with blocks selected |
-| Paste blocks | `Mod+V` to paste copied blocks |
-| Duplicate blocks | Right-click → **Duplicate** |
-| Delete blocks | `Delete` or `Backspace` key, or right-click → **Delete** |
-| Rename a block | Click block name in header, or edit in the Editor panel |
-| Enable/Disable a block | Right-click → **Enable/Disable** |
-| Toggle handle orientation | Right-click → **Toggle Handles** |
-| Toggle trigger mode | Right-click trigger block → **Toggle Trigger Mode** |
-| Configure a block | Select block → use Editor panel on right |
+
+
+ | Action | How | Preview |
+
+
+
+ | Add a block |
+ Drag from Toolbar panel, or right-click canvas → **Add Block** |
+ |
+
+
+ | Multi-select blocks |
+ `Mod+Click` additional blocks, or shift-drag to draw selection box |
+ |
+
+
+ | Copy blocks |
+ `Mod+C` with blocks selected |
+ |
+
+
+ | Paste blocks |
+ `Mod+V` to paste copied blocks |
+
+
+ | Duplicate blocks |
+ Right-click → **Duplicate** |
+ |
+
+
+ | Delete blocks |
+ `Delete` or `Backspace` key, or right-click → **Delete** |
+ |
+
+
+ | Rename a block |
+ Click block name in header, or edit in the Editor panel |
+ |
+
+
+ | Enable/Disable a block |
+ Right-click → **Enable/Disable** |
+ |
+
+
+ | Toggle handle orientation |
+ Right-click → **Toggle Handles** |
+ |
+
+
+ | Configure a block |
+ Select block → use Editor panel on right |
+ |
+
+
+
## Connections
-| Action | How |
-|--------|-----|
-| Create a connection | Drag from output handle to input handle |
-| Delete a connection | Click edge to select → `Delete` key |
-| Use output in another block | Drag connection tag into input field |
-
-## Canvas Navigation
-
-| Action | How |
-|--------|-----|
-| Pan/move canvas | Left-drag on empty space, or scroll/trackpad |
-| Zoom in/out | Scroll wheel or pinch gesture |
-| Auto-layout | `Shift+L` |
-| Draw selection box | Right-drag on empty canvas area |
+
+
+ | Action | How | Preview |
+
+
+
+ | Create a connection |
+ Drag from output handle to input handle |
+ |
+
+
+ | Delete a connection |
+ Click edge to select → `Delete` key |
+ |
+
+
+ | Use output in another block |
+ Drag connection tag into input field |
+ |
+
+
+
## Panels & Views
-| Action | How |
-|--------|-----|
-| Open Copilot tab | Press `C` or click Copilot tab |
-| Open Toolbar tab | Press `T` or click Toolbar tab |
-| Open Editor tab | Press `E` or click Editor tab |
-| Search toolbar | `Mod+F` |
-| Toggle advanced mode | Click toggle button on input fields |
-| Resize panels | Drag panel edge |
-| Collapse/expand sidebar | Click collapse button on sidebar |
+
+
+ | Action | How | Preview |
+
+
+
+ | Search toolbar |
+ `Mod+F` |
+ |
+
+
+ | Search everything |
+ `Mod+K` |
+ |
+
+
+ | Toggle manual mode |
+ Click toggle button to switch between manual and selector |
+ |
+
+
+ | Collapse/expand sidebar |
+ Click collapse button on sidebar |
+ |
+
+
+
## Running & Testing
-| Action | How |
-|--------|-----|
-| Run workflow | Click Play button or `Mod+Enter` |
-| Stop workflow | Click Stop button or `Mod+Enter` while running |
-| Test with chat | Use Chat panel on the right side |
-| Select output to view | Click dropdown in Chat panel → Select block output |
-| Clear chat history | Click clear button in Chat panel |
-| View execution logs | Open terminal panel at bottom, or `Mod+L` |
-| Filter logs by block | Click block filter in terminal |
-| Filter logs by status | Click status filter in terminal |
-| Search logs | Use search field in terminal |
-| Copy log entry | Right-click log entry → **Copy** |
-| Clear terminal | `Mod+D` |
+
+
+ | Action | How | Preview |
+
+
+
+ | Run workflow |
+ Click Run Workflow button or `Mod+Enter` |
+ |
+
+
+ | Stop workflow |
+ Click Stop button or `Mod+Enter` while running |
+ |
+
+
+ | Test with chat |
+ Use Chat panel on the right side |
+ |
+
+
+ | Select output to view |
+ Click dropdown in Chat panel → Select block output |
+ |
+
+
+ | Clear chat history |
+ Click clear button in Chat panel |
+ |
+
+
+ | View execution logs |
+ Open terminal panel at bottom, or `Mod+L` |
+ |
+
+
+ | Filter logs by block or status |
+ Click block filter in terminal or right-click log entry → **Filter by Block** or **Filter by Status** |
+ |
+
+
+ | Search logs |
+ Use search field in terminal or right-click log entry → **Search** |
+ |
+
+
+ | Copy log entry |
+ Clipboard Icon or Right-click log entry → **Copy** |
+ |
+
+
+ | Clear terminal |
+ Trash icon or `Mod+D` |
+ |
+
+
+
## Deployment
-| Action | How |
-|--------|-----|
-| Deploy a workflow | Click **Deploy** button in Deploy tab |
-| Update deployment | Click **Update** when changes are detected |
-| View deployment status | Check status indicator (Live/Update/Deploy) in Deploy tab |
-| Revert deployment | Access previous versions in Deploy tab |
-| Copy webhook URL | Deploy tab → Copy webhook URL |
-| Copy API endpoint | Deploy tab → Copy API endpoint URL |
-| Set up a schedule | Add Schedule trigger block → Configure interval |
+
+
+ | Action | How | Preview |
+
+
+
+ | Deploy a workflow |
+ Click **Deploy** button in panel |
+ |
+
+
+ | Update deployment |
+ Click **Update** when changes are detected |
+ |
+
+
+ | View deployment status |
+ Check status indicator (Live/Update/Deploy) in Deploy tab |
+ |
+
+
+ | Revert deployment |
+ Access previous versions in Deploy tab → **Promote to live** |
+ |
+
+
+ | Copy API endpoint |
+ Deploy tab → Copy API endpoint URL |
+ |
+
+
+
## Variables
-| Action | How |
-|--------|-----|
-| Add workflow variable | Variables tab → **Add Variable** |
-| Edit workflow variable | Variables tab → Click variable to edit |
-| Delete workflow variable | Variables tab → Click delete icon on variable |
-| Add environment variable | Settings → **Environment Variables** → **Add** |
-| Reference a variable | Use `{{variableName}}` syntax in block inputs |
-
-## Credentials
-
-| Action | How |
-|--------|-----|
-| Add API key | Block credential field → **Add Credential** → Enter API key |
-| Connect OAuth account | Block credential field → **Connect** → Authorize with provider |
-| Manage credentials | Settings → **Credentials** |
-| Remove credential | Settings → **Credentials** → Delete credential |
+
+
+ | Action | How | Preview |
+
+
+
+ | Add / Edit / Delete workflow variable |
+ Panel -> Variables -> **Add Variable**, click to edit, or delete icon |
+ |
+
+
+ | Add environment variable |
+ Settings → **Environment Variables** → **Add** |
+ |
+
+
+ | Reference a workflow variable |
+ Use `` syntax in block inputs |
+ |
+
+
+ | Reference an environment variable |
+ Use `{{ENV_VAR}}` syntax in block inputs |
+ |
+
+
+
diff --git a/apps/docs/content/docs/en/self-hosting/index.mdx b/apps/docs/content/docs/en/self-hosting/index.mdx
index 87ab39b9a..bbdef7e4e 100644
--- a/apps/docs/content/docs/en/self-hosting/index.mdx
+++ b/apps/docs/content/docs/en/self-hosting/index.mdx
@@ -16,12 +16,20 @@ Deploy Sim on your own infrastructure with Docker or Kubernetes.
## Requirements
-| Resource | Minimum | Recommended |
-|----------|---------|-------------|
-| CPU | 2 cores | 4+ cores |
-| RAM | 12 GB | 16+ GB |
-| Storage | 20 GB SSD | 50+ GB SSD |
-| Docker | 20.10+ | Latest |
+| Resource | Small | Standard | Production |
+|----------|-------|----------|------------|
+| CPU | 2 cores | 4 cores | 8+ cores |
+| RAM | 12 GB | 16 GB | 32+ GB |
+| Storage | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
+| Docker | 20.10+ | 20.10+ | Latest |
+
+**Small**: Development, testing, single user (1-5 users)
+**Standard**: Teams (5-50 users), moderate workloads
+**Production**: Large teams (50+ users), high availability, heavy workflow execution
+
+
+Resource requirements are driven by workflow execution (isolated-vm sandboxing), file processing (in-memory document parsing), and vector operations (pgvector). Memory is typically the constraining factor rather than CPU. Production telemetry shows the main app uses 4-8 GB average with peaks up to 12 GB under heavy load.
+
## Quick Start
diff --git a/apps/docs/content/docs/es/self-hosting/index.mdx b/apps/docs/content/docs/es/self-hosting/index.mdx
index a511b1963..2b9c5c1ef 100644
--- a/apps/docs/content/docs/es/self-hosting/index.mdx
+++ b/apps/docs/content/docs/es/self-hosting/index.mdx
@@ -10,12 +10,20 @@ Despliega Sim en tu propia infraestructura con Docker o Kubernetes.
## Requisitos
-| Recurso | Mínimo | Recomendado |
-|----------|---------|-------------|
-| CPU | 2 núcleos | 4+ núcleos |
-| RAM | 12 GB | 16+ GB |
-| Almacenamiento | 20 GB SSD | 50+ GB SSD |
-| Docker | 20.10+ | Última versión |
+| Recurso | Pequeño | Estándar | Producción |
+|----------|---------|----------|------------|
+| CPU | 2 núcleos | 4 núcleos | 8+ núcleos |
+| RAM | 12 GB | 16 GB | 32+ GB |
+| Almacenamiento | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
+| Docker | 20.10+ | 20.10+ | Última versión |
+
+**Pequeño**: Desarrollo, pruebas, usuario único (1-5 usuarios)
+**Estándar**: Equipos (5-50 usuarios), cargas de trabajo moderadas
+**Producción**: Equipos grandes (50+ usuarios), alta disponibilidad, ejecución intensiva de workflows
+
+
+Los requisitos de recursos están determinados por la ejecución de workflows (sandboxing isolated-vm), procesamiento de archivos (análisis de documentos en memoria) y operaciones vectoriales (pgvector). La memoria suele ser el factor limitante, no la CPU. La telemetría de producción muestra que la aplicación principal usa 4-8 GB en promedio con picos de hasta 12 GB bajo carga pesada.
+
## Inicio rápido
diff --git a/apps/docs/content/docs/fr/self-hosting/index.mdx b/apps/docs/content/docs/fr/self-hosting/index.mdx
index bb52a8aa8..cfd0d49f8 100644
--- a/apps/docs/content/docs/fr/self-hosting/index.mdx
+++ b/apps/docs/content/docs/fr/self-hosting/index.mdx
@@ -10,12 +10,20 @@ Déployez Sim sur votre propre infrastructure avec Docker ou Kubernetes.
## Prérequis
-| Ressource | Minimum | Recommandé |
-|----------|---------|-------------|
-| CPU | 2 cœurs | 4+ cœurs |
-| RAM | 12 Go | 16+ Go |
-| Stockage | 20 Go SSD | 50+ Go SSD |
-| Docker | 20.10+ | Dernière version |
+| Ressource | Petit | Standard | Production |
+|----------|-------|----------|------------|
+| CPU | 2 cœurs | 4 cœurs | 8+ cœurs |
+| RAM | 12 Go | 16 Go | 32+ Go |
+| Stockage | 20 Go SSD | 50 Go SSD | 100+ Go SSD |
+| Docker | 20.10+ | 20.10+ | Dernière version |
+
+**Petit** : Développement, tests, utilisateur unique (1-5 utilisateurs)
+**Standard** : Équipes (5-50 utilisateurs), charges de travail modérées
+**Production** : Grandes équipes (50+ utilisateurs), haute disponibilité, exécution intensive de workflows
+
+
+Les besoins en ressources sont déterminés par l'exécution des workflows (sandboxing isolated-vm), le traitement des fichiers (analyse de documents en mémoire) et les opérations vectorielles (pgvector). La mémoire est généralement le facteur limitant, pas le CPU. La télémétrie de production montre que l'application principale utilise 4-8 Go en moyenne avec des pics jusqu'à 12 Go sous forte charge.
+
## Démarrage rapide
diff --git a/apps/docs/content/docs/ja/self-hosting/index.mdx b/apps/docs/content/docs/ja/self-hosting/index.mdx
index 69ae41e6c..18aea7e7d 100644
--- a/apps/docs/content/docs/ja/self-hosting/index.mdx
+++ b/apps/docs/content/docs/ja/self-hosting/index.mdx
@@ -10,12 +10,20 @@ DockerまたはKubernetesを使用して、自社のインフラストラクチ
## 要件
-| リソース | 最小 | 推奨 |
-|----------|---------|-------------|
-| CPU | 2コア | 4+コア |
-| RAM | 12 GB | 16+ GB |
-| ストレージ | 20 GB SSD | 50+ GB SSD |
-| Docker | 20.10+ | 最新版 |
+| リソース | スモール | スタンダード | プロダクション |
+|----------|---------|-------------|----------------|
+| CPU | 2コア | 4コア | 8+コア |
+| RAM | 12 GB | 16 GB | 32+ GB |
+| ストレージ | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
+| Docker | 20.10+ | 20.10+ | 最新版 |
+
+**スモール**: 開発、テスト、シングルユーザー(1-5ユーザー)
+**スタンダード**: チーム(5-50ユーザー)、中程度のワークロード
+**プロダクション**: 大規模チーム(50+ユーザー)、高可用性、高負荷ワークフロー実行
+
+
+リソース要件は、ワークフロー実行(isolated-vmサンドボックス)、ファイル処理(メモリ内ドキュメント解析)、ベクトル演算(pgvector)によって決まります。CPUよりもメモリが制約要因となることが多いです。本番環境のテレメトリによると、メインアプリは平均4-8 GB、高負荷時は最大12 GBを使用します。
+
## クイックスタート
diff --git a/apps/docs/content/docs/zh/self-hosting/index.mdx b/apps/docs/content/docs/zh/self-hosting/index.mdx
index a2a4f4e50..de56839a1 100644
--- a/apps/docs/content/docs/zh/self-hosting/index.mdx
+++ b/apps/docs/content/docs/zh/self-hosting/index.mdx
@@ -10,12 +10,20 @@ import { Callout } from 'fumadocs-ui/components/callout'
## 要求
-| 资源 | 最低要求 | 推荐配置 |
-|----------|---------|-------------|
-| CPU | 2 核 | 4 核及以上 |
-| 内存 | 12 GB | 16 GB 及以上 |
-| 存储 | 20 GB SSD | 50 GB 及以上 SSD |
-| Docker | 20.10+ | 最新版本 |
+| 资源 | 小型 | 标准 | 生产环境 |
+|----------|------|------|----------|
+| CPU | 2 核 | 4 核 | 8+ 核 |
+| 内存 | 12 GB | 16 GB | 32+ GB |
+| 存储 | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
+| Docker | 20.10+ | 20.10+ | 最新版本 |
+
+**小型**: 开发、测试、单用户(1-5 用户)
+**标准**: 团队(5-50 用户)、中等工作负载
+**生产环境**: 大型团队(50+ 用户)、高可用性、密集工作流执行
+
+
+资源需求由工作流执行(isolated-vm 沙箱)、文件处理(内存中文档解析)和向量运算(pgvector)决定。内存通常是限制因素,而不是 CPU。生产遥测数据显示,主应用平均使用 4-8 GB,高负载时峰值可达 12 GB。
+
## 快速开始
diff --git a/apps/docs/public/static/quick-reference/add-env-variable.png b/apps/docs/public/static/quick-reference/add-env-variable.png
new file mode 100644
index 000000000..d9cd8e53d
Binary files /dev/null and b/apps/docs/public/static/quick-reference/add-env-variable.png differ
diff --git a/apps/docs/public/static/quick-reference/clear-chat.png b/apps/docs/public/static/quick-reference/clear-chat.png
new file mode 100644
index 000000000..ff28f5597
Binary files /dev/null and b/apps/docs/public/static/quick-reference/clear-chat.png differ
diff --git a/apps/docs/public/static/quick-reference/clear-terminal.png b/apps/docs/public/static/quick-reference/clear-terminal.png
new file mode 100644
index 000000000..8e8ea3ee5
Binary files /dev/null and b/apps/docs/public/static/quick-reference/clear-terminal.png differ
diff --git a/apps/docs/public/static/quick-reference/copy-api.png b/apps/docs/public/static/quick-reference/copy-api.png
new file mode 100644
index 000000000..72e80075c
Binary files /dev/null and b/apps/docs/public/static/quick-reference/copy-api.png differ
diff --git a/apps/docs/public/static/quick-reference/copy-log.png b/apps/docs/public/static/quick-reference/copy-log.png
new file mode 100644
index 000000000..45bef1b1d
Binary files /dev/null and b/apps/docs/public/static/quick-reference/copy-log.png differ
diff --git a/apps/docs/public/static/quick-reference/create-workflow.png b/apps/docs/public/static/quick-reference/create-workflow.png
new file mode 100644
index 000000000..7168f2ca7
Binary files /dev/null and b/apps/docs/public/static/quick-reference/create-workflow.png differ
diff --git a/apps/docs/public/static/quick-reference/delete-block.png b/apps/docs/public/static/quick-reference/delete-block.png
new file mode 100644
index 000000000..ab2a6a61c
Binary files /dev/null and b/apps/docs/public/static/quick-reference/delete-block.png differ
diff --git a/apps/docs/public/static/quick-reference/deploy.png b/apps/docs/public/static/quick-reference/deploy.png
new file mode 100644
index 000000000..4a7b11ff7
Binary files /dev/null and b/apps/docs/public/static/quick-reference/deploy.png differ
diff --git a/apps/docs/public/static/quick-reference/disable-block.png b/apps/docs/public/static/quick-reference/disable-block.png
new file mode 100644
index 000000000..ab34cb5c3
Binary files /dev/null and b/apps/docs/public/static/quick-reference/disable-block.png differ
diff --git a/apps/docs/public/static/quick-reference/env-variable-reference.png b/apps/docs/public/static/quick-reference/env-variable-reference.png
new file mode 100644
index 000000000..8760f9378
Binary files /dev/null and b/apps/docs/public/static/quick-reference/env-variable-reference.png differ
diff --git a/apps/docs/public/static/quick-reference/filter-block.png b/apps/docs/public/static/quick-reference/filter-block.png
new file mode 100644
index 000000000..b6a936bb0
Binary files /dev/null and b/apps/docs/public/static/quick-reference/filter-block.png differ
diff --git a/apps/docs/public/static/quick-reference/folder-context-menu.png b/apps/docs/public/static/quick-reference/folder-context-menu.png
new file mode 100644
index 000000000..52703b2d9
Binary files /dev/null and b/apps/docs/public/static/quick-reference/folder-context-menu.png differ
diff --git a/apps/docs/public/static/quick-reference/import-workflow.png b/apps/docs/public/static/quick-reference/import-workflow.png
new file mode 100644
index 000000000..eb33bc692
Binary files /dev/null and b/apps/docs/public/static/quick-reference/import-workflow.png differ
diff --git a/apps/docs/public/static/quick-reference/output-select.png b/apps/docs/public/static/quick-reference/output-select.png
new file mode 100644
index 000000000..20ba89f4d
Binary files /dev/null and b/apps/docs/public/static/quick-reference/output-select.png differ
diff --git a/apps/docs/public/static/quick-reference/promote-deployment.png b/apps/docs/public/static/quick-reference/promote-deployment.png
new file mode 100644
index 000000000..7c89d25a4
Binary files /dev/null and b/apps/docs/public/static/quick-reference/promote-deployment.png differ
diff --git a/apps/docs/public/static/quick-reference/rename-workflow.png b/apps/docs/public/static/quick-reference/rename-workflow.png
new file mode 100644
index 000000000..3dd5bd8c7
Binary files /dev/null and b/apps/docs/public/static/quick-reference/rename-workflow.png differ
diff --git a/apps/docs/public/static/quick-reference/run-workflow.png b/apps/docs/public/static/quick-reference/run-workflow.png
new file mode 100644
index 000000000..bfe4a4c1c
Binary files /dev/null and b/apps/docs/public/static/quick-reference/run-workflow.png differ
diff --git a/apps/docs/public/static/quick-reference/search-everything.png b/apps/docs/public/static/quick-reference/search-everything.png
new file mode 100644
index 000000000..bb401ccc6
Binary files /dev/null and b/apps/docs/public/static/quick-reference/search-everything.png differ
diff --git a/apps/docs/public/static/quick-reference/stop-workflow.png b/apps/docs/public/static/quick-reference/stop-workflow.png
new file mode 100644
index 000000000..d00598be0
Binary files /dev/null and b/apps/docs/public/static/quick-reference/stop-workflow.png differ
diff --git a/apps/docs/public/static/quick-reference/terminal-search.png b/apps/docs/public/static/quick-reference/terminal-search.png
new file mode 100644
index 000000000..0e7df230d
Binary files /dev/null and b/apps/docs/public/static/quick-reference/terminal-search.png differ
diff --git a/apps/docs/public/static/quick-reference/terminal.png b/apps/docs/public/static/quick-reference/terminal.png
new file mode 100644
index 000000000..920b2c81c
Binary files /dev/null and b/apps/docs/public/static/quick-reference/terminal.png differ
diff --git a/apps/docs/public/static/quick-reference/test-chat.png b/apps/docs/public/static/quick-reference/test-chat.png
new file mode 100644
index 000000000..0178ac876
Binary files /dev/null and b/apps/docs/public/static/quick-reference/test-chat.png differ
diff --git a/apps/docs/public/static/quick-reference/toggle-manual-mode.png b/apps/docs/public/static/quick-reference/toggle-manual-mode.png
new file mode 100644
index 000000000..1d107723d
Binary files /dev/null and b/apps/docs/public/static/quick-reference/toggle-manual-mode.png differ
diff --git a/apps/docs/public/static/quick-reference/update-deployment.png b/apps/docs/public/static/quick-reference/update-deployment.png
new file mode 100644
index 000000000..494daee28
Binary files /dev/null and b/apps/docs/public/static/quick-reference/update-deployment.png differ
diff --git a/apps/docs/public/static/quick-reference/variable-reference.png b/apps/docs/public/static/quick-reference/variable-reference.png
new file mode 100644
index 000000000..6d404ac04
Binary files /dev/null and b/apps/docs/public/static/quick-reference/variable-reference.png differ
diff --git a/apps/docs/public/static/quick-reference/variables.png b/apps/docs/public/static/quick-reference/variables.png
new file mode 100644
index 000000000..aecbc400a
Binary files /dev/null and b/apps/docs/public/static/quick-reference/variables.png differ
diff --git a/apps/docs/public/static/quick-reference/view-deployment.png b/apps/docs/public/static/quick-reference/view-deployment.png
new file mode 100644
index 000000000..b9c2f81cf
Binary files /dev/null and b/apps/docs/public/static/quick-reference/view-deployment.png differ
diff --git a/apps/docs/public/static/quick-reference/workflow-context-menu.png b/apps/docs/public/static/quick-reference/workflow-context-menu.png
new file mode 100644
index 000000000..a77f1f316
Binary files /dev/null and b/apps/docs/public/static/quick-reference/workflow-context-menu.png differ
diff --git a/apps/docs/public/static/quick-reference/workspace-context-menu.png b/apps/docs/public/static/quick-reference/workspace-context-menu.png
new file mode 100644
index 000000000..bf5ce4a70
Binary files /dev/null and b/apps/docs/public/static/quick-reference/workspace-context-menu.png differ
diff --git a/apps/sim/app/api/knowledge/search/utils.test.ts b/apps/sim/app/api/knowledge/search/utils.test.ts
index 279f7e56e..6224e046e 100644
--- a/apps/sim/app/api/knowledge/search/utils.test.ts
+++ b/apps/sim/app/api/knowledge/search/utils.test.ts
@@ -408,6 +408,7 @@ describe('Knowledge Search Utils', () => {
input: ['test query'],
model: 'text-embedding-3-small',
encoding_format: 'float',
+ dimensions: 1536,
}),
})
)
diff --git a/apps/sim/app/api/organizations/[id]/workspaces/route.ts b/apps/sim/app/api/organizations/[id]/workspaces/route.ts
deleted file mode 100644
index 6669c8a8b..000000000
--- a/apps/sim/app/api/organizations/[id]/workspaces/route.ts
+++ /dev/null
@@ -1,204 +0,0 @@
-import { db } from '@sim/db'
-import { member, permissions, user, workspace } from '@sim/db/schema'
-import { createLogger } from '@sim/logger'
-import { and, eq, or } from 'drizzle-orm'
-import { type NextRequest, NextResponse } from 'next/server'
-import { getSession } from '@/lib/auth'
-
-const logger = createLogger('OrganizationWorkspacesAPI')
-
-/**
- * GET /api/organizations/[id]/workspaces
- * Get workspaces related to the organization with optional filtering
- * Query parameters:
- * - ?available=true - Only workspaces where user can invite others (admin permissions)
- * - ?member=userId - Workspaces where specific member has access
- */
-export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
- try {
- const session = await getSession()
-
- if (!session?.user?.id) {
- return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
- }
-
- const { id: organizationId } = await params
- const url = new URL(request.url)
- const availableOnly = url.searchParams.get('available') === 'true'
- const memberId = url.searchParams.get('member')
-
- // Verify user is a member of this organization
- const memberEntry = await db
- .select()
- .from(member)
- .where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
- .limit(1)
-
- if (memberEntry.length === 0) {
- return NextResponse.json(
- {
- error: 'Forbidden - Not a member of this organization',
- },
- { status: 403 }
- )
- }
-
- const userRole = memberEntry[0].role
- const hasAdminAccess = ['owner', 'admin'].includes(userRole)
-
- if (availableOnly) {
- // Get workspaces where user has admin permissions (can invite others)
- const availableWorkspaces = await db
- .select({
- id: workspace.id,
- name: workspace.name,
- ownerId: workspace.ownerId,
- createdAt: workspace.createdAt,
- isOwner: eq(workspace.ownerId, session.user.id),
- permissionType: permissions.permissionType,
- })
- .from(workspace)
- .leftJoin(
- permissions,
- and(
- eq(permissions.entityType, 'workspace'),
- eq(permissions.entityId, workspace.id),
- eq(permissions.userId, session.user.id)
- )
- )
- .where(
- or(
- // User owns the workspace
- eq(workspace.ownerId, session.user.id),
- // User has admin permission on the workspace
- and(
- eq(permissions.userId, session.user.id),
- eq(permissions.entityType, 'workspace'),
- eq(permissions.permissionType, 'admin')
- )
- )
- )
-
- // Filter and format the results
- const workspacesWithInvitePermission = availableWorkspaces
- .filter((workspace) => {
- // Include if user owns the workspace OR has admin permission
- return workspace.isOwner || workspace.permissionType === 'admin'
- })
- .map((workspace) => ({
- id: workspace.id,
- name: workspace.name,
- isOwner: workspace.isOwner,
- canInvite: true, // All returned workspaces have invite permission
- createdAt: workspace.createdAt,
- }))
-
- logger.info('Retrieved available workspaces for organization member', {
- organizationId,
- userId: session.user.id,
- workspaceCount: workspacesWithInvitePermission.length,
- })
-
- return NextResponse.json({
- success: true,
- data: {
- workspaces: workspacesWithInvitePermission,
- totalCount: workspacesWithInvitePermission.length,
- filter: 'available',
- },
- })
- }
-
- if (memberId && hasAdminAccess) {
- // Get workspaces where specific member has access (admin only)
- const memberWorkspaces = await db
- .select({
- id: workspace.id,
- name: workspace.name,
- ownerId: workspace.ownerId,
- isOwner: eq(workspace.ownerId, memberId),
- permissionType: permissions.permissionType,
- createdAt: permissions.createdAt,
- })
- .from(workspace)
- .leftJoin(
- permissions,
- and(
- eq(permissions.entityType, 'workspace'),
- eq(permissions.entityId, workspace.id),
- eq(permissions.userId, memberId)
- )
- )
- .where(
- or(
- // Member owns the workspace
- eq(workspace.ownerId, memberId),
- // Member has permissions on the workspace
- and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
- )
- )
-
- const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
- id: workspace.id,
- name: workspace.name,
- isOwner: workspace.isOwner,
- permission: workspace.permissionType,
- joinedAt: workspace.createdAt,
- createdAt: workspace.createdAt,
- }))
-
- return NextResponse.json({
- success: true,
- data: {
- workspaces: formattedWorkspaces,
- totalCount: formattedWorkspaces.length,
- filter: 'member',
- memberId,
- },
- })
- }
-
- // Default: Get all workspaces (basic info only for regular members)
- if (!hasAdminAccess) {
- return NextResponse.json({
- success: true,
- data: {
- workspaces: [],
- totalCount: 0,
- message: 'Workspace access information is only available to organization admins',
- },
- })
- }
-
- // For admins: Get summary of all workspaces
- const allWorkspaces = await db
- .select({
- id: workspace.id,
- name: workspace.name,
- ownerId: workspace.ownerId,
- createdAt: workspace.createdAt,
- ownerName: user.name,
- })
- .from(workspace)
- .leftJoin(user, eq(workspace.ownerId, user.id))
-
- return NextResponse.json({
- success: true,
- data: {
- workspaces: allWorkspaces,
- totalCount: allWorkspaces.length,
- filter: 'all',
- },
- userRole,
- hasAdminAccess,
- })
- } catch (error) {
- logger.error('Failed to get organization workspaces', { error })
- return NextResponse.json(
- {
- error: 'Internal server error',
- },
- { status: 500 }
- )
- }
-}
diff --git a/apps/sim/app/api/tools/supabase/storage-upload/route.ts b/apps/sim/app/api/tools/supabase/storage-upload/route.ts
new file mode 100644
index 000000000..46122fc19
--- /dev/null
+++ b/apps/sim/app/api/tools/supabase/storage-upload/route.ts
@@ -0,0 +1,257 @@
+import { createLogger } from '@sim/logger'
+import { type NextRequest, NextResponse } from 'next/server'
+import { z } from 'zod'
+import { checkInternalAuth } from '@/lib/auth/hybrid'
+import { generateRequestId } from '@/lib/core/utils/request'
+import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
+import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
+
+export const dynamic = 'force-dynamic'
+
+const logger = createLogger('SupabaseStorageUploadAPI')
+
+const SupabaseStorageUploadSchema = z.object({
+ projectId: z.string().min(1, 'Project ID is required'),
+ apiKey: z.string().min(1, 'API key is required'),
+ bucket: z.string().min(1, 'Bucket name is required'),
+ fileName: z.string().min(1, 'File name is required'),
+ path: z.string().optional().nullable(),
+ fileData: z.any(),
+ contentType: z.string().optional().nullable(),
+ upsert: z.boolean().optional().default(false),
+})
+
+export async function POST(request: NextRequest) {
+ const requestId = generateRequestId()
+
+ try {
+ const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
+
+ if (!authResult.success) {
+ logger.warn(
+ `[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
+ )
+ return NextResponse.json(
+ {
+ success: false,
+ error: authResult.error || 'Authentication required',
+ },
+ { status: 401 }
+ )
+ }
+
+ logger.info(
+ `[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
+ {
+ userId: authResult.userId,
+ }
+ )
+
+ const body = await request.json()
+ const validatedData = SupabaseStorageUploadSchema.parse(body)
+
+ const fileData = validatedData.fileData
+ const isStringInput = typeof fileData === 'string'
+
+ logger.info(`[${requestId}] Uploading to Supabase Storage`, {
+ bucket: validatedData.bucket,
+ fileName: validatedData.fileName,
+ path: validatedData.path,
+ fileDataType: isStringInput ? 'string' : 'object',
+ })
+
+ if (!fileData) {
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'fileData is required',
+ },
+ { status: 400 }
+ )
+ }
+
+ let uploadBody: Buffer
+ let uploadContentType: string | undefined
+
+ if (isStringInput) {
+ let content = fileData as string
+
+ const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
+ if (dataUrlMatch) {
+ const [, mimeType, base64Data] = dataUrlMatch
+ content = base64Data
+ if (!validatedData.contentType) {
+ uploadContentType = mimeType
+ }
+ logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
+ }
+
+ const cleanedContent = content.replace(/[\s\r\n]/g, '')
+ const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
+
+ if (isLikelyBase64 && cleanedContent.length >= 4) {
+ try {
+ uploadBody = Buffer.from(cleanedContent, 'base64')
+
+ const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
+ const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
+
+ if (
+ uploadBody.length >= expectedMinSize &&
+ uploadBody.length <= expectedMaxSize &&
+ uploadBody.length > 0
+ ) {
+ logger.info(
+ `[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
+ )
+ } else {
+ const reEncoded = uploadBody.toString('base64')
+ if (reEncoded !== cleanedContent) {
+ logger.info(
+ `[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
+ )
+ uploadBody = Buffer.from(content, 'utf-8')
+ } else {
+ logger.info(
+ `[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
+ )
+ }
+ }
+ } catch (decodeError) {
+ logger.info(
+ `[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
+ )
+ uploadBody = Buffer.from(content, 'utf-8')
+ }
+ } else {
+ uploadBody = Buffer.from(content, 'utf-8')
+ logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
+ }
+
+ uploadContentType =
+ uploadContentType || validatedData.contentType || 'application/octet-stream'
+ } else {
+ const rawFile = fileData
+ logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
+
+ let userFile
+ try {
+ userFile = processSingleFileToUserFile(rawFile, requestId, logger)
+ } catch (error) {
+ return NextResponse.json(
+ {
+ success: false,
+ error: error instanceof Error ? error.message : 'Failed to process file',
+ },
+ { status: 400 }
+ )
+ }
+
+ const buffer = await downloadFileFromStorage(userFile, requestId, logger)
+
+ uploadBody = buffer
+ uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
+ }
+
+ let fullPath = validatedData.fileName
+ if (validatedData.path) {
+ const folderPath = validatedData.path.endsWith('/')
+ ? validatedData.path
+ : `${validatedData.path}/`
+ fullPath = `${folderPath}${validatedData.fileName}`
+ }
+
+ const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
+
+ const headers: Record = {
+ apikey: validatedData.apiKey,
+ Authorization: `Bearer ${validatedData.apiKey}`,
+ 'Content-Type': uploadContentType,
+ }
+
+ if (validatedData.upsert) {
+ headers['x-upsert'] = 'true'
+ }
+
+ logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
+ contentType: uploadContentType,
+ bodySize: uploadBody.length,
+ upsert: validatedData.upsert,
+ })
+
+ const response = await fetch(supabaseUrl, {
+ method: 'POST',
+ headers,
+ body: new Uint8Array(uploadBody),
+ })
+
+ if (!response.ok) {
+ const errorText = await response.text()
+ let errorData
+ try {
+ errorData = JSON.parse(errorText)
+ } catch {
+ errorData = { message: errorText }
+ }
+
+ logger.error(`[${requestId}] Supabase Storage upload failed:`, {
+ status: response.status,
+ statusText: response.statusText,
+ error: errorData,
+ })
+
+ return NextResponse.json(
+ {
+ success: false,
+ error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
+ details: errorData,
+ },
+ { status: response.status }
+ )
+ }
+
+ const result = await response.json()
+
+ logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
+ bucket: validatedData.bucket,
+ path: fullPath,
+ })
+
+ const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
+
+ return NextResponse.json({
+ success: true,
+ output: {
+ message: 'Successfully uploaded file to storage',
+ results: {
+ ...result,
+ path: fullPath,
+ bucket: validatedData.bucket,
+ publicUrl,
+ },
+ },
+ })
+ } catch (error) {
+ if (error instanceof z.ZodError) {
+ logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
+ return NextResponse.json(
+ {
+ success: false,
+ error: 'Invalid request data',
+ details: error.errors,
+ },
+ { status: 400 }
+ )
+ }
+
+ logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
+
+ return NextResponse.json(
+ {
+ success: false,
+ error: error instanceof Error ? error.message : 'Internal server error',
+ },
+ { status: 500 }
+ )
+ }
+}
diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx
index 017a51c0b..d3bf23e40 100644
--- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx
+++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx
@@ -338,6 +338,11 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
const configEqual =
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
+ const canonicalToggleEqual =
+ !!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
+ prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
+ prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
+
return (
prevProps.blockId === nextProps.blockId &&
configEqual &&
@@ -346,8 +351,7 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
prevProps.disabled === nextProps.disabled &&
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
- prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
- prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
+ canonicalToggleEqual
)
}
diff --git a/apps/sim/blocks/blocks/a2a.ts b/apps/sim/blocks/blocks/a2a.ts
index a042c55ec..86c98ac9b 100644
--- a/apps/sim/blocks/blocks/a2a.ts
+++ b/apps/sim/blocks/blocks/a2a.ts
@@ -214,15 +214,6 @@ export const A2ABlock: BlockConfig = {
],
config: {
tool: (params) => params.operation as string,
- params: (params) => {
- const { fileUpload, fileReference, ...rest } = params
- const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
- const files = hasFileUpload ? fileUpload : fileReference
- return {
- ...rest,
- ...(files ? { files } : {}),
- }
- },
},
},
inputs: {
diff --git a/apps/sim/blocks/blocks/gmail.ts b/apps/sim/blocks/blocks/gmail.ts
index e389d3b3d..223b69ec7 100644
--- a/apps/sim/blocks/blocks/gmail.ts
+++ b/apps/sim/blocks/blocks/gmail.ts
@@ -581,6 +581,18 @@ export const GmailV2Block: BlockConfig = {
results: { type: 'json', description: 'Search/read summary results' },
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
+ // Draft-specific outputs
+ draftId: {
+ type: 'string',
+ description: 'Draft ID',
+ condition: { field: 'operation', value: 'draft_gmail' },
+ },
+ messageId: {
+ type: 'string',
+ description: 'Gmail message ID for the draft',
+ condition: { field: 'operation', value: 'draft_gmail' },
+ },
+
// Trigger outputs (unchanged)
email_id: { type: 'string', description: 'Gmail message ID' },
thread_id: { type: 'string', description: 'Gmail thread ID' },
diff --git a/apps/sim/blocks/blocks/supabase.ts b/apps/sim/blocks/blocks/supabase.ts
index e8254a66a..8b5fc75f7 100644
--- a/apps/sim/blocks/blocks/supabase.ts
+++ b/apps/sim/blocks/blocks/supabase.ts
@@ -661,12 +661,25 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
placeholder: 'folder/subfolder/',
condition: { field: 'operation', value: 'storage_upload' },
},
+ {
+ id: 'file',
+ title: 'File',
+ type: 'file-upload',
+ canonicalParamId: 'fileData',
+ placeholder: 'Upload file to storage',
+ condition: { field: 'operation', value: 'storage_upload' },
+ mode: 'basic',
+ multiple: false,
+ required: true,
+ },
{
id: 'fileContent',
title: 'File Content',
type: 'code',
+ canonicalParamId: 'fileData',
placeholder: 'Base64 encoded for binary files, or plain text',
condition: { field: 'operation', value: 'storage_upload' },
+ mode: 'advanced',
required: true,
},
{
diff --git a/apps/sim/executor/utils/block-data.ts b/apps/sim/executor/utils/block-data.ts
index 694180374..0c7ec4bd1 100644
--- a/apps/sim/executor/utils/block-data.ts
+++ b/apps/sim/executor/utils/block-data.ts
@@ -1,7 +1,9 @@
-import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { normalizeName } from '@/executor/constants'
import type { ExecutionContext } from '@/executor/types'
import type { OutputSchema } from '@/executor/utils/block-reference'
+import type { SerializedBlock } from '@/serializer/types'
+import type { ToolConfig } from '@/tools/types'
+import { getTool } from '@/tools/utils'
export interface BlockDataCollection {
blockData: Record
@@ -9,6 +11,32 @@ export interface BlockDataCollection {
blockOutputSchemas: Record
}
+export function getBlockSchema(
+ block: SerializedBlock,
+ toolConfig?: ToolConfig
+): OutputSchema | undefined {
+ const isTrigger =
+ block.metadata?.category === 'triggers' ||
+ (block.config?.params as Record | undefined)?.triggerMode === true
+
+ // Triggers use saved outputs (defines the trigger payload schema)
+ if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
+ return block.outputs as OutputSchema
+ }
+
+ // When a tool is selected, tool outputs are the source of truth
+ if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
+ return toolConfig.outputs as OutputSchema
+ }
+
+ // Fallback to saved outputs for blocks without tools
+ if (block.outputs && Object.keys(block.outputs).length > 0) {
+ return block.outputs as OutputSchema
+ }
+
+ return undefined
+}
+
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
const blockData: Record = {}
const blockNameMapping: Record = {}
@@ -18,24 +46,21 @@ export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
if (state.output !== undefined) {
blockData[id] = state.output
}
+ }
- const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
- if (!workflowBlock) continue
+ const workflowBlocks = ctx.workflow?.blocks ?? []
+ for (const block of workflowBlocks) {
+ const id = block.id
- if (workflowBlock.metadata?.name) {
- blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
+ if (block.metadata?.name) {
+ blockNameMapping[normalizeName(block.metadata.name)] = id
}
- const blockType = workflowBlock.metadata?.id
- if (blockType) {
- const params = workflowBlock.config?.params as Record | undefined
- const subBlocks = params
- ? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
- : undefined
- const schema = getBlockOutputs(blockType, subBlocks)
- if (schema && Object.keys(schema).length > 0) {
- blockOutputSchemas[id] = schema
- }
+ const toolId = block.config?.tool
+ const toolConfig = toolId ? getTool(toolId) : undefined
+ const schema = getBlockSchema(block, toolConfig)
+ if (schema && Object.keys(schema).length > 0) {
+ blockOutputSchemas[id] = schema
}
}
diff --git a/apps/sim/executor/utils/start-block.ts b/apps/sim/executor/utils/start-block.ts
index 7df680518..3b7982f93 100644
--- a/apps/sim/executor/utils/start-block.ts
+++ b/apps/sim/executor/utils/start-block.ts
@@ -378,8 +378,30 @@ function buildManualTriggerOutput(
return mergeFilesIntoOutput(output, workflowInput)
}
-function buildIntegrationTriggerOutput(workflowInput: unknown): NormalizedBlockOutput {
- return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
+function buildIntegrationTriggerOutput(
+ workflowInput: unknown,
+ structuredInput: Record,
+ hasStructured: boolean
+): NormalizedBlockOutput {
+ const output: NormalizedBlockOutput = {}
+
+ if (hasStructured) {
+ for (const [key, value] of Object.entries(structuredInput)) {
+ output[key] = value
+ }
+ }
+
+ if (isPlainObject(workflowInput)) {
+ for (const [key, value] of Object.entries(workflowInput)) {
+ if (value !== undefined && value !== null) {
+ output[key] = value
+ } else if (!Object.hasOwn(output, key)) {
+ output[key] = value
+ }
+ }
+ }
+
+ return mergeFilesIntoOutput(output, workflowInput)
}
function extractSubBlocks(block: SerializedBlock): Record | undefined {
@@ -428,7 +450,7 @@ export function buildStartBlockOutput(options: StartBlockOutputOptions): Normali
return buildManualTriggerOutput(finalInput, workflowInput)
case StartBlockPath.EXTERNAL_TRIGGER:
- return buildIntegrationTriggerOutput(workflowInput)
+ return buildIntegrationTriggerOutput(workflowInput, structuredInput, hasStructured)
case StartBlockPath.LEGACY_STARTER:
return buildLegacyStarterOutput(
diff --git a/apps/sim/executor/variables/resolvers/block.ts b/apps/sim/executor/variables/resolvers/block.ts
index a29339b2b..09d246e80 100644
--- a/apps/sim/executor/variables/resolvers/block.ts
+++ b/apps/sim/executor/variables/resolvers/block.ts
@@ -1,10 +1,10 @@
-import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import {
isReference,
normalizeName,
parseReferencePath,
SPECIAL_REFERENCE_PREFIXES,
} from '@/executor/constants'
+import { getBlockSchema } from '@/executor/utils/block-data'
import {
InvalidFieldError,
type OutputSchema,
@@ -67,15 +67,9 @@ export class BlockResolver implements Resolver {
blockData[blockId] = output
}
- const blockType = block.metadata?.id
- const params = block.config?.params as Record | undefined
- const subBlocks = params
- ? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
- : undefined
const toolId = block.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined
- const outputSchema =
- toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
+ const outputSchema = getBlockSchema(block, toolConfig)
if (outputSchema && Object.keys(outputSchema).length > 0) {
blockOutputSchemas[blockId] = outputSchema
diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts
index d559969af..89aff1d12 100644
--- a/apps/sim/hooks/use-collaborative-workflow.ts
+++ b/apps/sim/hooks/use-collaborative-workflow.ts
@@ -680,6 +680,10 @@ export function useCollaborativeWorkflow() {
previousPositions?: Map
}
) => {
+ if (isBaselineDiffView) {
+ return
+ }
+
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
return
@@ -725,7 +729,7 @@ export function useCollaborativeWorkflow() {
}
}
},
- [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
+ [isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -817,6 +821,10 @@ export function useCollaborativeWorkflow() {
const collaborativeBatchToggleBlockEnabled = useCallback(
(ids: string[]) => {
+ if (isBaselineDiffView) {
+ return
+ }
+
if (ids.length === 0) return
const previousStates: Record = {}
@@ -849,7 +857,7 @@ export function useCollaborativeWorkflow() {
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
},
- [addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
+ [isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchUpdateParent = useCallback(
@@ -861,6 +869,10 @@ export function useCollaborativeWorkflow() {
affectedEdges: Edge[]
}>
) => {
+ if (isBaselineDiffView) {
+ return
+ }
+
if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow')
return
@@ -931,7 +943,7 @@ export function useCollaborativeWorkflow() {
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
},
- [isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
+ [isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -951,18 +963,37 @@ export function useCollaborativeWorkflow() {
const collaborativeSetBlockCanonicalMode = useCallback(
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
- executeQueuedOperation(
- BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
- OPERATION_TARGETS.BLOCK,
- { id, canonicalId, canonicalMode },
- () => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
- )
+ if (isBaselineDiffView) {
+ return
+ }
+
+ useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
+
+ if (!activeWorkflowId) {
+ return
+ }
+
+ const operationId = crypto.randomUUID()
+ addToQueue({
+ id: operationId,
+ operation: {
+ operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
+ target: OPERATION_TARGETS.BLOCK,
+ payload: { id, canonicalId, canonicalMode },
+ },
+ workflowId: activeWorkflowId,
+ userId: session?.user?.id || 'unknown',
+ })
},
- [executeQueuedOperation]
+ [isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
)
const collaborativeBatchToggleBlockHandles = useCallback(
(ids: string[]) => {
+ if (isBaselineDiffView) {
+ return
+ }
+
if (ids.length === 0) return
const previousStates: Record = {}
@@ -995,11 +1026,15 @@ export function useCollaborativeWorkflow() {
undoRedo.recordBatchToggleHandles(validIds, previousStates)
},
- [addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
+ [isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchAddEdges = useCallback(
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
+ if (isBaselineDiffView) {
+ return false
+ }
+
if (!isInActiveRoom()) {
logger.debug('Skipping batch add edges - not in active workflow')
return false
@@ -1035,11 +1070,15 @@ export function useCollaborativeWorkflow() {
return true
},
- [addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
+ [isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
)
const collaborativeBatchRemoveEdges = useCallback(
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
+ if (isBaselineDiffView) {
+ return false
+ }
+
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow')
return false
@@ -1089,7 +1128,7 @@ export function useCollaborativeWorkflow() {
logger.info('Batch removed edges', { count: validEdgeIds.length })
return true
},
- [isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
+ [isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
)
const collaborativeSetSubblockValue = useCallback(
@@ -1165,6 +1204,10 @@ export function useCollaborativeWorkflow() {
(blockId: string, subblockId: string, value: any) => {
if (isApplyingRemoteChange.current) return
+ if (isBaselineDiffView) {
+ return
+ }
+
if (!isInActiveRoom()) {
logger.debug('Skipping tag selection - not in active workflow', {
currentWorkflowId,
@@ -1192,7 +1235,14 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
- [addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
+ [
+ isBaselineDiffView,
+ addToQueue,
+ currentWorkflowId,
+ activeWorkflowId,
+ session?.user?.id,
+ isInActiveRoom,
+ ]
)
const collaborativeUpdateLoopType = useCallback(
@@ -1538,6 +1588,10 @@ export function useCollaborativeWorkflow() {
const collaborativeBatchRemoveBlocks = useCallback(
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
+ if (isBaselineDiffView) {
+ return false
+ }
+
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow')
return false
@@ -1619,6 +1673,7 @@ export function useCollaborativeWorkflow() {
return true
},
[
+ isBaselineDiffView,
addToQueue,
activeWorkflowId,
session?.user?.id,
diff --git a/apps/sim/lib/knowledge/embeddings.ts b/apps/sim/lib/knowledge/embeddings.ts
index 2b57b34c5..1171065c7 100644
--- a/apps/sim/lib/knowledge/embeddings.ts
+++ b/apps/sim/lib/knowledge/embeddings.ts
@@ -8,6 +8,17 @@ const logger = createLogger('EmbeddingUtils')
const MAX_TOKENS_PER_REQUEST = 8000
const MAX_CONCURRENT_BATCHES = env.KB_CONFIG_CONCURRENCY_LIMIT || 50
+const EMBEDDING_DIMENSIONS = 1536
+
+/**
+ * Check if the model supports custom dimensions.
+ * text-embedding-3-* models support the dimensions parameter.
+ * Checks for 'embedding-3' to handle Azure deployments with custom naming conventions.
+ */
+function supportsCustomDimensions(modelName: string): boolean {
+ const name = modelName.toLowerCase()
+ return name.includes('embedding-3') && !name.includes('ada')
+}
export class EmbeddingAPIError extends Error {
public status: number
@@ -93,15 +104,19 @@ async function getEmbeddingConfig(
async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Promise {
return retryWithExponentialBackoff(
async () => {
+ const useDimensions = supportsCustomDimensions(config.modelName)
+
const requestBody = config.useAzure
? {
input: inputs,
encoding_format: 'float',
+ ...(useDimensions && { dimensions: EMBEDDING_DIMENSIONS }),
}
: {
input: inputs,
model: config.modelName,
encoding_format: 'float',
+ ...(useDimensions && { dimensions: EMBEDDING_DIMENSIONS }),
}
const response = await fetch(config.apiUrl, {
diff --git a/apps/sim/lib/uploads/providers/blob/client.ts b/apps/sim/lib/uploads/providers/blob/client.ts
index 346ce27fa..f83e6b576 100644
--- a/apps/sim/lib/uploads/providers/blob/client.ts
+++ b/apps/sim/lib/uploads/providers/blob/client.ts
@@ -18,6 +18,52 @@ const logger = createLogger('BlobClient')
let _blobServiceClient: BlobServiceClientInstance | null = null
+interface ParsedCredentials {
+ accountName: string
+ accountKey: string
+}
+
+/**
+ * Extract account name and key from an Azure connection string.
+ * Connection strings have the format: DefaultEndpointsProtocol=https;AccountName=...;AccountKey=...;EndpointSuffix=...
+ */
+function parseConnectionString(connectionString: string): ParsedCredentials {
+ const accountNameMatch = connectionString.match(/AccountName=([^;]+)/)
+ if (!accountNameMatch) {
+ throw new Error('Cannot extract account name from connection string')
+ }
+
+ const accountKeyMatch = connectionString.match(/AccountKey=([^;]+)/)
+ if (!accountKeyMatch) {
+ throw new Error('Cannot extract account key from connection string')
+ }
+
+ return {
+ accountName: accountNameMatch[1],
+ accountKey: accountKeyMatch[1],
+ }
+}
+
+/**
+ * Get account credentials from BLOB_CONFIG, extracting from connection string if necessary.
+ */
+function getAccountCredentials(): ParsedCredentials {
+ if (BLOB_CONFIG.connectionString) {
+ return parseConnectionString(BLOB_CONFIG.connectionString)
+ }
+
+ if (BLOB_CONFIG.accountName && BLOB_CONFIG.accountKey) {
+ return {
+ accountName: BLOB_CONFIG.accountName,
+ accountKey: BLOB_CONFIG.accountKey,
+ }
+ }
+
+ throw new Error(
+ 'Azure Blob Storage credentials are missing – set AZURE_CONNECTION_STRING or both AZURE_ACCOUNT_NAME and AZURE_ACCOUNT_KEY'
+ )
+}
+
export async function getBlobServiceClient(): Promise {
if (_blobServiceClient) return _blobServiceClient
@@ -127,6 +173,8 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
const blockBlobClient = containerClient.getBlockBlobClient(key)
+ const { accountName, accountKey } = getAccountCredentials()
+
const sasOptions = {
containerName: BLOB_CONFIG.containerName,
blobName: key,
@@ -137,13 +185,7 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
const sasToken = generateBlobSASQueryParameters(
sasOptions,
- new StorageSharedKeyCredential(
- BLOB_CONFIG.accountName,
- BLOB_CONFIG.accountKey ??
- (() => {
- throw new Error('AZURE_ACCOUNT_KEY is required when using account name authentication')
- })()
- )
+ new StorageSharedKeyCredential(accountName, accountKey)
).toString()
return `${blockBlobClient.url}?${sasToken}`
@@ -168,9 +210,14 @@ export async function getPresignedUrlWithConfig(
StorageSharedKeyCredential,
} = await import('@azure/storage-blob')
let tempBlobServiceClient: BlobServiceClientInstance
+ let accountName: string
+ let accountKey: string
if (customConfig.connectionString) {
tempBlobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
+ const credentials = parseConnectionString(customConfig.connectionString)
+ accountName = credentials.accountName
+ accountKey = credentials.accountKey
} else if (customConfig.accountName && customConfig.accountKey) {
const sharedKeyCredential = new StorageSharedKeyCredential(
customConfig.accountName,
@@ -180,6 +227,8 @@ export async function getPresignedUrlWithConfig(
`https://${customConfig.accountName}.blob.core.windows.net`,
sharedKeyCredential
)
+ accountName = customConfig.accountName
+ accountKey = customConfig.accountKey
} else {
throw new Error(
'Custom blob config must include either connectionString or accountName + accountKey'
@@ -199,13 +248,7 @@ export async function getPresignedUrlWithConfig(
const sasToken = generateBlobSASQueryParameters(
sasOptions,
- new StorageSharedKeyCredential(
- customConfig.accountName,
- customConfig.accountKey ??
- (() => {
- throw new Error('Account key is required when using account name authentication')
- })()
- )
+ new StorageSharedKeyCredential(accountName, accountKey)
).toString()
return `${blockBlobClient.url}?${sasToken}`
@@ -403,13 +446,9 @@ export async function getMultipartPartUrls(
if (customConfig) {
if (customConfig.connectionString) {
blobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
- const match = customConfig.connectionString.match(/AccountName=([^;]+)/)
- if (!match) throw new Error('Cannot extract account name from connection string')
- accountName = match[1]
-
- const keyMatch = customConfig.connectionString.match(/AccountKey=([^;]+)/)
- if (!keyMatch) throw new Error('Cannot extract account key from connection string')
- accountKey = keyMatch[1]
+ const credentials = parseConnectionString(customConfig.connectionString)
+ accountName = credentials.accountName
+ accountKey = credentials.accountKey
} else if (customConfig.accountName && customConfig.accountKey) {
const credential = new StorageSharedKeyCredential(
customConfig.accountName,
@@ -428,12 +467,9 @@ export async function getMultipartPartUrls(
} else {
blobServiceClient = await getBlobServiceClient()
containerName = BLOB_CONFIG.containerName
- accountName = BLOB_CONFIG.accountName
- accountKey =
- BLOB_CONFIG.accountKey ||
- (() => {
- throw new Error('AZURE_ACCOUNT_KEY is required')
- })()
+ const credentials = getAccountCredentials()
+ accountName = credentials.accountName
+ accountKey = credentials.accountKey
}
const containerClient = blobServiceClient.getContainerClient(containerName)
@@ -501,12 +537,10 @@ export async function completeMultipartUpload(
const containerClient = blobServiceClient.getContainerClient(containerName)
const blockBlobClient = containerClient.getBlockBlobClient(key)
- // Sort parts by part number and extract block IDs
const sortedBlockIds = parts
.sort((a, b) => a.partNumber - b.partNumber)
.map((part) => part.blockId)
- // Commit the block list to create the final blob
await blockBlobClient.commitBlockList(sortedBlockIds, {
metadata: {
multipartUpload: 'completed',
@@ -557,10 +591,8 @@ export async function abortMultipartUpload(key: string, customConfig?: BlobConfi
const blockBlobClient = containerClient.getBlockBlobClient(key)
try {
- // Delete the blob if it exists (this also cleans up any uncommitted blocks)
await blockBlobClient.deleteIfExists()
} catch (error) {
- // Ignore errors since we're just cleaning up
logger.warn('Error cleaning up multipart upload:', error)
}
}
diff --git a/apps/sim/lib/workflows/blocks/block-outputs.ts b/apps/sim/lib/workflows/blocks/block-outputs.ts
index 12972141a..b00156e0c 100644
--- a/apps/sim/lib/workflows/blocks/block-outputs.ts
+++ b/apps/sim/lib/workflows/blocks/block-outputs.ts
@@ -618,13 +618,6 @@ export function getToolOutputs(
}
}
-/**
- * Generates output paths for a tool-based block.
- *
- * @param blockConfig - The block configuration containing tools config
- * @param subBlocks - SubBlock values for tool selection and condition evaluation
- * @returns Array of output paths for the tool, or empty array on error
- */
export function getToolOutputPaths(
blockConfig: BlockConfig,
subBlocks?: Record
@@ -634,12 +627,22 @@ export function getToolOutputPaths(
if (!outputs || Object.keys(outputs).length === 0) return []
if (subBlocks && blockConfig.outputs) {
- const filteredBlockOutputs = filterOutputsByCondition(blockConfig.outputs, subBlocks)
- const allowedKeys = new Set(Object.keys(filteredBlockOutputs))
-
const filteredOutputs: Record = {}
+
for (const [key, value] of Object.entries(outputs)) {
- if (allowedKeys.has(key)) {
+ const blockOutput = blockConfig.outputs[key]
+
+ if (!blockOutput || typeof blockOutput !== 'object') {
+ filteredOutputs[key] = value
+ continue
+ }
+
+ const condition = 'condition' in blockOutput ? blockOutput.condition : undefined
+ if (condition) {
+ if (evaluateOutputCondition(condition, subBlocks)) {
+ filteredOutputs[key] = value
+ }
+ } else {
filteredOutputs[key] = value
}
}
diff --git a/apps/sim/stores/operation-queue/store.ts b/apps/sim/stores/operation-queue/store.ts
index 07e9381ce..b5a23d8d4 100644
--- a/apps/sim/stores/operation-queue/store.ts
+++ b/apps/sim/stores/operation-queue/store.ts
@@ -27,6 +27,9 @@ export function registerEmitFunctions(
emitSubblockUpdate = subblockEmit
emitVariableUpdate = variableEmit
currentRegisteredWorkflowId = workflowId
+ if (workflowId) {
+ useOperationQueueStore.getState().processNextOperation()
+ }
}
let currentRegisteredWorkflowId: string | null = null
@@ -262,16 +265,14 @@ export const useOperationQueueStore = create((set, get) =>
return
}
- const nextOperation = currentRegisteredWorkflowId
- ? state.operations.find(
- (op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
- )
- : state.operations.find((op) => op.status === 'pending')
- if (!nextOperation) {
+ if (!currentRegisteredWorkflowId) {
return
}
- if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
+ const nextOperation = state.operations.find(
+ (op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
+ )
+ if (!nextOperation) {
return
}
diff --git a/apps/sim/tools/supabase/storage_upload.ts b/apps/sim/tools/supabase/storage_upload.ts
index d01faab30..35ba036c5 100644
--- a/apps/sim/tools/supabase/storage_upload.ts
+++ b/apps/sim/tools/supabase/storage_upload.ts
@@ -38,11 +38,12 @@ export const storageUploadTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Optional folder path (e.g., "folder/subfolder/")',
},
- fileContent: {
- type: 'string',
+ fileData: {
+ type: 'json',
required: true,
visibility: 'user-or-llm',
- description: 'The file content (base64 encoded for binary files, or plain text)',
+ description:
+ 'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
},
contentType: {
type: 'string',
@@ -65,65 +66,28 @@ export const storageUploadTool: ToolConfig<
},
request: {
- url: (params) => {
- // Combine folder path and fileName, ensuring proper formatting
- let fullPath = params.fileName
- if (params.path) {
- // Ensure path ends with / and doesn't have double slashes
- const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
- fullPath = `${folderPath}${params.fileName}`
- }
- return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
- },
+ url: '/api/tools/supabase/storage-upload',
method: 'POST',
- headers: (params) => {
- const headers: Record = {
- apikey: params.apiKey,
- Authorization: `Bearer ${params.apiKey}`,
- }
-
- if (params.contentType) {
- headers['Content-Type'] = params.contentType
- }
-
- if (params.upsert) {
- headers['x-upsert'] = 'true'
- }
-
- return headers
- },
- body: (params) => {
- // Return the file content wrapped in an object
- // The actual upload will need to handle this appropriately
- return {
- content: params.fileContent,
- }
- },
- },
-
- transformResponse: async (response: Response) => {
- let data
- try {
- data = await response.json()
- } catch (parseError) {
- throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
- }
-
- return {
- success: true,
- output: {
- message: 'Successfully uploaded file to storage',
- results: data,
- },
- error: undefined,
- }
+ headers: () => ({
+ 'Content-Type': 'application/json',
+ }),
+ body: (params) => ({
+ projectId: params.projectId,
+ apiKey: params.apiKey,
+ bucket: params.bucket,
+ fileName: params.fileName,
+ path: params.path,
+ fileData: params.fileData,
+ contentType: params.contentType,
+ upsert: params.upsert,
+ }),
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
results: {
type: 'object',
- description: 'Upload result including file path and metadata',
+ description: 'Upload result including file path, bucket, and public URL',
},
},
}
diff --git a/apps/sim/tools/supabase/types.ts b/apps/sim/tools/supabase/types.ts
index 2ccb6232b..dd810a395 100644
--- a/apps/sim/tools/supabase/types.ts
+++ b/apps/sim/tools/supabase/types.ts
@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
bucket: string
fileName: string
path?: string
- fileContent: string
+ fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text)
contentType?: string
upsert?: boolean
}
diff --git a/docker-compose.local.yml b/docker-compose.local.yml
index 768c0cc70..a2f768c30 100644
--- a/docker-compose.local.yml
+++ b/docker-compose.local.yml
@@ -52,7 +52,7 @@ services:
deploy:
resources:
limits:
- memory: 8G
+ memory: 1G
healthcheck:
test: ['CMD', 'wget', '--spider', '--quiet', 'http://127.0.0.1:3002/health']
interval: 90s
diff --git a/docker-compose.ollama.yml b/docker-compose.ollama.yml
index 4f32929f9..9d4f072bf 100644
--- a/docker-compose.ollama.yml
+++ b/docker-compose.ollama.yml
@@ -56,7 +56,7 @@ services:
deploy:
resources:
limits:
- memory: 8G
+ memory: 1G
healthcheck:
test: ['CMD', 'wget', '--spider', '--quiet', 'http://127.0.0.1:3002/health']
interval: 90s
diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml
index c6b79e6c1..74bdd67f8 100644
--- a/docker-compose.prod.yml
+++ b/docker-compose.prod.yml
@@ -42,7 +42,7 @@ services:
deploy:
resources:
limits:
- memory: 4G
+ memory: 1G
environment:
- DATABASE_URL=postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@db:5432/${POSTGRES_DB:-simstudio}
- NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000}
diff --git a/helm/sim/examples/values-production.yaml b/helm/sim/examples/values-production.yaml
index 3bac1ba09..794afa4ac 100644
--- a/helm/sim/examples/values-production.yaml
+++ b/helm/sim/examples/values-production.yaml
@@ -10,13 +10,13 @@ global:
app:
enabled: true
replicaCount: 2
-
+
resources:
limits:
- memory: "6Gi"
+ memory: "8Gi"
cpu: "2000m"
requests:
- memory: "4Gi"
+ memory: "6Gi"
cpu: "1000m"
# Production URLs (REQUIRED - update with your actual domain names)
@@ -49,14 +49,14 @@ app:
realtime:
enabled: true
replicaCount: 2
-
+
resources:
limits:
- memory: "4Gi"
- cpu: "1000m"
- requests:
- memory: "2Gi"
+ memory: "1Gi"
cpu: "500m"
+ requests:
+ memory: "512Mi"
+ cpu: "250m"
env:
NEXT_PUBLIC_APP_URL: "https://sim.acme.ai"
diff --git a/helm/sim/values.yaml b/helm/sim/values.yaml
index c182c2772..f0def91cf 100644
--- a/helm/sim/values.yaml
+++ b/helm/sim/values.yaml
@@ -29,10 +29,10 @@ app:
# Resource limits and requests
resources:
limits:
- memory: "4Gi"
+ memory: "8Gi"
cpu: "2000m"
requests:
- memory: "2Gi"
+ memory: "4Gi"
cpu: "1000m"
# Node selector for pod scheduling (leave empty to allow scheduling on any node)
@@ -232,24 +232,24 @@ app:
realtime:
# Enable/disable the realtime service
enabled: true
-
+
# Image configuration
image:
repository: simstudioai/realtime
tag: latest
pullPolicy: Always
-
+
# Number of replicas
replicaCount: 1
-
+
# Resource limits and requests
resources:
limits:
- memory: "2Gi"
- cpu: "1000m"
- requests:
memory: "1Gi"
cpu: "500m"
+ requests:
+ memory: "512Mi"
+ cpu: "250m"
# Node selector for pod scheduling (leave empty to allow scheduling on any node)
nodeSelector: {}