Compare commits
36 Commits
fix/s-tool
...
feat/run-f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c14c614e33 | ||
|
|
415acda403 | ||
|
|
8dc45e6e7e | ||
|
|
7a0aaa460d | ||
|
|
2c333bfd98 | ||
|
|
23ab11a40d | ||
|
|
6e541949ec | ||
|
|
3231955a07 | ||
|
|
d38fb29e05 | ||
|
|
5c1e620831 | ||
|
|
72594df766 | ||
|
|
be95a7dbd8 | ||
|
|
da5d4ac9d5 | ||
|
|
e8534bea7a | ||
|
|
3d0b810a8e | ||
|
|
ebf2852733 | ||
|
|
12495ef89c | ||
|
|
d8d85fccf0 | ||
|
|
56bc809c6f | ||
|
|
c7bd48573a | ||
|
|
80f00479a3 | ||
|
|
c140e90559 | ||
|
|
d83c418111 | ||
|
|
be2a9ef0f8 | ||
|
|
1bf5ed4586 | ||
|
|
dc0ed842c4 | ||
|
|
1952b196a0 | ||
|
|
fa03d4d818 | ||
|
|
e14cebeec5 | ||
|
|
404d8c006e | ||
|
|
ac91d78834 | ||
|
|
6f0a093869 | ||
|
|
bcf6dc8828 | ||
|
|
841cb638fb | ||
|
|
c7db48e3a2 | ||
|
|
4d844651c2 |
@@ -44,7 +44,7 @@ services:
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 4G
|
||||
memory: 1G
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio
|
||||
|
||||
35
.github/workflows/ci.yml
vendored
@@ -10,6 +10,9 @@ concurrency:
|
||||
group: ci-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
name: Test and Build
|
||||
@@ -27,10 +30,11 @@ jobs:
|
||||
steps:
|
||||
- name: Extract version from commit message
|
||||
id: extract
|
||||
env:
|
||||
COMMIT_MSG: ${{ github.event.head_commit.message }}
|
||||
run: |
|
||||
COMMIT_MSG="${{ github.event.head_commit.message }}"
|
||||
# Only tag versions on main branch
|
||||
if [ "${{ github.ref }}" = "refs/heads/main" ] && [[ "$COMMIT_MSG" =~ ^(v[0-9]+\.[0-9]+\.[0-9]+): ]]; then
|
||||
if [ "$GITHUB_REF" = "refs/heads/main" ] && [[ "$COMMIT_MSG" =~ ^(v[0-9]+\.[0-9]+\.[0-9]+): ]]; then
|
||||
VERSION="${BASH_REMATCH[1]}"
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "is_release=true" >> $GITHUB_OUTPUT
|
||||
@@ -277,3 +281,30 @@ jobs:
|
||||
if: needs.check-docs-changes.outputs.docs_changed == 'true'
|
||||
uses: ./.github/workflows/docs-embeddings.yml
|
||||
secrets: inherit
|
||||
|
||||
# Create GitHub Release (only for version commits on main, after all builds complete)
|
||||
create-release:
|
||||
name: Create GitHub Release
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
needs: [create-ghcr-manifests, detect-version]
|
||||
if: needs.detect-version.outputs.is_release == 'true'
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Create release
|
||||
env:
|
||||
GH_PAT: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: bun run scripts/create-single-release.ts ${{ needs.detect-version.outputs.version }}
|
||||
|
||||
3
.github/workflows/docs-embeddings.yml
vendored
@@ -4,6 +4,9 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
process-docs-embeddings:
|
||||
name: Process Documentation Embeddings
|
||||
|
||||
3
.github/workflows/migrations.yml
vendored
@@ -4,6 +4,9 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
migrate:
|
||||
name: Apply Database Migrations
|
||||
|
||||
3
.github/workflows/publish-cli.yml
vendored
@@ -6,6 +6,9 @@ on:
|
||||
paths:
|
||||
- 'packages/cli/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish-npm:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
3
.github/workflows/publish-python-sdk.yml
vendored
@@ -6,6 +6,9 @@ on:
|
||||
paths:
|
||||
- 'packages/python-sdk/**'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish-pypi:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
3
.github/workflows/publish-ts-sdk.yml
vendored
@@ -6,6 +6,9 @@ on:
|
||||
paths:
|
||||
- 'packages/ts-sdk/**'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
publish-npm:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
|
||||
3
.github/workflows/test-build.yml
vendored
@@ -4,6 +4,9 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
name: Test and Build
|
||||
|
||||
@@ -119,6 +119,19 @@ aside#nd-sidebar {
|
||||
}
|
||||
}
|
||||
|
||||
/* Hide TOC popover on tablet/medium screens (768px - 1279px) */
|
||||
/* Keeps it visible on mobile (<768px) for easy navigation */
|
||||
/* Desktop (>=1280px) already hides it via fumadocs xl:hidden */
|
||||
@media (min-width: 768px) and (max-width: 1279px) {
|
||||
#nd-docs-layout {
|
||||
--fd-toc-popover-height: 0px !important;
|
||||
}
|
||||
|
||||
[data-toc-popover] {
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* Desktop only: Apply custom navbar offset, sidebar width and margin offsets */
|
||||
/* On mobile, let fumadocs handle the layout natively */
|
||||
@media (min-width: 1024px) {
|
||||
|
||||
38
apps/docs/components/ui/action-media.tsx
Normal file
@@ -0,0 +1,38 @@
|
||||
'use client'
|
||||
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
|
||||
interface ActionImageProps {
|
||||
src: string
|
||||
alt: string
|
||||
}
|
||||
|
||||
interface ActionVideoProps {
|
||||
src: string
|
||||
alt: string
|
||||
}
|
||||
|
||||
export function ActionImage({ src, alt }: ActionImageProps) {
|
||||
return (
|
||||
<img
|
||||
src={src}
|
||||
alt={alt}
|
||||
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export function ActionVideo({ src, alt }: ActionVideoProps) {
|
||||
const resolvedSrc = getAssetUrl(src)
|
||||
|
||||
return (
|
||||
<video
|
||||
src={resolvedSrc}
|
||||
autoPlay
|
||||
loop
|
||||
muted
|
||||
playsInline
|
||||
className='inline-block w-full max-w-[200px] rounded border border-neutral-200 dark:border-neutral-700'
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -10,12 +10,20 @@ Stellen Sie Sim auf Ihrer eigenen Infrastruktur mit Docker oder Kubernetes berei
|
||||
|
||||
## Anforderungen
|
||||
|
||||
| Ressource | Minimum | Empfohlen |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 2 Kerne | 4+ Kerne |
|
||||
| RAM | 12 GB | 16+ GB |
|
||||
| Speicher | 20 GB SSD | 50+ GB SSD |
|
||||
| Docker | 20.10+ | Neueste Version |
|
||||
| Ressource | Klein | Standard | Produktion |
|
||||
|----------|-------|----------|------------|
|
||||
| CPU | 2 Kerne | 4 Kerne | 8+ Kerne |
|
||||
| RAM | 12 GB | 16 GB | 32+ GB |
|
||||
| Speicher | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
|
||||
| Docker | 20.10+ | 20.10+ | Neueste Version |
|
||||
|
||||
**Klein**: Entwicklung, Tests, Einzelnutzer (1-5 Nutzer)
|
||||
**Standard**: Teams (5-50 Nutzer), moderate Arbeitslasten
|
||||
**Produktion**: Große Teams (50+ Nutzer), Hochverfügbarkeit, intensive Workflow-Ausführung
|
||||
|
||||
<Callout type="info">
|
||||
Die Ressourcenanforderungen werden durch Workflow-Ausführung (isolated-vm Sandboxing), Dateiverarbeitung (In-Memory-Dokumentenparsing) und Vektoroperationen (pgvector) bestimmt. Arbeitsspeicher ist typischerweise der limitierende Faktor, nicht CPU. Produktionsdaten zeigen, dass die Hauptanwendung durchschnittlich 4-8 GB und bei hoher Last bis zu 12 GB benötigt.
|
||||
</Callout>
|
||||
|
||||
## Schnellstart
|
||||
|
||||
|
||||
@@ -5,45 +5,25 @@ title: Copilot
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Image } from '@/components/ui/image'
|
||||
import { MessageCircle, Package, Zap, Infinity as InfinityIcon, Brain, BrainCircuit } from 'lucide-react'
|
||||
import { MessageCircle, Hammer, Zap, Globe, Paperclip, History, RotateCcw, Brain } from 'lucide-react'
|
||||
|
||||
Copilot is your in-editor assistant that helps you build and edit workflows with Sim Copilot, as well as understand and improve them. It can:
|
||||
Copilot is your in-editor assistant that helps you build and edit workflows. It can:
|
||||
|
||||
- **Explain**: Answer questions about Sim and your current workflow
|
||||
- **Guide**: Suggest edits and best practices
|
||||
- **Edit**: Make changes to blocks, connections, and settings when you approve
|
||||
- **Build**: Add blocks, wire connections, and configure settings
|
||||
- **Debug**: Analyze execution issues and optimize performance
|
||||
|
||||
<Callout type="info">
|
||||
Copilot is a Sim-managed service. For self-hosted deployments, generate a Copilot API key in the hosted app (sim.ai → Settings → Copilot)
|
||||
Copilot is a Sim-managed service. For self-hosted deployments:
|
||||
1. Go to [sim.ai](https://sim.ai) → Settings → Copilot and generate a Copilot API key
|
||||
2. Set `COPILOT_API_KEY` in your self-hosted environment to that value
|
||||
2. Set `COPILOT_API_KEY` in your self-hosted environment
|
||||
</Callout>
|
||||
|
||||
## Context Menu (@)
|
||||
|
||||
Use the `@` symbol to reference various resources and give Copilot more context about your workspace:
|
||||
|
||||
<Image
|
||||
src="/static/copilot/copilot-menu.png"
|
||||
alt="Copilot context menu showing available reference options"
|
||||
width={600}
|
||||
height={400}
|
||||
/>
|
||||
|
||||
The `@` menu provides access to:
|
||||
- **Chats**: Reference previous copilot conversations
|
||||
- **All workflows**: Reference any workflow in your workspace
|
||||
- **Workflow Blocks**: Reference specific blocks from workflows
|
||||
- **Blocks**: Reference block types and templates
|
||||
- **Knowledge**: Reference your uploaded documents and knowledgebase
|
||||
- **Docs**: Reference Sim documentation
|
||||
- **Templates**: Reference workflow templates
|
||||
- **Logs**: Reference execution logs and results
|
||||
|
||||
This contextual information helps Copilot provide more accurate and relevant assistance for your specific use case.
|
||||
|
||||
## Modes
|
||||
|
||||
Switch between modes using the mode selector at the bottom of the input area.
|
||||
|
||||
<Cards>
|
||||
<Card
|
||||
title={
|
||||
@@ -60,113 +40,153 @@ This contextual information helps Copilot provide more accurate and relevant ass
|
||||
<Card
|
||||
title={
|
||||
<span className="inline-flex items-center gap-2">
|
||||
<Package className="h-4 w-4 text-muted-foreground" />
|
||||
Agent
|
||||
<Hammer className="h-4 w-4 text-muted-foreground" />
|
||||
Build
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<div className="m-0 text-sm">
|
||||
Build-and-edit mode. Copilot proposes specific edits (add blocks, wire variables, tweak settings) and applies them when you approve.
|
||||
Workflow building mode. Copilot can add blocks, wire connections, edit configurations, and debug issues.
|
||||
</div>
|
||||
</Card>
|
||||
</Cards>
|
||||
|
||||
<div className="flex justify-center">
|
||||
<Image
|
||||
src="/static/copilot/copilot-mode.png"
|
||||
alt="Copilot mode selection interface"
|
||||
width={600}
|
||||
height={400}
|
||||
className="my-6"
|
||||
/>
|
||||
</div>
|
||||
## Models
|
||||
|
||||
## Depth Levels
|
||||
Select your preferred AI model using the model selector at the bottom right of the input area.
|
||||
|
||||
<Cards>
|
||||
<Card
|
||||
title={
|
||||
<span className="inline-flex items-center gap-2">
|
||||
<Zap className="h-4 w-4 text-muted-foreground" />
|
||||
Fast
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<div className="m-0 text-sm">Quickest and cheapest. Best for small edits, simple workflows, and minor tweaks.</div>
|
||||
</Card>
|
||||
<Card
|
||||
title={
|
||||
<span className="inline-flex items-center gap-2">
|
||||
<InfinityIcon className="h-4 w-4 text-muted-foreground" />
|
||||
Auto
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<div className="m-0 text-sm">Balanced speed and reasoning. Recommended default for most tasks.</div>
|
||||
</Card>
|
||||
<Card
|
||||
title={
|
||||
<span className="inline-flex items-center gap-2">
|
||||
<Brain className="h-4 w-4 text-muted-foreground" />
|
||||
Advanced
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<div className="m-0 text-sm">More reasoning for larger workflows and complex edits while staying performant.</div>
|
||||
</Card>
|
||||
<Card
|
||||
title={
|
||||
<span className="inline-flex items-center gap-2">
|
||||
<BrainCircuit className="h-4 w-4 text-muted-foreground" />
|
||||
Behemoth
|
||||
</span>
|
||||
}
|
||||
>
|
||||
<div className="m-0 text-sm">Maximum reasoning for deep planning, debugging, and complex architectural changes.</div>
|
||||
</Card>
|
||||
</Cards>
|
||||
**Available Models:**
|
||||
- Claude 4.5 Opus, Sonnet (default), Haiku
|
||||
- GPT 5.2 Codex, Pro
|
||||
- Gemini 3 Pro
|
||||
|
||||
### Mode Selection Interface
|
||||
Choose based on your needs: faster models for simple tasks, more capable models for complex workflows.
|
||||
|
||||
You can easily switch between different reasoning modes using the mode selector in the Copilot interface:
|
||||
## Context Menu (@)
|
||||
|
||||
<Image
|
||||
src="/static/copilot/copilot-models.png"
|
||||
alt="Copilot mode selection showing Advanced mode with MAX toggle"
|
||||
width={600}
|
||||
height={300}
|
||||
/>
|
||||
Use the `@` symbol to reference resources and give Copilot more context:
|
||||
|
||||
The interface allows you to:
|
||||
- **Select reasoning level**: Choose from Fast, Auto, Advanced, or Behemoth
|
||||
- **Enable MAX mode**: Toggle for maximum reasoning capabilities when you need the most thorough analysis
|
||||
- **See mode descriptions**: Understand what each mode is optimized for
|
||||
| Reference | Description |
|
||||
|-----------|-------------|
|
||||
| **Chats** | Previous copilot conversations |
|
||||
| **Workflows** | Any workflow in your workspace |
|
||||
| **Workflow Blocks** | Blocks in the current workflow |
|
||||
| **Blocks** | Block types and templates |
|
||||
| **Knowledge** | Uploaded documents and knowledge bases |
|
||||
| **Docs** | Sim documentation |
|
||||
| **Templates** | Workflow templates |
|
||||
| **Logs** | Execution logs and results |
|
||||
|
||||
Choose your mode based on the complexity of your task - use Fast for simple questions and Behemoth for complex architectural changes.
|
||||
Type `@` in the input field to open the context menu, then search or browse to find what you need.
|
||||
|
||||
## Billing and Cost Calculation
|
||||
## Slash Commands (/)
|
||||
|
||||
### How Costs Are Calculated
|
||||
Use slash commands for quick actions:
|
||||
|
||||
Copilot usage is billed per token from the underlying LLM:
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `/fast` | Fast mode execution |
|
||||
| `/research` | Research and exploration mode |
|
||||
| `/actions` | Execute agent actions |
|
||||
|
||||
- **Input tokens**: billed at the provider's base rate (**at-cost**)
|
||||
- **Output tokens**: billed at **1.5×** the provider's base output rate
|
||||
**Web Commands:**
|
||||
|
||||
```javascript
|
||||
copilotCost = (inputTokens × inputPrice + outputTokens × (outputPrice × 1.5)) / 1,000,000
|
||||
```
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `/search` | Search the web |
|
||||
| `/read` | Read a specific URL |
|
||||
| `/scrape` | Scrape web page content |
|
||||
| `/crawl` | Crawl multiple pages |
|
||||
|
||||
| Component | Rate Applied |
|
||||
|----------|----------------------|
|
||||
| Input | inputPrice |
|
||||
| Output | outputPrice × 1.5 |
|
||||
Type `/` in the input field to see available commands.
|
||||
|
||||
<Callout type="warning">
|
||||
Pricing shown reflects rates as of September 4, 2025. Check provider documentation for current pricing.
|
||||
</Callout>
|
||||
## Chat Management
|
||||
|
||||
### Starting a New Chat
|
||||
|
||||
Click the **+** button in the Copilot header to start a fresh conversation.
|
||||
|
||||
### Chat History
|
||||
|
||||
Click **History** to view previous conversations grouped by date. You can:
|
||||
- Click a chat to resume it
|
||||
- Delete chats you no longer need
|
||||
|
||||
### Editing Messages
|
||||
|
||||
Hover over any of your messages and click **Edit** to modify and resend it. This is useful for refining your prompts.
|
||||
|
||||
### Message Queue
|
||||
|
||||
If you send a message while Copilot is still responding, it gets queued. You can:
|
||||
- View queued messages in the expandable queue panel
|
||||
- Send a queued message immediately (aborts current response)
|
||||
- Remove messages from the queue
|
||||
|
||||
## File Attachments
|
||||
|
||||
Click the attachment icon to upload files with your message. Supported file types include:
|
||||
- Images (preview thumbnails shown)
|
||||
- PDFs
|
||||
- Text files, JSON, XML
|
||||
- Other document formats
|
||||
|
||||
Files are displayed as clickable thumbnails that open in a new tab.
|
||||
|
||||
## Checkpoints & Changes
|
||||
|
||||
When Copilot makes changes to your workflow, it saves checkpoints so you can revert if needed.
|
||||
|
||||
### Viewing Checkpoints
|
||||
|
||||
Hover over a Copilot message and click the checkpoints icon to see saved workflow states for that message.
|
||||
|
||||
### Reverting Changes
|
||||
|
||||
Click **Revert** on any checkpoint to restore your workflow to that state. A confirmation dialog will warn that this action cannot be undone.
|
||||
|
||||
### Accepting Changes
|
||||
|
||||
When Copilot proposes changes, you can:
|
||||
- **Accept**: Apply the proposed changes (`Mod+Shift+Enter`)
|
||||
- **Reject**: Dismiss the changes and keep your current workflow
|
||||
|
||||
## Thinking Blocks
|
||||
|
||||
For complex requests, Copilot may show its reasoning process in expandable thinking blocks:
|
||||
|
||||
- Blocks auto-expand while Copilot is thinking
|
||||
- Click to manually expand/collapse
|
||||
- Shows duration of the thinking process
|
||||
- Helps you understand how Copilot arrived at its solution
|
||||
|
||||
## Options Selection
|
||||
|
||||
When Copilot presents multiple options, you can select using:
|
||||
|
||||
| Control | Action |
|
||||
|---------|--------|
|
||||
| **1-9** | Select option by number |
|
||||
| **Arrow Up/Down** | Navigate between options |
|
||||
| **Enter** | Select highlighted option |
|
||||
|
||||
Selected options are highlighted; unselected options appear struck through.
|
||||
|
||||
## Keyboard Shortcuts
|
||||
|
||||
| Shortcut | Action |
|
||||
|----------|--------|
|
||||
| `@` | Open context menu |
|
||||
| `/` | Open slash commands |
|
||||
| `Arrow Up/Down` | Navigate menu items |
|
||||
| `Enter` | Select menu item |
|
||||
| `Esc` | Close menus |
|
||||
| `Mod+Shift+Enter` | Accept Copilot changes |
|
||||
|
||||
## Usage Limits
|
||||
|
||||
Copilot usage is billed per token from the underlying LLM. If you reach your usage limit, Copilot will prompt you to increase your limit. You can add usage in increments ($50, $100) from your current base.
|
||||
|
||||
<Callout type="info">
|
||||
Model prices are per million tokens. The calculation divides by 1,000,000 to get the actual cost. See <a href="/execution/costs">the Cost Calculation page</a> for background and examples.
|
||||
See the [Cost Calculation page](/execution/costs) for billing details.
|
||||
</Callout>
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ Speed up your workflow building with these keyboard shortcuts and mouse controls
|
||||
| `Mod` + `V` | Paste blocks |
|
||||
| `Delete` or `Backspace` | Delete selected blocks or edges |
|
||||
| `Shift` + `L` | Auto-layout canvas |
|
||||
| `Mod` + `Shift` + `F` | Fit to view |
|
||||
| `Mod` + `Shift` + `Enter` | Accept Copilot changes |
|
||||
|
||||
## Panel Navigation
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"pages": [
|
||||
"./introduction/index",
|
||||
"./getting-started/index",
|
||||
"./quick-reference/index",
|
||||
"triggers",
|
||||
"blocks",
|
||||
"tools",
|
||||
|
||||
375
apps/docs/content/docs/en/quick-reference/index.mdx
Normal file
@@ -0,0 +1,375 @@
|
||||
---
|
||||
title: Quick Reference
|
||||
description: Essential actions for navigating and using the Sim workflow editor
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { ActionImage, ActionVideo } from '@/components/ui/action-media'
|
||||
|
||||
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
|
||||
|
||||
<Callout type="info">
|
||||
**Mod** refers to `Cmd` on macOS and `Ctrl` on Windows/Linux.
|
||||
</Callout>
|
||||
|
||||
## Workspaces
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Create a workspace</td>
|
||||
<td>Click workspace dropdown → **New Workspace**</td>
|
||||
<td><ActionVideo src="quick-reference/create-workspace.mp4" alt="Create workspace" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Switch workspaces</td>
|
||||
<td>Click workspace dropdown → Select workspace</td>
|
||||
<td><ActionVideo src="quick-reference/switch-workspace.mp4" alt="Switch workspaces" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Invite team members</td>
|
||||
<td>Sidebar → **Invite**</td>
|
||||
<td><ActionVideo src="quick-reference/invite.mp4" alt="Invite team members" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a workspace</td>
|
||||
<td>Right-click workspace → **Rename**</td>
|
||||
<td rowSpan={4}><ActionImage src="/static/quick-reference/workspace-context-menu.png" alt="Workspace context menu" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate a workspace</td>
|
||||
<td>Right-click workspace → **Duplicate**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Export a workspace</td>
|
||||
<td>Right-click workspace → **Export**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a workspace</td>
|
||||
<td>Right-click workspace → **Delete**</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Workflows
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Create a workflow</td>
|
||||
<td>Click **+** button in sidebar</td>
|
||||
<td><ActionImage src="/static/quick-reference/create-workflow.png" alt="Create workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Reorder / move workflows</td>
|
||||
<td>Drag workflow up/down or onto a folder</td>
|
||||
<td><ActionVideo src="quick-reference/reordering.mp4" alt="Reorder workflows" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Import a workflow</td>
|
||||
<td>Click import button in sidebar → Select file</td>
|
||||
<td><ActionImage src="/static/quick-reference/import-workflow.png" alt="Import workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Multi-select workflows</td>
|
||||
<td>`Mod+Click` or `Shift+Click` workflows in sidebar</td>
|
||||
<td><ActionVideo src="quick-reference/multiselect.mp4" alt="Multi-select workflows" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Open in new tab</td>
|
||||
<td>Right-click workflow → **Open in New Tab**</td>
|
||||
<td rowSpan={6}><ActionImage src="/static/quick-reference/workflow-context-menu.png" alt="Workflow context menu" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a workflow</td>
|
||||
<td>Right-click workflow → **Rename**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Assign workflow color</td>
|
||||
<td>Right-click workflow → **Change Color**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate a workflow</td>
|
||||
<td>Right-click workflow → **Duplicate**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Export a workflow</td>
|
||||
<td>Right-click workflow → **Export**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a workflow</td>
|
||||
<td>Right-click workflow → **Delete**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a folder</td>
|
||||
<td>Right-click folder → **Rename**</td>
|
||||
<td rowSpan={6}><ActionImage src="/static/quick-reference/folder-context-menu.png" alt="Folder context menu" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Create workflow in folder</td>
|
||||
<td>Right-click folder → **Create workflow**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Create folder in folder</td>
|
||||
<td>Right-click folder → **Create folder**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate a folder</td>
|
||||
<td>Right-click folder → **Duplicate**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Export a folder</td>
|
||||
<td>Right-click folder → **Export**</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a folder</td>
|
||||
<td>Right-click folder → **Delete**</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Blocks
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Add a block</td>
|
||||
<td>Drag from Toolbar panel, or right-click canvas → **Add Block**</td>
|
||||
<td><ActionVideo src="quick-reference/add-block.mp4" alt="Add a block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Multi-select blocks</td>
|
||||
<td>`Mod+Click` additional blocks, or shift-drag to draw selection box</td>
|
||||
<td><ActionVideo src="quick-reference/multiselect-blocks.mp4" alt="Multi-select blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Copy blocks</td>
|
||||
<td>`Mod+C` with blocks selected</td>
|
||||
<td rowSpan={2}><ActionVideo src="quick-reference/copy-paste.mp4" alt="Copy and paste blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Paste blocks</td>
|
||||
<td>`Mod+V` to paste copied blocks</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Duplicate blocks</td>
|
||||
<td>Right-click → **Duplicate**</td>
|
||||
<td><ActionVideo src="quick-reference/duplicate-block.mp4" alt="Duplicate blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete blocks</td>
|
||||
<td>`Delete` or `Backspace` key, or right-click → **Delete**</td>
|
||||
<td><ActionImage src="/static/quick-reference/delete-block.png" alt="Delete block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Rename a block</td>
|
||||
<td>Click block name in header, or edit in the Editor panel</td>
|
||||
<td><ActionVideo src="quick-reference/rename-block.mp4" alt="Rename a block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Enable/Disable a block</td>
|
||||
<td>Right-click → **Enable/Disable**</td>
|
||||
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Toggle handle orientation</td>
|
||||
<td>Right-click → **Toggle Handles**</td>
|
||||
<td><ActionVideo src="quick-reference/toggle-handles.mp4" alt="Toggle handle orientation" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Configure a block</td>
|
||||
<td>Select block → use Editor panel on right</td>
|
||||
<td><ActionVideo src="quick-reference/configure-block.mp4" alt="Configure a block" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Connections
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Create a connection</td>
|
||||
<td>Drag from output handle to input handle</td>
|
||||
<td><ActionVideo src="quick-reference/connect-blocks.mp4" alt="Connect blocks" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Delete a connection</td>
|
||||
<td>Click edge to select → `Delete` key</td>
|
||||
<td><ActionVideo src="quick-reference/delete-connection.mp4" alt="Delete connection" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Use output in another block</td>
|
||||
<td>Drag connection tag into input field</td>
|
||||
<td><ActionVideo src="quick-reference/connection-tag.mp4" alt="Use connection tag" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Panels & Views
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Search toolbar</td>
|
||||
<td>`Mod+F`</td>
|
||||
<td><ActionVideo src="quick-reference/search-toolbar.mp4" alt="Search toolbar" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Search everything</td>
|
||||
<td>`Mod+K`</td>
|
||||
<td><ActionImage src="/static/quick-reference/search-everything.png" alt="Search everything" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Toggle manual mode</td>
|
||||
<td>Click toggle button to switch between manual and selector</td>
|
||||
<td><ActionImage src="/static/quick-reference/toggle-manual-mode.png" alt="Toggle manual mode" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Collapse/expand sidebar</td>
|
||||
<td>Click collapse button on sidebar</td>
|
||||
<td><ActionVideo src="quick-reference/collapse-sidebar.mp4" alt="Collapse sidebar" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Running & Testing
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Run workflow</td>
|
||||
<td>Click Run Workflow button or `Mod+Enter`</td>
|
||||
<td><ActionImage src="/static/quick-reference/run-workflow.png" alt="Run workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Stop workflow</td>
|
||||
<td>Click Stop button or `Mod+Enter` while running</td>
|
||||
<td><ActionImage src="/static/quick-reference/stop-workflow.png" alt="Stop workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Test with chat</td>
|
||||
<td>Use Chat panel on the right side</td>
|
||||
<td><ActionImage src="/static/quick-reference/test-chat.png" alt="Test with chat" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Select output to view</td>
|
||||
<td>Click dropdown in Chat panel → Select block output</td>
|
||||
<td><ActionImage src="/static/quick-reference/output-select.png" alt="Select output to view" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Clear chat history</td>
|
||||
<td>Click clear button in Chat panel</td>
|
||||
<td><ActionImage src="/static/quick-reference/clear-chat.png" alt="Clear chat history" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>View execution logs</td>
|
||||
<td>Open terminal panel at bottom, or `Mod+L`</td>
|
||||
<td><ActionImage src="/static/quick-reference/terminal.png" alt="Execution logs terminal" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Filter logs by block or status</td>
|
||||
<td>Click block filter in terminal or right-click log entry → **Filter by Block** or **Filter by Status**</td>
|
||||
<td><ActionImage src="/static/quick-reference/filter-block.png" alt="Filter logs by block" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Search logs</td>
|
||||
<td>Use search field in terminal or right-click log entry → **Search**</td>
|
||||
<td><ActionImage src="/static/quick-reference/terminal-search.png" alt="Search logs" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Copy log entry</td>
|
||||
<td>Clipboard Icon or Right-click log entry → **Copy**</td>
|
||||
<td><ActionImage src="/static/quick-reference/copy-log.png" alt="Copy log entry" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Clear terminal</td>
|
||||
<td>Trash icon or `Mod+D`</td>
|
||||
<td><ActionImage src="/static/quick-reference/clear-terminal.png" alt="Clear terminal" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Deployment
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Deploy a workflow</td>
|
||||
<td>Click **Deploy** button in panel</td>
|
||||
<td><ActionImage src="/static/quick-reference/deploy.png" alt="Deploy workflow" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Update deployment</td>
|
||||
<td>Click **Update** when changes are detected</td>
|
||||
<td><ActionImage src="/static/quick-reference/update-deployment.png" alt="Update deployment" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>View deployment status</td>
|
||||
<td>Check status indicator (Live/Update/Deploy) in Deploy tab</td>
|
||||
<td><ActionImage src="/static/quick-reference/view-deployment.png" alt="View deployment status" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Revert deployment</td>
|
||||
<td>Access previous versions in Deploy tab → **Promote to live**</td>
|
||||
<td><ActionImage src="/static/quick-reference/promote-deployment.png" alt="Promote deployment to live" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Copy API endpoint</td>
|
||||
<td>Deploy tab → API → Copy API cURL</td>
|
||||
<td><ActionImage src="/static/quick-reference/copy-api.png" alt="Copy API endpoint" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
## Variables
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr><th>Action</th><th>How</th><th>Preview</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Add / Edit / Delete workflow variable</td>
|
||||
<td>Panel -> Variables -> **Add Variable**, click to edit, or delete icon</td>
|
||||
<td><ActionImage src="/static/quick-reference/variables.png" alt="Variables panel" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Add environment variable</td>
|
||||
<td>Settings → **Environment Variables** → **Add**</td>
|
||||
<td><ActionImage src="/static/quick-reference/add-env-variable.png" alt="Add environment variable" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Reference a workflow variable</td>
|
||||
<td>Use `<blockName.itemName>` syntax in block inputs</td>
|
||||
<td><ActionImage src="/static/quick-reference/variable-reference.png" alt="Reference workflow variable" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Reference an environment variable</td>
|
||||
<td>Use `{{ENV_VAR}}` syntax in block inputs</td>
|
||||
<td><ActionImage src="/static/quick-reference/env-variable-reference.png" alt="Reference environment variable" /></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
@@ -16,12 +16,20 @@ Deploy Sim on your own infrastructure with Docker or Kubernetes.
|
||||
|
||||
## Requirements
|
||||
|
||||
| Resource | Minimum | Recommended |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 2 cores | 4+ cores |
|
||||
| RAM | 12 GB | 16+ GB |
|
||||
| Storage | 20 GB SSD | 50+ GB SSD |
|
||||
| Docker | 20.10+ | Latest |
|
||||
| Resource | Small | Standard | Production |
|
||||
|----------|-------|----------|------------|
|
||||
| CPU | 2 cores | 4 cores | 8+ cores |
|
||||
| RAM | 12 GB | 16 GB | 32+ GB |
|
||||
| Storage | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
|
||||
| Docker | 20.10+ | 20.10+ | Latest |
|
||||
|
||||
**Small**: Development, testing, single user (1-5 users)
|
||||
**Standard**: Teams (5-50 users), moderate workloads
|
||||
**Production**: Large teams (50+ users), high availability, heavy workflow execution
|
||||
|
||||
<Callout type="info">
|
||||
Resource requirements are driven by workflow execution (isolated-vm sandboxing), file processing (in-memory document parsing), and vector operations (pgvector). Memory is typically the constraining factor rather than CPU. Production telemetry shows the main app uses 4-8 GB average with peaks up to 12 GB under heavy load.
|
||||
</Callout>
|
||||
|
||||
## Quick Start
|
||||
|
||||
|
||||
@@ -10,12 +10,20 @@ Despliega Sim en tu propia infraestructura con Docker o Kubernetes.
|
||||
|
||||
## Requisitos
|
||||
|
||||
| Recurso | Mínimo | Recomendado |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 2 núcleos | 4+ núcleos |
|
||||
| RAM | 12 GB | 16+ GB |
|
||||
| Almacenamiento | 20 GB SSD | 50+ GB SSD |
|
||||
| Docker | 20.10+ | Última versión |
|
||||
| Recurso | Pequeño | Estándar | Producción |
|
||||
|----------|---------|----------|------------|
|
||||
| CPU | 2 núcleos | 4 núcleos | 8+ núcleos |
|
||||
| RAM | 12 GB | 16 GB | 32+ GB |
|
||||
| Almacenamiento | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
|
||||
| Docker | 20.10+ | 20.10+ | Última versión |
|
||||
|
||||
**Pequeño**: Desarrollo, pruebas, usuario único (1-5 usuarios)
|
||||
**Estándar**: Equipos (5-50 usuarios), cargas de trabajo moderadas
|
||||
**Producción**: Equipos grandes (50+ usuarios), alta disponibilidad, ejecución intensiva de workflows
|
||||
|
||||
<Callout type="info">
|
||||
Los requisitos de recursos están determinados por la ejecución de workflows (sandboxing isolated-vm), procesamiento de archivos (análisis de documentos en memoria) y operaciones vectoriales (pgvector). La memoria suele ser el factor limitante, no la CPU. La telemetría de producción muestra que la aplicación principal usa 4-8 GB en promedio con picos de hasta 12 GB bajo carga pesada.
|
||||
</Callout>
|
||||
|
||||
## Inicio rápido
|
||||
|
||||
|
||||
@@ -10,12 +10,20 @@ Déployez Sim sur votre propre infrastructure avec Docker ou Kubernetes.
|
||||
|
||||
## Prérequis
|
||||
|
||||
| Ressource | Minimum | Recommandé |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 2 cœurs | 4+ cœurs |
|
||||
| RAM | 12 Go | 16+ Go |
|
||||
| Stockage | 20 Go SSD | 50+ Go SSD |
|
||||
| Docker | 20.10+ | Dernière version |
|
||||
| Ressource | Petit | Standard | Production |
|
||||
|----------|-------|----------|------------|
|
||||
| CPU | 2 cœurs | 4 cœurs | 8+ cœurs |
|
||||
| RAM | 12 Go | 16 Go | 32+ Go |
|
||||
| Stockage | 20 Go SSD | 50 Go SSD | 100+ Go SSD |
|
||||
| Docker | 20.10+ | 20.10+ | Dernière version |
|
||||
|
||||
**Petit** : Développement, tests, utilisateur unique (1-5 utilisateurs)
|
||||
**Standard** : Équipes (5-50 utilisateurs), charges de travail modérées
|
||||
**Production** : Grandes équipes (50+ utilisateurs), haute disponibilité, exécution intensive de workflows
|
||||
|
||||
<Callout type="info">
|
||||
Les besoins en ressources sont déterminés par l'exécution des workflows (sandboxing isolated-vm), le traitement des fichiers (analyse de documents en mémoire) et les opérations vectorielles (pgvector). La mémoire est généralement le facteur limitant, pas le CPU. La télémétrie de production montre que l'application principale utilise 4-8 Go en moyenne avec des pics jusqu'à 12 Go sous forte charge.
|
||||
</Callout>
|
||||
|
||||
## Démarrage rapide
|
||||
|
||||
|
||||
@@ -10,12 +10,20 @@ DockerまたはKubernetesを使用して、自社のインフラストラクチ
|
||||
|
||||
## 要件
|
||||
|
||||
| リソース | 最小 | 推奨 |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 2コア | 4+コア |
|
||||
| RAM | 12 GB | 16+ GB |
|
||||
| ストレージ | 20 GB SSD | 50+ GB SSD |
|
||||
| Docker | 20.10+ | 最新版 |
|
||||
| リソース | スモール | スタンダード | プロダクション |
|
||||
|----------|---------|-------------|----------------|
|
||||
| CPU | 2コア | 4コア | 8+コア |
|
||||
| RAM | 12 GB | 16 GB | 32+ GB |
|
||||
| ストレージ | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
|
||||
| Docker | 20.10+ | 20.10+ | 最新版 |
|
||||
|
||||
**スモール**: 開発、テスト、シングルユーザー(1-5ユーザー)
|
||||
**スタンダード**: チーム(5-50ユーザー)、中程度のワークロード
|
||||
**プロダクション**: 大規模チーム(50+ユーザー)、高可用性、高負荷ワークフロー実行
|
||||
|
||||
<Callout type="info">
|
||||
リソース要件は、ワークフロー実行(isolated-vmサンドボックス)、ファイル処理(メモリ内ドキュメント解析)、ベクトル演算(pgvector)によって決まります。CPUよりもメモリが制約要因となることが多いです。本番環境のテレメトリによると、メインアプリは平均4-8 GB、高負荷時は最大12 GBを使用します。
|
||||
</Callout>
|
||||
|
||||
## クイックスタート
|
||||
|
||||
|
||||
@@ -10,12 +10,20 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
## 要求
|
||||
|
||||
| 资源 | 最低要求 | 推荐配置 |
|
||||
|----------|---------|-------------|
|
||||
| CPU | 2 核 | 4 核及以上 |
|
||||
| 内存 | 12 GB | 16 GB 及以上 |
|
||||
| 存储 | 20 GB SSD | 50 GB 及以上 SSD |
|
||||
| Docker | 20.10+ | 最新版本 |
|
||||
| 资源 | 小型 | 标准 | 生产环境 |
|
||||
|----------|------|------|----------|
|
||||
| CPU | 2 核 | 4 核 | 8+ 核 |
|
||||
| 内存 | 12 GB | 16 GB | 32+ GB |
|
||||
| 存储 | 20 GB SSD | 50 GB SSD | 100+ GB SSD |
|
||||
| Docker | 20.10+ | 20.10+ | 最新版本 |
|
||||
|
||||
**小型**: 开发、测试、单用户(1-5 用户)
|
||||
**标准**: 团队(5-50 用户)、中等工作负载
|
||||
**生产环境**: 大型团队(50+ 用户)、高可用性、密集工作流执行
|
||||
|
||||
<Callout type="info">
|
||||
资源需求由工作流执行(isolated-vm 沙箱)、文件处理(内存中文档解析)和向量运算(pgvector)决定。内存通常是限制因素,而不是 CPU。生产遥测数据显示,主应用平均使用 4-8 GB,高负载时峰值可达 12 GB。
|
||||
</Callout>
|
||||
|
||||
## 快速开始
|
||||
|
||||
|
||||
BIN
apps/docs/public/static/quick-reference/add-env-variable.png
Normal file
|
After Width: | Height: | Size: 104 KiB |
BIN
apps/docs/public/static/quick-reference/clear-chat.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
apps/docs/public/static/quick-reference/clear-terminal.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
apps/docs/public/static/quick-reference/copy-api.png
Normal file
|
After Width: | Height: | Size: 114 KiB |
BIN
apps/docs/public/static/quick-reference/copy-log.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
apps/docs/public/static/quick-reference/create-workflow.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
apps/docs/public/static/quick-reference/delete-block.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
apps/docs/public/static/quick-reference/deploy.png
Normal file
|
After Width: | Height: | Size: 6.7 KiB |
BIN
apps/docs/public/static/quick-reference/disable-block.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
|
After Width: | Height: | Size: 36 KiB |
BIN
apps/docs/public/static/quick-reference/filter-block.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
BIN
apps/docs/public/static/quick-reference/folder-context-menu.png
Normal file
|
After Width: | Height: | Size: 48 KiB |
BIN
apps/docs/public/static/quick-reference/import-workflow.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
apps/docs/public/static/quick-reference/output-select.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
apps/docs/public/static/quick-reference/promote-deployment.png
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
apps/docs/public/static/quick-reference/rename-workflow.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
apps/docs/public/static/quick-reference/run-workflow.png
Normal file
|
After Width: | Height: | Size: 5.9 KiB |
BIN
apps/docs/public/static/quick-reference/search-everything.png
Normal file
|
After Width: | Height: | Size: 78 KiB |
BIN
apps/docs/public/static/quick-reference/stop-workflow.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
apps/docs/public/static/quick-reference/terminal-search.png
Normal file
|
After Width: | Height: | Size: 82 KiB |
BIN
apps/docs/public/static/quick-reference/terminal.png
Normal file
|
After Width: | Height: | Size: 146 KiB |
BIN
apps/docs/public/static/quick-reference/test-chat.png
Normal file
|
After Width: | Height: | Size: 7.1 KiB |
BIN
apps/docs/public/static/quick-reference/toggle-manual-mode.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
apps/docs/public/static/quick-reference/update-deployment.png
Normal file
|
After Width: | Height: | Size: 6.8 KiB |
BIN
apps/docs/public/static/quick-reference/variable-reference.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
BIN
apps/docs/public/static/quick-reference/variables.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
apps/docs/public/static/quick-reference/view-deployment.png
Normal file
|
After Width: | Height: | Size: 90 KiB |
|
After Width: | Height: | Size: 36 KiB |
|
After Width: | Height: | Size: 103 KiB |
@@ -10,8 +10,8 @@ export { LandingLoopNode } from './landing-canvas/landing-block/landing-loop-nod
|
||||
export { LandingNode } from './landing-canvas/landing-block/landing-node'
|
||||
export type { LoopBlockProps } from './landing-canvas/landing-block/loop-block'
|
||||
export { LoopBlock } from './landing-canvas/landing-block/loop-block'
|
||||
export type { TagProps } from './landing-canvas/landing-block/tag'
|
||||
export { Tag } from './landing-canvas/landing-block/tag'
|
||||
export type { SubBlockRowProps, TagProps } from './landing-canvas/landing-block/tag'
|
||||
export { SubBlockRow, Tag } from './landing-canvas/landing-block/tag'
|
||||
export type {
|
||||
LandingBlockNode,
|
||||
LandingCanvasProps,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import React from 'react'
|
||||
import { BookIcon } from 'lucide-react'
|
||||
import {
|
||||
Tag,
|
||||
type TagProps,
|
||||
SubBlockRow,
|
||||
type SubBlockRowProps,
|
||||
} from '@/app/(landing)/components/hero/components/landing-canvas/landing-block/tag'
|
||||
|
||||
/**
|
||||
* Data structure for a landing card component
|
||||
* Matches the workflow block structure from the application
|
||||
*/
|
||||
export interface LandingCardData {
|
||||
/** Icon element to display in the card header */
|
||||
@@ -15,8 +15,8 @@ export interface LandingCardData {
|
||||
color: string | '#f6f6f6'
|
||||
/** Name/title of the card */
|
||||
name: string
|
||||
/** Optional tags to display at the bottom of the card */
|
||||
tags?: TagProps[]
|
||||
/** Optional subblock rows to display below the header */
|
||||
tags?: SubBlockRowProps[]
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -28,7 +28,8 @@ export interface LandingBlockProps extends LandingCardData {
|
||||
}
|
||||
|
||||
/**
|
||||
* Landing block component that displays a card with icon, name, and optional tags
|
||||
* Landing block component that displays a card with icon, name, and optional subblock rows
|
||||
* Styled to match the application's workflow blocks
|
||||
* @param props - Component properties including icon, color, name, tags, and className
|
||||
* @returns A styled block card component
|
||||
*/
|
||||
@@ -39,33 +40,37 @@ export const LandingBlock = React.memo(function LandingBlock({
|
||||
tags,
|
||||
className,
|
||||
}: LandingBlockProps) {
|
||||
const hasContentBelowHeader = tags && tags.length > 0
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`z-10 flex w-64 flex-col items-start gap-3 rounded-[14px] border border-[#E5E5E5] bg-[#FEFEFE] p-3 ${className ?? ''}`}
|
||||
style={{
|
||||
boxShadow: '0 1px 2px 0 rgba(0, 0, 0, 0.05)',
|
||||
}}
|
||||
className={`z-10 flex w-[250px] flex-col rounded-[8px] border border-[#E5E5E5] bg-white ${className ?? ''}`}
|
||||
>
|
||||
<div className='flex w-full items-center justify-between'>
|
||||
<div className='flex items-center gap-2.5'>
|
||||
{/* Header - matches workflow-block.tsx header styling */}
|
||||
<div
|
||||
className={`flex items-center justify-between p-[8px] ${hasContentBelowHeader ? 'border-[#E5E5E5] border-b' : ''}`}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
className='flex h-6 w-6 items-center justify-center rounded-[8px] text-white'
|
||||
style={{ backgroundColor: color as string }}
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ background: color as string }}
|
||||
>
|
||||
{icon}
|
||||
</div>
|
||||
<p className='text-base text-card-foreground'>{name}</p>
|
||||
<span className='truncate font-medium text-[#171717] text-[16px]' title={name}>
|
||||
{name}
|
||||
</span>
|
||||
</div>
|
||||
<BookIcon className='h-4 w-4 text-muted-foreground' />
|
||||
</div>
|
||||
|
||||
{tags && tags.length > 0 ? (
|
||||
<div className='flex flex-wrap gap-2'>
|
||||
{/* Content - SubBlock Rows matching workflow-block.tsx */}
|
||||
{hasContentBelowHeader && (
|
||||
<div className='flex flex-col gap-[8px] p-[8px]'>
|
||||
{tags.map((tag) => (
|
||||
<Tag key={tag.label} icon={tag.icon} label={tag.label} />
|
||||
<SubBlockRow key={tag.label} icon={tag.icon} label={tag.label} />
|
||||
))}
|
||||
</div>
|
||||
) : null}
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
@@ -7,9 +7,14 @@ import {
|
||||
type LandingCardData,
|
||||
} from '@/app/(landing)/components/hero/components/landing-canvas/landing-block/landing-block'
|
||||
|
||||
/**
|
||||
* Handle Y offset from block top - matches HANDLE_POSITIONS.DEFAULT_Y_OFFSET
|
||||
*/
|
||||
const HANDLE_Y_OFFSET = 20
|
||||
|
||||
/**
|
||||
* React Flow node component for the landing canvas
|
||||
* Includes CSS animations and connection handles
|
||||
* Styled to match the application's workflow blocks
|
||||
* @param props - Component properties containing node data
|
||||
* @returns A React Flow compatible node component
|
||||
*/
|
||||
@@ -41,15 +46,15 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
type='target'
|
||||
position={Position.Left}
|
||||
style={{
|
||||
width: '12px',
|
||||
height: '12px',
|
||||
background: '#FEFEFE',
|
||||
border: '1px solid #E5E5E5',
|
||||
borderRadius: '50%',
|
||||
top: '50%',
|
||||
left: '-20px',
|
||||
width: '7px',
|
||||
height: '20px',
|
||||
background: '#D1D1D1',
|
||||
border: 'none',
|
||||
borderRadius: '2px 0 0 2px',
|
||||
top: `${HANDLE_Y_OFFSET}px`,
|
||||
left: '-7px',
|
||||
transform: 'translateY(-50%)',
|
||||
zIndex: 2,
|
||||
zIndex: 10,
|
||||
}}
|
||||
isConnectable={false}
|
||||
/>
|
||||
@@ -59,15 +64,15 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
style={{
|
||||
width: '12px',
|
||||
height: '12px',
|
||||
background: '#FEFEFE',
|
||||
border: '1px solid #E5E5E5',
|
||||
borderRadius: '50%',
|
||||
top: '50%',
|
||||
right: '-20px',
|
||||
width: '7px',
|
||||
height: '20px',
|
||||
background: '#D1D1D1',
|
||||
border: 'none',
|
||||
borderRadius: '0 2px 2px 0',
|
||||
top: `${HANDLE_Y_OFFSET}px`,
|
||||
right: '-7px',
|
||||
transform: 'translateY(-50%)',
|
||||
zIndex: 2,
|
||||
zIndex: 10,
|
||||
}}
|
||||
isConnectable={false}
|
||||
/>
|
||||
|
||||
@@ -15,6 +15,7 @@ export interface LoopBlockProps {
|
||||
/**
|
||||
* Loop block container component that provides a styled container
|
||||
* for grouping related elements with a dashed border
|
||||
* Styled to match the application's subflow containers
|
||||
* @param props - Component properties including children and styling
|
||||
* @returns A styled loop container component
|
||||
*/
|
||||
@@ -29,33 +30,33 @@ export const LoopBlock = React.memo(function LoopBlock({
|
||||
style={{
|
||||
width: '1198px',
|
||||
height: '528px',
|
||||
borderRadius: '14px',
|
||||
background: 'rgba(59, 130, 246, 0.10)',
|
||||
borderRadius: '8px',
|
||||
background: 'rgba(59, 130, 246, 0.08)',
|
||||
position: 'relative',
|
||||
...style,
|
||||
}}
|
||||
>
|
||||
{/* Custom dashed border with SVG */}
|
||||
{/* Custom dashed border with SVG - 8px border radius to match blocks */}
|
||||
<svg
|
||||
className='pointer-events-none absolute inset-0 h-full w-full'
|
||||
style={{ borderRadius: '14px' }}
|
||||
style={{ borderRadius: '8px' }}
|
||||
preserveAspectRatio='none'
|
||||
>
|
||||
<path
|
||||
className='landing-loop-animated-dash'
|
||||
d='M 1183.5 527.5
|
||||
L 14 527.5
|
||||
A 13.5 13.5 0 0 1 0.5 514
|
||||
L 0.5 14
|
||||
A 13.5 13.5 0 0 1 14 0.5
|
||||
L 1183.5 0.5
|
||||
A 13.5 13.5 0 0 1 1197 14
|
||||
L 1197 514
|
||||
A 13.5 13.5 0 0 1 1183.5 527.5 Z'
|
||||
d='M 1190 527.5
|
||||
L 8 527.5
|
||||
A 7.5 7.5 0 0 1 0.5 520
|
||||
L 0.5 8
|
||||
A 7.5 7.5 0 0 1 8 0.5
|
||||
L 1190 0.5
|
||||
A 7.5 7.5 0 0 1 1197.5 8
|
||||
L 1197.5 520
|
||||
A 7.5 7.5 0 0 1 1190 527.5 Z'
|
||||
fill='none'
|
||||
stroke='#3B82F6'
|
||||
strokeWidth='1'
|
||||
strokeDasharray='12 12'
|
||||
strokeDasharray='8 8'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
</svg>
|
||||
|
||||
@@ -1,25 +1,52 @@
|
||||
import React from 'react'
|
||||
|
||||
/**
|
||||
* Properties for a tag component
|
||||
* Properties for a subblock row component
|
||||
* Matches the SubBlockRow pattern from workflow-block.tsx
|
||||
*/
|
||||
export interface TagProps {
|
||||
/** Icon element to display in the tag */
|
||||
icon: React.ReactNode
|
||||
/** Text label for the tag */
|
||||
export interface SubBlockRowProps {
|
||||
/** Icon element to display (optional, for visual context) */
|
||||
icon?: React.ReactNode
|
||||
/** Text label for the row title */
|
||||
label: string
|
||||
/** Optional value to display on the right side */
|
||||
value?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tag component for displaying labeled icons in a compact format
|
||||
* @param props - Tag properties including icon and label
|
||||
* @returns A styled tag component
|
||||
* Kept for backwards compatibility
|
||||
*/
|
||||
export const Tag = React.memo(function Tag({ icon, label }: TagProps) {
|
||||
export type TagProps = SubBlockRowProps
|
||||
|
||||
/**
|
||||
* SubBlockRow component matching the workflow block's subblock row style
|
||||
* @param props - Row properties including label and optional value
|
||||
* @returns A styled row component
|
||||
*/
|
||||
export const SubBlockRow = React.memo(function SubBlockRow({ label, value }: SubBlockRowProps) {
|
||||
// Split label by colon to separate title and value if present
|
||||
const [title, displayValue] = label.includes(':')
|
||||
? label.split(':').map((s) => s.trim())
|
||||
: [label, value]
|
||||
|
||||
return (
|
||||
<div className='flex w-fit items-center gap-1 rounded-[8px] border border-gray-300 bg-white px-2 py-0.5'>
|
||||
<div className='h-3 w-3 text-muted-foreground'>{icon}</div>
|
||||
<p className='text-muted-foreground text-xs leading-normal'>{label}</p>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='min-w-0 truncate text-[#888888] text-[14px] capitalize' title={title}>
|
||||
{title}
|
||||
</span>
|
||||
{displayValue && (
|
||||
<span
|
||||
className='flex-1 truncate text-right text-[#171717] text-[14px]'
|
||||
title={displayValue}
|
||||
>
|
||||
{displayValue}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Tag component - alias for SubBlockRow for backwards compatibility
|
||||
*/
|
||||
export const Tag = SubBlockRow
|
||||
|
||||
@@ -9,9 +9,10 @@ import { LandingFlow } from '@/app/(landing)/components/hero/components/landing-
|
||||
|
||||
/**
|
||||
* Visual constants for landing node dimensions
|
||||
* Matches BLOCK_DIMENSIONS from the application
|
||||
*/
|
||||
export const CARD_WIDTH = 256
|
||||
export const CARD_HEIGHT = 92
|
||||
export const CARD_WIDTH = 250
|
||||
export const CARD_HEIGHT = 100
|
||||
|
||||
/**
|
||||
* Landing block node with positioning information
|
||||
|
||||
@@ -4,33 +4,29 @@ import React from 'react'
|
||||
import { type EdgeProps, getSmoothStepPath, Position } from 'reactflow'
|
||||
|
||||
/**
|
||||
* Custom edge component with animated dotted line that floats between handles
|
||||
* Custom edge component with animated dashed line
|
||||
* Styled to match the application's workflow edges with rectangular handles
|
||||
* @param props - React Flow edge properties
|
||||
* @returns An animated dotted edge component
|
||||
* @returns An animated dashed edge component
|
||||
*/
|
||||
export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
const { id, sourceX, sourceY, targetX, targetY, sourcePosition, targetPosition, style, data } =
|
||||
props
|
||||
const { id, sourceX, sourceY, targetX, targetY, sourcePosition, targetPosition, style } = props
|
||||
|
||||
// Adjust the connection points to create floating effect
|
||||
// Account for handle size (12px) and additional spacing
|
||||
const handleRadius = 6 // Half of handle width (12px)
|
||||
const floatingGap = 1 // Additional gap for floating effect
|
||||
|
||||
// Calculate adjusted positions based on edge direction
|
||||
// Adjust the connection points to connect flush with rectangular handles
|
||||
// Handle width is 7px, positioned at -7px from edge
|
||||
let adjustedSourceX = sourceX
|
||||
let adjustedTargetX = targetX
|
||||
|
||||
if (sourcePosition === Position.Right) {
|
||||
adjustedSourceX = sourceX + handleRadius + floatingGap
|
||||
adjustedSourceX = sourceX + 1
|
||||
} else if (sourcePosition === Position.Left) {
|
||||
adjustedSourceX = sourceX - handleRadius - floatingGap
|
||||
adjustedSourceX = sourceX - 1
|
||||
}
|
||||
|
||||
if (targetPosition === Position.Left) {
|
||||
adjustedTargetX = targetX - handleRadius - floatingGap
|
||||
adjustedTargetX = targetX - 1
|
||||
} else if (targetPosition === Position.Right) {
|
||||
adjustedTargetX = targetX + handleRadius + floatingGap
|
||||
adjustedTargetX = targetX + 1
|
||||
}
|
||||
|
||||
const [path] = getSmoothStepPath({
|
||||
@@ -40,8 +36,8 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
targetY,
|
||||
sourcePosition,
|
||||
targetPosition,
|
||||
borderRadius: 20,
|
||||
offset: 10,
|
||||
borderRadius: 8,
|
||||
offset: 16,
|
||||
})
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,16 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import React from 'react'
|
||||
import {
|
||||
ArrowUp,
|
||||
BinaryIcon,
|
||||
BookIcon,
|
||||
CalendarIcon,
|
||||
CodeIcon,
|
||||
Globe2Icon,
|
||||
MessageSquareIcon,
|
||||
VariableIcon,
|
||||
} from 'lucide-react'
|
||||
import { ArrowUp, CodeIcon } from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { type Edge, type Node, Position } from 'reactflow'
|
||||
import {
|
||||
@@ -23,7 +14,6 @@ import {
|
||||
JiraIcon,
|
||||
LinearIcon,
|
||||
NotionIcon,
|
||||
OpenAIIcon,
|
||||
OutlookIcon,
|
||||
PackageSearchIcon,
|
||||
PineconeIcon,
|
||||
@@ -65,67 +55,56 @@ const SERVICE_TEMPLATES = {
|
||||
|
||||
/**
|
||||
* Landing blocks for the canvas preview
|
||||
* Styled to match the application's workflow blocks with subblock rows
|
||||
*/
|
||||
const LANDING_BLOCKS: LandingManualBlock[] = [
|
||||
{
|
||||
id: 'schedule',
|
||||
name: 'Schedule',
|
||||
color: '#7B68EE',
|
||||
icon: <ScheduleIcon className='h-4 w-4' />,
|
||||
icon: <ScheduleIcon className='h-[16px] w-[16px] text-white' />,
|
||||
positions: {
|
||||
mobile: { x: 8, y: 60 },
|
||||
tablet: { x: 40, y: 120 },
|
||||
desktop: { x: 60, y: 180 },
|
||||
},
|
||||
tags: [
|
||||
{ icon: <CalendarIcon className='h-3 w-3' />, label: '09:00AM Daily' },
|
||||
{ icon: <Globe2Icon className='h-3 w-3' />, label: 'PST' },
|
||||
],
|
||||
tags: [{ label: 'Time: 09:00AM Daily' }, { label: 'Timezone: PST' }],
|
||||
},
|
||||
{
|
||||
id: 'knowledge',
|
||||
name: 'Knowledge',
|
||||
color: '#00B0B0',
|
||||
icon: <PackageSearchIcon className='h-4 w-4' />,
|
||||
icon: <PackageSearchIcon className='h-[16px] w-[16px] text-white' />,
|
||||
positions: {
|
||||
mobile: { x: 120, y: 140 },
|
||||
tablet: { x: 220, y: 200 },
|
||||
desktop: { x: 420, y: 241 },
|
||||
},
|
||||
tags: [
|
||||
{ icon: <BookIcon className='h-3 w-3' />, label: 'Product Vector DB' },
|
||||
{ icon: <BinaryIcon className='h-3 w-3' />, label: 'Limit: 10' },
|
||||
],
|
||||
tags: [{ label: 'Source: Product Vector DB' }, { label: 'Limit: 10' }],
|
||||
},
|
||||
{
|
||||
id: 'agent',
|
||||
name: 'Agent',
|
||||
color: '#802FFF',
|
||||
icon: <AgentIcon className='h-4 w-4' />,
|
||||
icon: <AgentIcon className='h-[16px] w-[16px] text-white' />,
|
||||
positions: {
|
||||
mobile: { x: 340, y: 60 },
|
||||
tablet: { x: 540, y: 120 },
|
||||
desktop: { x: 880, y: 142 },
|
||||
},
|
||||
tags: [
|
||||
{ icon: <OpenAIIcon className='h-3 w-3' />, label: 'gpt-5' },
|
||||
{ icon: <MessageSquareIcon className='h-3 w-3' />, label: 'You are a support ag...' },
|
||||
],
|
||||
tags: [{ label: 'Model: gpt-5' }, { label: 'Prompt: You are a support ag...' }],
|
||||
},
|
||||
{
|
||||
id: 'function',
|
||||
name: 'Function',
|
||||
color: '#FF402F',
|
||||
icon: <CodeIcon className='h-4 w-4' />,
|
||||
icon: <CodeIcon className='h-[16px] w-[16px] text-white' />,
|
||||
positions: {
|
||||
mobile: { x: 480, y: 220 },
|
||||
tablet: { x: 740, y: 280 },
|
||||
desktop: { x: 880, y: 340 },
|
||||
},
|
||||
tags: [
|
||||
{ icon: <CodeIcon className='h-3 w-3' />, label: 'Python' },
|
||||
{ icon: <VariableIcon className='h-3 w-3' />, label: 'time = "2025-09-01...' },
|
||||
],
|
||||
tags: [{ label: 'Language: Python' }, { label: 'Code: time = "2025-09-01...' }],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -229,7 +229,7 @@ function PricingCard({
|
||||
*/
|
||||
export default function LandingPricing() {
|
||||
return (
|
||||
<section id='pricing' className='px-4 pt-[19px] sm:px-0 sm:pt-0' aria-label='Pricing plans'>
|
||||
<section id='pricing' className='px-4 pt-[23px] sm:px-0 sm:pt-[4px]' aria-label='Pricing plans'>
|
||||
<h2 className='sr-only'>Pricing Plans</h2>
|
||||
<div className='relative mx-auto w-full max-w-[1289px]'>
|
||||
<div className='grid grid-cols-1 gap-4 sm:grid-cols-2 sm:gap-0 lg:grid-cols-4'>
|
||||
|
||||
@@ -21,7 +21,7 @@ interface NavProps {
|
||||
}
|
||||
|
||||
export default function Nav({ hideAuthButtons = false, variant = 'landing' }: NavProps = {}) {
|
||||
const [githubStars, setGithubStars] = useState('25.8k')
|
||||
const [githubStars, setGithubStars] = useState('26.1k')
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
const [isLoginHovered, setIsLoginHovered] = useState(false)
|
||||
const router = useRouter()
|
||||
|
||||
@@ -8,6 +8,7 @@ import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import { formatLiteralForCode } from '@/executor/utils/code-formatting'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
@@ -387,7 +388,12 @@ function resolveWorkflowVariables(
|
||||
if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
if (typeof variableValue === 'boolean') {
|
||||
// Already a boolean, keep as-is
|
||||
} else {
|
||||
const normalized = String(variableValue).toLowerCase().trim()
|
||||
variableValue = normalized === 'true'
|
||||
}
|
||||
} else if (type === 'json' && typeof variableValue === 'string') {
|
||||
try {
|
||||
variableValue = JSON.parse(variableValue)
|
||||
@@ -687,11 +693,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologue += `const ${k} = ${formatLiteralForCode(v, 'javascript')};\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -762,11 +764,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologue += `${k} = ${formatLiteralForCode(v, 'python')}\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
|
||||
@@ -408,6 +408,7 @@ describe('Knowledge Search Utils', () => {
|
||||
input: ['test query'],
|
||||
model: 'text-embedding-3-small',
|
||||
encoding_format: 'float',
|
||||
dimensions: 1536,
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, permissions, user, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('OrganizationWorkspacesAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/workspaces
|
||||
* Get workspaces related to the organization with optional filtering
|
||||
* Query parameters:
|
||||
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
||||
* - ?member=userId - Workspaces where specific member has access
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const availableOnly = url.searchParams.get('available') === 'true'
|
||||
const memberId = url.searchParams.get('member')
|
||||
|
||||
// Verify user is a member of this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Forbidden - Not a member of this organization',
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
if (availableOnly) {
|
||||
// Get workspaces where user has admin permissions (can invite others)
|
||||
const availableWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
isOwner: eq(workspace.ownerId, session.user.id),
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// User owns the workspace
|
||||
eq(workspace.ownerId, session.user.id),
|
||||
// User has admin permission on the workspace
|
||||
and(
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.permissionType, 'admin')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
// Filter and format the results
|
||||
const workspacesWithInvitePermission = availableWorkspaces
|
||||
.filter((workspace) => {
|
||||
// Include if user owns the workspace OR has admin permission
|
||||
return workspace.isOwner || workspace.permissionType === 'admin'
|
||||
})
|
||||
.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
canInvite: true, // All returned workspaces have invite permission
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
logger.info('Retrieved available workspaces for organization member', {
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
workspaceCount: workspacesWithInvitePermission.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: workspacesWithInvitePermission,
|
||||
totalCount: workspacesWithInvitePermission.length,
|
||||
filter: 'available',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (memberId && hasAdminAccess) {
|
||||
// Get workspaces where specific member has access (admin only)
|
||||
const memberWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
isOwner: eq(workspace.ownerId, memberId),
|
||||
permissionType: permissions.permissionType,
|
||||
createdAt: permissions.createdAt,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, memberId)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// Member owns the workspace
|
||||
eq(workspace.ownerId, memberId),
|
||||
// Member has permissions on the workspace
|
||||
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
||||
)
|
||||
)
|
||||
|
||||
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
permission: workspace.permissionType,
|
||||
joinedAt: workspace.createdAt,
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: formattedWorkspaces,
|
||||
totalCount: formattedWorkspaces.length,
|
||||
filter: 'member',
|
||||
memberId,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Default: Get all workspaces (basic info only for regular members)
|
||||
if (!hasAdminAccess) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: [],
|
||||
totalCount: 0,
|
||||
message: 'Workspace access information is only available to organization admins',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// For admins: Get summary of all workspaces
|
||||
const allWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
ownerName: user.name,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(user, eq(workspace.ownerId, user.id))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: allWorkspaces,
|
||||
totalCount: allWorkspaces.length,
|
||||
filter: 'all',
|
||||
},
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization workspaces', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
257
apps/sim/app/api/tools/supabase/storage-upload/route.ts
Normal file
@@ -0,0 +1,257 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SupabaseStorageUploadAPI')
|
||||
|
||||
const SupabaseStorageUploadSchema = z.object({
|
||||
projectId: z.string().min(1, 'Project ID is required'),
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
bucket: z.string().min(1, 'Bucket name is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
path: z.string().optional().nullable(),
|
||||
fileData: z.any(),
|
||||
contentType: z.string().optional().nullable(),
|
||||
upsert: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SupabaseStorageUploadSchema.parse(body)
|
||||
|
||||
const fileData = validatedData.fileData
|
||||
const isStringInput = typeof fileData === 'string'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
|
||||
bucket: validatedData.bucket,
|
||||
fileName: validatedData.fileName,
|
||||
path: validatedData.path,
|
||||
fileDataType: isStringInput ? 'string' : 'object',
|
||||
})
|
||||
|
||||
if (!fileData) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'fileData is required',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let uploadBody: Buffer
|
||||
let uploadContentType: string | undefined
|
||||
|
||||
if (isStringInput) {
|
||||
let content = fileData as string
|
||||
|
||||
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
|
||||
if (dataUrlMatch) {
|
||||
const [, mimeType, base64Data] = dataUrlMatch
|
||||
content = base64Data
|
||||
if (!validatedData.contentType) {
|
||||
uploadContentType = mimeType
|
||||
}
|
||||
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
|
||||
}
|
||||
|
||||
const cleanedContent = content.replace(/[\s\r\n]/g, '')
|
||||
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
|
||||
|
||||
if (isLikelyBase64 && cleanedContent.length >= 4) {
|
||||
try {
|
||||
uploadBody = Buffer.from(cleanedContent, 'base64')
|
||||
|
||||
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
|
||||
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
|
||||
|
||||
if (
|
||||
uploadBody.length >= expectedMinSize &&
|
||||
uploadBody.length <= expectedMaxSize &&
|
||||
uploadBody.length > 0
|
||||
) {
|
||||
logger.info(
|
||||
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
|
||||
)
|
||||
} else {
|
||||
const reEncoded = uploadBody.toString('base64')
|
||||
if (reEncoded !== cleanedContent) {
|
||||
logger.info(
|
||||
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
|
||||
)
|
||||
uploadBody = Buffer.from(content, 'utf-8')
|
||||
} else {
|
||||
logger.info(
|
||||
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (decodeError) {
|
||||
logger.info(
|
||||
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
|
||||
)
|
||||
uploadBody = Buffer.from(content, 'utf-8')
|
||||
}
|
||||
} else {
|
||||
uploadBody = Buffer.from(content, 'utf-8')
|
||||
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
|
||||
}
|
||||
|
||||
uploadContentType =
|
||||
uploadContentType || validatedData.contentType || 'application/octet-stream'
|
||||
} else {
|
||||
const rawFile = fileData
|
||||
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
|
||||
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
uploadBody = buffer
|
||||
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
|
||||
}
|
||||
|
||||
let fullPath = validatedData.fileName
|
||||
if (validatedData.path) {
|
||||
const folderPath = validatedData.path.endsWith('/')
|
||||
? validatedData.path
|
||||
: `${validatedData.path}/`
|
||||
fullPath = `${folderPath}${validatedData.fileName}`
|
||||
}
|
||||
|
||||
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
apikey: validatedData.apiKey,
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
'Content-Type': uploadContentType,
|
||||
}
|
||||
|
||||
if (validatedData.upsert) {
|
||||
headers['x-upsert'] = 'true'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
|
||||
contentType: uploadContentType,
|
||||
bodySize: uploadBody.length,
|
||||
upsert: validatedData.upsert,
|
||||
})
|
||||
|
||||
const response = await fetch(supabaseUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: new Uint8Array(uploadBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorData
|
||||
try {
|
||||
errorData = JSON.parse(errorText)
|
||||
} catch {
|
||||
errorData = { message: errorText }
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorData,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
|
||||
details: errorData,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
|
||||
bucket: validatedData.bucket,
|
||||
path: fullPath,
|
||||
})
|
||||
|
||||
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Successfully uploaded file to storage',
|
||||
results: {
|
||||
...result,
|
||||
path: fullPath,
|
||||
bucket: validatedData.bucket,
|
||||
publicUrl,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
305
apps/sim/app/api/workflows/[id]/execute-from-block/route.ts
Normal file
@@ -0,0 +1,305 @@
|
||||
import { db, workflow as workflowTable } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { IterationContext, SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { NormalizedBlockOutput } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
|
||||
const logger = createLogger('ExecuteFromBlockAPI')
|
||||
|
||||
const ExecuteFromBlockSchema = z.object({
|
||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||
sourceSnapshot: z.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
}),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = auth.userId
|
||||
|
||||
let body: unknown
|
||||
try {
|
||||
body = await req.json()
|
||||
} catch {
|
||||
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const validation = ExecuteFromBlockSchema.safeParse(body)
|
||||
if (!validation.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body:`, validation.error.errors)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request body',
|
||||
details: validation.error.errors.map((e) => ({
|
||||
path: e.path.join('.'),
|
||||
message: e.message,
|
||||
})),
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { startBlockId, sourceSnapshot } = validation.data
|
||||
const executionId = uuidv4()
|
||||
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord?.workspaceId) {
|
||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceId = workflowRecord.workspaceId
|
||||
const workflowUserId = workflowRecord.userId
|
||||
|
||||
logger.info(`[${requestId}] Starting run-from-block execution`, {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||
})
|
||||
|
||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (isStreamClosed) return
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot({
|
||||
requestId,
|
||||
workflowId,
|
||||
userId,
|
||||
executionId,
|
||||
triggerType: 'manual',
|
||||
workspaceId,
|
||||
workflowUserId,
|
||||
useDraftState: true,
|
||||
isClientSession: true,
|
||||
})
|
||||
|
||||
try {
|
||||
const startTime = new Date()
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:started',
|
||||
timestamp: startTime.toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { startTime: startTime.toISOString() },
|
||||
})
|
||||
|
||||
const result = await executeWorkflowCore({
|
||||
snapshot,
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
runFromBlock: {
|
||||
startBlockId,
|
||||
sourceSnapshot: sourceSnapshot as SerializableExecutionState,
|
||||
},
|
||||
callbacks: {
|
||||
onBlockStart: async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
iterationContext?: IterationContext
|
||||
) => {
|
||||
sendEvent({
|
||||
type: 'block:started',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
...(iterationContext && {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType,
|
||||
}),
|
||||
},
|
||||
})
|
||||
},
|
||||
onBlockComplete: async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
callbackData: {
|
||||
input?: unknown
|
||||
output: NormalizedBlockOutput
|
||||
executionTime: number
|
||||
},
|
||||
iterationContext?: IterationContext
|
||||
) => {
|
||||
const hasError = (callbackData.output as any)?.error
|
||||
sendEvent({
|
||||
type: hasError ? 'block:error' : 'block:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
input: callbackData.input,
|
||||
...(hasError
|
||||
? { error: (callbackData.output as any).error }
|
||||
: { output: callbackData.output }),
|
||||
durationMs: callbackData.executionTime || 0,
|
||||
...(iterationContext && {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType,
|
||||
}),
|
||||
},
|
||||
})
|
||||
},
|
||||
onStream: async (streamingExecution: unknown) => {
|
||||
const streamingExec = streamingExecution as {
|
||||
stream: ReadableStream
|
||||
execution: any
|
||||
}
|
||||
const blockId = streamingExec.execution?.blockId
|
||||
const reader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
sendEvent({
|
||||
type: 'stream:chunk',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { blockId, chunk },
|
||||
})
|
||||
}
|
||||
sendEvent({
|
||||
type: 'stream:done',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { blockId },
|
||||
})
|
||||
} finally {
|
||||
try {
|
||||
reader.releaseLock()
|
||||
} catch {}
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { duration: result.metadata?.duration || 0 },
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: executionResult?.error || errorMessage,
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
||||
controller.close()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: { ...SSE_HEADERS, 'X-Execution-Id': executionId },
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Failed to start run-from-block execution:`, error)
|
||||
return NextResponse.json(
|
||||
{ error: errorMessage || 'Failed to start execution' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -53,6 +53,7 @@ const ExecuteWorkflowSchema = z.object({
|
||||
parallels: z.record(z.any()).optional(),
|
||||
})
|
||||
.optional(),
|
||||
stopAfterBlockId: z.string().optional(),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
@@ -222,6 +223,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId,
|
||||
} = validation.data
|
||||
|
||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||
@@ -237,6 +239,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId: _stopAfterBlockId,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
} = body
|
||||
@@ -434,6 +437,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
loggingSession,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
})
|
||||
|
||||
const outputWithBase64 = includeFileBase64
|
||||
@@ -722,6 +726,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { memo, useCallback } from 'react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut, Play } from 'lucide-react'
|
||||
import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -49,6 +51,7 @@ export const ActionBar = memo(
|
||||
collaborativeBatchToggleBlockHandles,
|
||||
} = useCollaborativeWorkflow()
|
||||
const { setPendingSelection } = useWorkflowRegistry()
|
||||
const { handleRunFromBlock } = useWorkflowExecution()
|
||||
|
||||
const addNotification = useNotificationStore((s) => s.addNotification)
|
||||
|
||||
@@ -97,12 +100,30 @@ export const ActionBar = memo(
|
||||
)
|
||||
)
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const { isExecuting, getLastExecutionSnapshot } = useExecutionStore()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const edges = useWorkflowStore((state) => state.edges)
|
||||
|
||||
const isStartBlock = isInputDefinitionTrigger(blockType)
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
|
||||
const isInsideSubflow = parentId && (parentType === 'loop' || parentType === 'parallel')
|
||||
|
||||
const snapshot = activeWorkflowId ? getLastExecutionSnapshot(activeWorkflowId) : null
|
||||
const incomingEdges = edges.filter((edge) => edge.target === blockId)
|
||||
const isTriggerBlock = incomingEdges.length === 0
|
||||
const dependenciesSatisfied =
|
||||
isTriggerBlock ||
|
||||
(snapshot && incomingEdges.every((edge) => snapshot.executedBlocks.includes(edge.source)))
|
||||
const canRunFromBlock =
|
||||
dependenciesSatisfied && !isNoteBlock && !isInsideSubflow && !isExecuting
|
||||
|
||||
const handleRunFromBlockClick = useCallback(() => {
|
||||
if (!activeWorkflowId || !canRunFromBlock) return
|
||||
handleRunFromBlock(blockId, activeWorkflowId)
|
||||
}, [blockId, activeWorkflowId, canRunFromBlock, handleRunFromBlock])
|
||||
|
||||
/**
|
||||
* Get appropriate tooltip message based on disabled state
|
||||
@@ -135,23 +156,29 @@ export const ActionBar = memo(
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
if (canRunFromBlock && !disabled) {
|
||||
handleRunFromBlockClick()
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
disabled={disabled || !canRunFromBlock}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
<Play className={ICON_SIZE} />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
{(() => {
|
||||
if (disabled) return getTooltipMessage('Run from this block')
|
||||
if (isExecuting) return 'Execution in progress'
|
||||
if (isInsideSubflow) return 'Cannot run from inside subflow'
|
||||
if (!dependenciesSatisfied) return 'Run upstream blocks first'
|
||||
return 'Run from this block'
|
||||
})()}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
{!isNoteBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -40,9 +40,16 @@ export interface BlockMenuProps {
|
||||
onRemoveFromSubflow: () => void
|
||||
onOpenEditor: () => void
|
||||
onRename: () => void
|
||||
onRunFromBlock?: () => void
|
||||
onRunUntilBlock?: () => void
|
||||
hasClipboard?: boolean
|
||||
showRemoveFromSubflow?: boolean
|
||||
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
|
||||
canRunFromBlock?: boolean
|
||||
/** Reason why run from block is disabled (for tooltip) */
|
||||
runFromBlockDisabledReason?: string
|
||||
disableEdit?: boolean
|
||||
isExecuting?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -65,9 +72,14 @@ export function BlockMenu({
|
||||
onRemoveFromSubflow,
|
||||
onOpenEditor,
|
||||
onRename,
|
||||
onRunFromBlock,
|
||||
onRunUntilBlock,
|
||||
hasClipboard = false,
|
||||
showRemoveFromSubflow = false,
|
||||
canRunFromBlock = false,
|
||||
runFromBlockDisabledReason,
|
||||
disableEdit = false,
|
||||
isExecuting = false,
|
||||
}: BlockMenuProps) {
|
||||
const isSingleBlock = selectedBlocks.length === 1
|
||||
|
||||
@@ -203,6 +215,39 @@ export function BlockMenu({
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Run from/until block - only for single non-note block selection */}
|
||||
{isSingleBlock && !allNoteBlocks && (
|
||||
<>
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
disabled={!canRunFromBlock || isExecuting}
|
||||
onClick={() => {
|
||||
if (canRunFromBlock && !isExecuting) {
|
||||
onRunFromBlock?.()
|
||||
onClose()
|
||||
}
|
||||
}}
|
||||
>
|
||||
{isExecuting
|
||||
? 'Execution in progress...'
|
||||
: !canRunFromBlock && runFromBlockDisabledReason
|
||||
? runFromBlockDisabledReason
|
||||
: 'Run from this block'}
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
disabled={isExecuting}
|
||||
onClick={() => {
|
||||
if (!isExecuting) {
|
||||
onRunUntilBlock?.()
|
||||
onClose()
|
||||
}
|
||||
}}
|
||||
>
|
||||
{isExecuting ? 'Execution in progress...' : 'Run until this block'}
|
||||
</PopoverItem>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Badge, Button, Combobox, Input, Label, Textarea } from '@/components/emcn'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
Input,
|
||||
Label,
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
@@ -38,6 +46,14 @@ const DEFAULT_ASSIGNMENT: Omit<VariableAssignment, 'id'> = {
|
||||
isExisting: false,
|
||||
}
|
||||
|
||||
/**
|
||||
* Boolean value options for Combobox
|
||||
*/
|
||||
const BOOLEAN_OPTIONS: ComboboxOption[] = [
|
||||
{ label: 'true', value: 'true' },
|
||||
{ label: 'false', value: 'false' },
|
||||
]
|
||||
|
||||
/**
|
||||
* Parses a value that might be a JSON string or already an array of VariableAssignment.
|
||||
* This handles the case where workflows are imported with stringified values.
|
||||
@@ -104,8 +120,6 @@ export function VariablesInput({
|
||||
const allVariablesAssigned =
|
||||
!hasNoWorkflowVariables && getAvailableVariablesFor('new').length === 0
|
||||
|
||||
// Initialize with one empty assignment if none exist and not in preview/disabled mode
|
||||
// Also add assignment when first variable is created
|
||||
useEffect(() => {
|
||||
if (!isReadOnly && assignments.length === 0 && currentWorkflowVariables.length > 0) {
|
||||
const initialAssignment: VariableAssignment = {
|
||||
@@ -116,45 +130,46 @@ export function VariablesInput({
|
||||
}
|
||||
}, [currentWorkflowVariables.length, isReadOnly, assignments.length, setStoreValue])
|
||||
|
||||
// Clean up assignments when their associated variables are deleted
|
||||
useEffect(() => {
|
||||
if (isReadOnly || assignments.length === 0) return
|
||||
|
||||
const currentVariableIds = new Set(currentWorkflowVariables.map((v) => v.id))
|
||||
const validAssignments = assignments.filter((assignment) => {
|
||||
// Keep assignments that haven't selected a variable yet
|
||||
if (!assignment.variableId) return true
|
||||
// Keep assignments whose variable still exists
|
||||
return currentVariableIds.has(assignment.variableId)
|
||||
})
|
||||
|
||||
// If all variables were deleted, clear all assignments
|
||||
if (currentWorkflowVariables.length === 0) {
|
||||
setStoreValue([])
|
||||
} else if (validAssignments.length !== assignments.length) {
|
||||
// Some assignments reference deleted variables, remove them
|
||||
setStoreValue(validAssignments.length > 0 ? validAssignments : [])
|
||||
}
|
||||
}, [currentWorkflowVariables, assignments, isReadOnly, setStoreValue])
|
||||
|
||||
const addAssignment = () => {
|
||||
if (isPreview || disabled || allVariablesAssigned) return
|
||||
if (isReadOnly || allVariablesAssigned) return
|
||||
|
||||
const newAssignment: VariableAssignment = {
|
||||
...DEFAULT_ASSIGNMENT,
|
||||
id: crypto.randomUUID(),
|
||||
}
|
||||
setStoreValue([...(assignments || []), newAssignment])
|
||||
setStoreValue([...assignments, newAssignment])
|
||||
}
|
||||
|
||||
const removeAssignment = (id: string) => {
|
||||
if (isPreview || disabled) return
|
||||
setStoreValue((assignments || []).filter((a) => a.id !== id))
|
||||
if (isReadOnly) return
|
||||
|
||||
if (assignments.length === 1) {
|
||||
setStoreValue([{ ...DEFAULT_ASSIGNMENT, id: crypto.randomUUID() }])
|
||||
return
|
||||
}
|
||||
|
||||
setStoreValue(assignments.filter((a) => a.id !== id))
|
||||
}
|
||||
|
||||
const updateAssignment = (id: string, updates: Partial<VariableAssignment>) => {
|
||||
if (isPreview || disabled) return
|
||||
setStoreValue((assignments || []).map((a) => (a.id === id ? { ...a, ...updates } : a)))
|
||||
if (isReadOnly) return
|
||||
setStoreValue(assignments.map((a) => (a.id === id ? { ...a, ...updates } : a)))
|
||||
}
|
||||
|
||||
const handleVariableSelect = (assignmentId: string, variableId: string) => {
|
||||
@@ -169,19 +184,12 @@ export function VariablesInput({
|
||||
}
|
||||
}
|
||||
|
||||
const handleTagSelect = (tag: string) => {
|
||||
const handleTagSelect = (newValue: string) => {
|
||||
if (!activeFieldId) return
|
||||
|
||||
const assignment = assignments.find((a) => a.id === activeFieldId)
|
||||
if (!assignment) return
|
||||
|
||||
const currentValue = assignment.value || ''
|
||||
|
||||
const textBeforeCursor = currentValue.slice(0, cursorPosition)
|
||||
const lastOpenBracket = textBeforeCursor.lastIndexOf('<')
|
||||
|
||||
const newValue =
|
||||
currentValue.slice(0, lastOpenBracket) + tag + currentValue.slice(cursorPosition)
|
||||
const originalValue = assignment?.value || ''
|
||||
const textAfterCursor = originalValue.slice(cursorPosition)
|
||||
|
||||
updateAssignment(activeFieldId, { value: newValue })
|
||||
setShowTags(false)
|
||||
@@ -190,7 +198,7 @@ export function VariablesInput({
|
||||
const inputEl = valueInputRefs.current[activeFieldId]
|
||||
if (inputEl) {
|
||||
inputEl.focus()
|
||||
const newCursorPos = lastOpenBracket + tag.length
|
||||
const newCursorPos = newValue.length - textAfterCursor.length
|
||||
inputEl.setSelectionRange(newCursorPos, newCursorPos)
|
||||
}
|
||||
}, 10)
|
||||
@@ -272,6 +280,18 @@ export function VariablesInput({
|
||||
}))
|
||||
}
|
||||
|
||||
const syncOverlayScroll = (assignmentId: string, scrollLeft: number) => {
|
||||
const overlay = overlayRefs.current[assignmentId]
|
||||
if (overlay) overlay.scrollLeft = scrollLeft
|
||||
}
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement | HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Escape') {
|
||||
setShowTags(false)
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
}
|
||||
|
||||
if (isPreview && (!assignments || assignments.length === 0)) {
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center rounded-md border border-border/40 bg-muted/20 py-8 text-center'>
|
||||
@@ -302,7 +322,7 @@ export function VariablesInput({
|
||||
|
||||
return (
|
||||
<div className='space-y-[8px]'>
|
||||
{assignments && assignments.length > 0 && (
|
||||
{assignments.length > 0 && (
|
||||
<div className='space-y-[8px]'>
|
||||
{assignments.map((assignment, index) => {
|
||||
const collapsed = collapsedAssignments[assignment.id] || false
|
||||
@@ -334,7 +354,7 @@ export function VariablesInput({
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={addAssignment}
|
||||
disabled={isPreview || disabled || allVariablesAssigned}
|
||||
disabled={isReadOnly || allVariablesAssigned}
|
||||
className='h-auto p-0'
|
||||
>
|
||||
<Plus className='h-[14px] w-[14px]' />
|
||||
@@ -343,7 +363,7 @@ export function VariablesInput({
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => removeAssignment(assignment.id)}
|
||||
disabled={isPreview || disabled || assignments.length === 1}
|
||||
disabled={isReadOnly}
|
||||
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
@@ -358,16 +378,26 @@ export function VariablesInput({
|
||||
<Label className='text-[13px]'>Variable</Label>
|
||||
<Combobox
|
||||
options={availableVars.map((v) => ({ label: v.name, value: v.id }))}
|
||||
value={assignment.variableId || assignment.variableName || ''}
|
||||
value={assignment.variableId || ''}
|
||||
onChange={(value) => handleVariableSelect(assignment.id, value)}
|
||||
placeholder='Select a variable...'
|
||||
disabled={isPreview || disabled}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Value</Label>
|
||||
{assignment.type === 'object' || assignment.type === 'array' ? (
|
||||
{assignment.type === 'boolean' ? (
|
||||
<Combobox
|
||||
options={BOOLEAN_OPTIONS}
|
||||
value={assignment.value ?? ''}
|
||||
onChange={(v) =>
|
||||
!isReadOnly && updateAssignment(assignment.id, { value: v })
|
||||
}
|
||||
placeholder='Select value'
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
) : assignment.type === 'object' || assignment.type === 'array' ? (
|
||||
<div className='relative'>
|
||||
<Textarea
|
||||
ref={(el) => {
|
||||
@@ -381,26 +411,32 @@ export function VariablesInput({
|
||||
e.target.selectionStart ?? undefined
|
||||
)
|
||||
}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={() => {
|
||||
if (!isPreview && !disabled && !assignment.value?.trim()) {
|
||||
if (!isReadOnly && !assignment.value?.trim()) {
|
||||
setActiveFieldId(assignment.id)
|
||||
setCursorPosition(0)
|
||||
setShowTags(true)
|
||||
}
|
||||
}}
|
||||
onScroll={(e) => {
|
||||
const overlay = overlayRefs.current[assignment.id]
|
||||
if (overlay) {
|
||||
overlay.scrollTop = e.currentTarget.scrollTop
|
||||
overlay.scrollLeft = e.currentTarget.scrollLeft
|
||||
}
|
||||
}}
|
||||
placeholder={
|
||||
assignment.type === 'object'
|
||||
? '{\n "key": "value"\n}'
|
||||
: '[\n 1, 2, 3\n]'
|
||||
}
|
||||
disabled={isPreview || disabled}
|
||||
disabled={isReadOnly}
|
||||
className={cn(
|
||||
'min-h-[120px] font-mono text-sm text-transparent caret-foreground placeholder:text-muted-foreground/50',
|
||||
dragHighlight[assignment.id] && 'ring-2 ring-blue-500 ring-offset-2'
|
||||
)}
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
lineHeight: 'inherit',
|
||||
wordBreak: 'break-word',
|
||||
whiteSpace: 'pre-wrap',
|
||||
}}
|
||||
@@ -413,10 +449,7 @@ export function VariablesInput({
|
||||
if (el) overlayRefs.current[assignment.id] = el
|
||||
}}
|
||||
className='pointer-events-none absolute inset-0 flex items-start overflow-auto bg-transparent px-3 py-2 font-mono text-sm'
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
lineHeight: 'inherit',
|
||||
}}
|
||||
style={{ scrollbarWidth: 'none' }}
|
||||
>
|
||||
<div className='w-full whitespace-pre-wrap break-words'>
|
||||
{formatDisplayText(assignment.value || '', {
|
||||
@@ -441,21 +474,34 @@ export function VariablesInput({
|
||||
e.target.selectionStart ?? undefined
|
||||
)
|
||||
}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={() => {
|
||||
if (!isPreview && !disabled && !assignment.value?.trim()) {
|
||||
if (!isReadOnly && !assignment.value?.trim()) {
|
||||
setActiveFieldId(assignment.id)
|
||||
setCursorPosition(0)
|
||||
setShowTags(true)
|
||||
}
|
||||
}}
|
||||
onScroll={(e) =>
|
||||
syncOverlayScroll(assignment.id, e.currentTarget.scrollLeft)
|
||||
}
|
||||
onPaste={() =>
|
||||
setTimeout(() => {
|
||||
const input = valueInputRefs.current[assignment.id]
|
||||
if (input)
|
||||
syncOverlayScroll(
|
||||
assignment.id,
|
||||
(input as HTMLInputElement).scrollLeft
|
||||
)
|
||||
}, 0)
|
||||
}
|
||||
placeholder={`${assignment.type} value`}
|
||||
disabled={isPreview || disabled}
|
||||
disabled={isReadOnly}
|
||||
autoComplete='off'
|
||||
className={cn(
|
||||
'allow-scroll w-full overflow-auto text-transparent caret-foreground',
|
||||
'allow-scroll w-full overflow-x-auto overflow-y-hidden text-transparent caret-foreground',
|
||||
dragHighlight[assignment.id] && 'ring-2 ring-blue-500 ring-offset-2'
|
||||
)}
|
||||
style={{ overflowX: 'auto' }}
|
||||
onDrop={(e) => handleDrop(e, assignment.id)}
|
||||
onDragOver={(e) => handleDragOver(e, assignment.id)}
|
||||
onDragLeave={(e) => handleDragLeave(e, assignment.id)}
|
||||
@@ -465,7 +511,7 @@ export function VariablesInput({
|
||||
if (el) overlayRefs.current[assignment.id] = el
|
||||
}}
|
||||
className='pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-sm'
|
||||
style={{ overflowX: 'auto' }}
|
||||
style={{ scrollbarWidth: 'none' }}
|
||||
>
|
||||
<div
|
||||
className='w-full whitespace-pre'
|
||||
|
||||
@@ -284,22 +284,37 @@ const renderLabel = (
|
||||
</>
|
||||
)}
|
||||
{showCanonicalToggle && (
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||
onClick={canonicalToggle?.onToggle}
|
||||
disabled={canonicalToggleDisabledResolved}
|
||||
aria-label={canonicalToggle?.mode === 'advanced' ? 'Use selector' : 'Enter manual ID'}
|
||||
>
|
||||
<ArrowLeftRight
|
||||
className={cn(
|
||||
'!h-[12px] !w-[12px]',
|
||||
canonicalToggle?.mode === 'advanced'
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-secondary)]'
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||
onClick={canonicalToggle?.onToggle}
|
||||
disabled={canonicalToggleDisabledResolved}
|
||||
aria-label={
|
||||
canonicalToggle?.mode === 'advanced'
|
||||
? 'Switch to selector'
|
||||
: 'Switch to manual ID'
|
||||
}
|
||||
>
|
||||
<ArrowLeftRight
|
||||
className={cn(
|
||||
'!h-[12px] !w-[12px]',
|
||||
canonicalToggle?.mode === 'advanced'
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-secondary)]'
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>
|
||||
{canonicalToggle?.mode === 'advanced'
|
||||
? 'Switch to selector'
|
||||
: 'Switch to manual ID'}
|
||||
</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
@@ -323,6 +338,11 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
||||
const configEqual =
|
||||
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
||||
|
||||
const canonicalToggleEqual =
|
||||
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
|
||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||
|
||||
return (
|
||||
prevProps.blockId === nextProps.blockId &&
|
||||
configEqual &&
|
||||
@@ -331,8 +351,7 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
||||
prevProps.disabled === nextProps.disabled &&
|
||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||
canonicalToggleEqual
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,11 @@ interface UseCanvasContextMenuProps {
|
||||
|
||||
/**
|
||||
* Hook for managing workflow canvas context menus.
|
||||
* Handles right-click events, menu state, click-outside detection, and block info extraction.
|
||||
*
|
||||
* Handles right-click events on nodes, pane, and selections with proper multi-select behavior.
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Context menu state and handlers
|
||||
*/
|
||||
export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasContextMenuProps) {
|
||||
const [activeMenu, setActiveMenu] = useState<MenuType>(null)
|
||||
@@ -46,19 +50,29 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
|
||||
event.stopPropagation()
|
||||
|
||||
const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey
|
||||
setNodes((nodes) =>
|
||||
nodes.map((n) => ({
|
||||
...n,
|
||||
selected: isMultiSelect ? (n.id === node.id ? true : n.selected) : n.id === node.id,
|
||||
}))
|
||||
)
|
||||
const currentSelectedNodes = getNodes().filter((n) => n.selected)
|
||||
const isClickedNodeSelected = currentSelectedNodes.some((n) => n.id === node.id)
|
||||
|
||||
const selectedNodes = getNodes().filter((n) => n.selected)
|
||||
const nodesToUse = isMultiSelect
|
||||
? selectedNodes.some((n) => n.id === node.id)
|
||||
? selectedNodes
|
||||
: [...selectedNodes, node]
|
||||
: [node]
|
||||
let nodesToUse: Node[]
|
||||
if (isClickedNodeSelected) {
|
||||
nodesToUse = currentSelectedNodes
|
||||
} else if (isMultiSelect) {
|
||||
nodesToUse = [...currentSelectedNodes, node]
|
||||
setNodes((nodes) =>
|
||||
nodes.map((n) => ({
|
||||
...n,
|
||||
selected: n.id === node.id ? true : n.selected,
|
||||
}))
|
||||
)
|
||||
} else {
|
||||
nodesToUse = [node]
|
||||
setNodes((nodes) =>
|
||||
nodes.map((n) => ({
|
||||
...n,
|
||||
selected: n.id === node.id,
|
||||
}))
|
||||
)
|
||||
}
|
||||
|
||||
setPosition({ x: event.clientX, y: event.clientY })
|
||||
setSelectedBlocks(nodesToBlockInfos(nodesToUse))
|
||||
|
||||
@@ -15,7 +15,8 @@ import {
|
||||
TriggerUtils,
|
||||
} from '@/lib/workflows/triggers/triggers'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { BlockLog, BlockState, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { coerceValue } from '@/executor/utils/start-block'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
@@ -98,6 +99,8 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
setLastExecutionSnapshot,
|
||||
getLastExecutionSnapshot,
|
||||
} = useExecutionStore()
|
||||
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
|
||||
const executionStream = useExecutionStream()
|
||||
@@ -668,7 +671,8 @@ export function useWorkflowExecution() {
|
||||
onStream?: (se: StreamingExecution) => Promise<void>,
|
||||
executionId?: string,
|
||||
onBlockComplete?: (blockId: string, output: any) => Promise<void>,
|
||||
overrideTriggerType?: 'chat' | 'manual' | 'api'
|
||||
overrideTriggerType?: 'chat' | 'manual' | 'api',
|
||||
stopAfterBlockId?: string
|
||||
): Promise<ExecutionResult | StreamingExecution> => {
|
||||
// Use diff workflow for execution when available, regardless of canvas view state
|
||||
const executionWorkflowState = null as {
|
||||
@@ -876,6 +880,8 @@ export function useWorkflowExecution() {
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const streamedContent = new Map<string, string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
|
||||
// Execute the workflow
|
||||
try {
|
||||
@@ -887,6 +893,7 @@ export function useWorkflowExecution() {
|
||||
triggerType: overrideTriggerType || 'manual',
|
||||
useDraftState: true,
|
||||
isClientSession: true,
|
||||
stopAfterBlockId,
|
||||
workflowStateOverride: executionWorkflowState
|
||||
? {
|
||||
blocks: executionWorkflowState.blocks,
|
||||
@@ -916,18 +923,22 @@ export function useWorkflowExecution() {
|
||||
logger.info('onBlockCompleted received:', { data })
|
||||
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
// Create a new Set to trigger React re-render
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track successful block execution in run path
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
// Edges already tracked in onBlockStarted, no need to track again
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
||||
if (isContainerBlock) return
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
// Accumulate block log for the execution result
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
@@ -1056,6 +1067,48 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
logs: accumulatedBlockLogs,
|
||||
}
|
||||
|
||||
if (data.success && activeWorkflowId) {
|
||||
if (stopAfterBlockId) {
|
||||
const existingSnapshot = getLastExecutionSnapshot(activeWorkflowId)
|
||||
const mergedBlockStates = {
|
||||
...(existingSnapshot?.blockStates || {}),
|
||||
...Object.fromEntries(accumulatedBlockStates),
|
||||
}
|
||||
const mergedExecutedBlocks = new Set([
|
||||
...(existingSnapshot?.executedBlocks || []),
|
||||
...executedBlockIds,
|
||||
])
|
||||
const snapshot: SerializableExecutionState = {
|
||||
blockStates: mergedBlockStates,
|
||||
executedBlocks: Array.from(mergedExecutedBlocks),
|
||||
blockLogs: [...(existingSnapshot?.blockLogs || []), ...accumulatedBlockLogs],
|
||||
decisions: existingSnapshot?.decisions || { router: {}, condition: {} },
|
||||
completedLoops: existingSnapshot?.completedLoops || [],
|
||||
activeExecutionPath: Array.from(mergedExecutedBlocks),
|
||||
}
|
||||
setLastExecutionSnapshot(activeWorkflowId, snapshot)
|
||||
logger.info('Merged execution snapshot after run-until-block', {
|
||||
workflowId: activeWorkflowId,
|
||||
newBlocksExecuted: executedBlockIds.size,
|
||||
totalExecutedBlocks: mergedExecutedBlocks.size,
|
||||
})
|
||||
} else {
|
||||
const snapshot: SerializableExecutionState = {
|
||||
blockStates: Object.fromEntries(accumulatedBlockStates),
|
||||
executedBlocks: Array.from(executedBlockIds),
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
decisions: { router: {}, condition: {} },
|
||||
completedLoops: [],
|
||||
activeExecutionPath: Array.from(executedBlockIds),
|
||||
}
|
||||
setLastExecutionSnapshot(activeWorkflowId, snapshot)
|
||||
logger.info('Stored execution snapshot for run-from-block', {
|
||||
workflowId: activeWorkflowId,
|
||||
executedBlocksCount: executedBlockIds.size,
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
@@ -1376,6 +1429,265 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks,
|
||||
])
|
||||
|
||||
/**
|
||||
* Handles running workflow from a specific block using cached outputs
|
||||
*/
|
||||
const handleRunFromBlock = useCallback(
|
||||
async (blockId: string, workflowId: string) => {
|
||||
const snapshot = getLastExecutionSnapshot(workflowId)
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === blockId)
|
||||
const isTriggerBlock = incomingEdges.length === 0
|
||||
|
||||
if (!snapshot && !isTriggerBlock) {
|
||||
logger.error('No execution snapshot available for run-from-block', { workflowId, blockId })
|
||||
return
|
||||
}
|
||||
|
||||
const dependenciesSatisfied =
|
||||
isTriggerBlock ||
|
||||
(snapshot && incomingEdges.every((edge) => snapshot.executedBlocks.includes(edge.source)))
|
||||
|
||||
if (!dependenciesSatisfied) {
|
||||
logger.error('Upstream dependencies not satisfied for run-from-block', {
|
||||
workflowId,
|
||||
blockId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// For trigger blocks with no snapshot, create an empty one
|
||||
const effectiveSnapshot: SerializableExecutionState = snapshot || {
|
||||
blockStates: {},
|
||||
executedBlocks: [],
|
||||
blockLogs: [],
|
||||
decisions: { router: {}, condition: {} },
|
||||
completedLoops: [],
|
||||
activeExecutionPath: [],
|
||||
}
|
||||
|
||||
logger.info('Starting run-from-block execution', {
|
||||
workflowId,
|
||||
startBlockId: blockId,
|
||||
isTriggerBlock,
|
||||
})
|
||||
|
||||
setIsExecuting(true)
|
||||
const executionId = uuidv4()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
const activeBlocksSet = new Set<string>()
|
||||
|
||||
try {
|
||||
await executionStream.executeFromBlock({
|
||||
workflowId,
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: effectiveSnapshot,
|
||||
callbacks: {
|
||||
onExecutionStarted: (data) => {
|
||||
logger.info('Run-from-block execution started:', data)
|
||||
},
|
||||
|
||||
onBlockStarted: (data) => {
|
||||
activeBlocksSet.add(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
},
|
||||
|
||||
onBlockCompleted: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
||||
if (isContainerBlock) return
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onBlockError: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName,
|
||||
blockType: data.blockType,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
const mergedBlockStates: Record<string, BlockState> = {
|
||||
...effectiveSnapshot.blockStates,
|
||||
}
|
||||
for (const [bId, state] of accumulatedBlockStates) {
|
||||
mergedBlockStates[bId] = state
|
||||
}
|
||||
|
||||
const mergedExecutedBlocks = new Set([
|
||||
...effectiveSnapshot.executedBlocks,
|
||||
...executedBlockIds,
|
||||
])
|
||||
|
||||
const updatedSnapshot: SerializableExecutionState = {
|
||||
...effectiveSnapshot,
|
||||
blockStates: mergedBlockStates,
|
||||
executedBlocks: Array.from(mergedExecutedBlocks),
|
||||
blockLogs: [...effectiveSnapshot.blockLogs, ...accumulatedBlockLogs],
|
||||
activeExecutionPath: Array.from(mergedExecutedBlocks),
|
||||
}
|
||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||
logger.info('Updated execution snapshot after run-from-block', {
|
||||
workflowId,
|
||||
newBlocksExecuted: executedBlockIds.size,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
logger.error('Run-from-block execution error:', data.error)
|
||||
},
|
||||
|
||||
onExecutionCancelled: () => {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if ((error as Error).name !== 'AbortError') {
|
||||
logger.error('Run-from-block execution failed:', error)
|
||||
}
|
||||
} finally {
|
||||
setIsExecuting(false)
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
},
|
||||
[
|
||||
getLastExecutionSnapshot,
|
||||
setLastExecutionSnapshot,
|
||||
setIsExecuting,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
addConsole,
|
||||
executionStream,
|
||||
]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles running workflow until a specific block (stops after that block completes)
|
||||
*/
|
||||
const handleRunUntilBlock = useCallback(
|
||||
async (blockId: string, workflowId: string) => {
|
||||
if (!workflowId || workflowId !== activeWorkflowId) {
|
||||
logger.error('Invalid workflow ID for run-until-block', { workflowId, activeWorkflowId })
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting run-until-block execution', { workflowId, stopAfterBlockId: blockId })
|
||||
|
||||
setExecutionResult(null)
|
||||
setIsExecuting(true)
|
||||
|
||||
const executionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
undefined,
|
||||
undefined,
|
||||
executionId,
|
||||
undefined,
|
||||
'manual',
|
||||
blockId
|
||||
)
|
||||
if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorResult = handleExecutionError(error, { executionId })
|
||||
return errorResult
|
||||
} finally {
|
||||
setIsExecuting(false)
|
||||
setIsDebugging(false)
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
},
|
||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
||||
)
|
||||
|
||||
return {
|
||||
isExecuting,
|
||||
isDebugging,
|
||||
@@ -1386,5 +1698,7 @@ export function useWorkflowExecution() {
|
||||
handleResumeDebug,
|
||||
handleCancelDebug,
|
||||
handleCancelExecution,
|
||||
handleRunFromBlock,
|
||||
handleRunUntilBlock,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +47,7 @@ import {
|
||||
useCurrentWorkflow,
|
||||
useNodeUtilities,
|
||||
useShiftSelectionLock,
|
||||
useWorkflowExecution,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import {
|
||||
calculateContainerDimensions,
|
||||
@@ -325,6 +326,8 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
const showTrainingModal = useCopilotTrainingStore((state) => state.showModal)
|
||||
|
||||
const { handleRunFromBlock, handleRunUntilBlock } = useWorkflowExecution()
|
||||
|
||||
const snapToGridSize = useSnapToGridSize()
|
||||
const snapToGrid = snapToGridSize > 0
|
||||
|
||||
@@ -758,13 +761,16 @@ const WorkflowContent = React.memo(() => {
|
||||
[collaborativeBatchAddBlocks, setSelectedEdges, setPendingSelection]
|
||||
)
|
||||
|
||||
const { activeBlockIds, pendingBlocks, isDebugging } = useExecutionStore(
|
||||
useShallow((state) => ({
|
||||
activeBlockIds: state.activeBlockIds,
|
||||
pendingBlocks: state.pendingBlocks,
|
||||
isDebugging: state.isDebugging,
|
||||
}))
|
||||
)
|
||||
const { activeBlockIds, pendingBlocks, isDebugging, isExecuting, getLastExecutionSnapshot } =
|
||||
useExecutionStore(
|
||||
useShallow((state) => ({
|
||||
activeBlockIds: state.activeBlockIds,
|
||||
pendingBlocks: state.pendingBlocks,
|
||||
isDebugging: state.isDebugging,
|
||||
isExecuting: state.isExecuting,
|
||||
getLastExecutionSnapshot: state.getLastExecutionSnapshot,
|
||||
}))
|
||||
)
|
||||
|
||||
const [dragStartParentId, setDragStartParentId] = useState<string | null>(null)
|
||||
|
||||
@@ -988,6 +994,41 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
}, [contextMenuBlocks])
|
||||
|
||||
const handleContextRunFromBlock = useCallback(() => {
|
||||
if (contextMenuBlocks.length !== 1) return
|
||||
const blockId = contextMenuBlocks[0].id
|
||||
handleRunFromBlock(blockId, workflowIdParam)
|
||||
}, [contextMenuBlocks, workflowIdParam, handleRunFromBlock])
|
||||
|
||||
const handleContextRunUntilBlock = useCallback(() => {
|
||||
if (contextMenuBlocks.length !== 1) return
|
||||
const blockId = contextMenuBlocks[0].id
|
||||
handleRunUntilBlock(blockId, workflowIdParam)
|
||||
}, [contextMenuBlocks, workflowIdParam, handleRunUntilBlock])
|
||||
|
||||
const runFromBlockState = useMemo(() => {
|
||||
if (contextMenuBlocks.length !== 1) {
|
||||
return { canRun: false, reason: undefined }
|
||||
}
|
||||
const block = contextMenuBlocks[0]
|
||||
const snapshot = getLastExecutionSnapshot(workflowIdParam)
|
||||
const incomingEdges = edges.filter((edge) => edge.target === block.id)
|
||||
const isTriggerBlock = incomingEdges.length === 0
|
||||
const dependenciesSatisfied =
|
||||
isTriggerBlock ||
|
||||
(snapshot && incomingEdges.every((edge) => snapshot.executedBlocks.includes(edge.source)))
|
||||
const isNoteBlock = block.type === 'note'
|
||||
const isInsideSubflow =
|
||||
block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
||||
|
||||
if (isInsideSubflow) return { canRun: false, reason: 'Cannot run from inside subflow' }
|
||||
if (!dependenciesSatisfied) return { canRun: false, reason: 'Run upstream blocks first' }
|
||||
if (isNoteBlock) return { canRun: false, reason: undefined }
|
||||
if (isExecuting) return { canRun: false, reason: undefined }
|
||||
|
||||
return { canRun: true, reason: undefined }
|
||||
}, [contextMenuBlocks, edges, workflowIdParam, getLastExecutionSnapshot, isExecuting])
|
||||
|
||||
const handleContextAddBlock = useCallback(() => {
|
||||
useSearchModalStore.getState().open()
|
||||
}, [])
|
||||
@@ -1641,51 +1682,36 @@ const WorkflowContent = React.memo(() => {
|
||||
}, [screenToFlowPosition, handleToolbarDrop])
|
||||
|
||||
/**
|
||||
* Focus canvas on changed blocks when diff appears
|
||||
* Focuses on new/edited blocks rather than fitting the entire workflow
|
||||
* Focus canvas on changed blocks when diff appears.
|
||||
*/
|
||||
const pendingZoomBlockIdsRef = useRef<Set<string> | null>(null)
|
||||
const prevDiffReadyRef = useRef(false)
|
||||
|
||||
// Phase 1: When diff becomes ready, record which blocks we want to zoom to
|
||||
// Phase 2 effect is located after displayNodes is defined (search for "Phase 2")
|
||||
useEffect(() => {
|
||||
// Only focus when diff transitions from not ready to ready
|
||||
if (isDiffReady && !prevDiffReadyRef.current && diffAnalysis) {
|
||||
// Diff just became ready - record blocks to zoom to
|
||||
const changedBlockIds = [
|
||||
...(diffAnalysis.new_blocks || []),
|
||||
...(diffAnalysis.edited_blocks || []),
|
||||
]
|
||||
|
||||
if (changedBlockIds.length > 0) {
|
||||
const allNodes = getNodes()
|
||||
const changedNodes = allNodes.filter((node) => changedBlockIds.includes(node.id))
|
||||
|
||||
if (changedNodes.length > 0) {
|
||||
logger.info('Diff ready - focusing on changed blocks', {
|
||||
changedBlockIds,
|
||||
foundNodes: changedNodes.length,
|
||||
})
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({
|
||||
nodes: changedNodes,
|
||||
duration: 600,
|
||||
padding: 0.1,
|
||||
minZoom: 0.5,
|
||||
maxZoom: 1.0,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
logger.info('Diff ready - no changed nodes found, fitting all')
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({ padding: 0.1, duration: 600 })
|
||||
})
|
||||
}
|
||||
pendingZoomBlockIdsRef.current = new Set(changedBlockIds)
|
||||
} else {
|
||||
logger.info('Diff ready - no changed blocks, fitting all')
|
||||
// No specific blocks to focus on, fit all after a frame
|
||||
pendingZoomBlockIdsRef.current = null
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({ padding: 0.1, duration: 600 })
|
||||
})
|
||||
}
|
||||
} else if (!isDiffReady && prevDiffReadyRef.current) {
|
||||
// Diff was cleared (accepted/rejected) - cancel any pending zoom
|
||||
pendingZoomBlockIdsRef.current = null
|
||||
}
|
||||
prevDiffReadyRef.current = isDiffReady
|
||||
}, [isDiffReady, diffAnalysis, fitViewToBounds, getNodes])
|
||||
}, [isDiffReady, diffAnalysis, fitViewToBounds])
|
||||
|
||||
/** Displays trigger warning notifications. */
|
||||
useEffect(() => {
|
||||
@@ -2093,6 +2119,48 @@ const WorkflowContent = React.memo(() => {
|
||||
})
|
||||
}, [derivedNodes, blocks, pendingSelection, clearPendingSelection])
|
||||
|
||||
// Phase 2: When displayNodes updates, check if pending zoom blocks are ready
|
||||
// (Phase 1 is located earlier in the file where pendingZoomBlockIdsRef is defined)
|
||||
useEffect(() => {
|
||||
const pendingBlockIds = pendingZoomBlockIdsRef.current
|
||||
if (!pendingBlockIds || pendingBlockIds.size === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// Find the nodes we're waiting for
|
||||
const pendingNodes = displayNodes.filter((node) => pendingBlockIds.has(node.id))
|
||||
|
||||
// Check if all expected nodes are present with valid dimensions
|
||||
const allNodesReady =
|
||||
pendingNodes.length === pendingBlockIds.size &&
|
||||
pendingNodes.every(
|
||||
(node) =>
|
||||
typeof node.width === 'number' &&
|
||||
typeof node.height === 'number' &&
|
||||
node.width > 0 &&
|
||||
node.height > 0
|
||||
)
|
||||
|
||||
if (allNodesReady) {
|
||||
logger.info('Diff ready - focusing on changed blocks', {
|
||||
changedBlockIds: Array.from(pendingBlockIds),
|
||||
foundNodes: pendingNodes.length,
|
||||
})
|
||||
// Clear pending state before zooming to prevent re-triggers
|
||||
pendingZoomBlockIdsRef.current = null
|
||||
// Use requestAnimationFrame to ensure React has finished rendering
|
||||
requestAnimationFrame(() => {
|
||||
fitViewToBounds({
|
||||
nodes: pendingNodes,
|
||||
duration: 600,
|
||||
padding: 0.1,
|
||||
minZoom: 0.5,
|
||||
maxZoom: 1.0,
|
||||
})
|
||||
})
|
||||
}
|
||||
}, [displayNodes, fitViewToBounds])
|
||||
|
||||
/** Handles ActionBar remove-from-subflow events. */
|
||||
useEffect(() => {
|
||||
const handleRemoveFromSubflow = (event: Event) => {
|
||||
@@ -3281,11 +3349,16 @@ const WorkflowContent = React.memo(() => {
|
||||
onRemoveFromSubflow={handleContextRemoveFromSubflow}
|
||||
onOpenEditor={handleContextOpenEditor}
|
||||
onRename={handleContextRename}
|
||||
onRunFromBlock={handleContextRunFromBlock}
|
||||
onRunUntilBlock={handleContextRunUntilBlock}
|
||||
hasClipboard={hasClipboard()}
|
||||
showRemoveFromSubflow={contextMenuBlocks.some(
|
||||
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
|
||||
)}
|
||||
canRunFromBlock={runFromBlockState.canRun}
|
||||
runFromBlockDisabledReason={runFromBlockState.reason}
|
||||
disableEdit={!effectivePermissions.canEdit}
|
||||
isExecuting={isExecuting}
|
||||
/>
|
||||
|
||||
<CanvasMenu
|
||||
|
||||
@@ -27,18 +27,13 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
const [isOpen, setIsOpen] = useState(false)
|
||||
const [position, setPosition] = useState<ContextMenuPosition>({ x: 0, y: 0 })
|
||||
const menuRef = useRef<HTMLDivElement>(null)
|
||||
// Used to prevent click-outside dismissal when trigger is clicked
|
||||
const dismissPreventedRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Handle right-click event
|
||||
*/
|
||||
const handleContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
// Calculate position relative to viewport
|
||||
const x = e.clientX
|
||||
const y = e.clientY
|
||||
|
||||
@@ -50,17 +45,10 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
[onContextMenu]
|
||||
)
|
||||
|
||||
/**
|
||||
* Close the context menu
|
||||
*/
|
||||
const closeMenu = useCallback(() => {
|
||||
setIsOpen(false)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Prevent the next click-outside from dismissing the menu.
|
||||
* Call this on pointerdown of a toggle trigger to allow proper toggle behavior.
|
||||
*/
|
||||
const preventDismiss = useCallback(() => {
|
||||
dismissPreventedRef.current = true
|
||||
}, [])
|
||||
@@ -72,7 +60,6 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
if (!isOpen) return
|
||||
|
||||
const handleClickOutside = (e: MouseEvent) => {
|
||||
// Check if dismissal was prevented (e.g., by toggle trigger's pointerdown)
|
||||
if (dismissPreventedRef.current) {
|
||||
dismissPreventedRef.current = false
|
||||
return
|
||||
@@ -82,7 +69,6 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
// Small delay to prevent immediate close from the same click that opened the menu
|
||||
const timeoutId = setTimeout(() => {
|
||||
document.addEventListener('click', handleClickOutside)
|
||||
}, 0)
|
||||
|
||||
@@ -214,15 +214,6 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
||||
],
|
||||
config: {
|
||||
tool: (params) => params.operation as string,
|
||||
params: (params) => {
|
||||
const { fileUpload, fileReference, ...rest } = params
|
||||
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
|
||||
const files = hasFileUpload ? fileUpload : fileReference
|
||||
return {
|
||||
...rest,
|
||||
...(files ? { files } : {}),
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -581,6 +581,18 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
|
||||
results: { type: 'json', description: 'Search/read summary results' },
|
||||
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
|
||||
|
||||
// Draft-specific outputs
|
||||
draftId: {
|
||||
type: 'string',
|
||||
description: 'Draft ID',
|
||||
condition: { field: 'operation', value: 'draft_gmail' },
|
||||
},
|
||||
messageId: {
|
||||
type: 'string',
|
||||
description: 'Gmail message ID for the draft',
|
||||
condition: { field: 'operation', value: 'draft_gmail' },
|
||||
},
|
||||
|
||||
// Trigger outputs (unchanged)
|
||||
email_id: { type: 'string', description: 'Gmail message ID' },
|
||||
thread_id: { type: 'string', description: 'Gmail thread ID' },
|
||||
|
||||
@@ -12,6 +12,7 @@ export const SpotifyBlock: BlockConfig<ToolResponse> = {
|
||||
'Integrate Spotify into your workflow. Search for tracks, albums, artists, and playlists. Manage playlists, access your library, control playback, browse podcasts and audiobooks.',
|
||||
docsLink: 'https://docs.sim.ai/tools/spotify',
|
||||
category: 'tools',
|
||||
hideFromToolbar: true,
|
||||
bgColor: '#000000',
|
||||
icon: SpotifyIcon,
|
||||
subBlocks: [
|
||||
|
||||
@@ -661,12 +661,25 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
placeholder: 'folder/subfolder/',
|
||||
condition: { field: 'operation', value: 'storage_upload' },
|
||||
},
|
||||
{
|
||||
id: 'file',
|
||||
title: 'File',
|
||||
type: 'file-upload',
|
||||
canonicalParamId: 'fileData',
|
||||
placeholder: 'Upload file to storage',
|
||||
condition: { field: 'operation', value: 'storage_upload' },
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'fileContent',
|
||||
title: 'File Content',
|
||||
type: 'code',
|
||||
canonicalParamId: 'fileData',
|
||||
placeholder: 'Base64 encoded for binary files, or plain text',
|
||||
condition: { field: 'operation', value: 'storage_upload' },
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -26,6 +26,7 @@ export class ExecutionEngine {
|
||||
private allowResumeTriggers: boolean
|
||||
private cancelledFlag = false
|
||||
private errorFlag = false
|
||||
private stoppedEarlyFlag = false
|
||||
private executionError: Error | null = null
|
||||
private lastCancellationCheck = 0
|
||||
private readonly useRedisCancellation: boolean
|
||||
@@ -105,7 +106,7 @@ export class ExecutionEngine {
|
||||
this.initializeQueue(triggerBlockId)
|
||||
|
||||
while (this.hasWork()) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag || this.stoppedEarlyFlag) {
|
||||
break
|
||||
}
|
||||
await this.processQueue()
|
||||
@@ -259,6 +260,16 @@ export class ExecutionEngine {
|
||||
}
|
||||
|
||||
private initializeQueue(triggerBlockId?: string): void {
|
||||
if (this.context.runFromBlockContext) {
|
||||
const { startBlockId } = this.context.runFromBlockContext
|
||||
logger.info('Initializing queue for run-from-block mode', {
|
||||
startBlockId,
|
||||
dirtySetSize: this.context.runFromBlockContext.dirtySet.size,
|
||||
})
|
||||
this.addToQueue(startBlockId)
|
||||
return
|
||||
}
|
||||
|
||||
const pendingBlocks = this.context.metadata.pendingBlocks
|
||||
const remainingEdges = (this.context.metadata as any).remainingEdges
|
||||
|
||||
@@ -385,6 +396,12 @@ export class ExecutionEngine {
|
||||
this.finalOutput = output
|
||||
}
|
||||
|
||||
if (this.context.stopAfterBlockId === nodeId) {
|
||||
logger.info('Stopping execution after target block', { nodeId })
|
||||
this.stoppedEarlyFlag = true
|
||||
return
|
||||
}
|
||||
|
||||
const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false)
|
||||
|
||||
logger.info('Processing outgoing edges', {
|
||||
|
||||
@@ -5,12 +5,22 @@ import { BlockExecutor } from '@/executor/execution/block-executor'
|
||||
import { EdgeManager } from '@/executor/execution/edge-manager'
|
||||
import { ExecutionEngine } from '@/executor/execution/engine'
|
||||
import { ExecutionState } from '@/executor/execution/state'
|
||||
import type { ContextExtensions, WorkflowInput } from '@/executor/execution/types'
|
||||
import type {
|
||||
ContextExtensions,
|
||||
SerializableExecutionState,
|
||||
WorkflowInput,
|
||||
} from '@/executor/execution/types'
|
||||
import { createBlockHandlers } from '@/executor/handlers/registry'
|
||||
import { LoopOrchestrator } from '@/executor/orchestrators/loop'
|
||||
import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
|
||||
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
|
||||
import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types'
|
||||
import {
|
||||
computeDirtySet,
|
||||
type RunFromBlockContext,
|
||||
resolveContainerToSentinelStart,
|
||||
validateRunFromBlock,
|
||||
} from '@/executor/utils/run-from-block'
|
||||
import {
|
||||
buildResolutionFromBlock,
|
||||
buildStartBlockOutput,
|
||||
@@ -89,17 +99,108 @@ export class DAGExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute from a specific block using cached outputs for upstream blocks.
|
||||
*/
|
||||
async executeFromBlock(
|
||||
workflowId: string,
|
||||
startBlockId: string,
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
): Promise<ExecutionResult> {
|
||||
const dag = this.dagBuilder.build(this.workflow)
|
||||
|
||||
const executedBlocks = new Set(sourceSnapshot.executedBlocks)
|
||||
const validation = validateRunFromBlock(startBlockId, dag, executedBlocks)
|
||||
if (!validation.valid) {
|
||||
throw new Error(validation.error)
|
||||
}
|
||||
|
||||
const dirtySet = computeDirtySet(dag, startBlockId)
|
||||
const effectiveStartBlockId = resolveContainerToSentinelStart(startBlockId, dag) ?? startBlockId
|
||||
|
||||
logger.info('Executing from block', {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
effectiveStartBlockId,
|
||||
dirtySetSize: dirtySet.size,
|
||||
totalBlocks: dag.nodes.size,
|
||||
dirtyBlocks: Array.from(dirtySet),
|
||||
})
|
||||
|
||||
// Remove incoming edges from non-dirty sources so convergent blocks don't wait for cached upstream
|
||||
for (const nodeId of dirtySet) {
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
const nonDirtyIncoming: string[] = []
|
||||
for (const sourceId of node.incomingEdges) {
|
||||
if (!dirtySet.has(sourceId)) {
|
||||
nonDirtyIncoming.push(sourceId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const sourceId of nonDirtyIncoming) {
|
||||
node.incomingEdges.delete(sourceId)
|
||||
logger.debug('Removed non-dirty incoming edge for run-from-block', {
|
||||
nodeId,
|
||||
sourceId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const runFromBlockContext = { startBlockId: effectiveStartBlockId, dirtySet }
|
||||
const { context, state } = this.createExecutionContext(workflowId, undefined, {
|
||||
snapshotState: sourceSnapshot,
|
||||
runFromBlockContext,
|
||||
})
|
||||
|
||||
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
|
||||
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
|
||||
loopOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
|
||||
parallelOrchestrator.setResolver(resolver)
|
||||
parallelOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const allHandlers = createBlockHandlers()
|
||||
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
loopOrchestrator.setEdgeManager(edgeManager)
|
||||
const nodeOrchestrator = new NodeExecutionOrchestrator(
|
||||
dag,
|
||||
state,
|
||||
blockExecutor,
|
||||
loopOrchestrator,
|
||||
parallelOrchestrator
|
||||
)
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
return await engine.run()
|
||||
}
|
||||
|
||||
private createExecutionContext(
|
||||
workflowId: string,
|
||||
triggerBlockId?: string
|
||||
triggerBlockId?: string,
|
||||
overrides?: {
|
||||
snapshotState?: SerializableExecutionState
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
}
|
||||
): { context: ExecutionContext; state: ExecutionState } {
|
||||
const snapshotState = this.contextExtensions.snapshotState
|
||||
const snapshotState = overrides?.snapshotState ?? this.contextExtensions.snapshotState
|
||||
const blockStates = snapshotState?.blockStates
|
||||
? new Map(Object.entries(snapshotState.blockStates))
|
||||
: new Map<string, BlockState>()
|
||||
const executedBlocks = snapshotState?.executedBlocks
|
||||
let executedBlocks = snapshotState?.executedBlocks
|
||||
? new Set(snapshotState.executedBlocks)
|
||||
: new Set<string>()
|
||||
|
||||
if (overrides?.runFromBlockContext) {
|
||||
const { dirtySet } = overrides.runFromBlockContext
|
||||
executedBlocks = new Set([...executedBlocks].filter((id) => !dirtySet.has(id)))
|
||||
logger.info('Cleared executed status for dirty blocks', {
|
||||
dirtySetSize: dirtySet.size,
|
||||
remainingExecutedBlocks: executedBlocks.size,
|
||||
})
|
||||
}
|
||||
|
||||
const state = new ExecutionState(blockStates, executedBlocks)
|
||||
|
||||
const context: ExecutionContext = {
|
||||
@@ -109,7 +210,7 @@ export class DAGExecutor {
|
||||
userId: this.contextExtensions.userId,
|
||||
isDeployedContext: this.contextExtensions.isDeployedContext,
|
||||
blockStates: state.getBlockStates(),
|
||||
blockLogs: snapshotState?.blockLogs ?? [],
|
||||
blockLogs: overrides?.runFromBlockContext ? [] : (snapshotState?.blockLogs ?? []),
|
||||
metadata: {
|
||||
...this.contextExtensions.metadata,
|
||||
startTime: new Date().toISOString(),
|
||||
@@ -169,6 +270,8 @@ export class DAGExecutor {
|
||||
abortSignal: this.contextExtensions.abortSignal,
|
||||
includeFileBase64: this.contextExtensions.includeFileBase64,
|
||||
base64MaxBytes: this.contextExtensions.base64MaxBytes,
|
||||
runFromBlockContext: overrides?.runFromBlockContext,
|
||||
stopAfterBlockId: this.contextExtensions.stopAfterBlockId,
|
||||
}
|
||||
|
||||
if (this.contextExtensions.resumeFromSnapshot) {
|
||||
@@ -193,6 +296,10 @@ export class DAGExecutor {
|
||||
pendingBlocks: context.metadata.pendingBlocks,
|
||||
skipStarterBlockInit: true,
|
||||
})
|
||||
} else if (overrides?.runFromBlockContext) {
|
||||
logger.info('Run-from-block mode: skipping starter block initialization', {
|
||||
startBlockId: overrides.runFromBlockContext.startBlockId,
|
||||
})
|
||||
} else {
|
||||
this.initializeStarterBlock(context, state, triggerBlockId)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { BlockLog, BlockState, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export interface ExecutionMetadata {
|
||||
@@ -105,6 +106,17 @@ export interface ContextExtensions {
|
||||
output: { input?: any; output: NormalizedBlockOutput; executionTime: number },
|
||||
iterationContext?: IterationContext
|
||||
) => Promise<void>
|
||||
|
||||
/**
|
||||
* Run-from-block configuration. When provided, executor runs in partial
|
||||
* execution mode starting from the specified block.
|
||||
*/
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
|
||||
/**
|
||||
* Stop execution after this block completes. Used for "run until block" feature.
|
||||
*/
|
||||
stopAfterBlockId?: string
|
||||
}
|
||||
|
||||
export interface WorkflowInput {
|
||||
|
||||
@@ -4,9 +4,9 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { BlockType, DEFAULTS, EVALUATOR, HTTP } from '@/executor/constants'
|
||||
import { BlockType, DEFAULTS, EVALUATOR } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import { buildAPIUrl, extractAPIErrorMessage } from '@/executor/utils/http'
|
||||
import { buildAPIUrl, buildAuthHeaders, extractAPIErrorMessage } from '@/executor/utils/http'
|
||||
import { isJSONString, parseJSON, stringifyJSON } from '@/executor/utils/json'
|
||||
import { validateModelProvider } from '@/executor/utils/permission-check'
|
||||
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
||||
@@ -143,9 +143,7 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': HTTP.CONTENT_TYPE.JSON,
|
||||
},
|
||||
headers: await buildAuthHeaders(),
|
||||
body: stringifyJSON(providerRequest),
|
||||
})
|
||||
|
||||
|
||||
@@ -9,12 +9,12 @@ import type { BlockOutput } from '@/blocks/types'
|
||||
import {
|
||||
BlockType,
|
||||
DEFAULTS,
|
||||
HTTP,
|
||||
isAgentBlockType,
|
||||
isRouterV2BlockType,
|
||||
ROUTER,
|
||||
} from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import { buildAuthHeaders } from '@/executor/utils/http'
|
||||
import { validateModelProvider } from '@/executor/utils/permission-check'
|
||||
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
@@ -118,9 +118,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': HTTP.CONTENT_TYPE.JSON,
|
||||
},
|
||||
headers: await buildAuthHeaders(),
|
||||
body: JSON.stringify(providerRequest),
|
||||
})
|
||||
|
||||
@@ -277,9 +275,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': HTTP.CONTENT_TYPE.JSON,
|
||||
},
|
||||
headers: await buildAuthHeaders(),
|
||||
body: JSON.stringify(providerRequest),
|
||||
})
|
||||
|
||||
|
||||
@@ -276,7 +276,16 @@ export class LoopOrchestrator {
|
||||
scope: LoopScope
|
||||
): LoopContinuationResult {
|
||||
const results = scope.allIterationOutputs
|
||||
this.state.setBlockOutput(loopId, { results }, DEFAULTS.EXECUTION_TIME)
|
||||
const output = { results }
|
||||
this.state.setBlockOutput(loopId, output, DEFAULTS.EXECUTION_TIME)
|
||||
|
||||
// Emit onBlockComplete for the loop container so the UI can track it
|
||||
if (this.contextExtensions?.onBlockComplete) {
|
||||
this.contextExtensions.onBlockComplete(loopId, 'Loop', 'loop', {
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
shouldContinue: false,
|
||||
|
||||
@@ -31,7 +31,18 @@ export class NodeExecutionOrchestrator {
|
||||
throw new Error(`Node not found in DAG: ${nodeId}`)
|
||||
}
|
||||
|
||||
if (this.state.hasExecuted(nodeId)) {
|
||||
if (ctx.runFromBlockContext && !ctx.runFromBlockContext.dirtySet.has(nodeId)) {
|
||||
const cachedOutput = this.state.getBlockOutput(nodeId) || {}
|
||||
logger.debug('Skipping non-dirty block in run-from-block mode', { nodeId })
|
||||
return {
|
||||
nodeId,
|
||||
output: cachedOutput,
|
||||
isFinalOutput: false,
|
||||
}
|
||||
}
|
||||
|
||||
const isDirtyBlock = ctx.runFromBlockContext?.dirtySet.has(nodeId) ?? false
|
||||
if (!isDirtyBlock && this.state.hasExecuted(nodeId)) {
|
||||
const output = this.state.getBlockOutput(nodeId) || {}
|
||||
return {
|
||||
nodeId,
|
||||
|
||||
@@ -228,9 +228,17 @@ export class ParallelOrchestrator {
|
||||
const branchOutputs = scope.branchOutputs.get(i) || []
|
||||
results.push(branchOutputs)
|
||||
}
|
||||
this.state.setBlockOutput(parallelId, {
|
||||
results,
|
||||
})
|
||||
const output = { results }
|
||||
this.state.setBlockOutput(parallelId, output)
|
||||
|
||||
// Emit onBlockComplete for the parallel container so the UI can track it
|
||||
if (this.contextExtensions?.onBlockComplete) {
|
||||
this.contextExtensions.onBlockComplete(parallelId, 'Parallel', 'parallel', {
|
||||
output,
|
||||
executionTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
allBranchesComplete: true,
|
||||
results,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { TraceSpan } from '@/lib/logs/types'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
export interface UserFile {
|
||||
@@ -250,6 +251,17 @@ export interface ExecutionContext {
|
||||
* will not have their base64 content fetched.
|
||||
*/
|
||||
base64MaxBytes?: number
|
||||
|
||||
/**
|
||||
* Context for "run from block" mode. When present, only blocks in dirtySet
|
||||
* will be executed; others return cached outputs from the source snapshot.
|
||||
*/
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
|
||||
/**
|
||||
* Stop execution after this block completes. Used for "run until block" feature.
|
||||
*/
|
||||
stopAfterBlockId?: string
|
||||
}
|
||||
|
||||
export interface ExecutionResult {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
export interface BlockDataCollection {
|
||||
blockData: Record<string, unknown>
|
||||
@@ -9,6 +11,32 @@ export interface BlockDataCollection {
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export function getBlockSchema(
|
||||
block: SerializedBlock,
|
||||
toolConfig?: ToolConfig
|
||||
): OutputSchema | undefined {
|
||||
const isTrigger =
|
||||
block.metadata?.category === 'triggers' ||
|
||||
(block.config?.params as Record<string, unknown> | undefined)?.triggerMode === true
|
||||
|
||||
// Triggers use saved outputs (defines the trigger payload schema)
|
||||
if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
|
||||
return block.outputs as OutputSchema
|
||||
}
|
||||
|
||||
// When a tool is selected, tool outputs are the source of truth
|
||||
if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
|
||||
return toolConfig.outputs as OutputSchema
|
||||
}
|
||||
|
||||
// Fallback to saved outputs for blocks without tools
|
||||
if (block.outputs && Object.keys(block.outputs).length > 0) {
|
||||
return block.outputs as OutputSchema
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
@@ -18,24 +46,21 @@ export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
}
|
||||
}
|
||||
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (!workflowBlock) continue
|
||||
const workflowBlocks = ctx.workflow?.blocks ?? []
|
||||
for (const block of workflowBlocks) {
|
||||
const id = block.id
|
||||
|
||||
if (workflowBlock.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
if (block.metadata?.name) {
|
||||
blockNameMapping[normalizeName(block.metadata.name)] = id
|
||||
}
|
||||
|
||||
const blockType = workflowBlock.metadata?.id
|
||||
if (blockType) {
|
||||
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const schema = getBlockOutputs(blockType, subBlocks)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
}
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const schema = getBlockSchema(block, toolConfig)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
48
apps/sim/executor/utils/code-formatting.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* Formats a JavaScript/TypeScript value as a code literal for the target language.
|
||||
* Handles special cases like null, undefined, booleans, and Python-specific number representations.
|
||||
*
|
||||
* @param value - The value to format
|
||||
* @param language - Target language ('javascript' or 'python')
|
||||
* @returns A string literal representation valid in the target language
|
||||
*
|
||||
* @example
|
||||
* formatLiteralForCode(null, 'python') // => 'None'
|
||||
* formatLiteralForCode(true, 'python') // => 'True'
|
||||
* formatLiteralForCode(NaN, 'python') // => "float('nan')"
|
||||
* formatLiteralForCode("hello", 'javascript') // => '"hello"'
|
||||
* formatLiteralForCode({a: 1}, 'python') // => "json.loads('{\"a\":1}')"
|
||||
*/
|
||||
export function formatLiteralForCode(value: unknown, language: 'javascript' | 'python'): string {
|
||||
const isPython = language === 'python'
|
||||
|
||||
if (value === undefined) {
|
||||
return isPython ? 'None' : 'undefined'
|
||||
}
|
||||
if (value === null) {
|
||||
return isPython ? 'None' : 'null'
|
||||
}
|
||||
if (typeof value === 'boolean') {
|
||||
return isPython ? (value ? 'True' : 'False') : String(value)
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
if (Number.isNaN(value)) {
|
||||
return isPython ? "float('nan')" : 'NaN'
|
||||
}
|
||||
if (value === Number.POSITIVE_INFINITY) {
|
||||
return isPython ? "float('inf')" : 'Infinity'
|
||||
}
|
||||
if (value === Number.NEGATIVE_INFINITY) {
|
||||
return isPython ? "float('-inf')" : '-Infinity'
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
// Objects and arrays - Python needs json.loads() because JSON true/false/null aren't valid Python
|
||||
if (isPython) {
|
||||
return `json.loads(${JSON.stringify(JSON.stringify(value))})`
|
||||
}
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
493
apps/sim/executor/utils/run-from-block.test.ts
Normal file
@@ -0,0 +1,493 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import type { DAG, DAGNode } from '@/executor/dag/builder'
|
||||
import type { DAGEdge, NodeMetadata } from '@/executor/dag/types'
|
||||
import { computeDirtySet, validateRunFromBlock } from '@/executor/utils/run-from-block'
|
||||
import type { SerializedLoop, SerializedParallel } from '@/serializer/types'
|
||||
|
||||
/**
|
||||
* Helper to create a DAG node for testing
|
||||
*/
|
||||
function createNode(
|
||||
id: string,
|
||||
outgoingEdges: Array<{ target: string; sourceHandle?: string }> = [],
|
||||
metadata: Partial<NodeMetadata> = {}
|
||||
): DAGNode {
|
||||
const edges = new Map<string, DAGEdge>()
|
||||
for (const edge of outgoingEdges) {
|
||||
edges.set(edge.target, { target: edge.target, sourceHandle: edge.sourceHandle })
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
block: {
|
||||
id,
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'test', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
metadata: { id: 'test', name: `block-${id}`, category: 'tools' },
|
||||
enabled: true,
|
||||
},
|
||||
incomingEdges: new Set<string>(),
|
||||
outgoingEdges: edges,
|
||||
metadata: {
|
||||
isParallelBranch: false,
|
||||
isLoopNode: false,
|
||||
isSentinel: false,
|
||||
...metadata,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a DAG for testing
|
||||
*/
|
||||
function createDAG(nodes: DAGNode[]): DAG {
|
||||
const nodeMap = new Map<string, DAGNode>()
|
||||
for (const node of nodes) {
|
||||
nodeMap.set(node.id, node)
|
||||
}
|
||||
|
||||
// Set up incoming edges based on outgoing edges
|
||||
for (const node of nodes) {
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
const targetNode = nodeMap.get(edge.target)
|
||||
if (targetNode) {
|
||||
targetNode.incomingEdges.add(node.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
nodes: nodeMap,
|
||||
loopConfigs: new Map<string, SerializedLoop>(),
|
||||
parallelConfigs: new Map<string, SerializedParallel>(),
|
||||
}
|
||||
}
|
||||
|
||||
describe('computeDirtySet', () => {
|
||||
it('includes start block in dirty set', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B'), createNode('C')])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
})
|
||||
|
||||
it('includes all downstream blocks in linear workflow', () => {
|
||||
// A → B → C → D
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(3)
|
||||
})
|
||||
|
||||
it('handles branching paths', () => {
|
||||
// A → B → C
|
||||
// ↓
|
||||
// D → E
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B', [{ target: 'C' }, { target: 'D' }]),
|
||||
createNode('C'),
|
||||
createNode('D', [{ target: 'E' }]),
|
||||
createNode('E'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.has('E')).toBe(true)
|
||||
expect(dirtySet.size).toBe(4)
|
||||
})
|
||||
|
||||
it('handles convergence points', () => {
|
||||
// A → C
|
||||
// B → C → D
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'C' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
// Run from A: should include A, C, D (but not B)
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(false)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(3)
|
||||
})
|
||||
|
||||
it('handles diamond pattern', () => {
|
||||
// B
|
||||
// ↗ ↘
|
||||
// A D
|
||||
// ↘ ↗
|
||||
// C
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }, { target: 'C' }]),
|
||||
createNode('B', [{ target: 'D' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(4)
|
||||
})
|
||||
|
||||
it('stops at graph boundaries', () => {
|
||||
// A → B C → D (disconnected)
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B'),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(false)
|
||||
expect(dirtySet.has('D')).toBe(false)
|
||||
expect(dirtySet.size).toBe(2)
|
||||
})
|
||||
|
||||
it('handles single node workflow', () => {
|
||||
const dag = createDAG([createNode('A')])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'A')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(true)
|
||||
expect(dirtySet.size).toBe(1)
|
||||
})
|
||||
|
||||
it('handles node not in DAG gracefully', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B')])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'nonexistent')
|
||||
|
||||
// Should just contain the start block ID even if not found
|
||||
expect(dirtySet.has('nonexistent')).toBe(true)
|
||||
expect(dirtySet.size).toBe(1)
|
||||
})
|
||||
|
||||
it('includes convergent block when running from one branch of parallel', () => {
|
||||
// Parallel branches converging:
|
||||
// A → B → D
|
||||
// A → C → D
|
||||
// Running from B should include B and D (but not A or C)
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }, { target: 'C' }]),
|
||||
createNode('B', [{ target: 'D' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'B')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(false)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(2)
|
||||
})
|
||||
|
||||
it('handles running from convergent block itself (all upstream non-dirty)', () => {
|
||||
// A → C
|
||||
// B → C
|
||||
// Running from C should only include C
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'C' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'C')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(false)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.size).toBe(2)
|
||||
})
|
||||
|
||||
it('handles deep downstream chains', () => {
|
||||
// A → B → C → D → E → F
|
||||
// Running from C should include C, D, E, F
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B', [{ target: 'C' }]),
|
||||
createNode('C', [{ target: 'D' }]),
|
||||
createNode('D', [{ target: 'E' }]),
|
||||
createNode('E', [{ target: 'F' }]),
|
||||
createNode('F'),
|
||||
])
|
||||
|
||||
const dirtySet = computeDirtySet(dag, 'C')
|
||||
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
expect(dirtySet.has('B')).toBe(false)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
expect(dirtySet.has('D')).toBe(true)
|
||||
expect(dirtySet.has('E')).toBe(true)
|
||||
expect(dirtySet.has('F')).toBe(true)
|
||||
expect(dirtySet.size).toBe(4)
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateRunFromBlock', () => {
|
||||
it('accepts valid block', () => {
|
||||
const dag = createDAG([createNode('A'), createNode('B')])
|
||||
const executedBlocks = new Set(['A', 'B'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true)
|
||||
expect(result.error).toBeUndefined()
|
||||
})
|
||||
|
||||
it('rejects block not found in DAG', () => {
|
||||
const dag = createDAG([createNode('A')])
|
||||
const executedBlocks = new Set(['A', 'B'])
|
||||
|
||||
const result = validateRunFromBlock('B', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('Block not found')
|
||||
})
|
||||
|
||||
it('rejects blocks inside loops', () => {
|
||||
const dag = createDAG([createNode('A', [], { isLoopNode: true, loopId: 'loop-1' })])
|
||||
const executedBlocks = new Set(['A'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('inside loop')
|
||||
expect(result.error).toContain('loop-1')
|
||||
})
|
||||
|
||||
it('rejects blocks inside parallels', () => {
|
||||
const dag = createDAG([
|
||||
createNode('A', [], { isParallelBranch: true, parallelId: 'parallel-1' }),
|
||||
])
|
||||
const executedBlocks = new Set(['A'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('inside parallel')
|
||||
expect(result.error).toContain('parallel-1')
|
||||
})
|
||||
|
||||
it('rejects sentinel nodes', () => {
|
||||
const dag = createDAG([createNode('A', [], { isSentinel: true, sentinelType: 'start' })])
|
||||
const executedBlocks = new Set(['A'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('sentinel')
|
||||
})
|
||||
|
||||
it('rejects blocks with unexecuted upstream dependencies', () => {
|
||||
// A → B, only A executed but B depends on A
|
||||
const dag = createDAG([createNode('A', [{ target: 'B' }]), createNode('B')])
|
||||
const executedBlocks = new Set<string>() // A was not executed
|
||||
|
||||
const result = validateRunFromBlock('B', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.error).toContain('Upstream dependency not executed')
|
||||
})
|
||||
|
||||
it('allows blocks with no dependencies even if not previously executed', () => {
|
||||
// A and B are independent (no edges)
|
||||
const dag = createDAG([createNode('A'), createNode('B')])
|
||||
const executedBlocks = new Set(['A']) // B was not executed but has no deps
|
||||
|
||||
const result = validateRunFromBlock('B', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true) // B has no incoming edges, so it's valid
|
||||
})
|
||||
|
||||
it('accepts regular executed block', () => {
|
||||
const dag = createDAG([
|
||||
createNode('trigger', [{ target: 'A' }]),
|
||||
createNode('A', [{ target: 'B' }]),
|
||||
createNode('B'),
|
||||
])
|
||||
const executedBlocks = new Set(['trigger', 'A', 'B'])
|
||||
|
||||
const result = validateRunFromBlock('A', dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('accepts loop container when executed', () => {
|
||||
// Loop container with sentinel nodes
|
||||
const loopId = 'loop-container-1'
|
||||
const sentinelStartId = `loop-${loopId}-sentinel-start`
|
||||
const sentinelEndId = `loop-${loopId}-sentinel-end`
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: sentinelStartId }]),
|
||||
createNode(sentinelStartId, [{ target: 'B' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'start',
|
||||
loopId,
|
||||
}),
|
||||
createNode('B', [{ target: sentinelEndId }], { isLoopNode: true, loopId }),
|
||||
createNode(sentinelEndId, [{ target: 'C' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'end',
|
||||
loopId,
|
||||
}),
|
||||
createNode('C'),
|
||||
])
|
||||
dag.loopConfigs.set(loopId, { id: loopId, nodes: ['B'], iterations: 3, loopType: 'for' } as any)
|
||||
const executedBlocks = new Set(['A', loopId, sentinelStartId, 'B', sentinelEndId, 'C'])
|
||||
|
||||
const result = validateRunFromBlock(loopId, dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('accepts parallel container when executed', () => {
|
||||
// Parallel container with sentinel nodes
|
||||
const parallelId = 'parallel-container-1'
|
||||
const sentinelStartId = `parallel-${parallelId}-sentinel-start`
|
||||
const sentinelEndId = `parallel-${parallelId}-sentinel-end`
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: sentinelStartId }]),
|
||||
createNode(sentinelStartId, [{ target: 'B₍0₎' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'start',
|
||||
parallelId,
|
||||
}),
|
||||
createNode('B₍0₎', [{ target: sentinelEndId }], { isParallelBranch: true, parallelId }),
|
||||
createNode(sentinelEndId, [{ target: 'C' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'end',
|
||||
parallelId,
|
||||
}),
|
||||
createNode('C'),
|
||||
])
|
||||
dag.parallelConfigs.set(parallelId, { id: parallelId, nodes: ['B'], count: 2 } as any)
|
||||
const executedBlocks = new Set(['A', parallelId, sentinelStartId, 'B₍0₎', sentinelEndId, 'C'])
|
||||
|
||||
const result = validateRunFromBlock(parallelId, dag, executedBlocks)
|
||||
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('allows loop container with no upstream dependencies', () => {
|
||||
// Loop containers are validated via their sentinel nodes, not incoming edges on the container itself
|
||||
// If the loop has no upstream dependencies, it should be valid
|
||||
const loopId = 'loop-container-1'
|
||||
const sentinelStartId = `loop-${loopId}-sentinel-start`
|
||||
const dag = createDAG([
|
||||
createNode(sentinelStartId, [], { isSentinel: true, sentinelType: 'start', loopId }),
|
||||
])
|
||||
dag.loopConfigs.set(loopId, { id: loopId, nodes: [], iterations: 3, loopType: 'for' } as any)
|
||||
const executedBlocks = new Set<string>() // Nothing executed but loop has no deps
|
||||
|
||||
const result = validateRunFromBlock(loopId, dag, executedBlocks)
|
||||
|
||||
// Loop container validation doesn't check incoming edges (containers don't have nodes in dag.nodes)
|
||||
// So this is valid - the loop can start fresh
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('computeDirtySet with containers', () => {
|
||||
it('includes loop container and all downstream when running from loop', () => {
|
||||
// A → loop-sentinel-start → B (inside loop) → loop-sentinel-end → C
|
||||
const loopId = 'loop-1'
|
||||
const sentinelStartId = `loop-${loopId}-sentinel-start`
|
||||
const sentinelEndId = `loop-${loopId}-sentinel-end`
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: sentinelStartId }]),
|
||||
createNode(sentinelStartId, [{ target: 'B' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'start',
|
||||
loopId,
|
||||
}),
|
||||
createNode('B', [{ target: sentinelEndId }], { isLoopNode: true, loopId }),
|
||||
createNode(sentinelEndId, [{ target: 'C' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'end',
|
||||
loopId,
|
||||
}),
|
||||
createNode('C'),
|
||||
])
|
||||
dag.loopConfigs.set(loopId, { id: loopId, nodes: ['B'], iterations: 3, loopType: 'for' } as any)
|
||||
|
||||
const dirtySet = computeDirtySet(dag, loopId)
|
||||
|
||||
// Should include loop container, sentinel-start, B, sentinel-end, C
|
||||
expect(dirtySet.has(loopId)).toBe(true)
|
||||
expect(dirtySet.has(sentinelStartId)).toBe(true)
|
||||
expect(dirtySet.has('B')).toBe(true)
|
||||
expect(dirtySet.has(sentinelEndId)).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
// Should NOT include A (upstream)
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
})
|
||||
|
||||
it('includes parallel container and all downstream when running from parallel', () => {
|
||||
// A → parallel-sentinel-start → B₍0₎ → parallel-sentinel-end → C
|
||||
const parallelId = 'parallel-1'
|
||||
const sentinelStartId = `parallel-${parallelId}-sentinel-start`
|
||||
const sentinelEndId = `parallel-${parallelId}-sentinel-end`
|
||||
const dag = createDAG([
|
||||
createNode('A', [{ target: sentinelStartId }]),
|
||||
createNode(sentinelStartId, [{ target: 'B₍0₎' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'start',
|
||||
parallelId,
|
||||
}),
|
||||
createNode('B₍0₎', [{ target: sentinelEndId }], { isParallelBranch: true, parallelId }),
|
||||
createNode(sentinelEndId, [{ target: 'C' }], {
|
||||
isSentinel: true,
|
||||
sentinelType: 'end',
|
||||
parallelId,
|
||||
}),
|
||||
createNode('C'),
|
||||
])
|
||||
dag.parallelConfigs.set(parallelId, { id: parallelId, nodes: ['B'], count: 2 } as any)
|
||||
|
||||
const dirtySet = computeDirtySet(dag, parallelId)
|
||||
|
||||
// Should include parallel container, sentinel-start, B₍0₎, sentinel-end, C
|
||||
expect(dirtySet.has(parallelId)).toBe(true)
|
||||
expect(dirtySet.has(sentinelStartId)).toBe(true)
|
||||
expect(dirtySet.has('B₍0₎')).toBe(true)
|
||||
expect(dirtySet.has(sentinelEndId)).toBe(true)
|
||||
expect(dirtySet.has('C')).toBe(true)
|
||||
// Should NOT include A (upstream)
|
||||
expect(dirtySet.has('A')).toBe(false)
|
||||
})
|
||||
})
|
||||
169
apps/sim/executor/utils/run-from-block.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { LOOP, PARALLEL } from '@/executor/constants'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
|
||||
const logger = createLogger('run-from-block')
|
||||
|
||||
/**
|
||||
* Builds the sentinel-start node ID for a loop.
|
||||
*/
|
||||
function buildLoopSentinelStartId(loopId: string): string {
|
||||
return `${LOOP.SENTINEL.PREFIX}${loopId}${LOOP.SENTINEL.START_SUFFIX}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the sentinel-start node ID for a parallel.
|
||||
*/
|
||||
function buildParallelSentinelStartId(parallelId: string): string {
|
||||
return `${PARALLEL.SENTINEL.PREFIX}${parallelId}${PARALLEL.SENTINEL.START_SUFFIX}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block ID is a loop or parallel container and returns the sentinel-start ID if so.
|
||||
* Returns null if the block is not a container.
|
||||
*/
|
||||
export function resolveContainerToSentinelStart(blockId: string, dag: DAG): string | null {
|
||||
if (dag.loopConfigs.has(blockId)) {
|
||||
return buildLoopSentinelStartId(blockId)
|
||||
}
|
||||
if (dag.parallelConfigs.has(blockId)) {
|
||||
return buildParallelSentinelStartId(blockId)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of validating a block for run-from-block execution.
|
||||
*/
|
||||
export interface RunFromBlockValidation {
|
||||
valid: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Context for run-from-block execution mode.
|
||||
*/
|
||||
export interface RunFromBlockContext {
|
||||
/** The block ID to start execution from */
|
||||
startBlockId: string
|
||||
/** Set of block IDs that need re-execution (start block + all downstream) */
|
||||
dirtySet: Set<string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes all blocks that need re-execution when running from a specific block.
|
||||
* Uses BFS to find all downstream blocks reachable via outgoing edges.
|
||||
*
|
||||
* For loop/parallel containers, starts from the sentinel-start node and includes
|
||||
* the container ID itself in the dirty set.
|
||||
*
|
||||
* @param dag - The workflow DAG
|
||||
* @param startBlockId - The block to start execution from
|
||||
* @returns Set of block IDs that are "dirty" and need re-execution
|
||||
*/
|
||||
export function computeDirtySet(dag: DAG, startBlockId: string): Set<string> {
|
||||
const dirty = new Set<string>([startBlockId])
|
||||
const sentinelStartId = resolveContainerToSentinelStart(startBlockId, dag)
|
||||
const traversalStartId = sentinelStartId ?? startBlockId
|
||||
|
||||
if (sentinelStartId) {
|
||||
dirty.add(sentinelStartId)
|
||||
}
|
||||
|
||||
const queue = [traversalStartId]
|
||||
|
||||
while (queue.length > 0) {
|
||||
const nodeId = queue.shift()!
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (!dirty.has(edge.target)) {
|
||||
dirty.add(edge.target)
|
||||
queue.push(edge.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Computed dirty set', {
|
||||
startBlockId,
|
||||
traversalStartId,
|
||||
dirtySetSize: dirty.size,
|
||||
dirtyBlocks: Array.from(dirty),
|
||||
})
|
||||
|
||||
return dirty
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a block can be used as a run-from-block starting point.
|
||||
*
|
||||
* Validation rules:
|
||||
* - Block must exist in the DAG (or be a loop/parallel container)
|
||||
* - Block cannot be inside a loop (but loop containers are allowed)
|
||||
* - Block cannot be inside a parallel (but parallel containers are allowed)
|
||||
* - Block cannot be a sentinel node
|
||||
* - All upstream dependencies must have been executed (have cached outputs)
|
||||
*
|
||||
* @param blockId - The block ID to validate
|
||||
* @param dag - The workflow DAG
|
||||
* @param executedBlocks - Set of blocks that were executed in the source run
|
||||
* @returns Validation result with error message if invalid
|
||||
*/
|
||||
export function validateRunFromBlock(
|
||||
blockId: string,
|
||||
dag: DAG,
|
||||
executedBlocks: Set<string>
|
||||
): RunFromBlockValidation {
|
||||
const node = dag.nodes.get(blockId)
|
||||
const isLoopContainer = dag.loopConfigs.has(blockId)
|
||||
const isParallelContainer = dag.parallelConfigs.has(blockId)
|
||||
const isContainer = isLoopContainer || isParallelContainer
|
||||
|
||||
if (!node && !isContainer) {
|
||||
return { valid: false, error: `Block not found in workflow: ${blockId}` }
|
||||
}
|
||||
|
||||
if (isContainer) {
|
||||
const sentinelStartId = resolveContainerToSentinelStart(blockId, dag)
|
||||
if (!sentinelStartId || !dag.nodes.has(sentinelStartId)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Container sentinel not found for: ${blockId}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (node) {
|
||||
if (node.metadata.isLoopNode) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot run from block inside loop: ${node.metadata.loopId}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (node.metadata.isParallelBranch) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot run from block inside parallel: ${node.metadata.parallelId}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (node.metadata.isSentinel) {
|
||||
return { valid: false, error: 'Cannot run from sentinel node' }
|
||||
}
|
||||
|
||||
if (node.incomingEdges.size > 0) {
|
||||
for (const sourceId of node.incomingEdges.keys()) {
|
||||
if (!executedBlocks.has(sourceId)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Upstream dependency not executed: ${sourceId}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
}
|
||||
@@ -378,8 +378,30 @@ function buildManualTriggerOutput(
|
||||
return mergeFilesIntoOutput(output, workflowInput)
|
||||
}
|
||||
|
||||
function buildIntegrationTriggerOutput(workflowInput: unknown): NormalizedBlockOutput {
|
||||
return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
|
||||
function buildIntegrationTriggerOutput(
|
||||
workflowInput: unknown,
|
||||
structuredInput: Record<string, unknown>,
|
||||
hasStructured: boolean
|
||||
): NormalizedBlockOutput {
|
||||
const output: NormalizedBlockOutput = {}
|
||||
|
||||
if (hasStructured) {
|
||||
for (const [key, value] of Object.entries(structuredInput)) {
|
||||
output[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (isPlainObject(workflowInput)) {
|
||||
for (const [key, value] of Object.entries(workflowInput)) {
|
||||
if (value !== undefined && value !== null) {
|
||||
output[key] = value
|
||||
} else if (!Object.hasOwn(output, key)) {
|
||||
output[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mergeFilesIntoOutput(output, workflowInput)
|
||||
}
|
||||
|
||||
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {
|
||||
@@ -428,7 +450,7 @@ export function buildStartBlockOutput(options: StartBlockOutputOptions): Normali
|
||||
return buildManualTriggerOutput(finalInput, workflowInput)
|
||||
|
||||
case StartBlockPath.EXTERNAL_TRIGGER:
|
||||
return buildIntegrationTriggerOutput(workflowInput)
|
||||
return buildIntegrationTriggerOutput(workflowInput, structuredInput, hasStructured)
|
||||
|
||||
case StartBlockPath.LEGACY_STARTER:
|
||||
return buildLegacyStarterOutput(
|
||||
|
||||
@@ -157,7 +157,14 @@ export class VariableResolver {
|
||||
|
||||
let replacementError: Error | null = null
|
||||
|
||||
// Use generic utility for smart variable reference replacement
|
||||
const blockType = block?.metadata?.id
|
||||
const language =
|
||||
blockType === BlockType.FUNCTION
|
||||
? ((block?.config?.params as Record<string, unknown> | undefined)?.language as
|
||||
| string
|
||||
| undefined)
|
||||
: undefined
|
||||
|
||||
let result = replaceValidReferences(template, (match) => {
|
||||
if (replacementError) return match
|
||||
|
||||
@@ -167,14 +174,7 @@ export class VariableResolver {
|
||||
return match
|
||||
}
|
||||
|
||||
const blockType = block?.metadata?.id
|
||||
const isInTemplateLiteral =
|
||||
blockType === BlockType.FUNCTION &&
|
||||
template.includes('${') &&
|
||||
template.includes('}') &&
|
||||
template.includes('`')
|
||||
|
||||
return this.blockResolver.formatValueForBlock(resolved, blockType, isInTemplateLiteral)
|
||||
return this.blockResolver.formatValueForBlock(resolved, blockType, language)
|
||||
} catch (error) {
|
||||
replacementError = error instanceof Error ? error : new Error(String(error))
|
||||
return match
|
||||
|
||||
@@ -257,15 +257,9 @@ describe('BlockResolver', () => {
|
||||
expect(result).toBe('"hello"')
|
||||
})
|
||||
|
||||
it.concurrent('should format string for function block in template literal', () => {
|
||||
it.concurrent('should format object for function block', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
const result = resolver.formatValueForBlock('hello', 'function', true)
|
||||
expect(result).toBe('hello')
|
||||
})
|
||||
|
||||
it.concurrent('should format object for function block in template literal', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
const result = resolver.formatValueForBlock({ a: 1 }, 'function', true)
|
||||
const result = resolver.formatValueForBlock({ a: 1 }, 'function')
|
||||
expect(result).toBe('{"a":1}')
|
||||
})
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import {
|
||||
isReference,
|
||||
normalizeName,
|
||||
parseReferencePath,
|
||||
SPECIAL_REFERENCE_PREFIXES,
|
||||
} from '@/executor/constants'
|
||||
import { getBlockSchema } from '@/executor/utils/block-data'
|
||||
import {
|
||||
InvalidFieldError,
|
||||
type OutputSchema,
|
||||
resolveBlockReference,
|
||||
} from '@/executor/utils/block-reference'
|
||||
import { formatLiteralForCode } from '@/executor/utils/code-formatting'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
@@ -67,15 +68,9 @@ export class BlockResolver implements Resolver {
|
||||
blockData[blockId] = output
|
||||
}
|
||||
|
||||
const blockType = block.metadata?.id
|
||||
const params = block.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
|
||||
const outputSchema = getBlockSchema(block, toolConfig)
|
||||
|
||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||
blockOutputSchemas[blockId] = outputSchema
|
||||
@@ -165,17 +160,13 @@ export class BlockResolver implements Resolver {
|
||||
return this.nameToBlockId.get(normalizeName(name))
|
||||
}
|
||||
|
||||
public formatValueForBlock(
|
||||
value: any,
|
||||
blockType: string | undefined,
|
||||
isInTemplateLiteral = false
|
||||
): string {
|
||||
public formatValueForBlock(value: any, blockType: string | undefined, language?: string): string {
|
||||
if (blockType === 'condition') {
|
||||
return this.stringifyForCondition(value)
|
||||
}
|
||||
|
||||
if (blockType === 'function') {
|
||||
return this.formatValueForCodeContext(value, isInTemplateLiteral)
|
||||
return this.formatValueForCodeContext(value, language)
|
||||
}
|
||||
|
||||
if (blockType === 'response') {
|
||||
@@ -216,29 +207,7 @@ export class BlockResolver implements Resolver {
|
||||
return String(value)
|
||||
}
|
||||
|
||||
private formatValueForCodeContext(value: any, isInTemplateLiteral: boolean): string {
|
||||
if (isInTemplateLiteral) {
|
||||
if (typeof value === 'string') {
|
||||
return value
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
if (value === undefined) {
|
||||
return 'undefined'
|
||||
}
|
||||
if (value === null) {
|
||||
return 'null'
|
||||
}
|
||||
return String(value)
|
||||
private formatValueForCodeContext(value: any, language?: string): string {
|
||||
return formatLiteralForCode(value, language === 'python' ? 'python' : 'javascript')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,10 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
const arrayMatch = part.match(/^([^[]+)(\[.+)$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop, bracketsPart] = arrayMatch
|
||||
current = current[prop]
|
||||
current =
|
||||
typeof current === 'object' && current !== null
|
||||
? (current as Record<string, unknown>)[prop]
|
||||
: undefined
|
||||
if (current === undefined || current === null) {
|
||||
return undefined
|
||||
}
|
||||
@@ -49,7 +52,10 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
current = current[part]
|
||||
current =
|
||||
typeof current === 'object' && current !== null
|
||||
? (current as Record<string, unknown>)[part]
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
return current
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback } from 'react'
|
||||
import type { Node, ReactFlowInstance } from 'reactflow'
|
||||
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
|
||||
interface VisibleBounds {
|
||||
width: number
|
||||
@@ -139,8 +140,8 @@ export function useCanvasViewport(reactFlowInstance: ReactFlowInstance | null) {
|
||||
let maxY = Number.NEGATIVE_INFINITY
|
||||
|
||||
nodes.forEach((node) => {
|
||||
const nodeWidth = node.width ?? 200
|
||||
const nodeHeight = node.height ?? 100
|
||||
const nodeWidth = node.width ?? BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
const nodeHeight = node.height ?? BLOCK_DIMENSIONS.MIN_HEIGHT
|
||||
|
||||
minX = Math.min(minX, node.position.x)
|
||||
minY = Math.min(minY, node.position.y)
|
||||
|
||||
@@ -24,7 +24,7 @@ import { useUndoRedoStore } from '@/stores/undo-redo'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { filterNewEdges, mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { filterNewEdges, filterValidEdges, mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -226,9 +226,12 @@ export function useCollaborativeWorkflow() {
|
||||
case EDGES_OPERATIONS.BATCH_ADD_EDGES: {
|
||||
const { edges } = payload
|
||||
if (Array.isArray(edges) && edges.length > 0) {
|
||||
const newEdges = filterNewEdges(edges, useWorkflowStore.getState().edges)
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const currentEdges = useWorkflowStore.getState().edges
|
||||
const validEdges = filterValidEdges(edges, blocks)
|
||||
const newEdges = filterNewEdges(validEdges, currentEdges)
|
||||
if (newEdges.length > 0) {
|
||||
useWorkflowStore.getState().batchAddEdges(newEdges)
|
||||
useWorkflowStore.getState().batchAddEdges(newEdges, { skipValidation: true })
|
||||
}
|
||||
}
|
||||
break
|
||||
@@ -677,6 +680,10 @@ export function useCollaborativeWorkflow() {
|
||||
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
||||
}
|
||||
) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch position update - not in active workflow')
|
||||
return
|
||||
@@ -722,7 +729,7 @@ export function useCollaborativeWorkflow() {
|
||||
}
|
||||
}
|
||||
},
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeUpdateBlockName = useCallback(
|
||||
@@ -814,6 +821,10 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeBatchToggleBlockEnabled = useCallback(
|
||||
(ids: string[]) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (ids.length === 0) return
|
||||
|
||||
const previousStates: Record<string, boolean> = {}
|
||||
@@ -846,7 +857,7 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
||||
},
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeBatchUpdateParent = useCallback(
|
||||
@@ -858,6 +869,10 @@ export function useCollaborativeWorkflow() {
|
||||
affectedEdges: Edge[]
|
||||
}>
|
||||
) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch update parent - not in active workflow')
|
||||
return
|
||||
@@ -928,7 +943,7 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
||||
},
|
||||
[isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||
)
|
||||
|
||||
const collaborativeToggleBlockAdvancedMode = useCallback(
|
||||
@@ -948,18 +963,37 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeSetBlockCanonicalMode = useCallback(
|
||||
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
||||
executeQueuedOperation(
|
||||
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
OPERATION_TARGETS.BLOCK,
|
||||
{ id, canonicalId, canonicalMode },
|
||||
() => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||
)
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||
|
||||
if (!activeWorkflowId) {
|
||||
return
|
||||
}
|
||||
|
||||
const operationId = crypto.randomUUID()
|
||||
addToQueue({
|
||||
id: operationId,
|
||||
operation: {
|
||||
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
target: OPERATION_TARGETS.BLOCK,
|
||||
payload: { id, canonicalId, canonicalMode },
|
||||
},
|
||||
workflowId: activeWorkflowId,
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
},
|
||||
[executeQueuedOperation]
|
||||
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
|
||||
)
|
||||
|
||||
const collaborativeBatchToggleBlockHandles = useCallback(
|
||||
(ids: string[]) => {
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (ids.length === 0) return
|
||||
|
||||
const previousStates: Record<string, boolean> = {}
|
||||
@@ -992,11 +1026,15 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
||||
},
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeBatchAddEdges = useCallback(
|
||||
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
||||
if (isBaselineDiffView) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch add edges - not in active workflow')
|
||||
return false
|
||||
@@ -1004,7 +1042,11 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
if (edges.length === 0) return false
|
||||
|
||||
const newEdges = filterNewEdges(edges, useWorkflowStore.getState().edges)
|
||||
// Filter out invalid edges (e.g., edges targeting trigger blocks) and duplicates
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const currentEdges = useWorkflowStore.getState().edges
|
||||
const validEdges = filterValidEdges(edges, blocks)
|
||||
const newEdges = filterNewEdges(validEdges, currentEdges)
|
||||
if (newEdges.length === 0) return false
|
||||
|
||||
const operationId = crypto.randomUUID()
|
||||
@@ -1020,7 +1062,7 @@ export function useCollaborativeWorkflow() {
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
|
||||
useWorkflowStore.getState().batchAddEdges(newEdges)
|
||||
useWorkflowStore.getState().batchAddEdges(newEdges, { skipValidation: true })
|
||||
|
||||
if (!options?.skipUndoRedo) {
|
||||
newEdges.forEach((edge) => undoRedo.recordAddEdge(edge.id))
|
||||
@@ -1028,11 +1070,15 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
return true
|
||||
},
|
||||
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeBatchRemoveEdges = useCallback(
|
||||
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||
if (isBaselineDiffView) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch remove edges - not in active workflow')
|
||||
return false
|
||||
@@ -1082,7 +1128,7 @@ export function useCollaborativeWorkflow() {
|
||||
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
||||
return true
|
||||
},
|
||||
[isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||
)
|
||||
|
||||
const collaborativeSetSubblockValue = useCallback(
|
||||
@@ -1158,6 +1204,10 @@ export function useCollaborativeWorkflow() {
|
||||
(blockId: string, subblockId: string, value: any) => {
|
||||
if (isApplyingRemoteChange.current) return
|
||||
|
||||
if (isBaselineDiffView) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping tag selection - not in active workflow', {
|
||||
currentWorkflowId,
|
||||
@@ -1185,7 +1235,14 @@ export function useCollaborativeWorkflow() {
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
},
|
||||
[addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
|
||||
[
|
||||
isBaselineDiffView,
|
||||
addToQueue,
|
||||
currentWorkflowId,
|
||||
activeWorkflowId,
|
||||
session?.user?.id,
|
||||
isInActiveRoom,
|
||||
]
|
||||
)
|
||||
|
||||
const collaborativeUpdateLoopType = useCallback(
|
||||
@@ -1484,9 +1541,23 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
if (blocks.length === 0) return false
|
||||
|
||||
// Filter out invalid edges (e.g., edges targeting trigger blocks)
|
||||
// Combine existing blocks with new blocks for validation
|
||||
const existingBlocks = useWorkflowStore.getState().blocks
|
||||
const newBlocksMap = blocks.reduce(
|
||||
(acc, block) => {
|
||||
acc[block.id] = block
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockState>
|
||||
)
|
||||
const allBlocks = { ...existingBlocks, ...newBlocksMap }
|
||||
const validEdges = filterValidEdges(edges, allBlocks)
|
||||
|
||||
logger.info('Batch adding blocks collaboratively', {
|
||||
blockCount: blocks.length,
|
||||
edgeCount: edges.length,
|
||||
edgeCount: validEdges.length,
|
||||
filteredEdges: edges.length - validEdges.length,
|
||||
})
|
||||
|
||||
const operationId = crypto.randomUUID()
|
||||
@@ -1496,16 +1567,18 @@ export function useCollaborativeWorkflow() {
|
||||
operation: {
|
||||
operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS,
|
||||
target: OPERATION_TARGETS.BLOCKS,
|
||||
payload: { blocks, edges, loops, parallels, subBlockValues },
|
||||
payload: { blocks, edges: validEdges, loops, parallels, subBlockValues },
|
||||
},
|
||||
workflowId: activeWorkflowId || '',
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
|
||||
useWorkflowStore.getState().batchAddBlocks(blocks, edges, subBlockValues)
|
||||
useWorkflowStore.getState().batchAddBlocks(blocks, validEdges, subBlockValues, {
|
||||
skipEdgeValidation: true,
|
||||
})
|
||||
|
||||
if (!options?.skipUndoRedo) {
|
||||
undoRedo.recordBatchAddBlocks(blocks, edges, subBlockValues)
|
||||
undoRedo.recordBatchAddBlocks(blocks, validEdges, subBlockValues)
|
||||
}
|
||||
|
||||
return true
|
||||
@@ -1515,6 +1588,10 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeBatchRemoveBlocks = useCallback(
|
||||
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||
if (isBaselineDiffView) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!isInActiveRoom()) {
|
||||
logger.debug('Skipping batch remove blocks - not in active workflow')
|
||||
return false
|
||||
@@ -1596,6 +1673,7 @@ export function useCollaborativeWorkflow() {
|
||||
return true
|
||||
},
|
||||
[
|
||||
isBaselineDiffView,
|
||||
addToQueue,
|
||||
activeWorkflowId,
|
||||
session?.user?.id,
|
||||
|
||||
@@ -1,10 +1,85 @@
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
/**
|
||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||
*/
|
||||
async function processSSEStream(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
callbacks: ExecutionStreamCallbacks,
|
||||
logPrefix: string
|
||||
): Promise<void> {
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split('\n\n')
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim() || !line.startsWith('data: ')) continue
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
if (data === '[DONE]') {
|
||||
logger.info(`${logPrefix} stream completed`)
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
callbacks.onExecutionStarted?.(event.data)
|
||||
break
|
||||
case 'execution:completed':
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
case 'execution:cancelled':
|
||||
callbacks.onExecutionCancelled?.(event.data)
|
||||
break
|
||||
case 'block:started':
|
||||
callbacks.onBlockStarted?.(event.data)
|
||||
break
|
||||
case 'block:completed':
|
||||
callbacks.onBlockCompleted?.(event.data)
|
||||
break
|
||||
case 'block:error':
|
||||
callbacks.onBlockError?.(event.data)
|
||||
break
|
||||
case 'stream:chunk':
|
||||
callbacks.onStreamChunk?.(event.data)
|
||||
break
|
||||
case 'stream:done':
|
||||
callbacks.onStreamDone?.(event.data)
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown event type:', (event as any).type)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse SSE event:', error, { data })
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
}
|
||||
|
||||
export interface ExecutionStreamCallbacks {
|
||||
onExecutionStarted?: (data: { startTime: string }) => void
|
||||
onExecutionCompleted?: (data: {
|
||||
@@ -68,6 +143,14 @@ export interface ExecuteStreamOptions {
|
||||
loops?: Record<string, any>
|
||||
parallels?: Record<string, any>
|
||||
}
|
||||
stopAfterBlockId?: string
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
export interface ExecuteFromBlockOptions {
|
||||
workflowId: string
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
@@ -119,91 +202,7 @@ export function useExecutionStream() {
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) {
|
||||
break
|
||||
}
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
const lines = buffer.split('\n\n')
|
||||
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim() || !line.startsWith('data: ')) {
|
||||
continue
|
||||
}
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
|
||||
if (data === '[DONE]') {
|
||||
logger.info('Stream completed')
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
logger.info('📡 SSE Event received:', {
|
||||
type: event.type,
|
||||
executionId: event.executionId,
|
||||
data: event.data,
|
||||
})
|
||||
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
logger.info('🚀 Execution started')
|
||||
callbacks.onExecutionStarted?.(event.data)
|
||||
break
|
||||
case 'execution:completed':
|
||||
logger.info('✅ Execution completed')
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
logger.error('❌ Execution error')
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
case 'execution:cancelled':
|
||||
logger.warn('🛑 Execution cancelled')
|
||||
callbacks.onExecutionCancelled?.(event.data)
|
||||
break
|
||||
case 'block:started':
|
||||
logger.info('🔷 Block started:', event.data.blockId)
|
||||
callbacks.onBlockStarted?.(event.data)
|
||||
break
|
||||
case 'block:completed':
|
||||
logger.info('✓ Block completed:', event.data.blockId)
|
||||
callbacks.onBlockCompleted?.(event.data)
|
||||
break
|
||||
case 'block:error':
|
||||
logger.error('✗ Block error:', event.data.blockId)
|
||||
callbacks.onBlockError?.(event.data)
|
||||
break
|
||||
case 'stream:chunk':
|
||||
callbacks.onStreamChunk?.(event.data)
|
||||
break
|
||||
case 'stream:done':
|
||||
logger.info('Stream done:', event.data.blockId)
|
||||
callbacks.onStreamDone?.(event.data)
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown event type:', (event as any).type)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse SSE event:', error, { data })
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
await processSSEStream(reader, callbacks, 'Execution')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Execution stream cancelled')
|
||||
@@ -222,6 +221,65 @@ export function useExecutionStream() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||
const { workflowId, startBlockId, sourceSnapshot, callbacks = {} } = options
|
||||
|
||||
if (abortControllerRef.current) {
|
||||
abortControllerRef.current.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllerRef.current = abortController
|
||||
currentExecutionRef.current = null
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute-from-block`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ startBlockId, sourceSnapshot }),
|
||||
signal: abortController.signal,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorResponse = await response.json()
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionRef.current = { workflowId, executionId }
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
}
|
||||
throw error
|
||||
} finally {
|
||||
abortControllerRef.current = null
|
||||
currentExecutionRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
const cancel = useCallback(() => {
|
||||
const execution = currentExecutionRef.current
|
||||
if (execution) {
|
||||
@@ -239,6 +297,7 @@ export function useExecutionStream() {
|
||||
|
||||
return {
|
||||
execute,
|
||||
executeFromBlock,
|
||||
cancel,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,9 +11,10 @@ import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { buildCanonicalIndex, isCanonicalPair } from '@/lib/workflows/subblocks/visibility'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
|
||||
import { EDGE, normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
|
||||
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
@@ -667,11 +668,47 @@ function createBlockFromParams(
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (validatedInputs) {
|
||||
updateCanonicalModesForInputs(blockState, Object.keys(validatedInputs), blockConfig)
|
||||
}
|
||||
}
|
||||
|
||||
return blockState
|
||||
}
|
||||
|
||||
function updateCanonicalModesForInputs(
|
||||
block: { data?: { canonicalModes?: Record<string, 'basic' | 'advanced'> } },
|
||||
inputKeys: string[],
|
||||
blockConfig: BlockConfig
|
||||
): void {
|
||||
if (!blockConfig.subBlocks?.length) return
|
||||
|
||||
const canonicalIndex = buildCanonicalIndex(blockConfig.subBlocks)
|
||||
const canonicalModeUpdates: Record<string, 'basic' | 'advanced'> = {}
|
||||
|
||||
for (const inputKey of inputKeys) {
|
||||
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[inputKey]
|
||||
if (!canonicalId) continue
|
||||
|
||||
const group = canonicalIndex.groupsById[canonicalId]
|
||||
if (!group || !isCanonicalPair(group)) continue
|
||||
|
||||
const isAdvanced = group.advancedIds.includes(inputKey)
|
||||
const existingMode = canonicalModeUpdates[canonicalId]
|
||||
|
||||
if (!existingMode || isAdvanced) {
|
||||
canonicalModeUpdates[canonicalId] = isAdvanced ? 'advanced' : 'basic'
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(canonicalModeUpdates).length > 0) {
|
||||
if (!block.data) block.data = {}
|
||||
if (!block.data.canonicalModes) block.data.canonicalModes = {}
|
||||
Object.assign(block.data.canonicalModes, canonicalModeUpdates)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize tools array by adding back fields that were sanitized for training
|
||||
*/
|
||||
@@ -1654,6 +1691,15 @@ function applyOperationsToWorkflowState(
|
||||
block.data.collection = params.inputs.collection
|
||||
}
|
||||
}
|
||||
|
||||
const editBlockConfig = getBlock(block.type)
|
||||
if (editBlockConfig) {
|
||||
updateCanonicalModesForInputs(
|
||||
block,
|
||||
Object.keys(validationResult.validInputs),
|
||||
editBlockConfig
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Update basic properties
|
||||
@@ -2256,6 +2302,15 @@ function applyOperationsToWorkflowState(
|
||||
existingBlock.subBlocks[key].value = sanitizedValue
|
||||
}
|
||||
})
|
||||
|
||||
const existingBlockConfig = getBlock(existingBlock.type)
|
||||
if (existingBlockConfig) {
|
||||
updateCanonicalModesForInputs(
|
||||
existingBlock,
|
||||
Object.keys(validationResult.validInputs),
|
||||
existingBlockConfig
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Special container types (loop, parallel) are not in the block registry but are valid
|
||||
|
||||
@@ -132,6 +132,8 @@ async function executeCode(request) {
|
||||
for (const [key, value] of Object.entries(contextVariables)) {
|
||||
if (value === undefined) {
|
||||
await jail.set(key, undefined)
|
||||
} else if (value === null) {
|
||||
await jail.set(key, null)
|
||||
} else {
|
||||
await jail.set(key, new ivm.ExternalCopy(value).copyInto())
|
||||
}
|
||||
|
||||