mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-25 06:48:12 -05:00
Compare commits
47 Commits
fix/ci
...
python-sdk
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d63a5cb504 | ||
|
|
1bf5ed4586 | ||
|
|
dc0ed842c4 | ||
|
|
1952b196a0 | ||
|
|
fa03d4d818 | ||
|
|
e14cebeec5 | ||
|
|
404d8c006e | ||
|
|
8bd5d41723 | ||
|
|
ac91d78834 | ||
|
|
6f0a093869 | ||
|
|
c12931bc50 | ||
|
|
bcf6dc8828 | ||
|
|
e9c4251c1c | ||
|
|
cc2be33d6b | ||
|
|
45371e521e | ||
|
|
0ce0f98aa5 | ||
|
|
dff1c9d083 | ||
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
@@ -27,10 +27,11 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Extract version from commit message
|
- name: Extract version from commit message
|
||||||
id: extract
|
id: extract
|
||||||
|
env:
|
||||||
|
COMMIT_MSG: ${{ github.event.head_commit.message }}
|
||||||
run: |
|
run: |
|
||||||
COMMIT_MSG="${{ github.event.head_commit.message }}"
|
|
||||||
# Only tag versions on main branch
|
# Only tag versions on main branch
|
||||||
if [ "${{ github.ref }}" = "refs/heads/main" ] && [[ "$COMMIT_MSG" =~ ^(v[0-9]+\.[0-9]+\.[0-9]+): ]]; then
|
if [ "$GITHUB_REF" = "refs/heads/main" ] && [[ "$COMMIT_MSG" =~ ^(v[0-9]+\.[0-9]+\.[0-9]+): ]]; then
|
||||||
VERSION="${BASH_REMATCH[1]}"
|
VERSION="${BASH_REMATCH[1]}"
|
||||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||||
echo "is_release=true" >> $GITHUB_OUTPUT
|
echo "is_release=true" >> $GITHUB_OUTPUT
|
||||||
|
|||||||
@@ -119,6 +119,19 @@ aside#nd-sidebar {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Hide TOC popover on tablet/medium screens (768px - 1279px) */
|
||||||
|
/* Keeps it visible on mobile (<768px) for easy navigation */
|
||||||
|
/* Desktop (>=1280px) already hides it via fumadocs xl:hidden */
|
||||||
|
@media (min-width: 768px) and (max-width: 1279px) {
|
||||||
|
#nd-docs-layout {
|
||||||
|
--fd-toc-popover-height: 0px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-toc-popover] {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Desktop only: Apply custom navbar offset, sidebar width and margin offsets */
|
/* Desktop only: Apply custom navbar offset, sidebar width and margin offsets */
|
||||||
/* On mobile, let fumadocs handle the layout natively */
|
/* On mobile, let fumadocs handle the layout natively */
|
||||||
@media (min-width: 1024px) {
|
@media (min-width: 1024px) {
|
||||||
|
|||||||
@@ -5,45 +5,25 @@ title: Copilot
|
|||||||
import { Callout } from 'fumadocs-ui/components/callout'
|
import { Callout } from 'fumadocs-ui/components/callout'
|
||||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||||
import { Image } from '@/components/ui/image'
|
import { Image } from '@/components/ui/image'
|
||||||
import { MessageCircle, Package, Zap, Infinity as InfinityIcon, Brain, BrainCircuit } from 'lucide-react'
|
import { MessageCircle, Hammer, Zap, Globe, Paperclip, History, RotateCcw, Brain } from 'lucide-react'
|
||||||
|
|
||||||
Copilot is your in-editor assistant that helps you build and edit workflows with Sim Copilot, as well as understand and improve them. It can:
|
Copilot is your in-editor assistant that helps you build and edit workflows. It can:
|
||||||
|
|
||||||
- **Explain**: Answer questions about Sim and your current workflow
|
- **Explain**: Answer questions about Sim and your current workflow
|
||||||
- **Guide**: Suggest edits and best practices
|
- **Guide**: Suggest edits and best practices
|
||||||
- **Edit**: Make changes to blocks, connections, and settings when you approve
|
- **Build**: Add blocks, wire connections, and configure settings
|
||||||
|
- **Debug**: Analyze execution issues and optimize performance
|
||||||
|
|
||||||
<Callout type="info">
|
<Callout type="info">
|
||||||
Copilot is a Sim-managed service. For self-hosted deployments, generate a Copilot API key in the hosted app (sim.ai → Settings → Copilot)
|
Copilot is a Sim-managed service. For self-hosted deployments:
|
||||||
1. Go to [sim.ai](https://sim.ai) → Settings → Copilot and generate a Copilot API key
|
1. Go to [sim.ai](https://sim.ai) → Settings → Copilot and generate a Copilot API key
|
||||||
2. Set `COPILOT_API_KEY` in your self-hosted environment to that value
|
2. Set `COPILOT_API_KEY` in your self-hosted environment
|
||||||
</Callout>
|
</Callout>
|
||||||
|
|
||||||
## Context Menu (@)
|
|
||||||
|
|
||||||
Use the `@` symbol to reference various resources and give Copilot more context about your workspace:
|
|
||||||
|
|
||||||
<Image
|
|
||||||
src="/static/copilot/copilot-menu.png"
|
|
||||||
alt="Copilot context menu showing available reference options"
|
|
||||||
width={600}
|
|
||||||
height={400}
|
|
||||||
/>
|
|
||||||
|
|
||||||
The `@` menu provides access to:
|
|
||||||
- **Chats**: Reference previous copilot conversations
|
|
||||||
- **All workflows**: Reference any workflow in your workspace
|
|
||||||
- **Workflow Blocks**: Reference specific blocks from workflows
|
|
||||||
- **Blocks**: Reference block types and templates
|
|
||||||
- **Knowledge**: Reference your uploaded documents and knowledgebase
|
|
||||||
- **Docs**: Reference Sim documentation
|
|
||||||
- **Templates**: Reference workflow templates
|
|
||||||
- **Logs**: Reference execution logs and results
|
|
||||||
|
|
||||||
This contextual information helps Copilot provide more accurate and relevant assistance for your specific use case.
|
|
||||||
|
|
||||||
## Modes
|
## Modes
|
||||||
|
|
||||||
|
Switch between modes using the mode selector at the bottom of the input area.
|
||||||
|
|
||||||
<Cards>
|
<Cards>
|
||||||
<Card
|
<Card
|
||||||
title={
|
title={
|
||||||
@@ -60,113 +40,153 @@ This contextual information helps Copilot provide more accurate and relevant ass
|
|||||||
<Card
|
<Card
|
||||||
title={
|
title={
|
||||||
<span className="inline-flex items-center gap-2">
|
<span className="inline-flex items-center gap-2">
|
||||||
<Package className="h-4 w-4 text-muted-foreground" />
|
<Hammer className="h-4 w-4 text-muted-foreground" />
|
||||||
Agent
|
Build
|
||||||
</span>
|
</span>
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<div className="m-0 text-sm">
|
<div className="m-0 text-sm">
|
||||||
Build-and-edit mode. Copilot proposes specific edits (add blocks, wire variables, tweak settings) and applies them when you approve.
|
Workflow building mode. Copilot can add blocks, wire connections, edit configurations, and debug issues.
|
||||||
</div>
|
</div>
|
||||||
</Card>
|
</Card>
|
||||||
</Cards>
|
</Cards>
|
||||||
|
|
||||||
<div className="flex justify-center">
|
## Models
|
||||||
<Image
|
|
||||||
src="/static/copilot/copilot-mode.png"
|
|
||||||
alt="Copilot mode selection interface"
|
|
||||||
width={600}
|
|
||||||
height={400}
|
|
||||||
className="my-6"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
## Depth Levels
|
Select your preferred AI model using the model selector at the bottom right of the input area.
|
||||||
|
|
||||||
<Cards>
|
**Available Models:**
|
||||||
<Card
|
- Claude 4.5 Opus, Sonnet (default), Haiku
|
||||||
title={
|
- GPT 5.2 Codex, Pro
|
||||||
<span className="inline-flex items-center gap-2">
|
- Gemini 3 Pro
|
||||||
<Zap className="h-4 w-4 text-muted-foreground" />
|
|
||||||
Fast
|
|
||||||
</span>
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<div className="m-0 text-sm">Quickest and cheapest. Best for small edits, simple workflows, and minor tweaks.</div>
|
|
||||||
</Card>
|
|
||||||
<Card
|
|
||||||
title={
|
|
||||||
<span className="inline-flex items-center gap-2">
|
|
||||||
<InfinityIcon className="h-4 w-4 text-muted-foreground" />
|
|
||||||
Auto
|
|
||||||
</span>
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<div className="m-0 text-sm">Balanced speed and reasoning. Recommended default for most tasks.</div>
|
|
||||||
</Card>
|
|
||||||
<Card
|
|
||||||
title={
|
|
||||||
<span className="inline-flex items-center gap-2">
|
|
||||||
<Brain className="h-4 w-4 text-muted-foreground" />
|
|
||||||
Advanced
|
|
||||||
</span>
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<div className="m-0 text-sm">More reasoning for larger workflows and complex edits while staying performant.</div>
|
|
||||||
</Card>
|
|
||||||
<Card
|
|
||||||
title={
|
|
||||||
<span className="inline-flex items-center gap-2">
|
|
||||||
<BrainCircuit className="h-4 w-4 text-muted-foreground" />
|
|
||||||
Behemoth
|
|
||||||
</span>
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<div className="m-0 text-sm">Maximum reasoning for deep planning, debugging, and complex architectural changes.</div>
|
|
||||||
</Card>
|
|
||||||
</Cards>
|
|
||||||
|
|
||||||
### Mode Selection Interface
|
Choose based on your needs: faster models for simple tasks, more capable models for complex workflows.
|
||||||
|
|
||||||
You can easily switch between different reasoning modes using the mode selector in the Copilot interface:
|
## Context Menu (@)
|
||||||
|
|
||||||
<Image
|
Use the `@` symbol to reference resources and give Copilot more context:
|
||||||
src="/static/copilot/copilot-models.png"
|
|
||||||
alt="Copilot mode selection showing Advanced mode with MAX toggle"
|
|
||||||
width={600}
|
|
||||||
height={300}
|
|
||||||
/>
|
|
||||||
|
|
||||||
The interface allows you to:
|
| Reference | Description |
|
||||||
- **Select reasoning level**: Choose from Fast, Auto, Advanced, or Behemoth
|
|-----------|-------------|
|
||||||
- **Enable MAX mode**: Toggle for maximum reasoning capabilities when you need the most thorough analysis
|
| **Chats** | Previous copilot conversations |
|
||||||
- **See mode descriptions**: Understand what each mode is optimized for
|
| **Workflows** | Any workflow in your workspace |
|
||||||
|
| **Workflow Blocks** | Blocks in the current workflow |
|
||||||
|
| **Blocks** | Block types and templates |
|
||||||
|
| **Knowledge** | Uploaded documents and knowledge bases |
|
||||||
|
| **Docs** | Sim documentation |
|
||||||
|
| **Templates** | Workflow templates |
|
||||||
|
| **Logs** | Execution logs and results |
|
||||||
|
|
||||||
Choose your mode based on the complexity of your task - use Fast for simple questions and Behemoth for complex architectural changes.
|
Type `@` in the input field to open the context menu, then search or browse to find what you need.
|
||||||
|
|
||||||
## Billing and Cost Calculation
|
## Slash Commands (/)
|
||||||
|
|
||||||
### How Costs Are Calculated
|
Use slash commands for quick actions:
|
||||||
|
|
||||||
Copilot usage is billed per token from the underlying LLM:
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `/fast` | Fast mode execution |
|
||||||
|
| `/research` | Research and exploration mode |
|
||||||
|
| `/actions` | Execute agent actions |
|
||||||
|
|
||||||
- **Input tokens**: billed at the provider's base rate (**at-cost**)
|
**Web Commands:**
|
||||||
- **Output tokens**: billed at **1.5×** the provider's base output rate
|
|
||||||
|
|
||||||
```javascript
|
| Command | Description |
|
||||||
copilotCost = (inputTokens × inputPrice + outputTokens × (outputPrice × 1.5)) / 1,000,000
|
|---------|-------------|
|
||||||
```
|
| `/search` | Search the web |
|
||||||
|
| `/read` | Read a specific URL |
|
||||||
|
| `/scrape` | Scrape web page content |
|
||||||
|
| `/crawl` | Crawl multiple pages |
|
||||||
|
|
||||||
| Component | Rate Applied |
|
Type `/` in the input field to see available commands.
|
||||||
|----------|----------------------|
|
|
||||||
| Input | inputPrice |
|
|
||||||
| Output | outputPrice × 1.5 |
|
|
||||||
|
|
||||||
<Callout type="warning">
|
## Chat Management
|
||||||
Pricing shown reflects rates as of September 4, 2025. Check provider documentation for current pricing.
|
|
||||||
</Callout>
|
### Starting a New Chat
|
||||||
|
|
||||||
|
Click the **+** button in the Copilot header to start a fresh conversation.
|
||||||
|
|
||||||
|
### Chat History
|
||||||
|
|
||||||
|
Click **History** to view previous conversations grouped by date. You can:
|
||||||
|
- Click a chat to resume it
|
||||||
|
- Delete chats you no longer need
|
||||||
|
|
||||||
|
### Editing Messages
|
||||||
|
|
||||||
|
Hover over any of your messages and click **Edit** to modify and resend it. This is useful for refining your prompts.
|
||||||
|
|
||||||
|
### Message Queue
|
||||||
|
|
||||||
|
If you send a message while Copilot is still responding, it gets queued. You can:
|
||||||
|
- View queued messages in the expandable queue panel
|
||||||
|
- Send a queued message immediately (aborts current response)
|
||||||
|
- Remove messages from the queue
|
||||||
|
|
||||||
|
## File Attachments
|
||||||
|
|
||||||
|
Click the attachment icon to upload files with your message. Supported file types include:
|
||||||
|
- Images (preview thumbnails shown)
|
||||||
|
- PDFs
|
||||||
|
- Text files, JSON, XML
|
||||||
|
- Other document formats
|
||||||
|
|
||||||
|
Files are displayed as clickable thumbnails that open in a new tab.
|
||||||
|
|
||||||
|
## Checkpoints & Changes
|
||||||
|
|
||||||
|
When Copilot makes changes to your workflow, it saves checkpoints so you can revert if needed.
|
||||||
|
|
||||||
|
### Viewing Checkpoints
|
||||||
|
|
||||||
|
Hover over a Copilot message and click the checkpoints icon to see saved workflow states for that message.
|
||||||
|
|
||||||
|
### Reverting Changes
|
||||||
|
|
||||||
|
Click **Revert** on any checkpoint to restore your workflow to that state. A confirmation dialog will warn that this action cannot be undone.
|
||||||
|
|
||||||
|
### Accepting Changes
|
||||||
|
|
||||||
|
When Copilot proposes changes, you can:
|
||||||
|
- **Accept**: Apply the proposed changes (`Mod+Shift+Enter`)
|
||||||
|
- **Reject**: Dismiss the changes and keep your current workflow
|
||||||
|
|
||||||
|
## Thinking Blocks
|
||||||
|
|
||||||
|
For complex requests, Copilot may show its reasoning process in expandable thinking blocks:
|
||||||
|
|
||||||
|
- Blocks auto-expand while Copilot is thinking
|
||||||
|
- Click to manually expand/collapse
|
||||||
|
- Shows duration of the thinking process
|
||||||
|
- Helps you understand how Copilot arrived at its solution
|
||||||
|
|
||||||
|
## Options Selection
|
||||||
|
|
||||||
|
When Copilot presents multiple options, you can select using:
|
||||||
|
|
||||||
|
| Control | Action |
|
||||||
|
|---------|--------|
|
||||||
|
| **1-9** | Select option by number |
|
||||||
|
| **Arrow Up/Down** | Navigate between options |
|
||||||
|
| **Enter** | Select highlighted option |
|
||||||
|
|
||||||
|
Selected options are highlighted; unselected options appear struck through.
|
||||||
|
|
||||||
|
## Keyboard Shortcuts
|
||||||
|
|
||||||
|
| Shortcut | Action |
|
||||||
|
|----------|--------|
|
||||||
|
| `@` | Open context menu |
|
||||||
|
| `/` | Open slash commands |
|
||||||
|
| `Arrow Up/Down` | Navigate menu items |
|
||||||
|
| `Enter` | Select menu item |
|
||||||
|
| `Esc` | Close menus |
|
||||||
|
| `Mod+Shift+Enter` | Accept Copilot changes |
|
||||||
|
|
||||||
|
## Usage Limits
|
||||||
|
|
||||||
|
Copilot usage is billed per token from the underlying LLM. If you reach your usage limit, Copilot will prompt you to increase your limit. You can add usage in increments ($50, $100) from your current base.
|
||||||
|
|
||||||
<Callout type="info">
|
<Callout type="info">
|
||||||
Model prices are per million tokens. The calculation divides by 1,000,000 to get the actual cost. See <a href="/execution/costs">the Cost Calculation page</a> for background and examples.
|
See the [Cost Calculation page](/execution/costs) for billing details.
|
||||||
</Callout>
|
</Callout>
|
||||||
|
|
||||||
|
|||||||
@@ -34,6 +34,8 @@ Speed up your workflow building with these keyboard shortcuts and mouse controls
|
|||||||
| `Mod` + `V` | Paste blocks |
|
| `Mod` + `V` | Paste blocks |
|
||||||
| `Delete` or `Backspace` | Delete selected blocks or edges |
|
| `Delete` or `Backspace` | Delete selected blocks or edges |
|
||||||
| `Shift` + `L` | Auto-layout canvas |
|
| `Shift` + `L` | Auto-layout canvas |
|
||||||
|
| `Mod` + `Shift` + `F` | Fit to view |
|
||||||
|
| `Mod` + `Shift` + `Enter` | Accept Copilot changes |
|
||||||
|
|
||||||
## Panel Navigation
|
## Panel Navigation
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
"pages": [
|
"pages": [
|
||||||
"./introduction/index",
|
"./introduction/index",
|
||||||
"./getting-started/index",
|
"./getting-started/index",
|
||||||
|
"./quick-reference/index",
|
||||||
"triggers",
|
"triggers",
|
||||||
"blocks",
|
"blocks",
|
||||||
"tools",
|
"tools",
|
||||||
|
|||||||
136
apps/docs/content/docs/en/quick-reference/index.mdx
Normal file
136
apps/docs/content/docs/en/quick-reference/index.mdx
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
---
|
||||||
|
title: Quick Reference
|
||||||
|
description: Essential actions for navigating and using the Sim workflow editor
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Callout } from 'fumadocs-ui/components/callout'
|
||||||
|
|
||||||
|
A quick lookup for everyday actions in the Sim workflow editor. For keyboard shortcuts, see [Keyboard Shortcuts](/keyboard-shortcuts).
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
**Mod** refers to `Cmd` on macOS and `Ctrl` on Windows/Linux.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
|
## Workspaces
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Create a workspace | Click workspace dropdown in sidebar → **New Workspace** |
|
||||||
|
| Rename a workspace | Workspace settings → Edit name |
|
||||||
|
| Switch workspaces | Click workspace dropdown in sidebar → Select workspace |
|
||||||
|
| Invite team members | Workspace settings → **Team** → **Invite** |
|
||||||
|
|
||||||
|
## Workflows
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Create a workflow | Click **New Workflow** button or `Mod+Shift+A` |
|
||||||
|
| Rename a workflow | Double-click workflow name in sidebar, or right-click → **Rename** |
|
||||||
|
| Duplicate a workflow | Right-click workflow → **Duplicate** |
|
||||||
|
| Reorder workflows | Drag workflow up/down in the sidebar list |
|
||||||
|
| Import a workflow | Sidebar menu → **Import** → Select file |
|
||||||
|
| Create a folder | Right-click in sidebar → **New Folder** |
|
||||||
|
| Rename a folder | Right-click folder → **Rename** |
|
||||||
|
| Delete a folder | Right-click folder → **Delete** |
|
||||||
|
| Collapse/expand folder | Click folder arrow, or double-click folder |
|
||||||
|
| Move workflow to folder | Drag workflow onto folder in sidebar |
|
||||||
|
| Delete a workflow | Right-click workflow → **Delete** |
|
||||||
|
| Export a workflow | Right-click workflow → **Export** |
|
||||||
|
| Assign workflow color | Right-click workflow → **Change Color** |
|
||||||
|
| Multi-select workflows | `Mod+Click` or `Shift+Click` workflows in sidebar |
|
||||||
|
| Open in new tab | Right-click workflow → **Open in New Tab** |
|
||||||
|
|
||||||
|
## Blocks
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Add a block | Drag from Toolbar panel, or right-click canvas → **Add Block** |
|
||||||
|
| Select a block | Click on the block |
|
||||||
|
| Multi-select blocks | `Mod+Click` additional blocks, or right-drag to draw selection box |
|
||||||
|
| Move blocks | Drag selected block(s) to new position |
|
||||||
|
| Copy blocks | `Mod+C` with blocks selected |
|
||||||
|
| Paste blocks | `Mod+V` to paste copied blocks |
|
||||||
|
| Duplicate blocks | Right-click → **Duplicate** |
|
||||||
|
| Delete blocks | `Delete` or `Backspace` key, or right-click → **Delete** |
|
||||||
|
| Rename a block | Click block name in header, or edit in the Editor panel |
|
||||||
|
| Enable/Disable a block | Right-click → **Enable/Disable** |
|
||||||
|
| Toggle handle orientation | Right-click → **Toggle Handles** |
|
||||||
|
| Toggle trigger mode | Right-click trigger block → **Toggle Trigger Mode** |
|
||||||
|
| Configure a block | Select block → use Editor panel on right |
|
||||||
|
|
||||||
|
## Connections
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Create a connection | Drag from output handle to input handle |
|
||||||
|
| Delete a connection | Click edge to select → `Delete` key |
|
||||||
|
| Use output in another block | Drag connection tag into input field |
|
||||||
|
|
||||||
|
## Canvas Navigation
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Pan/move canvas | Left-drag on empty space, or scroll/trackpad |
|
||||||
|
| Zoom in/out | Scroll wheel or pinch gesture |
|
||||||
|
| Auto-layout | `Shift+L` |
|
||||||
|
| Draw selection box | Right-drag on empty canvas area |
|
||||||
|
|
||||||
|
## Panels & Views
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Open Copilot tab | Press `C` or click Copilot tab |
|
||||||
|
| Open Toolbar tab | Press `T` or click Toolbar tab |
|
||||||
|
| Open Editor tab | Press `E` or click Editor tab |
|
||||||
|
| Search toolbar | `Mod+F` |
|
||||||
|
| Toggle advanced mode | Click toggle button on input fields |
|
||||||
|
| Resize panels | Drag panel edge |
|
||||||
|
| Collapse/expand sidebar | Click collapse button on sidebar |
|
||||||
|
|
||||||
|
## Running & Testing
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Run workflow | Click Play button or `Mod+Enter` |
|
||||||
|
| Stop workflow | Click Stop button or `Mod+Enter` while running |
|
||||||
|
| Test with chat | Use Chat panel on the right side |
|
||||||
|
| Select output to view | Click dropdown in Chat panel → Select block output |
|
||||||
|
| Clear chat history | Click clear button in Chat panel |
|
||||||
|
| View execution logs | Open terminal panel at bottom, or `Mod+L` |
|
||||||
|
| Filter logs by block | Click block filter in terminal |
|
||||||
|
| Filter logs by status | Click status filter in terminal |
|
||||||
|
| Search logs | Use search field in terminal |
|
||||||
|
| Copy log entry | Right-click log entry → **Copy** |
|
||||||
|
| Clear terminal | `Mod+D` |
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Deploy a workflow | Click **Deploy** button in Deploy tab |
|
||||||
|
| Update deployment | Click **Update** when changes are detected |
|
||||||
|
| View deployment status | Check status indicator (Live/Update/Deploy) in Deploy tab |
|
||||||
|
| Revert deployment | Access previous versions in Deploy tab |
|
||||||
|
| Copy webhook URL | Deploy tab → Copy webhook URL |
|
||||||
|
| Copy API endpoint | Deploy tab → Copy API endpoint URL |
|
||||||
|
| Set up a schedule | Add Schedule trigger block → Configure interval |
|
||||||
|
|
||||||
|
## Variables
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Add workflow variable | Variables tab → **Add Variable** |
|
||||||
|
| Edit workflow variable | Variables tab → Click variable to edit |
|
||||||
|
| Delete workflow variable | Variables tab → Click delete icon on variable |
|
||||||
|
| Add environment variable | Settings → **Environment Variables** → **Add** |
|
||||||
|
| Reference a variable | Use `{{variableName}}` syntax in block inputs |
|
||||||
|
|
||||||
|
## Credentials
|
||||||
|
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Add API key | Block credential field → **Add Credential** → Enter API key |
|
||||||
|
| Connect OAuth account | Block credential field → **Connect** → Authorize with provider |
|
||||||
|
| Manage credentials | Settings → **Credentials** |
|
||||||
|
| Remove credential | Settings → **Credentials** → Delete credential |
|
||||||
|
|
||||||
@@ -1,7 +1,15 @@
|
|||||||
import { useEffect, useRef, useState } from 'react'
|
import { useEffect, useRef, useState } from 'react'
|
||||||
import { Plus } from 'lucide-react'
|
import { Plus } from 'lucide-react'
|
||||||
import { useParams } from 'next/navigation'
|
import { useParams } from 'next/navigation'
|
||||||
import { Badge, Button, Combobox, Input, Label, Textarea } from '@/components/emcn'
|
import {
|
||||||
|
Badge,
|
||||||
|
Button,
|
||||||
|
Combobox,
|
||||||
|
type ComboboxOption,
|
||||||
|
Input,
|
||||||
|
Label,
|
||||||
|
Textarea,
|
||||||
|
} from '@/components/emcn'
|
||||||
import { Trash } from '@/components/emcn/icons/trash'
|
import { Trash } from '@/components/emcn/icons/trash'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||||
@@ -38,6 +46,14 @@ const DEFAULT_ASSIGNMENT: Omit<VariableAssignment, 'id'> = {
|
|||||||
isExisting: false,
|
isExisting: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Boolean value options for Combobox
|
||||||
|
*/
|
||||||
|
const BOOLEAN_OPTIONS: ComboboxOption[] = [
|
||||||
|
{ label: 'true', value: 'true' },
|
||||||
|
{ label: 'false', value: 'false' },
|
||||||
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses a value that might be a JSON string or already an array of VariableAssignment.
|
* Parses a value that might be a JSON string or already an array of VariableAssignment.
|
||||||
* This handles the case where workflows are imported with stringified values.
|
* This handles the case where workflows are imported with stringified values.
|
||||||
@@ -104,8 +120,6 @@ export function VariablesInput({
|
|||||||
const allVariablesAssigned =
|
const allVariablesAssigned =
|
||||||
!hasNoWorkflowVariables && getAvailableVariablesFor('new').length === 0
|
!hasNoWorkflowVariables && getAvailableVariablesFor('new').length === 0
|
||||||
|
|
||||||
// Initialize with one empty assignment if none exist and not in preview/disabled mode
|
|
||||||
// Also add assignment when first variable is created
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!isReadOnly && assignments.length === 0 && currentWorkflowVariables.length > 0) {
|
if (!isReadOnly && assignments.length === 0 && currentWorkflowVariables.length > 0) {
|
||||||
const initialAssignment: VariableAssignment = {
|
const initialAssignment: VariableAssignment = {
|
||||||
@@ -116,45 +130,46 @@ export function VariablesInput({
|
|||||||
}
|
}
|
||||||
}, [currentWorkflowVariables.length, isReadOnly, assignments.length, setStoreValue])
|
}, [currentWorkflowVariables.length, isReadOnly, assignments.length, setStoreValue])
|
||||||
|
|
||||||
// Clean up assignments when their associated variables are deleted
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isReadOnly || assignments.length === 0) return
|
if (isReadOnly || assignments.length === 0) return
|
||||||
|
|
||||||
const currentVariableIds = new Set(currentWorkflowVariables.map((v) => v.id))
|
const currentVariableIds = new Set(currentWorkflowVariables.map((v) => v.id))
|
||||||
const validAssignments = assignments.filter((assignment) => {
|
const validAssignments = assignments.filter((assignment) => {
|
||||||
// Keep assignments that haven't selected a variable yet
|
|
||||||
if (!assignment.variableId) return true
|
if (!assignment.variableId) return true
|
||||||
// Keep assignments whose variable still exists
|
|
||||||
return currentVariableIds.has(assignment.variableId)
|
return currentVariableIds.has(assignment.variableId)
|
||||||
})
|
})
|
||||||
|
|
||||||
// If all variables were deleted, clear all assignments
|
|
||||||
if (currentWorkflowVariables.length === 0) {
|
if (currentWorkflowVariables.length === 0) {
|
||||||
setStoreValue([])
|
setStoreValue([])
|
||||||
} else if (validAssignments.length !== assignments.length) {
|
} else if (validAssignments.length !== assignments.length) {
|
||||||
// Some assignments reference deleted variables, remove them
|
|
||||||
setStoreValue(validAssignments.length > 0 ? validAssignments : [])
|
setStoreValue(validAssignments.length > 0 ? validAssignments : [])
|
||||||
}
|
}
|
||||||
}, [currentWorkflowVariables, assignments, isReadOnly, setStoreValue])
|
}, [currentWorkflowVariables, assignments, isReadOnly, setStoreValue])
|
||||||
|
|
||||||
const addAssignment = () => {
|
const addAssignment = () => {
|
||||||
if (isPreview || disabled || allVariablesAssigned) return
|
if (isReadOnly || allVariablesAssigned) return
|
||||||
|
|
||||||
const newAssignment: VariableAssignment = {
|
const newAssignment: VariableAssignment = {
|
||||||
...DEFAULT_ASSIGNMENT,
|
...DEFAULT_ASSIGNMENT,
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
}
|
}
|
||||||
setStoreValue([...(assignments || []), newAssignment])
|
setStoreValue([...assignments, newAssignment])
|
||||||
}
|
}
|
||||||
|
|
||||||
const removeAssignment = (id: string) => {
|
const removeAssignment = (id: string) => {
|
||||||
if (isPreview || disabled) return
|
if (isReadOnly) return
|
||||||
setStoreValue((assignments || []).filter((a) => a.id !== id))
|
|
||||||
|
if (assignments.length === 1) {
|
||||||
|
setStoreValue([{ ...DEFAULT_ASSIGNMENT, id: crypto.randomUUID() }])
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
setStoreValue(assignments.filter((a) => a.id !== id))
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateAssignment = (id: string, updates: Partial<VariableAssignment>) => {
|
const updateAssignment = (id: string, updates: Partial<VariableAssignment>) => {
|
||||||
if (isPreview || disabled) return
|
if (isReadOnly) return
|
||||||
setStoreValue((assignments || []).map((a) => (a.id === id ? { ...a, ...updates } : a)))
|
setStoreValue(assignments.map((a) => (a.id === id ? { ...a, ...updates } : a)))
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleVariableSelect = (assignmentId: string, variableId: string) => {
|
const handleVariableSelect = (assignmentId: string, variableId: string) => {
|
||||||
@@ -169,19 +184,12 @@ export function VariablesInput({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleTagSelect = (tag: string) => {
|
const handleTagSelect = (newValue: string) => {
|
||||||
if (!activeFieldId) return
|
if (!activeFieldId) return
|
||||||
|
|
||||||
const assignment = assignments.find((a) => a.id === activeFieldId)
|
const assignment = assignments.find((a) => a.id === activeFieldId)
|
||||||
if (!assignment) return
|
const originalValue = assignment?.value || ''
|
||||||
|
const textAfterCursor = originalValue.slice(cursorPosition)
|
||||||
const currentValue = assignment.value || ''
|
|
||||||
|
|
||||||
const textBeforeCursor = currentValue.slice(0, cursorPosition)
|
|
||||||
const lastOpenBracket = textBeforeCursor.lastIndexOf('<')
|
|
||||||
|
|
||||||
const newValue =
|
|
||||||
currentValue.slice(0, lastOpenBracket) + tag + currentValue.slice(cursorPosition)
|
|
||||||
|
|
||||||
updateAssignment(activeFieldId, { value: newValue })
|
updateAssignment(activeFieldId, { value: newValue })
|
||||||
setShowTags(false)
|
setShowTags(false)
|
||||||
@@ -190,7 +198,7 @@ export function VariablesInput({
|
|||||||
const inputEl = valueInputRefs.current[activeFieldId]
|
const inputEl = valueInputRefs.current[activeFieldId]
|
||||||
if (inputEl) {
|
if (inputEl) {
|
||||||
inputEl.focus()
|
inputEl.focus()
|
||||||
const newCursorPos = lastOpenBracket + tag.length
|
const newCursorPos = newValue.length - textAfterCursor.length
|
||||||
inputEl.setSelectionRange(newCursorPos, newCursorPos)
|
inputEl.setSelectionRange(newCursorPos, newCursorPos)
|
||||||
}
|
}
|
||||||
}, 10)
|
}, 10)
|
||||||
@@ -272,6 +280,18 @@ export function VariablesInput({
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const syncOverlayScroll = (assignmentId: string, scrollLeft: number) => {
|
||||||
|
const overlay = overlayRefs.current[assignmentId]
|
||||||
|
if (overlay) overlay.scrollLeft = scrollLeft
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement | HTMLTextAreaElement>) => {
|
||||||
|
if (e.key === 'Escape') {
|
||||||
|
setShowTags(false)
|
||||||
|
setActiveSourceBlockId(null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (isPreview && (!assignments || assignments.length === 0)) {
|
if (isPreview && (!assignments || assignments.length === 0)) {
|
||||||
return (
|
return (
|
||||||
<div className='flex flex-col items-center justify-center rounded-md border border-border/40 bg-muted/20 py-8 text-center'>
|
<div className='flex flex-col items-center justify-center rounded-md border border-border/40 bg-muted/20 py-8 text-center'>
|
||||||
@@ -302,7 +322,7 @@ export function VariablesInput({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='space-y-[8px]'>
|
<div className='space-y-[8px]'>
|
||||||
{assignments && assignments.length > 0 && (
|
{assignments.length > 0 && (
|
||||||
<div className='space-y-[8px]'>
|
<div className='space-y-[8px]'>
|
||||||
{assignments.map((assignment, index) => {
|
{assignments.map((assignment, index) => {
|
||||||
const collapsed = collapsedAssignments[assignment.id] || false
|
const collapsed = collapsedAssignments[assignment.id] || false
|
||||||
@@ -334,7 +354,7 @@ export function VariablesInput({
|
|||||||
<Button
|
<Button
|
||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={addAssignment}
|
onClick={addAssignment}
|
||||||
disabled={isPreview || disabled || allVariablesAssigned}
|
disabled={isReadOnly || allVariablesAssigned}
|
||||||
className='h-auto p-0'
|
className='h-auto p-0'
|
||||||
>
|
>
|
||||||
<Plus className='h-[14px] w-[14px]' />
|
<Plus className='h-[14px] w-[14px]' />
|
||||||
@@ -343,7 +363,7 @@ export function VariablesInput({
|
|||||||
<Button
|
<Button
|
||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={() => removeAssignment(assignment.id)}
|
onClick={() => removeAssignment(assignment.id)}
|
||||||
disabled={isPreview || disabled || assignments.length === 1}
|
disabled={isReadOnly}
|
||||||
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||||
>
|
>
|
||||||
<Trash className='h-[14px] w-[14px]' />
|
<Trash className='h-[14px] w-[14px]' />
|
||||||
@@ -358,16 +378,26 @@ export function VariablesInput({
|
|||||||
<Label className='text-[13px]'>Variable</Label>
|
<Label className='text-[13px]'>Variable</Label>
|
||||||
<Combobox
|
<Combobox
|
||||||
options={availableVars.map((v) => ({ label: v.name, value: v.id }))}
|
options={availableVars.map((v) => ({ label: v.name, value: v.id }))}
|
||||||
value={assignment.variableId || assignment.variableName || ''}
|
value={assignment.variableId || ''}
|
||||||
onChange={(value) => handleVariableSelect(assignment.id, value)}
|
onChange={(value) => handleVariableSelect(assignment.id, value)}
|
||||||
placeholder='Select a variable...'
|
placeholder='Select a variable...'
|
||||||
disabled={isPreview || disabled}
|
disabled={isReadOnly}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className='flex flex-col gap-[6px]'>
|
<div className='flex flex-col gap-[6px]'>
|
||||||
<Label className='text-[13px]'>Value</Label>
|
<Label className='text-[13px]'>Value</Label>
|
||||||
{assignment.type === 'object' || assignment.type === 'array' ? (
|
{assignment.type === 'boolean' ? (
|
||||||
|
<Combobox
|
||||||
|
options={BOOLEAN_OPTIONS}
|
||||||
|
value={assignment.value ?? ''}
|
||||||
|
onChange={(v) =>
|
||||||
|
!isReadOnly && updateAssignment(assignment.id, { value: v })
|
||||||
|
}
|
||||||
|
placeholder='Select value'
|
||||||
|
disabled={isReadOnly}
|
||||||
|
/>
|
||||||
|
) : assignment.type === 'object' || assignment.type === 'array' ? (
|
||||||
<div className='relative'>
|
<div className='relative'>
|
||||||
<Textarea
|
<Textarea
|
||||||
ref={(el) => {
|
ref={(el) => {
|
||||||
@@ -381,26 +411,32 @@ export function VariablesInput({
|
|||||||
e.target.selectionStart ?? undefined
|
e.target.selectionStart ?? undefined
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
onFocus={() => {
|
onFocus={() => {
|
||||||
if (!isPreview && !disabled && !assignment.value?.trim()) {
|
if (!isReadOnly && !assignment.value?.trim()) {
|
||||||
setActiveFieldId(assignment.id)
|
setActiveFieldId(assignment.id)
|
||||||
setCursorPosition(0)
|
setCursorPosition(0)
|
||||||
setShowTags(true)
|
setShowTags(true)
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
onScroll={(e) => {
|
||||||
|
const overlay = overlayRefs.current[assignment.id]
|
||||||
|
if (overlay) {
|
||||||
|
overlay.scrollTop = e.currentTarget.scrollTop
|
||||||
|
overlay.scrollLeft = e.currentTarget.scrollLeft
|
||||||
|
}
|
||||||
|
}}
|
||||||
placeholder={
|
placeholder={
|
||||||
assignment.type === 'object'
|
assignment.type === 'object'
|
||||||
? '{\n "key": "value"\n}'
|
? '{\n "key": "value"\n}'
|
||||||
: '[\n 1, 2, 3\n]'
|
: '[\n 1, 2, 3\n]'
|
||||||
}
|
}
|
||||||
disabled={isPreview || disabled}
|
disabled={isReadOnly}
|
||||||
className={cn(
|
className={cn(
|
||||||
'min-h-[120px] font-mono text-sm text-transparent caret-foreground placeholder:text-muted-foreground/50',
|
'min-h-[120px] font-mono text-sm text-transparent caret-foreground placeholder:text-muted-foreground/50',
|
||||||
dragHighlight[assignment.id] && 'ring-2 ring-blue-500 ring-offset-2'
|
dragHighlight[assignment.id] && 'ring-2 ring-blue-500 ring-offset-2'
|
||||||
)}
|
)}
|
||||||
style={{
|
style={{
|
||||||
fontFamily: 'inherit',
|
|
||||||
lineHeight: 'inherit',
|
|
||||||
wordBreak: 'break-word',
|
wordBreak: 'break-word',
|
||||||
whiteSpace: 'pre-wrap',
|
whiteSpace: 'pre-wrap',
|
||||||
}}
|
}}
|
||||||
@@ -413,10 +449,7 @@ export function VariablesInput({
|
|||||||
if (el) overlayRefs.current[assignment.id] = el
|
if (el) overlayRefs.current[assignment.id] = el
|
||||||
}}
|
}}
|
||||||
className='pointer-events-none absolute inset-0 flex items-start overflow-auto bg-transparent px-3 py-2 font-mono text-sm'
|
className='pointer-events-none absolute inset-0 flex items-start overflow-auto bg-transparent px-3 py-2 font-mono text-sm'
|
||||||
style={{
|
style={{ scrollbarWidth: 'none' }}
|
||||||
fontFamily: 'inherit',
|
|
||||||
lineHeight: 'inherit',
|
|
||||||
}}
|
|
||||||
>
|
>
|
||||||
<div className='w-full whitespace-pre-wrap break-words'>
|
<div className='w-full whitespace-pre-wrap break-words'>
|
||||||
{formatDisplayText(assignment.value || '', {
|
{formatDisplayText(assignment.value || '', {
|
||||||
@@ -441,21 +474,34 @@ export function VariablesInput({
|
|||||||
e.target.selectionStart ?? undefined
|
e.target.selectionStart ?? undefined
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
onFocus={() => {
|
onFocus={() => {
|
||||||
if (!isPreview && !disabled && !assignment.value?.trim()) {
|
if (!isReadOnly && !assignment.value?.trim()) {
|
||||||
setActiveFieldId(assignment.id)
|
setActiveFieldId(assignment.id)
|
||||||
setCursorPosition(0)
|
setCursorPosition(0)
|
||||||
setShowTags(true)
|
setShowTags(true)
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
onScroll={(e) =>
|
||||||
|
syncOverlayScroll(assignment.id, e.currentTarget.scrollLeft)
|
||||||
|
}
|
||||||
|
onPaste={() =>
|
||||||
|
setTimeout(() => {
|
||||||
|
const input = valueInputRefs.current[assignment.id]
|
||||||
|
if (input)
|
||||||
|
syncOverlayScroll(
|
||||||
|
assignment.id,
|
||||||
|
(input as HTMLInputElement).scrollLeft
|
||||||
|
)
|
||||||
|
}, 0)
|
||||||
|
}
|
||||||
placeholder={`${assignment.type} value`}
|
placeholder={`${assignment.type} value`}
|
||||||
disabled={isPreview || disabled}
|
disabled={isReadOnly}
|
||||||
autoComplete='off'
|
autoComplete='off'
|
||||||
className={cn(
|
className={cn(
|
||||||
'allow-scroll w-full overflow-auto text-transparent caret-foreground',
|
'allow-scroll w-full overflow-x-auto overflow-y-hidden text-transparent caret-foreground',
|
||||||
dragHighlight[assignment.id] && 'ring-2 ring-blue-500 ring-offset-2'
|
dragHighlight[assignment.id] && 'ring-2 ring-blue-500 ring-offset-2'
|
||||||
)}
|
)}
|
||||||
style={{ overflowX: 'auto' }}
|
|
||||||
onDrop={(e) => handleDrop(e, assignment.id)}
|
onDrop={(e) => handleDrop(e, assignment.id)}
|
||||||
onDragOver={(e) => handleDragOver(e, assignment.id)}
|
onDragOver={(e) => handleDragOver(e, assignment.id)}
|
||||||
onDragLeave={(e) => handleDragLeave(e, assignment.id)}
|
onDragLeave={(e) => handleDragLeave(e, assignment.id)}
|
||||||
@@ -465,7 +511,7 @@ export function VariablesInput({
|
|||||||
if (el) overlayRefs.current[assignment.id] = el
|
if (el) overlayRefs.current[assignment.id] = el
|
||||||
}}
|
}}
|
||||||
className='pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-sm'
|
className='pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-sm'
|
||||||
style={{ overflowX: 'auto' }}
|
style={{ scrollbarWidth: 'none' }}
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
className='w-full whitespace-pre'
|
className='w-full whitespace-pre'
|
||||||
|
|||||||
@@ -284,12 +284,18 @@ const renderLabel = (
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
{showCanonicalToggle && (
|
{showCanonicalToggle && (
|
||||||
|
<Tooltip.Root>
|
||||||
|
<Tooltip.Trigger asChild>
|
||||||
<button
|
<button
|
||||||
type='button'
|
type='button'
|
||||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||||
onClick={canonicalToggle?.onToggle}
|
onClick={canonicalToggle?.onToggle}
|
||||||
disabled={canonicalToggleDisabledResolved}
|
disabled={canonicalToggleDisabledResolved}
|
||||||
aria-label={canonicalToggle?.mode === 'advanced' ? 'Use selector' : 'Enter manual ID'}
|
aria-label={
|
||||||
|
canonicalToggle?.mode === 'advanced'
|
||||||
|
? 'Switch to selector'
|
||||||
|
: 'Switch to manual ID'
|
||||||
|
}
|
||||||
>
|
>
|
||||||
<ArrowLeftRight
|
<ArrowLeftRight
|
||||||
className={cn(
|
className={cn(
|
||||||
@@ -300,6 +306,15 @@ const renderLabel = (
|
|||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
</button>
|
</button>
|
||||||
|
</Tooltip.Trigger>
|
||||||
|
<Tooltip.Content side='top'>
|
||||||
|
<p>
|
||||||
|
{canonicalToggle?.mode === 'advanced'
|
||||||
|
? 'Switch to selector'
|
||||||
|
: 'Switch to manual ID'}
|
||||||
|
</p>
|
||||||
|
</Tooltip.Content>
|
||||||
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -13,7 +13,11 @@ interface UseCanvasContextMenuProps {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook for managing workflow canvas context menus.
|
* Hook for managing workflow canvas context menus.
|
||||||
* Handles right-click events, menu state, click-outside detection, and block info extraction.
|
*
|
||||||
|
* Handles right-click events on nodes, pane, and selections with proper multi-select behavior.
|
||||||
|
*
|
||||||
|
* @param props - Hook configuration
|
||||||
|
* @returns Context menu state and handlers
|
||||||
*/
|
*/
|
||||||
export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasContextMenuProps) {
|
export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasContextMenuProps) {
|
||||||
const [activeMenu, setActiveMenu] = useState<MenuType>(null)
|
const [activeMenu, setActiveMenu] = useState<MenuType>(null)
|
||||||
@@ -46,19 +50,29 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
|
|||||||
event.stopPropagation()
|
event.stopPropagation()
|
||||||
|
|
||||||
const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey
|
const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey
|
||||||
|
const currentSelectedNodes = getNodes().filter((n) => n.selected)
|
||||||
|
const isClickedNodeSelected = currentSelectedNodes.some((n) => n.id === node.id)
|
||||||
|
|
||||||
|
let nodesToUse: Node[]
|
||||||
|
if (isClickedNodeSelected) {
|
||||||
|
nodesToUse = currentSelectedNodes
|
||||||
|
} else if (isMultiSelect) {
|
||||||
|
nodesToUse = [...currentSelectedNodes, node]
|
||||||
setNodes((nodes) =>
|
setNodes((nodes) =>
|
||||||
nodes.map((n) => ({
|
nodes.map((n) => ({
|
||||||
...n,
|
...n,
|
||||||
selected: isMultiSelect ? (n.id === node.id ? true : n.selected) : n.id === node.id,
|
selected: n.id === node.id ? true : n.selected,
|
||||||
}))
|
}))
|
||||||
)
|
)
|
||||||
|
} else {
|
||||||
const selectedNodes = getNodes().filter((n) => n.selected)
|
nodesToUse = [node]
|
||||||
const nodesToUse = isMultiSelect
|
setNodes((nodes) =>
|
||||||
? selectedNodes.some((n) => n.id === node.id)
|
nodes.map((n) => ({
|
||||||
? selectedNodes
|
...n,
|
||||||
: [...selectedNodes, node]
|
selected: n.id === node.id,
|
||||||
: [node]
|
}))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
setPosition({ x: event.clientX, y: event.clientY })
|
setPosition({ x: event.clientX, y: event.clientY })
|
||||||
setSelectedBlocks(nodesToBlockInfos(nodesToUse))
|
setSelectedBlocks(nodesToBlockInfos(nodesToUse))
|
||||||
|
|||||||
@@ -27,18 +27,13 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
|||||||
const [isOpen, setIsOpen] = useState(false)
|
const [isOpen, setIsOpen] = useState(false)
|
||||||
const [position, setPosition] = useState<ContextMenuPosition>({ x: 0, y: 0 })
|
const [position, setPosition] = useState<ContextMenuPosition>({ x: 0, y: 0 })
|
||||||
const menuRef = useRef<HTMLDivElement>(null)
|
const menuRef = useRef<HTMLDivElement>(null)
|
||||||
// Used to prevent click-outside dismissal when trigger is clicked
|
|
||||||
const dismissPreventedRef = useRef(false)
|
const dismissPreventedRef = useRef(false)
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle right-click event
|
|
||||||
*/
|
|
||||||
const handleContextMenu = useCallback(
|
const handleContextMenu = useCallback(
|
||||||
(e: React.MouseEvent) => {
|
(e: React.MouseEvent) => {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
|
|
||||||
// Calculate position relative to viewport
|
|
||||||
const x = e.clientX
|
const x = e.clientX
|
||||||
const y = e.clientY
|
const y = e.clientY
|
||||||
|
|
||||||
@@ -50,17 +45,10 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
|||||||
[onContextMenu]
|
[onContextMenu]
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
|
||||||
* Close the context menu
|
|
||||||
*/
|
|
||||||
const closeMenu = useCallback(() => {
|
const closeMenu = useCallback(() => {
|
||||||
setIsOpen(false)
|
setIsOpen(false)
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
/**
|
|
||||||
* Prevent the next click-outside from dismissing the menu.
|
|
||||||
* Call this on pointerdown of a toggle trigger to allow proper toggle behavior.
|
|
||||||
*/
|
|
||||||
const preventDismiss = useCallback(() => {
|
const preventDismiss = useCallback(() => {
|
||||||
dismissPreventedRef.current = true
|
dismissPreventedRef.current = true
|
||||||
}, [])
|
}, [])
|
||||||
@@ -72,7 +60,6 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
|||||||
if (!isOpen) return
|
if (!isOpen) return
|
||||||
|
|
||||||
const handleClickOutside = (e: MouseEvent) => {
|
const handleClickOutside = (e: MouseEvent) => {
|
||||||
// Check if dismissal was prevented (e.g., by toggle trigger's pointerdown)
|
|
||||||
if (dismissPreventedRef.current) {
|
if (dismissPreventedRef.current) {
|
||||||
dismissPreventedRef.current = false
|
dismissPreventedRef.current = false
|
||||||
return
|
return
|
||||||
@@ -82,7 +69,6 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Small delay to prevent immediate close from the same click that opened the menu
|
|
||||||
const timeoutId = setTimeout(() => {
|
const timeoutId = setTimeout(() => {
|
||||||
document.addEventListener('click', handleClickOutside)
|
document.addEventListener('click', handleClickOutside)
|
||||||
}, 0)
|
}, 0)
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import type { BlockOutput } from '@/blocks/types'
|
import type { BlockOutput } from '@/blocks/types'
|
||||||
import { BlockType, DEFAULTS, EVALUATOR, HTTP } from '@/executor/constants'
|
import { BlockType, DEFAULTS, EVALUATOR } from '@/executor/constants'
|
||||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||||
import { buildAPIUrl, extractAPIErrorMessage } from '@/executor/utils/http'
|
import { buildAPIUrl, buildAuthHeaders, extractAPIErrorMessage } from '@/executor/utils/http'
|
||||||
import { isJSONString, parseJSON, stringifyJSON } from '@/executor/utils/json'
|
import { isJSONString, parseJSON, stringifyJSON } from '@/executor/utils/json'
|
||||||
import { validateModelProvider } from '@/executor/utils/permission-check'
|
import { validateModelProvider } from '@/executor/utils/permission-check'
|
||||||
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
||||||
@@ -143,9 +143,7 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
|||||||
|
|
||||||
const response = await fetch(url.toString(), {
|
const response = await fetch(url.toString(), {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: await buildAuthHeaders(),
|
||||||
'Content-Type': HTTP.CONTENT_TYPE.JSON,
|
|
||||||
},
|
|
||||||
body: stringifyJSON(providerRequest),
|
body: stringifyJSON(providerRequest),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,12 @@ import type { BlockOutput } from '@/blocks/types'
|
|||||||
import {
|
import {
|
||||||
BlockType,
|
BlockType,
|
||||||
DEFAULTS,
|
DEFAULTS,
|
||||||
HTTP,
|
|
||||||
isAgentBlockType,
|
isAgentBlockType,
|
||||||
isRouterV2BlockType,
|
isRouterV2BlockType,
|
||||||
ROUTER,
|
ROUTER,
|
||||||
} from '@/executor/constants'
|
} from '@/executor/constants'
|
||||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||||
|
import { buildAuthHeaders } from '@/executor/utils/http'
|
||||||
import { validateModelProvider } from '@/executor/utils/permission-check'
|
import { validateModelProvider } from '@/executor/utils/permission-check'
|
||||||
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
||||||
import type { SerializedBlock } from '@/serializer/types'
|
import type { SerializedBlock } from '@/serializer/types'
|
||||||
@@ -118,9 +118,7 @@ export class RouterBlockHandler implements BlockHandler {
|
|||||||
|
|
||||||
const response = await fetch(url.toString(), {
|
const response = await fetch(url.toString(), {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: await buildAuthHeaders(),
|
||||||
'Content-Type': HTTP.CONTENT_TYPE.JSON,
|
|
||||||
},
|
|
||||||
body: JSON.stringify(providerRequest),
|
body: JSON.stringify(providerRequest),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -277,9 +275,7 @@ export class RouterBlockHandler implements BlockHandler {
|
|||||||
|
|
||||||
const response = await fetch(url.toString(), {
|
const response = await fetch(url.toString(), {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: await buildAuthHeaders(),
|
||||||
'Content-Type': HTTP.CONTENT_TYPE.JSON,
|
|
||||||
},
|
|
||||||
body: JSON.stringify(providerRequest),
|
body: JSON.stringify(providerRequest),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -11,9 +11,10 @@ import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom
|
|||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||||
|
import { buildCanonicalIndex, isCanonicalPair } from '@/lib/workflows/subblocks/visibility'
|
||||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||||
import type { SubBlockConfig } from '@/blocks/types'
|
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
|
||||||
import { EDGE, normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
|
import { EDGE, normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
|
||||||
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
|
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
|
||||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||||
@@ -667,11 +668,47 @@ function createBlockFromParams(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (validatedInputs) {
|
||||||
|
updateCanonicalModesForInputs(blockState, Object.keys(validatedInputs), blockConfig)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return blockState
|
return blockState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function updateCanonicalModesForInputs(
|
||||||
|
block: { data?: { canonicalModes?: Record<string, 'basic' | 'advanced'> } },
|
||||||
|
inputKeys: string[],
|
||||||
|
blockConfig: BlockConfig
|
||||||
|
): void {
|
||||||
|
if (!blockConfig.subBlocks?.length) return
|
||||||
|
|
||||||
|
const canonicalIndex = buildCanonicalIndex(blockConfig.subBlocks)
|
||||||
|
const canonicalModeUpdates: Record<string, 'basic' | 'advanced'> = {}
|
||||||
|
|
||||||
|
for (const inputKey of inputKeys) {
|
||||||
|
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[inputKey]
|
||||||
|
if (!canonicalId) continue
|
||||||
|
|
||||||
|
const group = canonicalIndex.groupsById[canonicalId]
|
||||||
|
if (!group || !isCanonicalPair(group)) continue
|
||||||
|
|
||||||
|
const isAdvanced = group.advancedIds.includes(inputKey)
|
||||||
|
const existingMode = canonicalModeUpdates[canonicalId]
|
||||||
|
|
||||||
|
if (!existingMode || isAdvanced) {
|
||||||
|
canonicalModeUpdates[canonicalId] = isAdvanced ? 'advanced' : 'basic'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(canonicalModeUpdates).length > 0) {
|
||||||
|
if (!block.data) block.data = {}
|
||||||
|
if (!block.data.canonicalModes) block.data.canonicalModes = {}
|
||||||
|
Object.assign(block.data.canonicalModes, canonicalModeUpdates)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalize tools array by adding back fields that were sanitized for training
|
* Normalize tools array by adding back fields that were sanitized for training
|
||||||
*/
|
*/
|
||||||
@@ -1654,6 +1691,15 @@ function applyOperationsToWorkflowState(
|
|||||||
block.data.collection = params.inputs.collection
|
block.data.collection = params.inputs.collection
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const editBlockConfig = getBlock(block.type)
|
||||||
|
if (editBlockConfig) {
|
||||||
|
updateCanonicalModesForInputs(
|
||||||
|
block,
|
||||||
|
Object.keys(validationResult.validInputs),
|
||||||
|
editBlockConfig
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update basic properties
|
// Update basic properties
|
||||||
@@ -2256,6 +2302,15 @@ function applyOperationsToWorkflowState(
|
|||||||
existingBlock.subBlocks[key].value = sanitizedValue
|
existingBlock.subBlocks[key].value = sanitizedValue
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const existingBlockConfig = getBlock(existingBlock.type)
|
||||||
|
if (existingBlockConfig) {
|
||||||
|
updateCanonicalModesForInputs(
|
||||||
|
existingBlock,
|
||||||
|
Object.keys(validationResult.validInputs),
|
||||||
|
existingBlockConfig
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Special container types (loop, parallel) are not in the block registry but are valid
|
// Special container types (loop, parallel) are not in the block registry but are valid
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@a2a-js/sdk": "0.3.7",
|
"@a2a-js/sdk": "0.3.7",
|
||||||
"@anthropic-ai/sdk": "^0.39.0",
|
"@anthropic-ai/sdk": "0.71.2",
|
||||||
"@aws-sdk/client-bedrock-runtime": "3.940.0",
|
"@aws-sdk/client-bedrock-runtime": "3.940.0",
|
||||||
"@aws-sdk/client-dynamodb": "3.940.0",
|
"@aws-sdk/client-dynamodb": "3.940.0",
|
||||||
"@aws-sdk/client-rds-data": "3.940.0",
|
"@aws-sdk/client-rds-data": "3.940.0",
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import Anthropic from '@anthropic-ai/sdk'
|
import Anthropic from '@anthropic-ai/sdk'
|
||||||
|
import { transformJSONSchema } from '@anthropic-ai/sdk/lib/transform-json-schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import type { StreamingExecution } from '@/executor/types'
|
import type { StreamingExecution } from '@/executor/types'
|
||||||
import { MAX_TOOL_ITERATIONS } from '@/providers'
|
import { MAX_TOOL_ITERATIONS } from '@/providers'
|
||||||
@@ -185,13 +186,10 @@ export const anthropicProvider: ProviderConfig = {
|
|||||||
const schema = request.responseFormat.schema || request.responseFormat
|
const schema = request.responseFormat.schema || request.responseFormat
|
||||||
|
|
||||||
if (useNativeStructuredOutputs) {
|
if (useNativeStructuredOutputs) {
|
||||||
const schemaWithConstraints = {
|
const transformedSchema = transformJSONSchema(schema)
|
||||||
...schema,
|
|
||||||
additionalProperties: false,
|
|
||||||
}
|
|
||||||
payload.output_format = {
|
payload.output_format = {
|
||||||
type: 'json_schema',
|
type: 'json_schema',
|
||||||
schema: schemaWithConstraints,
|
schema: transformedSchema,
|
||||||
}
|
}
|
||||||
logger.info(`Using native structured outputs for model: ${modelId}`)
|
logger.info(`Using native structured outputs for model: ${modelId}`)
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
37
bun.lock
37
bun.lock
@@ -1,6 +1,5 @@
|
|||||||
{
|
{
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"configVersion": 0,
|
|
||||||
"workspaces": {
|
"workspaces": {
|
||||||
"": {
|
"": {
|
||||||
"name": "simstudio",
|
"name": "simstudio",
|
||||||
@@ -55,7 +54,7 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@a2a-js/sdk": "0.3.7",
|
"@a2a-js/sdk": "0.3.7",
|
||||||
"@anthropic-ai/sdk": "^0.39.0",
|
"@anthropic-ai/sdk": "0.71.2",
|
||||||
"@aws-sdk/client-bedrock-runtime": "3.940.0",
|
"@aws-sdk/client-bedrock-runtime": "3.940.0",
|
||||||
"@aws-sdk/client-dynamodb": "3.940.0",
|
"@aws-sdk/client-dynamodb": "3.940.0",
|
||||||
"@aws-sdk/client-rds-data": "3.940.0",
|
"@aws-sdk/client-rds-data": "3.940.0",
|
||||||
@@ -363,7 +362,7 @@
|
|||||||
|
|
||||||
"@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="],
|
"@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="],
|
||||||
|
|
||||||
"@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.39.0", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" } }, "sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg=="],
|
"@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.71.2", "", { "dependencies": { "json-schema-to-ts": "^3.1.1" }, "peerDependencies": { "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["zod"], "bin": { "anthropic-ai-sdk": "bin/cli" } }, "sha512-TGNDEUuEstk/DKu0/TflXAEt+p+p/WhTlFzEnoosvbaDU2LTjm42igSdlL0VijrKpWejtOKxX0b8A7uc+XiSAQ=="],
|
||||||
|
|
||||||
"@ark/schema": ["@ark/schema@0.56.0", "", { "dependencies": { "@ark/util": "0.56.0" } }, "sha512-ECg3hox/6Z/nLajxXqNhgPtNdHWC9zNsDyskwO28WinoFEnWow4IsERNz9AnXRhTZJnYIlAJ4uGn3nlLk65vZA=="],
|
"@ark/schema": ["@ark/schema@0.56.0", "", { "dependencies": { "@ark/util": "0.56.0" } }, "sha512-ECg3hox/6Z/nLajxXqNhgPtNdHWC9zNsDyskwO28WinoFEnWow4IsERNz9AnXRhTZJnYIlAJ4uGn3nlLk65vZA=="],
|
||||||
|
|
||||||
@@ -547,6 +546,8 @@
|
|||||||
|
|
||||||
"@babel/plugin-transform-react-jsx-source": ["@babel/plugin-transform-react-jsx-source@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw=="],
|
"@babel/plugin-transform-react-jsx-source": ["@babel/plugin-transform-react-jsx-source@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw=="],
|
||||||
|
|
||||||
|
"@babel/runtime": ["@babel/runtime@7.28.6", "", {}, "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA=="],
|
||||||
|
|
||||||
"@babel/template": ["@babel/template@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ=="],
|
"@babel/template": ["@babel/template@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ=="],
|
||||||
|
|
||||||
"@babel/traverse": ["@babel/traverse@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/generator": "^7.28.6", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.6", "@babel/template": "^7.28.6", "@babel/types": "^7.28.6", "debug": "^4.3.1" } }, "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg=="],
|
"@babel/traverse": ["@babel/traverse@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/generator": "^7.28.6", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.6", "@babel/template": "^7.28.6", "@babel/types": "^7.28.6", "debug": "^4.3.1" } }, "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg=="],
|
||||||
@@ -2443,6 +2444,8 @@
|
|||||||
|
|
||||||
"json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="],
|
"json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="],
|
||||||
|
|
||||||
|
"json-schema-to-ts": ["json-schema-to-ts@3.1.1", "", { "dependencies": { "@babel/runtime": "^7.18.3", "ts-algebra": "^2.0.0" } }, "sha512-+DWg8jCJG2TEnpy7kOm/7/AxaYoaRbjVB4LFZLySZlWn8exGs3A4OLJR966cVvU26N7X9TWxl+Jsw7dzAqKT6g=="],
|
||||||
|
|
||||||
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
||||||
|
|
||||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||||
@@ -3387,6 +3390,8 @@
|
|||||||
|
|
||||||
"trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="],
|
"trough": ["trough@2.2.0", "", {}, "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw=="],
|
||||||
|
|
||||||
|
"ts-algebra": ["ts-algebra@2.0.0", "", {}, "sha512-FPAhNPFMrkwz76P7cdjdmiShwMynZYN6SgOujD1urY4oNm80Ou9oMdmbR45LotcKOXoy7wSmHkRFE6Mxbrhefw=="],
|
||||||
|
|
||||||
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
|
"ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="],
|
||||||
|
|
||||||
"tsafe": ["tsafe@1.8.12", "", {}, "sha512-nFRqW0ttu/2o6XTXsHiVZWJBCOaxhVqZLg7dgs3coZNsCMPXPfwz+zPHAQA+70fNnVJLAPg1EgGIqK9Q84tvAw=="],
|
"tsafe": ["tsafe@1.8.12", "", {}, "sha512-nFRqW0ttu/2o6XTXsHiVZWJBCOaxhVqZLg7dgs3coZNsCMPXPfwz+zPHAQA+70fNnVJLAPg1EgGIqK9Q84tvAw=="],
|
||||||
@@ -3593,10 +3598,6 @@
|
|||||||
|
|
||||||
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
|
"zwitch": ["zwitch@2.0.4", "", {}, "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A=="],
|
||||||
|
|
||||||
"@anthropic-ai/sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
|
|
||||||
|
|
||||||
"@anthropic-ai/sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
|
||||||
|
|
||||||
"@asamuzakjp/css-color/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
"@asamuzakjp/css-color/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||||
|
|
||||||
"@aws-crypto/crc32/@aws-sdk/types": ["@aws-sdk/types@3.969.0", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-7IIzM5TdiXn+VtgPdVLjmE6uUBUtnga0f4RiSEI1WW10RPuNvZ9U+pL3SwDiRDAdoGrOF9tSLJOFZmfuwYuVYQ=="],
|
"@aws-crypto/crc32/@aws-sdk/types": ["@aws-sdk/types@3.969.0", "", { "dependencies": { "@smithy/types": "^4.12.0", "tslib": "^2.6.2" } }, "sha512-7IIzM5TdiXn+VtgPdVLjmE6uUBUtnga0f4RiSEI1WW10RPuNvZ9U+pL3SwDiRDAdoGrOF9tSLJOFZmfuwYuVYQ=="],
|
||||||
@@ -3713,6 +3714,8 @@
|
|||||||
|
|
||||||
"@browserbasehq/sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
"@browserbasehq/sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.39.0", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" } }, "sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg=="],
|
||||||
|
|
||||||
"@cerebras/cerebras_cloud_sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
|
"@cerebras/cerebras_cloud_sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
|
||||||
|
|
||||||
"@cerebras/cerebras_cloud_sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
"@cerebras/cerebras_cloud_sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
||||||
@@ -4215,10 +4218,6 @@
|
|||||||
|
|
||||||
"xml2js/xmlbuilder": ["xmlbuilder@11.0.1", "", {}, "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="],
|
"xml2js/xmlbuilder": ["xmlbuilder@11.0.1", "", {}, "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="],
|
||||||
|
|
||||||
"@anthropic-ai/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
|
||||||
|
|
||||||
"@anthropic-ai/sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
|
||||||
|
|
||||||
"@aws-crypto/sha1-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
|
"@aws-crypto/sha1-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
|
||||||
|
|
||||||
"@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
|
"@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
|
||||||
@@ -4275,6 +4274,10 @@
|
|||||||
|
|
||||||
"@browserbasehq/sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
"@browserbasehq/sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
||||||
|
|
||||||
"@cerebras/cerebras_cloud_sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
"@cerebras/cerebras_cloud_sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||||
|
|
||||||
"@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
"@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
||||||
@@ -4685,10 +4688,6 @@
|
|||||||
|
|
||||||
"vite/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.2", "", { "os": "win32", "cpu": "x64" }, "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ=="],
|
"vite/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.2", "", { "os": "win32", "cpu": "x64" }, "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ=="],
|
||||||
|
|
||||||
"@anthropic-ai/sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
|
||||||
|
|
||||||
"@anthropic-ai/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
|
||||||
|
|
||||||
"@aws-crypto/sha1-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
|
"@aws-crypto/sha1-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
|
||||||
|
|
||||||
"@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
|
"@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
|
||||||
@@ -4737,6 +4736,10 @@
|
|||||||
|
|
||||||
"@browserbasehq/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
"@browserbasehq/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
||||||
|
|
||||||
"@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
"@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
||||||
|
|
||||||
"@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
"@cerebras/cerebras_cloud_sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||||
@@ -4829,6 +4832,10 @@
|
|||||||
|
|
||||||
"@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-DjRJEYNnHUTu9kGPPQDTSXquwSEd6myKR4ssI4FaYLFhdT3ldWpj73yYt807H3tdmhS7vPmdVqchSJnjurUQAw=="],
|
"@aws-sdk/client-sqs/@aws-sdk/credential-provider-node/@aws-sdk/credential-provider-sso/@aws-sdk/token-providers/@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.947.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.947.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.947.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.947.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.7", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.14", "@smithy/middleware-retry": "^4.4.14", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.10", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.13", "@smithy/util-defaults-mode-node": "^4.2.16", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-DjRJEYNnHUTu9kGPPQDTSXquwSEd6myKR4ssI4FaYLFhdT3ldWpj73yYt807H3tdmhS7vPmdVqchSJnjurUQAw=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
||||||
|
|
||||||
|
"@browserbasehq/stagehand/@anthropic-ai/sdk/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||||
|
|
||||||
"@trigger.dev/core/socket.io/engine.io/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
"@trigger.dev/core/socket.io/engine.io/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||||
|
|
||||||
"lint-staged/listr2/cli-truncate/string-width/strip-ansi": ["strip-ansi@7.1.2", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA=="],
|
"lint-staged/listr2/cli-truncate/string-width/strip-ansi": ["strip-ansi@7.1.2", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA=="],
|
||||||
|
|||||||
3
packages/python-sdk/.gitignore
vendored
3
packages/python-sdk/.gitignore
vendored
@@ -82,3 +82,6 @@ Thumbs.db
|
|||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
.dmypy.json
|
.dmypy.json
|
||||||
dmypy.json
|
dmypy.json
|
||||||
|
|
||||||
|
# uv
|
||||||
|
uv.lock
|
||||||
@@ -43,24 +43,30 @@ SimStudioClient(api_key: str, base_url: str = "https://sim.ai")
|
|||||||
|
|
||||||
#### Methods
|
#### Methods
|
||||||
|
|
||||||
##### execute_workflow(workflow_id, input_data=None, timeout=30.0)
|
##### execute_workflow(workflow_id, input=None, *, timeout=30.0, stream=None, selected_outputs=None, async_execution=None)
|
||||||
|
|
||||||
Execute a workflow with optional input data.
|
Execute a workflow with optional input data.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
result = client.execute_workflow(
|
# With dict input (spread at root level of request body)
|
||||||
"workflow-id",
|
result = client.execute_workflow("workflow-id", {"message": "Hello, world!"})
|
||||||
input_data={"message": "Hello, world!"},
|
|
||||||
timeout=30.0 # 30 seconds
|
# With primitive input (wrapped as { input: value })
|
||||||
)
|
result = client.execute_workflow("workflow-id", "NVDA")
|
||||||
|
|
||||||
|
# With options (keyword-only arguments)
|
||||||
|
result = client.execute_workflow("workflow-id", {"message": "Hello"}, timeout=60.0)
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters:**
|
**Parameters:**
|
||||||
- `workflow_id` (str): The ID of the workflow to execute
|
- `workflow_id` (str): The ID of the workflow to execute
|
||||||
- `input_data` (dict, optional): Input data to pass to the workflow. File objects are automatically converted to base64.
|
- `input` (any, optional): Input data to pass to the workflow. Dicts are spread at the root level, primitives/lists are wrapped in `{ input: value }`. File objects are automatically converted to base64.
|
||||||
- `timeout` (float): Timeout in seconds (default: 30.0)
|
- `timeout` (float, keyword-only): Timeout in seconds (default: 30.0)
|
||||||
|
- `stream` (bool, keyword-only): Enable streaming responses
|
||||||
|
- `selected_outputs` (list, keyword-only): Block outputs to stream (e.g., `["agent1.content"]`)
|
||||||
|
- `async_execution` (bool, keyword-only): Execute asynchronously and return execution ID
|
||||||
|
|
||||||
**Returns:** `WorkflowExecutionResult`
|
**Returns:** `WorkflowExecutionResult` or `AsyncExecutionResult`
|
||||||
|
|
||||||
##### get_workflow_status(workflow_id)
|
##### get_workflow_status(workflow_id)
|
||||||
|
|
||||||
@@ -92,24 +98,89 @@ if is_ready:
|
|||||||
|
|
||||||
**Returns:** `bool`
|
**Returns:** `bool`
|
||||||
|
|
||||||
##### execute_workflow_sync(workflow_id, input_data=None, timeout=30.0)
|
##### execute_workflow_sync(workflow_id, input=None, *, timeout=30.0, stream=None, selected_outputs=None)
|
||||||
|
|
||||||
Execute a workflow and poll for completion (useful for long-running workflows).
|
Execute a workflow synchronously (ensures non-async mode).
|
||||||
|
|
||||||
```python
|
```python
|
||||||
result = client.execute_workflow_sync(
|
result = client.execute_workflow_sync("workflow-id", {"data": "some input"}, timeout=60.0)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `workflow_id` (str): The ID of the workflow to execute
|
||||||
|
- `input` (any, optional): Input data to pass to the workflow
|
||||||
|
- `timeout` (float, keyword-only): Timeout in seconds (default: 30.0)
|
||||||
|
- `stream` (bool, keyword-only): Enable streaming responses
|
||||||
|
- `selected_outputs` (list, keyword-only): Block outputs to stream (e.g., `["agent1.content"]`)
|
||||||
|
|
||||||
|
**Returns:** `WorkflowExecutionResult`
|
||||||
|
|
||||||
|
##### get_job_status(task_id)
|
||||||
|
|
||||||
|
Get the status of an async job.
|
||||||
|
|
||||||
|
```python
|
||||||
|
status = client.get_job_status("task-id-from-async-execution")
|
||||||
|
print("Job status:", status)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `task_id` (str): The task ID returned from async execution
|
||||||
|
|
||||||
|
**Returns:** `dict`
|
||||||
|
|
||||||
|
##### execute_with_retry(workflow_id, input=None, *, timeout=30.0, stream=None, selected_outputs=None, async_execution=None, max_retries=3, initial_delay=1.0, max_delay=30.0, backoff_multiplier=2.0)
|
||||||
|
|
||||||
|
Execute a workflow with automatic retry on rate limit errors.
|
||||||
|
|
||||||
|
```python
|
||||||
|
result = client.execute_with_retry(
|
||||||
"workflow-id",
|
"workflow-id",
|
||||||
input_data={"data": "some input"},
|
{"message": "Hello"},
|
||||||
timeout=60.0
|
timeout=30.0,
|
||||||
|
max_retries=3,
|
||||||
|
initial_delay=1.0,
|
||||||
|
max_delay=30.0,
|
||||||
|
backoff_multiplier=2.0
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters:**
|
**Parameters:**
|
||||||
- `workflow_id` (str): The ID of the workflow to execute
|
- `workflow_id` (str): The ID of the workflow to execute
|
||||||
- `input_data` (dict, optional): Input data to pass to the workflow
|
- `input` (any, optional): Input data to pass to the workflow
|
||||||
- `timeout` (float): Timeout for the initial request in seconds
|
- `timeout` (float, keyword-only): Timeout in seconds (default: 30.0)
|
||||||
|
- `stream` (bool, keyword-only): Enable streaming responses
|
||||||
|
- `selected_outputs` (list, keyword-only): Block outputs to stream
|
||||||
|
- `async_execution` (bool, keyword-only): Execute asynchronously
|
||||||
|
- `max_retries` (int, keyword-only): Maximum retry attempts (default: 3)
|
||||||
|
- `initial_delay` (float, keyword-only): Initial delay in seconds (default: 1.0)
|
||||||
|
- `max_delay` (float, keyword-only): Maximum delay in seconds (default: 30.0)
|
||||||
|
- `backoff_multiplier` (float, keyword-only): Backoff multiplier (default: 2.0)
|
||||||
|
|
||||||
**Returns:** `WorkflowExecutionResult`
|
**Returns:** `WorkflowExecutionResult` or `AsyncExecutionResult`
|
||||||
|
|
||||||
|
##### get_rate_limit_info()
|
||||||
|
|
||||||
|
Get current rate limit information from the last API response.
|
||||||
|
|
||||||
|
```python
|
||||||
|
rate_info = client.get_rate_limit_info()
|
||||||
|
if rate_info:
|
||||||
|
print("Remaining requests:", rate_info.remaining)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Returns:** `RateLimitInfo` or `None`
|
||||||
|
|
||||||
|
##### get_usage_limits()
|
||||||
|
|
||||||
|
Get current usage limits and quota information.
|
||||||
|
|
||||||
|
```python
|
||||||
|
limits = client.get_usage_limits()
|
||||||
|
print("Current usage:", limits.usage)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Returns:** `UsageLimits`
|
||||||
|
|
||||||
##### set_api_key(api_key)
|
##### set_api_key(api_key)
|
||||||
|
|
||||||
@@ -171,6 +242,39 @@ class SimStudioError(Exception):
|
|||||||
self.status = status
|
self.status = status
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### AsyncExecutionResult
|
||||||
|
|
||||||
|
```python
|
||||||
|
@dataclass
|
||||||
|
class AsyncExecutionResult:
|
||||||
|
success: bool
|
||||||
|
task_id: str
|
||||||
|
status: str # 'queued'
|
||||||
|
created_at: str
|
||||||
|
links: Dict[str, str]
|
||||||
|
```
|
||||||
|
|
||||||
|
### RateLimitInfo
|
||||||
|
|
||||||
|
```python
|
||||||
|
@dataclass
|
||||||
|
class RateLimitInfo:
|
||||||
|
limit: int
|
||||||
|
remaining: int
|
||||||
|
reset: int
|
||||||
|
retry_after: Optional[int] = None
|
||||||
|
```
|
||||||
|
|
||||||
|
### UsageLimits
|
||||||
|
|
||||||
|
```python
|
||||||
|
@dataclass
|
||||||
|
class UsageLimits:
|
||||||
|
success: bool
|
||||||
|
rate_limit: Dict[str, Any]
|
||||||
|
usage: Dict[str, Any]
|
||||||
|
```
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
### Basic Workflow Execution
|
### Basic Workflow Execution
|
||||||
@@ -191,7 +295,7 @@ def run_workflow():
|
|||||||
# Execute the workflow
|
# Execute the workflow
|
||||||
result = client.execute_workflow(
|
result = client.execute_workflow(
|
||||||
"my-workflow-id",
|
"my-workflow-id",
|
||||||
input_data={
|
{
|
||||||
"message": "Process this data",
|
"message": "Process this data",
|
||||||
"user_id": "12345"
|
"user_id": "12345"
|
||||||
}
|
}
|
||||||
@@ -298,7 +402,7 @@ client = SimStudioClient(api_key=os.getenv("SIM_API_KEY"))
|
|||||||
with open('document.pdf', 'rb') as f:
|
with open('document.pdf', 'rb') as f:
|
||||||
result = client.execute_workflow(
|
result = client.execute_workflow(
|
||||||
'workflow-id',
|
'workflow-id',
|
||||||
input_data={
|
{
|
||||||
'documents': [f], # Must match your workflow's "files" field name
|
'documents': [f], # Must match your workflow's "files" field name
|
||||||
'instructions': 'Analyze this document'
|
'instructions': 'Analyze this document'
|
||||||
}
|
}
|
||||||
@@ -308,7 +412,7 @@ with open('document.pdf', 'rb') as f:
|
|||||||
with open('doc1.pdf', 'rb') as f1, open('doc2.pdf', 'rb') as f2:
|
with open('doc1.pdf', 'rb') as f1, open('doc2.pdf', 'rb') as f2:
|
||||||
result = client.execute_workflow(
|
result = client.execute_workflow(
|
||||||
'workflow-id',
|
'workflow-id',
|
||||||
input_data={
|
{
|
||||||
'attachments': [f1, f2], # Must match your workflow's "files" field name
|
'attachments': [f1, f2], # Must match your workflow's "files" field name
|
||||||
'query': 'Compare these documents'
|
'query': 'Compare these documents'
|
||||||
}
|
}
|
||||||
@@ -327,14 +431,14 @@ def execute_workflows_batch(workflow_data_pairs):
|
|||||||
"""Execute multiple workflows with different input data."""
|
"""Execute multiple workflows with different input data."""
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
for workflow_id, input_data in workflow_data_pairs:
|
for workflow_id, workflow_input in workflow_data_pairs:
|
||||||
try:
|
try:
|
||||||
# Validate workflow before execution
|
# Validate workflow before execution
|
||||||
if not client.validate_workflow(workflow_id):
|
if not client.validate_workflow(workflow_id):
|
||||||
print(f"Skipping {workflow_id}: not deployed")
|
print(f"Skipping {workflow_id}: not deployed")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result = client.execute_workflow(workflow_id, input_data)
|
result = client.execute_workflow(workflow_id, workflow_input)
|
||||||
results.append({
|
results.append({
|
||||||
"workflow_id": workflow_id,
|
"workflow_id": workflow_id,
|
||||||
"success": result.success,
|
"success": result.success,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "simstudio-sdk"
|
name = "simstudio-sdk"
|
||||||
version = "0.1.1"
|
version = "0.1.2"
|
||||||
authors = [
|
authors = [
|
||||||
{name = "Sim", email = "help@sim.ai"},
|
{name = "Sim", email = "help@sim.ai"},
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import os
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.1.0"
|
__version__ = "0.1.2"
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"SimStudioClient",
|
"SimStudioClient",
|
||||||
"SimStudioError",
|
"SimStudioError",
|
||||||
@@ -64,15 +64,6 @@ class RateLimitInfo:
|
|||||||
retry_after: Optional[int] = None
|
retry_after: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RateLimitStatus:
|
|
||||||
"""Rate limit status for sync/async requests."""
|
|
||||||
is_limited: bool
|
|
||||||
limit: int
|
|
||||||
remaining: int
|
|
||||||
reset_at: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class UsageLimits:
|
class UsageLimits:
|
||||||
"""Usage limits and quota information."""
|
"""Usage limits and quota information."""
|
||||||
@@ -115,7 +106,6 @@ class SimStudioClient:
|
|||||||
Recursively processes nested dicts and lists.
|
Recursively processes nested dicts and lists.
|
||||||
"""
|
"""
|
||||||
import base64
|
import base64
|
||||||
import io
|
|
||||||
|
|
||||||
# Check if this is a file-like object
|
# Check if this is a file-like object
|
||||||
if hasattr(value, 'read') and callable(value.read):
|
if hasattr(value, 'read') and callable(value.read):
|
||||||
@@ -159,7 +149,8 @@ class SimStudioClient:
|
|||||||
def execute_workflow(
|
def execute_workflow(
|
||||||
self,
|
self,
|
||||||
workflow_id: str,
|
workflow_id: str,
|
||||||
input_data: Optional[Dict[str, Any]] = None,
|
input: Optional[Any] = None,
|
||||||
|
*,
|
||||||
timeout: float = 30.0,
|
timeout: float = 30.0,
|
||||||
stream: Optional[bool] = None,
|
stream: Optional[bool] = None,
|
||||||
selected_outputs: Optional[list] = None,
|
selected_outputs: Optional[list] = None,
|
||||||
@@ -169,11 +160,13 @@ class SimStudioClient:
|
|||||||
Execute a workflow with optional input data.
|
Execute a workflow with optional input data.
|
||||||
If async_execution is True, returns immediately with a task ID.
|
If async_execution is True, returns immediately with a task ID.
|
||||||
|
|
||||||
File objects in input_data will be automatically detected and converted to base64.
|
File objects in input will be automatically detected and converted to base64.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_id: The ID of the workflow to execute
|
workflow_id: The ID of the workflow to execute
|
||||||
input_data: Input data to pass to the workflow (can include file-like objects)
|
input: Input data to pass to the workflow. Can be a dict (spread at root level),
|
||||||
|
primitive value (string, number, bool), or list (wrapped in 'input' field).
|
||||||
|
File-like objects within dicts are automatically converted to base64.
|
||||||
timeout: Timeout in seconds (default: 30.0)
|
timeout: Timeout in seconds (default: 30.0)
|
||||||
stream: Enable streaming responses (default: None)
|
stream: Enable streaming responses (default: None)
|
||||||
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
||||||
@@ -193,8 +186,15 @@ class SimStudioClient:
|
|||||||
headers['X-Execution-Mode'] = 'async'
|
headers['X-Execution-Mode'] = 'async'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Build JSON body - spread input at root level, then add API control parameters
|
# Build JSON body - spread dict inputs at root level, wrap primitives/lists in 'input' field
|
||||||
body = input_data.copy() if input_data is not None else {}
|
body = {}
|
||||||
|
if input is not None:
|
||||||
|
if isinstance(input, dict):
|
||||||
|
# Dict input: spread at root level (matches curl/API behavior)
|
||||||
|
body = input.copy()
|
||||||
|
else:
|
||||||
|
# Primitive or list input: wrap in 'input' field
|
||||||
|
body = {'input': input}
|
||||||
|
|
||||||
# Convert any file objects in the input to base64 format
|
# Convert any file objects in the input to base64 format
|
||||||
body = self._convert_files_to_base64(body)
|
body = self._convert_files_to_base64(body)
|
||||||
@@ -320,20 +320,18 @@ class SimStudioClient:
|
|||||||
def execute_workflow_sync(
|
def execute_workflow_sync(
|
||||||
self,
|
self,
|
||||||
workflow_id: str,
|
workflow_id: str,
|
||||||
input_data: Optional[Dict[str, Any]] = None,
|
input: Optional[Any] = None,
|
||||||
|
*,
|
||||||
timeout: float = 30.0,
|
timeout: float = 30.0,
|
||||||
stream: Optional[bool] = None,
|
stream: Optional[bool] = None,
|
||||||
selected_outputs: Optional[list] = None
|
selected_outputs: Optional[list] = None
|
||||||
) -> WorkflowExecutionResult:
|
) -> WorkflowExecutionResult:
|
||||||
"""
|
"""
|
||||||
Execute a workflow and poll for completion (useful for long-running workflows).
|
Execute a workflow synchronously (ensures non-async mode).
|
||||||
|
|
||||||
Note: Currently, the API is synchronous, so this method just calls execute_workflow.
|
|
||||||
In the future, if async execution is added, this method can be enhanced.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_id: The ID of the workflow to execute
|
workflow_id: The ID of the workflow to execute
|
||||||
input_data: Input data to pass to the workflow (can include file-like objects)
|
input: Input data to pass to the workflow (can include file-like objects)
|
||||||
timeout: Timeout for the initial request in seconds
|
timeout: Timeout for the initial request in seconds
|
||||||
stream: Enable streaming responses (default: None)
|
stream: Enable streaming responses (default: None)
|
||||||
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
||||||
@@ -344,9 +342,14 @@ class SimStudioClient:
|
|||||||
Raises:
|
Raises:
|
||||||
SimStudioError: If the workflow execution fails
|
SimStudioError: If the workflow execution fails
|
||||||
"""
|
"""
|
||||||
# For now, the API is synchronous, so we just execute directly
|
return self.execute_workflow(
|
||||||
# In the future, if async execution is added, this method can be enhanced
|
workflow_id,
|
||||||
return self.execute_workflow(workflow_id, input_data, timeout, stream, selected_outputs)
|
input,
|
||||||
|
timeout=timeout,
|
||||||
|
stream=stream,
|
||||||
|
selected_outputs=selected_outputs,
|
||||||
|
async_execution=False
|
||||||
|
)
|
||||||
|
|
||||||
def set_api_key(self, api_key: str) -> None:
|
def set_api_key(self, api_key: str) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -410,7 +413,8 @@ class SimStudioClient:
|
|||||||
def execute_with_retry(
|
def execute_with_retry(
|
||||||
self,
|
self,
|
||||||
workflow_id: str,
|
workflow_id: str,
|
||||||
input_data: Optional[Dict[str, Any]] = None,
|
input: Optional[Any] = None,
|
||||||
|
*,
|
||||||
timeout: float = 30.0,
|
timeout: float = 30.0,
|
||||||
stream: Optional[bool] = None,
|
stream: Optional[bool] = None,
|
||||||
selected_outputs: Optional[list] = None,
|
selected_outputs: Optional[list] = None,
|
||||||
@@ -425,7 +429,7 @@ class SimStudioClient:
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_id: The ID of the workflow to execute
|
workflow_id: The ID of the workflow to execute
|
||||||
input_data: Input data to pass to the workflow (can include file-like objects)
|
input: Input data to pass to the workflow (can include file-like objects)
|
||||||
timeout: Timeout in seconds
|
timeout: Timeout in seconds
|
||||||
stream: Enable streaming responses
|
stream: Enable streaming responses
|
||||||
selected_outputs: Block outputs to stream
|
selected_outputs: Block outputs to stream
|
||||||
@@ -448,11 +452,11 @@ class SimStudioClient:
|
|||||||
try:
|
try:
|
||||||
return self.execute_workflow(
|
return self.execute_workflow(
|
||||||
workflow_id,
|
workflow_id,
|
||||||
input_data,
|
input,
|
||||||
timeout,
|
timeout=timeout,
|
||||||
stream,
|
stream=stream,
|
||||||
selected_outputs,
|
selected_outputs=selected_outputs,
|
||||||
async_execution
|
async_execution=async_execution
|
||||||
)
|
)
|
||||||
except SimStudioError as e:
|
except SimStudioError as e:
|
||||||
if e.code != 'RATE_LIMIT_EXCEEDED':
|
if e.code != 'RATE_LIMIT_EXCEEDED':
|
||||||
|
|||||||
@@ -91,11 +91,9 @@ def test_context_manager(mock_close):
|
|||||||
"""Test SimStudioClient as context manager."""
|
"""Test SimStudioClient as context manager."""
|
||||||
with SimStudioClient(api_key="test-api-key") as client:
|
with SimStudioClient(api_key="test-api-key") as client:
|
||||||
assert client.api_key == "test-api-key"
|
assert client.api_key == "test-api-key"
|
||||||
# Should close without error
|
|
||||||
mock_close.assert_called_once()
|
mock_close.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
# Tests for async execution
|
|
||||||
@patch('simstudio.requests.Session.post')
|
@patch('simstudio.requests.Session.post')
|
||||||
def test_async_execution_returns_task_id(mock_post):
|
def test_async_execution_returns_task_id(mock_post):
|
||||||
"""Test async execution returns AsyncExecutionResult."""
|
"""Test async execution returns AsyncExecutionResult."""
|
||||||
@@ -115,7 +113,7 @@ def test_async_execution_returns_task_id(mock_post):
|
|||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
result = client.execute_workflow(
|
result = client.execute_workflow(
|
||||||
"workflow-id",
|
"workflow-id",
|
||||||
input_data={"message": "Hello"},
|
{"message": "Hello"},
|
||||||
async_execution=True
|
async_execution=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -124,7 +122,6 @@ def test_async_execution_returns_task_id(mock_post):
|
|||||||
assert result.status == "queued"
|
assert result.status == "queued"
|
||||||
assert result.links["status"] == "/api/jobs/task-123"
|
assert result.links["status"] == "/api/jobs/task-123"
|
||||||
|
|
||||||
# Verify X-Execution-Mode header was set
|
|
||||||
call_args = mock_post.call_args
|
call_args = mock_post.call_args
|
||||||
assert call_args[1]["headers"]["X-Execution-Mode"] == "async"
|
assert call_args[1]["headers"]["X-Execution-Mode"] == "async"
|
||||||
|
|
||||||
@@ -146,7 +143,7 @@ def test_sync_execution_returns_result(mock_post):
|
|||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
result = client.execute_workflow(
|
result = client.execute_workflow(
|
||||||
"workflow-id",
|
"workflow-id",
|
||||||
input_data={"message": "Hello"},
|
{"message": "Hello"},
|
||||||
async_execution=False
|
async_execution=False
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -166,13 +163,12 @@ def test_async_header_not_set_when_false(mock_post):
|
|||||||
mock_post.return_value = mock_response
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
client.execute_workflow("workflow-id", input_data={"message": "Hello"})
|
client.execute_workflow("workflow-id", {"message": "Hello"})
|
||||||
|
|
||||||
call_args = mock_post.call_args
|
call_args = mock_post.call_args
|
||||||
assert "X-Execution-Mode" not in call_args[1]["headers"]
|
assert "X-Execution-Mode" not in call_args[1]["headers"]
|
||||||
|
|
||||||
|
|
||||||
# Tests for job status
|
|
||||||
@patch('simstudio.requests.Session.get')
|
@patch('simstudio.requests.Session.get')
|
||||||
def test_get_job_status_success(mock_get):
|
def test_get_job_status_success(mock_get):
|
||||||
"""Test getting job status."""
|
"""Test getting job status."""
|
||||||
@@ -222,7 +218,6 @@ def test_get_job_status_not_found(mock_get):
|
|||||||
assert "Job not found" in str(exc_info.value)
|
assert "Job not found" in str(exc_info.value)
|
||||||
|
|
||||||
|
|
||||||
# Tests for retry with rate limiting
|
|
||||||
@patch('simstudio.requests.Session.post')
|
@patch('simstudio.requests.Session.post')
|
||||||
@patch('simstudio.time.sleep')
|
@patch('simstudio.time.sleep')
|
||||||
def test_execute_with_retry_success_first_attempt(mock_sleep, mock_post):
|
def test_execute_with_retry_success_first_attempt(mock_sleep, mock_post):
|
||||||
@@ -238,7 +233,7 @@ def test_execute_with_retry_success_first_attempt(mock_sleep, mock_post):
|
|||||||
mock_post.return_value = mock_response
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
result = client.execute_with_retry("workflow-id", input_data={"message": "test"})
|
result = client.execute_with_retry("workflow-id", {"message": "test"})
|
||||||
|
|
||||||
assert result.success is True
|
assert result.success is True
|
||||||
assert mock_post.call_count == 1
|
assert mock_post.call_count == 1
|
||||||
@@ -278,7 +273,7 @@ def test_execute_with_retry_retries_on_rate_limit(mock_sleep, mock_post):
|
|||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
result = client.execute_with_retry(
|
result = client.execute_with_retry(
|
||||||
"workflow-id",
|
"workflow-id",
|
||||||
input_data={"message": "test"},
|
{"message": "test"},
|
||||||
max_retries=3,
|
max_retries=3,
|
||||||
initial_delay=0.01
|
initial_delay=0.01
|
||||||
)
|
)
|
||||||
@@ -307,7 +302,7 @@ def test_execute_with_retry_max_retries_exceeded(mock_sleep, mock_post):
|
|||||||
with pytest.raises(SimStudioError) as exc_info:
|
with pytest.raises(SimStudioError) as exc_info:
|
||||||
client.execute_with_retry(
|
client.execute_with_retry(
|
||||||
"workflow-id",
|
"workflow-id",
|
||||||
input_data={"message": "test"},
|
{"message": "test"},
|
||||||
max_retries=2,
|
max_retries=2,
|
||||||
initial_delay=0.01
|
initial_delay=0.01
|
||||||
)
|
)
|
||||||
@@ -333,13 +328,12 @@ def test_execute_with_retry_no_retry_on_other_errors(mock_post):
|
|||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
|
|
||||||
with pytest.raises(SimStudioError) as exc_info:
|
with pytest.raises(SimStudioError) as exc_info:
|
||||||
client.execute_with_retry("workflow-id", input_data={"message": "test"})
|
client.execute_with_retry("workflow-id", {"message": "test"})
|
||||||
|
|
||||||
assert "Server error" in str(exc_info.value)
|
assert "Server error" in str(exc_info.value)
|
||||||
assert mock_post.call_count == 1 # No retries
|
assert mock_post.call_count == 1 # No retries
|
||||||
|
|
||||||
|
|
||||||
# Tests for rate limit info
|
|
||||||
def test_get_rate_limit_info_returns_none_initially():
|
def test_get_rate_limit_info_returns_none_initially():
|
||||||
"""Test rate limit info is None before any API calls."""
|
"""Test rate limit info is None before any API calls."""
|
||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
@@ -362,7 +356,7 @@ def test_get_rate_limit_info_after_api_call(mock_post):
|
|||||||
mock_post.return_value = mock_response
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
client.execute_workflow("workflow-id", input_data={})
|
client.execute_workflow("workflow-id", {})
|
||||||
|
|
||||||
info = client.get_rate_limit_info()
|
info = client.get_rate_limit_info()
|
||||||
assert info is not None
|
assert info is not None
|
||||||
@@ -371,7 +365,6 @@ def test_get_rate_limit_info_after_api_call(mock_post):
|
|||||||
assert info.reset == 1704067200
|
assert info.reset == 1704067200
|
||||||
|
|
||||||
|
|
||||||
# Tests for usage limits
|
|
||||||
@patch('simstudio.requests.Session.get')
|
@patch('simstudio.requests.Session.get')
|
||||||
def test_get_usage_limits_success(mock_get):
|
def test_get_usage_limits_success(mock_get):
|
||||||
"""Test getting usage limits."""
|
"""Test getting usage limits."""
|
||||||
@@ -435,7 +428,6 @@ def test_get_usage_limits_unauthorized(mock_get):
|
|||||||
assert "Invalid API key" in str(exc_info.value)
|
assert "Invalid API key" in str(exc_info.value)
|
||||||
|
|
||||||
|
|
||||||
# Tests for streaming with selectedOutputs
|
|
||||||
@patch('simstudio.requests.Session.post')
|
@patch('simstudio.requests.Session.post')
|
||||||
def test_execute_workflow_with_stream_and_selected_outputs(mock_post):
|
def test_execute_workflow_with_stream_and_selected_outputs(mock_post):
|
||||||
"""Test execution with stream and selectedOutputs parameters."""
|
"""Test execution with stream and selectedOutputs parameters."""
|
||||||
@@ -449,7 +441,7 @@ def test_execute_workflow_with_stream_and_selected_outputs(mock_post):
|
|||||||
client = SimStudioClient(api_key="test-api-key")
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
client.execute_workflow(
|
client.execute_workflow(
|
||||||
"workflow-id",
|
"workflow-id",
|
||||||
input_data={"message": "test"},
|
{"message": "test"},
|
||||||
stream=True,
|
stream=True,
|
||||||
selected_outputs=["agent1.content", "agent2.content"]
|
selected_outputs=["agent1.content", "agent2.content"]
|
||||||
)
|
)
|
||||||
@@ -460,3 +452,84 @@ def test_execute_workflow_with_stream_and_selected_outputs(mock_post):
|
|||||||
assert request_body["message"] == "test"
|
assert request_body["message"] == "test"
|
||||||
assert request_body["stream"] is True
|
assert request_body["stream"] is True
|
||||||
assert request_body["selectedOutputs"] == ["agent1.content", "agent2.content"]
|
assert request_body["selectedOutputs"] == ["agent1.content", "agent2.content"]
|
||||||
|
|
||||||
|
|
||||||
|
# Tests for primitive and list inputs
|
||||||
|
@patch('simstudio.requests.Session.post')
|
||||||
|
def test_execute_workflow_with_string_input(mock_post):
|
||||||
|
"""Test execution with primitive string input wraps in input field."""
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.ok = True
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {"success": True, "output": {}}
|
||||||
|
mock_response.headers.get.return_value = None
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
|
client.execute_workflow("workflow-id", "NVDA")
|
||||||
|
|
||||||
|
call_args = mock_post.call_args
|
||||||
|
request_body = call_args[1]["json"]
|
||||||
|
|
||||||
|
assert request_body["input"] == "NVDA"
|
||||||
|
assert "0" not in request_body # Should not spread string characters
|
||||||
|
|
||||||
|
|
||||||
|
@patch('simstudio.requests.Session.post')
|
||||||
|
def test_execute_workflow_with_number_input(mock_post):
|
||||||
|
"""Test execution with primitive number input wraps in input field."""
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.ok = True
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {"success": True, "output": {}}
|
||||||
|
mock_response.headers.get.return_value = None
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
|
client.execute_workflow("workflow-id", 42)
|
||||||
|
|
||||||
|
call_args = mock_post.call_args
|
||||||
|
request_body = call_args[1]["json"]
|
||||||
|
|
||||||
|
assert request_body["input"] == 42
|
||||||
|
|
||||||
|
|
||||||
|
@patch('simstudio.requests.Session.post')
|
||||||
|
def test_execute_workflow_with_list_input(mock_post):
|
||||||
|
"""Test execution with list input wraps in input field."""
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.ok = True
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {"success": True, "output": {}}
|
||||||
|
mock_response.headers.get.return_value = None
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
|
client.execute_workflow("workflow-id", ["NVDA", "AAPL", "GOOG"])
|
||||||
|
|
||||||
|
call_args = mock_post.call_args
|
||||||
|
request_body = call_args[1]["json"]
|
||||||
|
|
||||||
|
assert request_body["input"] == ["NVDA", "AAPL", "GOOG"]
|
||||||
|
assert "0" not in request_body # Should not spread list
|
||||||
|
|
||||||
|
|
||||||
|
@patch('simstudio.requests.Session.post')
|
||||||
|
def test_execute_workflow_with_dict_input_spreads_at_root(mock_post):
|
||||||
|
"""Test execution with dict input spreads at root level."""
|
||||||
|
mock_response = Mock()
|
||||||
|
mock_response.ok = True
|
||||||
|
mock_response.status_code = 200
|
||||||
|
mock_response.json.return_value = {"success": True, "output": {}}
|
||||||
|
mock_response.headers.get.return_value = None
|
||||||
|
mock_post.return_value = mock_response
|
||||||
|
|
||||||
|
client = SimStudioClient(api_key="test-api-key")
|
||||||
|
client.execute_workflow("workflow-id", {"ticker": "NVDA", "quantity": 100})
|
||||||
|
|
||||||
|
call_args = mock_post.call_args
|
||||||
|
request_body = call_args[1]["json"]
|
||||||
|
|
||||||
|
assert request_body["ticker"] == "NVDA"
|
||||||
|
assert request_body["quantity"] == 100
|
||||||
|
assert "input" not in request_body # Should not wrap in input field
|
||||||
@@ -71,6 +71,19 @@ vi.mock('@/executor/path')
|
|||||||
vi.mock('@/executor/resolver', () => ({
|
vi.mock('@/executor/resolver', () => ({
|
||||||
InputResolver: vi.fn(),
|
InputResolver: vi.fn(),
|
||||||
}))
|
}))
|
||||||
|
vi.mock('@/executor/utils/http', () => ({
|
||||||
|
buildAuthHeaders: vi.fn().mockResolvedValue({ 'Content-Type': 'application/json' }),
|
||||||
|
buildAPIUrl: vi.fn((path: string) => new URL(path, 'http://localhost:3000')),
|
||||||
|
extractAPIErrorMessage: vi.fn(async (response: Response) => {
|
||||||
|
const defaultMessage = `API request failed with status ${response.status}`
|
||||||
|
try {
|
||||||
|
const errorData = await response.json()
|
||||||
|
return errorData.error || defaultMessage
|
||||||
|
} catch {
|
||||||
|
return defaultMessage
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
// Specific block utilities
|
// Specific block utilities
|
||||||
vi.mock('@/blocks/blocks/router')
|
vi.mock('@/blocks/blocks/router')
|
||||||
|
|||||||
@@ -47,24 +47,35 @@ new SimStudioClient(config: SimStudioConfig)
|
|||||||
|
|
||||||
#### Methods
|
#### Methods
|
||||||
|
|
||||||
##### executeWorkflow(workflowId, options?)
|
##### executeWorkflow(workflowId, input?, options?)
|
||||||
|
|
||||||
Execute a workflow with optional input data.
|
Execute a workflow with optional input data.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
// With object input (spread at root level of request body)
|
||||||
const result = await client.executeWorkflow('workflow-id', {
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
input: { message: 'Hello, world!' },
|
message: 'Hello, world!'
|
||||||
timeout: 30000 // 30 seconds
|
});
|
||||||
|
|
||||||
|
// With primitive input (wrapped as { input: value })
|
||||||
|
const result = await client.executeWorkflow('workflow-id', 'NVDA');
|
||||||
|
|
||||||
|
// With options
|
||||||
|
const result = await client.executeWorkflow('workflow-id', { message: 'Hello' }, {
|
||||||
|
timeout: 60000
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters:**
|
**Parameters:**
|
||||||
- `workflowId` (string): The ID of the workflow to execute
|
- `workflowId` (string): The ID of the workflow to execute
|
||||||
|
- `input` (any, optional): Input data to pass to the workflow. Objects are spread at the root level, primitives/arrays are wrapped in `{ input: value }`. File objects are automatically converted to base64.
|
||||||
- `options` (ExecutionOptions, optional):
|
- `options` (ExecutionOptions, optional):
|
||||||
- `input` (any): Input data to pass to the workflow. File objects are automatically converted to base64.
|
|
||||||
- `timeout` (number): Timeout in milliseconds (default: 30000)
|
- `timeout` (number): Timeout in milliseconds (default: 30000)
|
||||||
|
- `stream` (boolean): Enable streaming responses
|
||||||
|
- `selectedOutputs` (string[]): Block outputs to stream (e.g., `["agent1.content"]`)
|
||||||
|
- `async` (boolean): Execute asynchronously and return execution ID
|
||||||
|
|
||||||
**Returns:** `Promise<WorkflowExecutionResult>`
|
**Returns:** `Promise<WorkflowExecutionResult | AsyncExecutionResult>`
|
||||||
|
|
||||||
##### getWorkflowStatus(workflowId)
|
##### getWorkflowStatus(workflowId)
|
||||||
|
|
||||||
@@ -96,25 +107,89 @@ if (isReady) {
|
|||||||
|
|
||||||
**Returns:** `Promise<boolean>`
|
**Returns:** `Promise<boolean>`
|
||||||
|
|
||||||
##### executeWorkflowSync(workflowId, options?)
|
##### executeWorkflowSync(workflowId, input?, options?)
|
||||||
|
|
||||||
Execute a workflow and poll for completion (useful for long-running workflows).
|
Execute a workflow and poll for completion (useful for long-running workflows).
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
const result = await client.executeWorkflowSync('workflow-id', {
|
const result = await client.executeWorkflowSync('workflow-id', { data: 'some input' }, {
|
||||||
input: { data: 'some input' },
|
|
||||||
timeout: 60000
|
timeout: 60000
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters:**
|
**Parameters:**
|
||||||
- `workflowId` (string): The ID of the workflow to execute
|
- `workflowId` (string): The ID of the workflow to execute
|
||||||
|
- `input` (any, optional): Input data to pass to the workflow
|
||||||
- `options` (ExecutionOptions, optional):
|
- `options` (ExecutionOptions, optional):
|
||||||
- `input` (any): Input data to pass to the workflow
|
|
||||||
- `timeout` (number): Timeout for the initial request in milliseconds
|
- `timeout` (number): Timeout for the initial request in milliseconds
|
||||||
|
|
||||||
**Returns:** `Promise<WorkflowExecutionResult>`
|
**Returns:** `Promise<WorkflowExecutionResult>`
|
||||||
|
|
||||||
|
##### getJobStatus(taskId)
|
||||||
|
|
||||||
|
Get the status of an async job.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const status = await client.getJobStatus('task-id-from-async-execution');
|
||||||
|
console.log('Job status:', status);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `taskId` (string): The task ID returned from async execution
|
||||||
|
|
||||||
|
**Returns:** `Promise<any>`
|
||||||
|
|
||||||
|
##### executeWithRetry(workflowId, input?, options?, retryOptions?)
|
||||||
|
|
||||||
|
Execute a workflow with automatic retry on rate limit errors.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await client.executeWithRetry('workflow-id', { message: 'Hello' }, {
|
||||||
|
timeout: 30000
|
||||||
|
}, {
|
||||||
|
maxRetries: 3,
|
||||||
|
initialDelay: 1000,
|
||||||
|
maxDelay: 30000,
|
||||||
|
backoffMultiplier: 2
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters:**
|
||||||
|
- `workflowId` (string): The ID of the workflow to execute
|
||||||
|
- `input` (any, optional): Input data to pass to the workflow
|
||||||
|
- `options` (ExecutionOptions, optional): Execution options
|
||||||
|
- `retryOptions` (RetryOptions, optional):
|
||||||
|
- `maxRetries` (number): Maximum retry attempts (default: 3)
|
||||||
|
- `initialDelay` (number): Initial delay in ms (default: 1000)
|
||||||
|
- `maxDelay` (number): Maximum delay in ms (default: 30000)
|
||||||
|
- `backoffMultiplier` (number): Backoff multiplier (default: 2)
|
||||||
|
|
||||||
|
**Returns:** `Promise<WorkflowExecutionResult | AsyncExecutionResult>`
|
||||||
|
|
||||||
|
##### getRateLimitInfo()
|
||||||
|
|
||||||
|
Get current rate limit information from the last API response.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const rateInfo = client.getRateLimitInfo();
|
||||||
|
if (rateInfo) {
|
||||||
|
console.log('Remaining requests:', rateInfo.remaining);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Returns:** `RateLimitInfo | null`
|
||||||
|
|
||||||
|
##### getUsageLimits()
|
||||||
|
|
||||||
|
Get current usage limits and quota information.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const limits = await client.getUsageLimits();
|
||||||
|
console.log('Current usage:', limits.usage);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Returns:** `Promise<UsageLimits>`
|
||||||
|
|
||||||
##### setApiKey(apiKey)
|
##### setApiKey(apiKey)
|
||||||
|
|
||||||
Update the API key.
|
Update the API key.
|
||||||
@@ -170,6 +245,81 @@ class SimStudioError extends Error {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### AsyncExecutionResult
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface AsyncExecutionResult {
|
||||||
|
success: boolean;
|
||||||
|
taskId: string;
|
||||||
|
status: 'queued';
|
||||||
|
createdAt: string;
|
||||||
|
links: {
|
||||||
|
status: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### RateLimitInfo
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface RateLimitInfo {
|
||||||
|
limit: number;
|
||||||
|
remaining: number;
|
||||||
|
reset: number;
|
||||||
|
retryAfter?: number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### UsageLimits
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface UsageLimits {
|
||||||
|
success: boolean;
|
||||||
|
rateLimit: {
|
||||||
|
sync: {
|
||||||
|
isLimited: boolean;
|
||||||
|
limit: number;
|
||||||
|
remaining: number;
|
||||||
|
resetAt: string;
|
||||||
|
};
|
||||||
|
async: {
|
||||||
|
isLimited: boolean;
|
||||||
|
limit: number;
|
||||||
|
remaining: number;
|
||||||
|
resetAt: string;
|
||||||
|
};
|
||||||
|
authType: string;
|
||||||
|
};
|
||||||
|
usage: {
|
||||||
|
currentPeriodCost: number;
|
||||||
|
limit: number;
|
||||||
|
plan: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ExecutionOptions
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface ExecutionOptions {
|
||||||
|
timeout?: number;
|
||||||
|
stream?: boolean;
|
||||||
|
selectedOutputs?: string[];
|
||||||
|
async?: boolean;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### RetryOptions
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface RetryOptions {
|
||||||
|
maxRetries?: number;
|
||||||
|
initialDelay?: number;
|
||||||
|
maxDelay?: number;
|
||||||
|
backoffMultiplier?: number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
### Basic Workflow Execution
|
### Basic Workflow Execution
|
||||||
@@ -191,10 +341,8 @@ async function runWorkflow() {
|
|||||||
|
|
||||||
// Execute the workflow
|
// Execute the workflow
|
||||||
const result = await client.executeWorkflow('my-workflow-id', {
|
const result = await client.executeWorkflow('my-workflow-id', {
|
||||||
input: {
|
|
||||||
message: 'Process this data',
|
message: 'Process this data',
|
||||||
userId: '12345'
|
userId: '12345'
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
@@ -298,22 +446,18 @@ const file = new File([fileBuffer], 'document.pdf', { type: 'application/pdf' })
|
|||||||
|
|
||||||
// Include files under the field name from your API trigger's input format
|
// Include files under the field name from your API trigger's input format
|
||||||
const result = await client.executeWorkflow('workflow-id', {
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
input: {
|
|
||||||
documents: [file], // Field name must match your API trigger's file input field
|
documents: [file], // Field name must match your API trigger's file input field
|
||||||
instructions: 'Process this document'
|
instructions: 'Process this document'
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Browser: From file input
|
// Browser: From file input
|
||||||
const handleFileUpload = async (event: Event) => {
|
const handleFileUpload = async (event: Event) => {
|
||||||
const input = event.target as HTMLInputElement;
|
const inputEl = event.target as HTMLInputElement;
|
||||||
const files = Array.from(input.files || []);
|
const files = Array.from(inputEl.files || []);
|
||||||
|
|
||||||
const result = await client.executeWorkflow('workflow-id', {
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
input: {
|
|
||||||
attachments: files, // Field name must match your API trigger's file input field
|
attachments: files, // Field name must match your API trigger's file input field
|
||||||
query: 'Analyze these files'
|
query: 'Analyze these files'
|
||||||
}
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "simstudio-ts-sdk",
|
"name": "simstudio-ts-sdk",
|
||||||
"version": "0.1.1",
|
"version": "0.1.2",
|
||||||
"description": "Sim SDK - Execute workflows programmatically",
|
"description": "Sim SDK - Execute workflows programmatically",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -119,10 +119,11 @@ describe('SimStudioClient', () => {
|
|||||||
}
|
}
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
const result = await client.executeWorkflow('workflow-id', {
|
const result = await client.executeWorkflow(
|
||||||
input: { message: 'Hello' },
|
'workflow-id',
|
||||||
async: true,
|
{ message: 'Hello' },
|
||||||
})
|
{ async: true }
|
||||||
|
)
|
||||||
|
|
||||||
expect(result).toHaveProperty('taskId', 'task-123')
|
expect(result).toHaveProperty('taskId', 'task-123')
|
||||||
expect(result).toHaveProperty('status', 'queued')
|
expect(result).toHaveProperty('status', 'queued')
|
||||||
@@ -152,10 +153,11 @@ describe('SimStudioClient', () => {
|
|||||||
}
|
}
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
const result = await client.executeWorkflow('workflow-id', {
|
const result = await client.executeWorkflow(
|
||||||
input: { message: 'Hello' },
|
'workflow-id',
|
||||||
async: false,
|
{ message: 'Hello' },
|
||||||
})
|
{ async: false }
|
||||||
|
)
|
||||||
|
|
||||||
expect(result).toHaveProperty('success', true)
|
expect(result).toHaveProperty('success', true)
|
||||||
expect(result).toHaveProperty('output')
|
expect(result).toHaveProperty('output')
|
||||||
@@ -177,9 +179,7 @@ describe('SimStudioClient', () => {
|
|||||||
}
|
}
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
await client.executeWorkflow('workflow-id', {
|
await client.executeWorkflow('workflow-id', { message: 'Hello' })
|
||||||
input: { message: 'Hello' },
|
|
||||||
})
|
|
||||||
|
|
||||||
const calls = vi.mocked(fetch.default).mock.calls
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
expect(calls[0][1]?.headers).not.toHaveProperty('X-Execution-Mode')
|
expect(calls[0][1]?.headers).not.toHaveProperty('X-Execution-Mode')
|
||||||
@@ -256,9 +256,7 @@ describe('SimStudioClient', () => {
|
|||||||
}
|
}
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
const result = await client.executeWithRetry('workflow-id', {
|
const result = await client.executeWithRetry('workflow-id', { message: 'test' })
|
||||||
input: { message: 'test' },
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(result).toHaveProperty('success', true)
|
expect(result).toHaveProperty('success', true)
|
||||||
expect(vi.mocked(fetch.default)).toHaveBeenCalledTimes(1)
|
expect(vi.mocked(fetch.default)).toHaveBeenCalledTimes(1)
|
||||||
@@ -305,7 +303,8 @@ describe('SimStudioClient', () => {
|
|||||||
|
|
||||||
const result = await client.executeWithRetry(
|
const result = await client.executeWithRetry(
|
||||||
'workflow-id',
|
'workflow-id',
|
||||||
{ input: { message: 'test' } },
|
{ message: 'test' },
|
||||||
|
{},
|
||||||
{ maxRetries: 3, initialDelay: 10 }
|
{ maxRetries: 3, initialDelay: 10 }
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -336,7 +335,8 @@ describe('SimStudioClient', () => {
|
|||||||
await expect(
|
await expect(
|
||||||
client.executeWithRetry(
|
client.executeWithRetry(
|
||||||
'workflow-id',
|
'workflow-id',
|
||||||
{ input: { message: 'test' } },
|
{ message: 'test' },
|
||||||
|
{},
|
||||||
{ maxRetries: 2, initialDelay: 10 }
|
{ maxRetries: 2, initialDelay: 10 }
|
||||||
)
|
)
|
||||||
).rejects.toThrow('Rate limit exceeded')
|
).rejects.toThrow('Rate limit exceeded')
|
||||||
@@ -361,9 +361,9 @@ describe('SimStudioClient', () => {
|
|||||||
|
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
await expect(
|
await expect(client.executeWithRetry('workflow-id', { message: 'test' })).rejects.toThrow(
|
||||||
client.executeWithRetry('workflow-id', { input: { message: 'test' } })
|
'Server error'
|
||||||
).rejects.toThrow('Server error')
|
)
|
||||||
|
|
||||||
expect(vi.mocked(fetch.default)).toHaveBeenCalledTimes(1) // No retries
|
expect(vi.mocked(fetch.default)).toHaveBeenCalledTimes(1) // No retries
|
||||||
})
|
})
|
||||||
@@ -393,7 +393,7 @@ describe('SimStudioClient', () => {
|
|||||||
|
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
await client.executeWorkflow('workflow-id', { input: {} })
|
await client.executeWorkflow('workflow-id', {})
|
||||||
|
|
||||||
const info = client.getRateLimitInfo()
|
const info = client.getRateLimitInfo()
|
||||||
expect(info).not.toBeNull()
|
expect(info).not.toBeNull()
|
||||||
@@ -490,11 +490,11 @@ describe('SimStudioClient', () => {
|
|||||||
|
|
||||||
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
await client.executeWorkflow('workflow-id', {
|
await client.executeWorkflow(
|
||||||
input: { message: 'test' },
|
'workflow-id',
|
||||||
stream: true,
|
{ message: 'test' },
|
||||||
selectedOutputs: ['agent1.content', 'agent2.content'],
|
{ stream: true, selectedOutputs: ['agent1.content', 'agent2.content'] }
|
||||||
})
|
)
|
||||||
|
|
||||||
const calls = vi.mocked(fetch.default).mock.calls
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
||||||
@@ -505,6 +505,134 @@ describe('SimStudioClient', () => {
|
|||||||
expect(requestBody.selectedOutputs).toEqual(['agent1.content', 'agent2.content'])
|
expect(requestBody.selectedOutputs).toEqual(['agent1.content', 'agent2.content'])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('executeWorkflow - primitive and array inputs', () => {
|
||||||
|
it('should wrap primitive string input in input field', async () => {
|
||||||
|
const fetch = await import('node-fetch')
|
||||||
|
const mockResponse = {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
output: {},
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
get: vi.fn().mockReturnValue(null),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
|
await client.executeWorkflow('workflow-id', 'NVDA')
|
||||||
|
|
||||||
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
|
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
||||||
|
|
||||||
|
expect(requestBody).toHaveProperty('input', 'NVDA')
|
||||||
|
expect(requestBody).not.toHaveProperty('0') // Should not spread string characters
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap primitive number input in input field', async () => {
|
||||||
|
const fetch = await import('node-fetch')
|
||||||
|
const mockResponse = {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
output: {},
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
get: vi.fn().mockReturnValue(null),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
|
await client.executeWorkflow('workflow-id', 42)
|
||||||
|
|
||||||
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
|
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
||||||
|
|
||||||
|
expect(requestBody).toHaveProperty('input', 42)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should wrap array input in input field', async () => {
|
||||||
|
const fetch = await import('node-fetch')
|
||||||
|
const mockResponse = {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
output: {},
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
get: vi.fn().mockReturnValue(null),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
|
await client.executeWorkflow('workflow-id', ['NVDA', 'AAPL', 'GOOG'])
|
||||||
|
|
||||||
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
|
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
||||||
|
|
||||||
|
expect(requestBody).toHaveProperty('input')
|
||||||
|
expect(requestBody.input).toEqual(['NVDA', 'AAPL', 'GOOG'])
|
||||||
|
expect(requestBody).not.toHaveProperty('0') // Should not spread array
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should spread object input at root level', async () => {
|
||||||
|
const fetch = await import('node-fetch')
|
||||||
|
const mockResponse = {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
output: {},
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
get: vi.fn().mockReturnValue(null),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
|
await client.executeWorkflow('workflow-id', { ticker: 'NVDA', quantity: 100 })
|
||||||
|
|
||||||
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
|
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
||||||
|
|
||||||
|
expect(requestBody).toHaveProperty('ticker', 'NVDA')
|
||||||
|
expect(requestBody).toHaveProperty('quantity', 100)
|
||||||
|
expect(requestBody).not.toHaveProperty('input') // Should not wrap in input field
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle null input as no input (empty body)', async () => {
|
||||||
|
const fetch = await import('node-fetch')
|
||||||
|
const mockResponse = {
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
output: {},
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
get: vi.fn().mockReturnValue(null),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mocked(fetch.default).mockResolvedValue(mockResponse as any)
|
||||||
|
|
||||||
|
await client.executeWorkflow('workflow-id', null)
|
||||||
|
|
||||||
|
const calls = vi.mocked(fetch.default).mock.calls
|
||||||
|
const requestBody = JSON.parse(calls[0][1]?.body as string)
|
||||||
|
|
||||||
|
// null treated as "no input" - sends empty body (consistent with Python SDK)
|
||||||
|
expect(requestBody).toEqual({})
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('SimStudioError', () => {
|
describe('SimStudioError', () => {
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ export interface WorkflowStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface ExecutionOptions {
|
export interface ExecutionOptions {
|
||||||
input?: any
|
|
||||||
timeout?: number
|
timeout?: number
|
||||||
stream?: boolean
|
stream?: boolean
|
||||||
selectedOutputs?: string[]
|
selectedOutputs?: string[]
|
||||||
@@ -117,10 +116,6 @@ export class SimStudioClient {
|
|||||||
this.baseUrl = normalizeBaseUrl(config.baseUrl || 'https://sim.ai')
|
this.baseUrl = normalizeBaseUrl(config.baseUrl || 'https://sim.ai')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute a workflow with optional input data
|
|
||||||
* If async is true, returns immediately with a task ID
|
|
||||||
*/
|
|
||||||
/**
|
/**
|
||||||
* Convert File objects in input to API format (base64)
|
* Convert File objects in input to API format (base64)
|
||||||
* Recursively processes nested objects and arrays
|
* Recursively processes nested objects and arrays
|
||||||
@@ -170,20 +165,25 @@ export class SimStudioClient {
|
|||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a workflow with optional input data
|
||||||
|
* @param workflowId - The ID of the workflow to execute
|
||||||
|
* @param input - Input data to pass to the workflow (object, primitive, or array)
|
||||||
|
* @param options - Execution options (timeout, stream, async, etc.)
|
||||||
|
*/
|
||||||
async executeWorkflow(
|
async executeWorkflow(
|
||||||
workflowId: string,
|
workflowId: string,
|
||||||
|
input?: any,
|
||||||
options: ExecutionOptions = {}
|
options: ExecutionOptions = {}
|
||||||
): Promise<WorkflowExecutionResult | AsyncExecutionResult> {
|
): Promise<WorkflowExecutionResult | AsyncExecutionResult> {
|
||||||
const url = `${this.baseUrl}/api/workflows/${workflowId}/execute`
|
const url = `${this.baseUrl}/api/workflows/${workflowId}/execute`
|
||||||
const { input, timeout = 30000, stream, selectedOutputs, async } = options
|
const { timeout = 30000, stream, selectedOutputs, async } = options
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Create a timeout promise
|
|
||||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||||
setTimeout(() => reject(new Error('TIMEOUT')), timeout)
|
setTimeout(() => reject(new Error('TIMEOUT')), timeout)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Build headers - async execution uses X-Execution-Mode header
|
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'X-API-Key': this.apiKey,
|
'X-API-Key': this.apiKey,
|
||||||
@@ -192,10 +192,15 @@ export class SimStudioClient {
|
|||||||
headers['X-Execution-Mode'] = 'async'
|
headers['X-Execution-Mode'] = 'async'
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build JSON body - spread input at root level, then add API control parameters
|
let jsonBody: any = {}
|
||||||
let jsonBody: any = input !== undefined ? { ...input } : {}
|
if (input !== undefined && input !== null) {
|
||||||
|
if (typeof input === 'object' && input !== null && !Array.isArray(input)) {
|
||||||
|
jsonBody = { ...input }
|
||||||
|
} else {
|
||||||
|
jsonBody = { input }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Convert any File objects in the input to base64 format
|
|
||||||
jsonBody = await this.convertFilesToBase64(jsonBody)
|
jsonBody = await this.convertFilesToBase64(jsonBody)
|
||||||
|
|
||||||
if (stream !== undefined) {
|
if (stream !== undefined) {
|
||||||
@@ -213,10 +218,8 @@ export class SimStudioClient {
|
|||||||
|
|
||||||
const response = await Promise.race([fetchPromise, timeoutPromise])
|
const response = await Promise.race([fetchPromise, timeoutPromise])
|
||||||
|
|
||||||
// Extract rate limit headers
|
|
||||||
this.updateRateLimitInfo(response)
|
this.updateRateLimitInfo(response)
|
||||||
|
|
||||||
// Handle rate limiting with retry
|
|
||||||
if (response.status === 429) {
|
if (response.status === 429) {
|
||||||
const retryAfter = this.rateLimitInfo?.retryAfter || 1000
|
const retryAfter = this.rateLimitInfo?.retryAfter || 1000
|
||||||
throw new SimStudioError(
|
throw new SimStudioError(
|
||||||
@@ -285,15 +288,18 @@ export class SimStudioClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute a workflow and poll for completion (useful for long-running workflows)
|
* Execute a workflow synchronously (ensures non-async mode)
|
||||||
|
* @param workflowId - The ID of the workflow to execute
|
||||||
|
* @param input - Input data to pass to the workflow
|
||||||
|
* @param options - Execution options (timeout, stream, etc.)
|
||||||
*/
|
*/
|
||||||
async executeWorkflowSync(
|
async executeWorkflowSync(
|
||||||
workflowId: string,
|
workflowId: string,
|
||||||
|
input?: any,
|
||||||
options: ExecutionOptions = {}
|
options: ExecutionOptions = {}
|
||||||
): Promise<WorkflowExecutionResult> {
|
): Promise<WorkflowExecutionResult> {
|
||||||
// Ensure sync mode by explicitly setting async to false
|
|
||||||
const syncOptions = { ...options, async: false }
|
const syncOptions = { ...options, async: false }
|
||||||
return this.executeWorkflow(workflowId, syncOptions) as Promise<WorkflowExecutionResult>
|
return this.executeWorkflow(workflowId, input, syncOptions) as Promise<WorkflowExecutionResult>
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -361,9 +367,14 @@ export class SimStudioClient {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute workflow with automatic retry on rate limit
|
* Execute workflow with automatic retry on rate limit
|
||||||
|
* @param workflowId - The ID of the workflow to execute
|
||||||
|
* @param input - Input data to pass to the workflow
|
||||||
|
* @param options - Execution options (timeout, stream, async, etc.)
|
||||||
|
* @param retryOptions - Retry configuration (maxRetries, delays, etc.)
|
||||||
*/
|
*/
|
||||||
async executeWithRetry(
|
async executeWithRetry(
|
||||||
workflowId: string,
|
workflowId: string,
|
||||||
|
input?: any,
|
||||||
options: ExecutionOptions = {},
|
options: ExecutionOptions = {},
|
||||||
retryOptions: RetryOptions = {}
|
retryOptions: RetryOptions = {}
|
||||||
): Promise<WorkflowExecutionResult | AsyncExecutionResult> {
|
): Promise<WorkflowExecutionResult | AsyncExecutionResult> {
|
||||||
@@ -379,7 +390,7 @@ export class SimStudioClient {
|
|||||||
|
|
||||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||||
try {
|
try {
|
||||||
return await this.executeWorkflow(workflowId, options)
|
return await this.executeWorkflow(workflowId, input, options)
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (!(error instanceof SimStudioError) || error.code !== 'RATE_LIMIT_EXCEEDED') {
|
if (!(error instanceof SimStudioError) || error.code !== 'RATE_LIMIT_EXCEEDED') {
|
||||||
throw error
|
throw error
|
||||||
@@ -387,23 +398,19 @@ export class SimStudioClient {
|
|||||||
|
|
||||||
lastError = error
|
lastError = error
|
||||||
|
|
||||||
// Don't retry after last attempt
|
|
||||||
if (attempt === maxRetries) {
|
if (attempt === maxRetries) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use retry-after if provided, otherwise use exponential backoff
|
|
||||||
const waitTime =
|
const waitTime =
|
||||||
error.status === 429 && this.rateLimitInfo?.retryAfter
|
error.status === 429 && this.rateLimitInfo?.retryAfter
|
||||||
? this.rateLimitInfo.retryAfter
|
? this.rateLimitInfo.retryAfter
|
||||||
: Math.min(delay, maxDelay)
|
: Math.min(delay, maxDelay)
|
||||||
|
|
||||||
// Add jitter (±25%)
|
|
||||||
const jitter = waitTime * (0.75 + Math.random() * 0.5)
|
const jitter = waitTime * (0.75 + Math.random() * 0.5)
|
||||||
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, jitter))
|
await new Promise((resolve) => setTimeout(resolve, jitter))
|
||||||
|
|
||||||
// Exponential backoff for next attempt
|
|
||||||
delay *= backoffMultiplier
|
delay *= backoffMultiplier
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -475,5 +482,4 @@ export class SimStudioClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Export types and classes
|
|
||||||
export { SimStudioClient as default }
|
export { SimStudioClient as default }
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ async function fetchGitHubCommitDetails(
|
|||||||
|
|
||||||
const githubUsername = commit.author?.login || commit.committer?.login || 'unknown'
|
const githubUsername = commit.author?.login || commit.committer?.login || 'unknown'
|
||||||
|
|
||||||
let cleanMessage = commit.commit.message.split('\n')[0] // First line only
|
let cleanMessage = commit.commit.message.split('\n')[0]
|
||||||
if (prNumber) {
|
if (prNumber) {
|
||||||
cleanMessage = cleanMessage.replace(/\s*\(#\d+\)\s*$/, '')
|
cleanMessage = cleanMessage.replace(/\s*\(#\d+\)\s*$/, '')
|
||||||
}
|
}
|
||||||
@@ -226,12 +226,23 @@ async function getCommitsBetweenVersions(
|
|||||||
function categorizeCommit(message: string): 'features' | 'fixes' | 'improvements' | 'other' {
|
function categorizeCommit(message: string): 'features' | 'fixes' | 'improvements' | 'other' {
|
||||||
const msgLower = message.toLowerCase()
|
const msgLower = message.toLowerCase()
|
||||||
|
|
||||||
if (
|
if (/^feat(\(|:|!)/.test(msgLower)) {
|
||||||
msgLower.includes('feat') ||
|
return 'features'
|
||||||
msgLower.includes('add') ||
|
}
|
||||||
msgLower.includes('implement') ||
|
|
||||||
msgLower.includes('new ')
|
if (/^fix(\(|:|!)/.test(msgLower)) {
|
||||||
) {
|
return 'fixes'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (/^(improvement|improve|perf|refactor)(\(|:|!)/.test(msgLower)) {
|
||||||
|
return 'improvements'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (/^(chore|docs|style|test|ci|build)(\(|:|!)/.test(msgLower)) {
|
||||||
|
return 'other'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msgLower.includes('feat') || msgLower.includes('implement') || msgLower.includes('new ')) {
|
||||||
return 'features'
|
return 'features'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -242,9 +253,10 @@ function categorizeCommit(message: string): 'features' | 'fixes' | 'improvements
|
|||||||
if (
|
if (
|
||||||
msgLower.includes('improve') ||
|
msgLower.includes('improve') ||
|
||||||
msgLower.includes('enhance') ||
|
msgLower.includes('enhance') ||
|
||||||
msgLower.includes('update') ||
|
|
||||||
msgLower.includes('upgrade') ||
|
msgLower.includes('upgrade') ||
|
||||||
msgLower.includes('optimization')
|
msgLower.includes('optimization') ||
|
||||||
|
msgLower.includes('add') ||
|
||||||
|
msgLower.includes('update')
|
||||||
) {
|
) {
|
||||||
return 'improvements'
|
return 'improvements'
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user