mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-14 09:27:58 -05:00
Compare commits
14 Commits
v0.5.58
...
feat/group
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66d19c00db | ||
|
|
a45426bb6b | ||
|
|
a3007d8980 | ||
|
|
8ec067d280 | ||
|
|
f04cd7c355 | ||
|
|
eb52f69efd | ||
|
|
64b3f98488 | ||
|
|
4be420311c | ||
|
|
b49ed2fcd9 | ||
|
|
837405e1ec | ||
|
|
2bc403972c | ||
|
|
40a066f39c | ||
|
|
c9068d043e | ||
|
|
048eddd468 |
@@ -1,11 +1,11 @@
|
||||
---
|
||||
description: Create a block configuration for a Sim Studio integration with proper subBlocks, conditions, and tool wiring
|
||||
description: Create a block configuration for a Sim integration with proper subBlocks, conditions, and tool wiring
|
||||
argument-hint: <service-name>
|
||||
---
|
||||
|
||||
# Add Block Skill
|
||||
|
||||
You are an expert at creating block configurations for Sim Studio. You understand the serializer, subBlock types, conditions, dependsOn, modes, and all UI patterns.
|
||||
You are an expert at creating block configurations for Sim. You understand the serializer, subBlock types, conditions, dependsOn, modes, and all UI patterns.
|
||||
|
||||
## Your Task
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
---
|
||||
description: Add a complete integration to Sim Studio (tools, block, icon, registration)
|
||||
description: Add a complete integration to Sim (tools, block, icon, registration)
|
||||
argument-hint: <service-name> [api-docs-url]
|
||||
---
|
||||
|
||||
# Add Integration Skill
|
||||
|
||||
You are an expert at adding complete integrations to Sim Studio. This skill orchestrates the full process of adding a new service integration.
|
||||
You are an expert at adding complete integrations to Sim. This skill orchestrates the full process of adding a new service integration.
|
||||
|
||||
## Overview
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
---
|
||||
description: Create tool configurations for a Sim Studio integration by reading API docs
|
||||
description: Create tool configurations for a Sim integration by reading API docs
|
||||
argument-hint: <service-name> [api-docs-url]
|
||||
---
|
||||
|
||||
# Add Tools Skill
|
||||
|
||||
You are an expert at creating tool configurations for Sim Studio integrations. Your job is to read API documentation and create properly structured tool files.
|
||||
You are an expert at creating tool configurations for Sim integrations. Your job is to read API documentation and create properly structured tool files.
|
||||
|
||||
## Your Task
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
---
|
||||
description: Create webhook triggers for a Sim Studio integration using the generic trigger builder
|
||||
description: Create webhook triggers for a Sim integration using the generic trigger builder
|
||||
argument-hint: <service-name>
|
||||
---
|
||||
|
||||
# Add Trigger Skill
|
||||
|
||||
You are an expert at creating webhook triggers for Sim Studio. You understand the trigger system, the generic `buildTriggerSubBlocks` helper, and how triggers connect to blocks.
|
||||
You are an expert at creating webhook triggers for Sim. You understand the trigger system, the generic `buildTriggerSubBlocks` helper, and how triggers connect to blocks.
|
||||
|
||||
## Your Task
|
||||
|
||||
|
||||
35
.github/workflows/test-build.yml
vendored
35
.github/workflows/test-build.yml
vendored
@@ -38,6 +38,41 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Validate feature flags
|
||||
run: |
|
||||
FILE="apps/sim/lib/core/config/feature-flags.ts"
|
||||
ERRORS=""
|
||||
|
||||
echo "Checking for hardcoded boolean feature flags..."
|
||||
|
||||
# Use perl for multiline matching to catch both:
|
||||
# export const isHosted = true
|
||||
# export const isHosted =
|
||||
# true
|
||||
HARDCODED=$(perl -0777 -ne 'while (/export const (is[A-Za-z]+)\s*=\s*\n?\s*(true|false)\b/g) { print " $1 = $2\n" }' "$FILE")
|
||||
|
||||
if [ -n "$HARDCODED" ]; then
|
||||
ERRORS="${ERRORS}\n❌ Feature flags must not be hardcoded to boolean literals!\n\nFound hardcoded flags:\n${HARDCODED}\n\nFeature flags should derive their values from environment variables.\n"
|
||||
fi
|
||||
|
||||
echo "Checking feature flag naming conventions..."
|
||||
|
||||
# Check that all export const (except functions) start with 'is'
|
||||
# This finds exports like "export const someFlag" that don't start with "is" or "get"
|
||||
BAD_NAMES=$(grep -E "^export const [a-z]" "$FILE" | grep -vE "^export const (is|get)" | sed 's/export const \([a-zA-Z]*\).*/ \1/')
|
||||
|
||||
if [ -n "$BAD_NAMES" ]; then
|
||||
ERRORS="${ERRORS}\n❌ Feature flags must use 'is' prefix for boolean flags!\n\nFound incorrectly named flags:\n${BAD_NAMES}\n\nExample: 'hostedMode' should be 'isHostedMode'\n"
|
||||
fi
|
||||
|
||||
if [ -n "$ERRORS" ]; then
|
||||
echo ""
|
||||
echo -e "$ERRORS"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All feature flags are properly configured"
|
||||
|
||||
- name: Lint code
|
||||
run: bun run lint:check
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Sim Studio Development Guidelines
|
||||
# Sim Development Guidelines
|
||||
|
||||
You are a professional software engineer. All code must follow best practices: accurate, readable, clean, and efficient.
|
||||
|
||||
|
||||
117
README.md
117
README.md
@@ -13,6 +13,10 @@
|
||||
<a href="https://docs.sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Docs-6F3DFA.svg" alt="Documentation"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
</p>
|
||||
|
||||
### Build Workflows with Ease
|
||||
Design agent workflows visually on a canvas—connect agents, tools, and blocks, then run them instantly.
|
||||
|
||||
@@ -60,17 +64,11 @@ Docker must be installed and running on your machine.
|
||||
### Self-hosted: Docker Compose
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/simstudioai/sim.git
|
||||
|
||||
# Navigate to the project directory
|
||||
cd sim
|
||||
|
||||
# Start Sim
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
Access the application at [http://localhost:3000/](http://localhost:3000/)
|
||||
Open [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
#### Using Local Models with Ollama
|
||||
|
||||
@@ -91,33 +89,17 @@ docker compose -f docker-compose.ollama.yml exec ollama ollama pull llama3.1:8b
|
||||
|
||||
#### Using an External Ollama Instance
|
||||
|
||||
If you already have Ollama running on your host machine (outside Docker), you need to configure the `OLLAMA_URL` to use `host.docker.internal` instead of `localhost`:
|
||||
If Ollama is running on your host machine, use `host.docker.internal` instead of `localhost`:
|
||||
|
||||
```bash
|
||||
# Docker Desktop (macOS/Windows)
|
||||
OLLAMA_URL=http://host.docker.internal:11434 docker compose -f docker-compose.prod.yml up -d
|
||||
|
||||
# Linux (add extra_hosts or use host IP)
|
||||
docker compose -f docker-compose.prod.yml up -d # Then set OLLAMA_URL to your host's IP
|
||||
```
|
||||
|
||||
**Why?** When running inside Docker, `localhost` refers to the container itself, not your host machine. `host.docker.internal` is a special DNS name that resolves to the host.
|
||||
|
||||
For Linux users, you can either:
|
||||
- Use your host machine's actual IP address (e.g., `http://192.168.1.100:11434`)
|
||||
- Add `extra_hosts: ["host.docker.internal:host-gateway"]` to the simstudio service in your compose file
|
||||
On Linux, use your host's IP address or add `extra_hosts: ["host.docker.internal:host-gateway"]` to the compose file.
|
||||
|
||||
#### Using vLLM
|
||||
|
||||
Sim also supports [vLLM](https://docs.vllm.ai/) for self-hosted models with OpenAI-compatible API:
|
||||
|
||||
```bash
|
||||
# Set these environment variables
|
||||
VLLM_BASE_URL=http://your-vllm-server:8000
|
||||
VLLM_API_KEY=your_optional_api_key # Only if your vLLM instance requires auth
|
||||
```
|
||||
|
||||
When running with Docker, use `host.docker.internal` if vLLM is on your host machine (same as Ollama above).
|
||||
Sim supports [vLLM](https://docs.vllm.ai/) for self-hosted models. Set `VLLM_BASE_URL` and optionally `VLLM_API_KEY` in your environment.
|
||||
|
||||
### Self-hosted: Dev Containers
|
||||
|
||||
@@ -128,14 +110,9 @@ When running with Docker, use `host.docker.internal` if vLLM is on your host mac
|
||||
|
||||
### Self-hosted: Manual Setup
|
||||
|
||||
**Requirements:**
|
||||
- [Bun](https://bun.sh/) runtime
|
||||
- [Node.js](https://nodejs.org/) v20+ (required for sandboxed code execution)
|
||||
- PostgreSQL 12+ with [pgvector extension](https://github.com/pgvector/pgvector) (required for AI embeddings)
|
||||
**Requirements:** [Bun](https://bun.sh/), [Node.js](https://nodejs.org/) v20+, PostgreSQL 12+ with [pgvector](https://github.com/pgvector/pgvector)
|
||||
|
||||
**Note:** Sim uses vector embeddings for AI features like knowledge bases and semantic search, which requires the `pgvector` PostgreSQL extension.
|
||||
|
||||
1. Clone and install dependencies:
|
||||
1. Clone and install:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git
|
||||
@@ -145,75 +122,33 @@ bun install
|
||||
|
||||
2. Set up PostgreSQL with pgvector:
|
||||
|
||||
You need PostgreSQL with the `vector` extension for embedding support. Choose one option:
|
||||
|
||||
**Option A: Using Docker (Recommended)**
|
||||
```bash
|
||||
# Start PostgreSQL with pgvector extension
|
||||
docker run --name simstudio-db \
|
||||
-e POSTGRES_PASSWORD=your_password \
|
||||
-e POSTGRES_DB=simstudio \
|
||||
-p 5432:5432 -d \
|
||||
pgvector/pgvector:pg17
|
||||
docker run --name simstudio-db -e POSTGRES_PASSWORD=your_password -e POSTGRES_DB=simstudio -p 5432:5432 -d pgvector/pgvector:pg17
|
||||
```
|
||||
|
||||
**Option B: Manual Installation**
|
||||
- Install PostgreSQL 12+ and the pgvector extension
|
||||
- See [pgvector installation guide](https://github.com/pgvector/pgvector#installation)
|
||||
Or install manually via the [pgvector guide](https://github.com/pgvector/pgvector#installation).
|
||||
|
||||
3. Set up environment:
|
||||
3. Configure environment:
|
||||
|
||||
```bash
|
||||
cd apps/sim
|
||||
cp .env.example .env # Configure with required variables (DATABASE_URL, BETTER_AUTH_SECRET, BETTER_AUTH_URL)
|
||||
cp apps/sim/.env.example apps/sim/.env
|
||||
cp packages/db/.env.example packages/db/.env
|
||||
# Edit both .env files to set DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
|
||||
```
|
||||
|
||||
Update your `.env` file with the database URL:
|
||||
```bash
|
||||
DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
|
||||
```
|
||||
|
||||
4. Set up the database:
|
||||
|
||||
First, configure the database package environment:
|
||||
```bash
|
||||
cd packages/db
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
Update your `packages/db/.env` file with the database URL:
|
||||
```bash
|
||||
DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
|
||||
```
|
||||
|
||||
Then run the migrations:
|
||||
```bash
|
||||
cd packages/db # Required so drizzle picks correct .env file
|
||||
bunx drizzle-kit migrate --config=./drizzle.config.ts
|
||||
```
|
||||
|
||||
5. Start the development servers:
|
||||
|
||||
**Recommended approach - run both servers together (from project root):**
|
||||
4. Run migrations:
|
||||
|
||||
```bash
|
||||
bun run dev:full
|
||||
cd packages/db && bunx drizzle-kit migrate --config=./drizzle.config.ts
|
||||
```
|
||||
|
||||
This starts both the main Next.js application and the realtime socket server required for full functionality.
|
||||
5. Start development servers:
|
||||
|
||||
**Alternative - run servers separately:**
|
||||
|
||||
Next.js app (from project root):
|
||||
```bash
|
||||
bun run dev
|
||||
bun run dev:full # Starts both Next.js app and realtime socket server
|
||||
```
|
||||
|
||||
Realtime socket server (from `apps/sim` directory in a separate terminal):
|
||||
```bash
|
||||
cd apps/sim
|
||||
bun run dev:sockets
|
||||
```
|
||||
Or run separately: `bun run dev` (Next.js) and `cd apps/sim && bun run dev:sockets` (realtime).
|
||||
|
||||
## Copilot API Keys
|
||||
|
||||
@@ -224,7 +159,7 @@ Copilot is a Sim-managed service. To use Copilot on a self-hosted instance:
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Key environment variables for self-hosted deployments (see `apps/sim/.env.example` for full list):
|
||||
Key environment variables for self-hosted deployments. See [`.env.example`](apps/sim/.env.example) for defaults or [`env.ts`](apps/sim/lib/core/config/env.ts) for the full list.
|
||||
|
||||
| Variable | Required | Description |
|
||||
|----------|----------|-------------|
|
||||
@@ -232,9 +167,9 @@ Key environment variables for self-hosted deployments (see `apps/sim/.env.exampl
|
||||
| `BETTER_AUTH_SECRET` | Yes | Auth secret (`openssl rand -hex 32`) |
|
||||
| `BETTER_AUTH_URL` | Yes | Your app URL (e.g., `http://localhost:3000`) |
|
||||
| `NEXT_PUBLIC_APP_URL` | Yes | Public app URL (same as above) |
|
||||
| `ENCRYPTION_KEY` | Yes | Encryption key (`openssl rand -hex 32`) |
|
||||
| `OLLAMA_URL` | No | Ollama server URL (default: `http://localhost:11434`) |
|
||||
| `VLLM_BASE_URL` | No | vLLM server URL for self-hosted models |
|
||||
| `ENCRYPTION_KEY` | Yes | Encrypts environment variables (`openssl rand -hex 32`) |
|
||||
| `INTERNAL_API_SECRET` | Yes | Encrypts internal API routes (`openssl rand -hex 32`) |
|
||||
| `API_ENCRYPTION_KEY` | Yes | Encrypts API keys (`openssl rand -hex 32`) |
|
||||
| `COPILOT_API_KEY` | No | API key from sim.ai for Copilot features |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@@ -4078,6 +4078,31 @@ export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function A2AIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 860 860' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<circle cx='544' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='154' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='706' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='316' cy='307' r='27' fill='currentColor' />
|
||||
<path
|
||||
d='M336.5 191.003H162C97.6588 191.003 45.5 243.162 45.5 307.503C45.5 371.844 97.6442 424.003 161.985 424.003C206.551 424.003 256.288 424.003 296.5 424.003C487.5 424.003 374 191.005 569 191.001C613.886 191 658.966 191 698.025 191C762.366 191.001 814.5 243.16 814.5 307.501C814.5 371.843 762.34 424.003 697.998 424.003H523.5'
|
||||
stroke='currentColor'
|
||||
strokeWidth='48'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
<path
|
||||
d='M256 510.002C270.359 510.002 282 521.643 282 536.002C282 550.361 270.359 562.002 256 562.002H148C133.641 562.002 122 550.361 122 536.002C122 521.643 133.641 510.002 148 510.002H256ZM712 510.002C726.359 510.002 738 521.643 738 536.002C738 550.361 726.359 562.002 712 562.002H360C345.641 562.002 334 550.361 334 536.002C334 521.643 345.641 510.002 360 510.002H712Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M444 628.002C458.359 628.002 470 639.643 470 654.002C470 668.361 458.359 680.002 444 680.002H100C85.6406 680.002 74 668.361 74 654.002C74 639.643 85.6406 628.002 100 628.002H444ZM548 628.002C562.359 628.002 574 639.643 574 654.002C574 668.361 562.359 680.002 548 680.002C533.641 680.002 522 668.361 522 654.002C522 639.643 533.641 628.002 548 628.002ZM760 628.002C774.359 628.002 786 639.643 786 654.002C786 668.361 774.359 680.002 760 680.002H652C637.641 680.002 626 668.361 626 654.002C626 639.643 637.641 628.002 652 628.002H760Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function WordpressIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 25.925 25.925'>
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import type { ComponentType, SVGProps } from 'react'
|
||||
import {
|
||||
A2AIcon,
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
ApifyIcon,
|
||||
@@ -127,6 +128,7 @@ import {
|
||||
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||
|
||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
a2a: A2AIcon,
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
apify: ApifyIcon,
|
||||
|
||||
@@ -6,13 +6,13 @@ description: Enterprise-Funktionen für Organisationen mit erweiterten
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Sim Studio Enterprise bietet erweiterte Funktionen für Organisationen mit erhöhten Sicherheits-, Compliance- und Verwaltungsanforderungen.
|
||||
Sim Enterprise bietet erweiterte Funktionen für Organisationen mit erhöhten Sicherheits-, Compliance- und Verwaltungsanforderungen.
|
||||
|
||||
---
|
||||
|
||||
## Bring Your Own Key (BYOK)
|
||||
|
||||
Verwenden Sie Ihre eigenen API-Schlüssel für KI-Modellanbieter anstelle der gehosteten Schlüssel von Sim Studio.
|
||||
Verwenden Sie Ihre eigenen API-Schlüssel für KI-Modellanbieter anstelle der gehosteten Schlüssel von Sim.
|
||||
|
||||
### Unterstützte Anbieter
|
||||
|
||||
@@ -33,7 +33,7 @@ Verwenden Sie Ihre eigenen API-Schlüssel für KI-Modellanbieter anstelle der ge
|
||||
BYOK-Schlüssel werden verschlüsselt gespeichert. Nur Organisationsadministratoren und -inhaber können Schlüssel verwalten.
|
||||
</Callout>
|
||||
|
||||
Wenn konfiguriert, verwenden Workflows Ihren Schlüssel anstelle der gehosteten Schlüssel von Sim Studio. Bei Entfernung wechseln Workflows automatisch zu den gehosteten Schlüsseln zurück.
|
||||
Wenn konfiguriert, verwenden Workflows Ihren Schlüssel anstelle der gehosteten Schlüssel von Sim. Bei Entfernung wechseln Workflows automatisch zu den gehosteten Schlüsseln zurück.
|
||||
|
||||
---
|
||||
|
||||
@@ -73,5 +73,5 @@ Für selbst gehostete Bereitstellungen können Enterprise-Funktionen über Umgeb
|
||||
| `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Workspace-/Organisations-Einladungen global deaktivieren |
|
||||
|
||||
<Callout type="warn">
|
||||
BYOK ist nur im gehosteten Sim Studio verfügbar. Selbst gehostete Deployments konfigurieren AI-Provider-Schlüssel direkt über Umgebungsvariablen.
|
||||
BYOK ist nur im gehosteten Sim verfügbar. Selbst gehostete Deployments konfigurieren AI-Provider-Schlüssel direkt über Umgebungsvariablen.
|
||||
</Callout>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Docker
|
||||
description: Sim Studio mit Docker Compose bereitstellen
|
||||
description: Sim mit Docker Compose bereitstellen
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Umgebungsvariablen
|
||||
description: Konfigurationsreferenz für Sim Studio
|
||||
description: Konfigurationsreferenz für Sim
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
title: Self-Hosting
|
||||
description: Stellen Sie Sim Studio auf Ihrer eigenen Infrastruktur bereit
|
||||
description: Stellen Sie Sim auf Ihrer eigenen Infrastruktur bereit
|
||||
---
|
||||
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Stellen Sie Sim Studio auf Ihrer eigenen Infrastruktur mit Docker oder Kubernetes bereit.
|
||||
Stellen Sie Sim auf Ihrer eigenen Infrastruktur mit Docker oder Kubernetes bereit.
|
||||
|
||||
## Anforderungen
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Kubernetes
|
||||
description: Sim Studio mit Helm bereitstellen
|
||||
description: Sim mit Helm bereitstellen
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Cloud-Plattformen
|
||||
description: Sim Studio auf Cloud-Plattformen bereitstellen
|
||||
description: Sim auf Cloud-Plattformen bereitstellen
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
@@ -64,7 +64,7 @@ sudo usermod -aG docker $USER
|
||||
docker --version
|
||||
```
|
||||
|
||||
### Sim Studio bereitstellen
|
||||
### Sim bereitstellen
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
|
||||
@@ -5,7 +5,7 @@ description: Enterprise features for business organizations
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Sim Studio Enterprise provides advanced features for organizations with enhanced security, compliance, and management requirements.
|
||||
Sim Enterprise provides advanced features for organizations with enhanced security, compliance, and management requirements.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -106,7 +106,7 @@ The model breakdown shows:
|
||||
|
||||
## Bring Your Own Key (BYOK)
|
||||
|
||||
Use your own API keys for AI model providers instead of Sim Studio's hosted keys to pay base prices with no markup.
|
||||
Use your own API keys for AI model providers instead of Sim's hosted keys to pay base prices with no markup.
|
||||
|
||||
### Supported Providers
|
||||
|
||||
@@ -127,7 +127,7 @@ Use your own API keys for AI model providers instead of Sim Studio's hosted keys
|
||||
BYOK keys are encrypted at rest. Only workspace admins can manage keys.
|
||||
</Callout>
|
||||
|
||||
When configured, workflows use your key instead of Sim Studio's hosted keys. If removed, workflows automatically fall back to hosted keys with the multiplier.
|
||||
When configured, workflows use your key instead of Sim's hosted keys. If removed, workflows automatically fall back to hosted keys with the multiplier.
|
||||
|
||||
## Cost Optimization Strategies
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Docker
|
||||
description: Deploy Sim Studio with Docker Compose
|
||||
description: Deploy Sim with Docker Compose
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Environment Variables
|
||||
description: Configuration reference for Sim Studio
|
||||
description: Configuration reference for Sim
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
---
|
||||
title: Self-Hosting
|
||||
description: Deploy Sim Studio on your own infrastructure
|
||||
description: Deploy Sim on your own infrastructure
|
||||
---
|
||||
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Deploy Sim Studio on your own infrastructure with Docker or Kubernetes.
|
||||
Deploy Sim on your own infrastructure with Docker or Kubernetes.
|
||||
|
||||
<div className="flex gap-2 my-4">
|
||||
<a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d">
|
||||
<img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
## Requirements
|
||||
|
||||
@@ -48,3 +54,4 @@ Open [http://localhost:3000](http://localhost:3000)
|
||||
| realtime | 3002 | WebSocket server |
|
||||
| db | 5432 | PostgreSQL with pgvector |
|
||||
| migrations | - | Database migrations (runs once) |
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Kubernetes
|
||||
description: Deploy Sim Studio with Helm
|
||||
description: Deploy Sim with Helm
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Cloud Platforms
|
||||
description: Deploy Sim Studio on cloud platforms
|
||||
description: Deploy Sim on cloud platforms
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
@@ -57,7 +57,7 @@ sudo usermod -aG docker $USER
|
||||
docker --version
|
||||
```
|
||||
|
||||
### Deploy Sim Studio
|
||||
### Deploy Sim
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
|
||||
215
apps/docs/content/docs/en/tools/a2a.mdx
Normal file
215
apps/docs/content/docs/en/tools/a2a.mdx
Normal file
@@ -0,0 +1,215 @@
|
||||
---
|
||||
title: A2A
|
||||
description: Interact with external A2A-compatible agents
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="a2a"
|
||||
color="#4151B5"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The A2A (Agent-to-Agent) protocol enables Sim to interact with external AI agents and systems that implement A2A-compatible APIs. With A2A, you can connect Sim’s automations and workflows to remote agents—such as LLM-powered bots, microservices, and other AI-based tools—using a standardized messaging format.
|
||||
|
||||
Using the A2A tools in Sim, you can:
|
||||
|
||||
- **Send Messages to External Agents**: Communicate directly with remote agents, providing prompts, commands, or data.
|
||||
- **Receive and Stream Responses**: Get structured responses, artifacts, or real-time updates from the agent as the task progresses.
|
||||
- **Continue Conversations or Tasks**: Carry on multi-turn conversations or workflows by referencing task and context IDs.
|
||||
- **Integrate Third-Party AI and Automation**: Leverage external A2A-compatible services as part of your Sim workflows.
|
||||
|
||||
These features allow you to build advanced workflows that combine Sim’s native capabilities with the intelligence and automation of external AIs or custom agents. To use A2A integrations, you’ll need the external agent’s endpoint URL and, if required, an API key or credentials.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Use the A2A (Agent-to-Agent) protocol to interact with external AI agents.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `a2a_send_message`
|
||||
|
||||
Send a message to an external A2A-compatible agent.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `message` | string | Yes | Message to send to the agent |
|
||||
| `taskId` | string | No | Task ID for continuing an existing task |
|
||||
| `contextId` | string | No | Context ID for conversation continuity |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | The text response from the agent |
|
||||
| `taskId` | string | Task ID for follow-up interactions |
|
||||
| `contextId` | string | Context ID for conversation continuity |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Structured output artifacts |
|
||||
| `history` | array | Full message history |
|
||||
|
||||
### `a2a_get_task`
|
||||
|
||||
Query the status of an existing A2A task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to query |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
| `historyLength` | number | No | Number of history messages to include |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Task ID |
|
||||
| `contextId` | string | Context ID |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Output artifacts |
|
||||
| `history` | array | Message history |
|
||||
|
||||
### `a2a_cancel_task`
|
||||
|
||||
Cancel a running A2A task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to cancel |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `cancelled` | boolean | Whether cancellation was successful |
|
||||
| `state` | string | Task state after cancellation |
|
||||
|
||||
### `a2a_get_agent_card`
|
||||
|
||||
Fetch the Agent Card (discovery document) for an A2A agent.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `apiKey` | string | No | API key for authentication \(if required\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `name` | string | Agent name |
|
||||
| `description` | string | Agent description |
|
||||
| `url` | string | Agent endpoint URL |
|
||||
| `version` | string | Agent version |
|
||||
| `capabilities` | object | Agent capabilities \(streaming, pushNotifications, etc.\) |
|
||||
| `skills` | array | Skills the agent can perform |
|
||||
| `defaultInputModes` | array | Default input modes \(text, file, data\) |
|
||||
| `defaultOutputModes` | array | Default output modes \(text, file, data\) |
|
||||
|
||||
### `a2a_resubscribe`
|
||||
|
||||
Reconnect to an ongoing A2A task stream after connection interruption.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to resubscribe to |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Task ID |
|
||||
| `contextId` | string | Context ID |
|
||||
| `state` | string | Current task state |
|
||||
| `isRunning` | boolean | Whether the task is still running |
|
||||
| `artifacts` | array | Output artifacts |
|
||||
| `history` | array | Message history |
|
||||
|
||||
### `a2a_set_push_notification`
|
||||
|
||||
Configure a webhook to receive task update notifications.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to configure notifications for |
|
||||
| `webhookUrl` | string | Yes | HTTPS webhook URL to receive notifications |
|
||||
| `token` | string | No | Token for webhook validation |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | Configured webhook URL |
|
||||
| `token` | string | Token for webhook validation |
|
||||
| `success` | boolean | Whether configuration was successful |
|
||||
|
||||
### `a2a_get_push_notification`
|
||||
|
||||
Get the push notification webhook configuration for a task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to get notification config for |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | Configured webhook URL |
|
||||
| `token` | string | Token for webhook validation |
|
||||
| `exists` | boolean | Whether a push notification config exists |
|
||||
|
||||
### `a2a_delete_push_notification`
|
||||
|
||||
Delete the push notification webhook configuration for a task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to delete notification config for |
|
||||
| `pushNotificationConfigId` | string | No | Push notification configuration ID to delete \(optional - server can derive from taskId\) |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether deletion was successful |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `a2a`
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"pages": [
|
||||
"index",
|
||||
"a2a",
|
||||
"ahrefs",
|
||||
"airtable",
|
||||
"apify",
|
||||
|
||||
@@ -6,13 +6,13 @@ description: Funciones enterprise para organizaciones con requisitos avanzados
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Sim Studio Enterprise proporciona funciones avanzadas para organizaciones con requisitos mejorados de seguridad, cumplimiento y gestión.
|
||||
Sim Enterprise proporciona funciones avanzadas para organizaciones con requisitos mejorados de seguridad, cumplimiento y gestión.
|
||||
|
||||
---
|
||||
|
||||
## Bring Your Own Key (BYOK)
|
||||
|
||||
Usa tus propias claves API para proveedores de modelos de IA en lugar de las claves alojadas de Sim Studio.
|
||||
Usa tus propias claves API para proveedores de modelos de IA en lugar de las claves alojadas de Sim.
|
||||
|
||||
### Proveedores compatibles
|
||||
|
||||
@@ -33,7 +33,7 @@ Usa tus propias claves API para proveedores de modelos de IA en lugar de las cla
|
||||
Las claves BYOK están cifradas en reposo. Solo los administradores y propietarios de la organización pueden gestionar las claves.
|
||||
</Callout>
|
||||
|
||||
Cuando está configurado, los flujos de trabajo usan tu clave en lugar de las claves alojadas de Sim Studio. Si se elimina, los flujos de trabajo vuelven automáticamente a las claves alojadas.
|
||||
Cuando está configurado, los flujos de trabajo usan tu clave en lugar de las claves alojadas de Sim. Si se elimina, los flujos de trabajo vuelven automáticamente a las claves alojadas.
|
||||
|
||||
---
|
||||
|
||||
@@ -73,5 +73,5 @@ Para implementaciones self-hosted, las funciones enterprise se pueden activar me
|
||||
| `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Desactivar globalmente invitaciones a espacios de trabajo/organizaciones |
|
||||
|
||||
<Callout type="warn">
|
||||
BYOK solo está disponible en Sim Studio alojado. Las implementaciones autoalojadas configuran las claves de proveedor de IA directamente a través de variables de entorno.
|
||||
BYOK solo está disponible en Sim alojado. Las implementaciones autoalojadas configuran las claves de proveedor de IA directamente a través de variables de entorno.
|
||||
</Callout>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Docker
|
||||
description: Despliega Sim Studio con Docker Compose
|
||||
description: Despliega Sim con Docker Compose
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Variables de entorno
|
||||
description: Referencia de configuración para Sim Studio
|
||||
description: Referencia de configuración para Sim
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
title: Autoalojamiento
|
||||
description: Despliega Sim Studio en tu propia infraestructura
|
||||
description: Despliega Sim en tu propia infraestructura
|
||||
---
|
||||
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Despliega Sim Studio en tu propia infraestructura con Docker o Kubernetes.
|
||||
Despliega Sim en tu propia infraestructura con Docker o Kubernetes.
|
||||
|
||||
## Requisitos
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Kubernetes
|
||||
description: Desplegar Sim Studio con Helm
|
||||
description: Desplegar Sim con Helm
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Plataformas en la nube
|
||||
description: Despliega Sim Studio en plataformas en la nube
|
||||
description: Despliega Sim en plataformas en la nube
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
@@ -64,7 +64,7 @@ sudo usermod -aG docker $USER
|
||||
docker --version
|
||||
```
|
||||
|
||||
### Desplegar Sim Studio
|
||||
### Desplegar Sim
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
|
||||
@@ -6,13 +6,13 @@ description: Fonctionnalités entreprise pour les organisations ayant des
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Sim Studio Entreprise fournit des fonctionnalités avancées pour les organisations ayant des exigences renforcées en matière de sécurité, de conformité et de gestion.
|
||||
Sim Entreprise fournit des fonctionnalités avancées pour les organisations ayant des exigences renforcées en matière de sécurité, de conformité et de gestion.
|
||||
|
||||
---
|
||||
|
||||
## Apportez votre propre clé (BYOK)
|
||||
|
||||
Utilisez vos propres clés API pour les fournisseurs de modèles IA au lieu des clés hébergées par Sim Studio.
|
||||
Utilisez vos propres clés API pour les fournisseurs de modèles IA au lieu des clés hébergées par Sim.
|
||||
|
||||
### Fournisseurs pris en charge
|
||||
|
||||
@@ -33,7 +33,7 @@ Utilisez vos propres clés API pour les fournisseurs de modèles IA au lieu des
|
||||
Les clés BYOK sont chiffrées au repos. Seuls les administrateurs et propriétaires de l'organisation peuvent gérer les clés.
|
||||
</Callout>
|
||||
|
||||
Une fois configurés, les workflows utilisent votre clé au lieu des clés hébergées par Sim Studio. Si elle est supprimée, les workflows basculent automatiquement vers les clés hébergées.
|
||||
Une fois configurés, les workflows utilisent votre clé au lieu des clés hébergées par Sim. Si elle est supprimée, les workflows basculent automatiquement vers les clés hébergées.
|
||||
|
||||
---
|
||||
|
||||
@@ -73,5 +73,5 @@ Pour les déploiements auto-hébergés, les fonctionnalités entreprise peuvent
|
||||
| `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Désactiver globalement les invitations aux espaces de travail/organisations |
|
||||
|
||||
<Callout type="warn">
|
||||
BYOK est uniquement disponible sur Sim Studio hébergé. Les déploiements auto-hébergés configurent les clés de fournisseur d'IA directement via les variables d'environnement.
|
||||
BYOK est uniquement disponible sur Sim hébergé. Les déploiements auto-hébergés configurent les clés de fournisseur d'IA directement via les variables d'environnement.
|
||||
</Callout>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Docker
|
||||
description: Déployer Sim Studio avec Docker Compose
|
||||
description: Déployer Sim avec Docker Compose
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Variables d'environnement
|
||||
description: Référence de configuration pour Sim Studio
|
||||
description: Référence de configuration pour Sim
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
title: Auto-hébergement
|
||||
description: Déployez Sim Studio sur votre propre infrastructure
|
||||
description: Déployez Sim sur votre propre infrastructure
|
||||
---
|
||||
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Déployez Sim Studio sur votre propre infrastructure avec Docker ou Kubernetes.
|
||||
Déployez Sim sur votre propre infrastructure avec Docker ou Kubernetes.
|
||||
|
||||
## Prérequis
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Kubernetes
|
||||
description: Déployer Sim Studio avec Helm
|
||||
description: Déployer Sim avec Helm
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Plateformes cloud
|
||||
description: Déployer Sim Studio sur des plateformes cloud
|
||||
description: Déployer Sim sur des plateformes cloud
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
@@ -64,7 +64,7 @@ sudo usermod -aG docker $USER
|
||||
docker --version
|
||||
```
|
||||
|
||||
### Déployer Sim Studio
|
||||
### Déployer Sim
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
|
||||
@@ -5,13 +5,13 @@ description: 高度なセキュリティとコンプライアンス要件を持
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Sim Studio Enterpriseは、強化されたセキュリティ、コンプライアンス、管理要件を持つ組織向けの高度な機能を提供します。
|
||||
Sim Enterpriseは、強化されたセキュリティ、コンプライアンス、管理要件を持つ組織向けの高度な機能を提供します。
|
||||
|
||||
---
|
||||
|
||||
## Bring Your Own Key (BYOK)
|
||||
|
||||
Sim Studioのホストキーの代わりに、AIモデルプロバイダー用の独自のAPIキーを使用できます。
|
||||
Simのホストキーの代わりに、AIモデルプロバイダー用の独自のAPIキーを使用できます。
|
||||
|
||||
### 対応プロバイダー
|
||||
|
||||
@@ -32,7 +32,7 @@ Sim Studioのホストキーの代わりに、AIモデルプロバイダー用
|
||||
BYOKキーは保存時に暗号化されます。組織の管理者とオーナーのみがキーを管理できます。
|
||||
</Callout>
|
||||
|
||||
設定すると、ワークフローはSim Studioのホストキーの代わりに独自のキーを使用します。削除すると、ワークフローは自動的にホストキーにフォールバックします。
|
||||
設定すると、ワークフローはSimのホストキーの代わりに独自のキーを使用します。削除すると、ワークフローは自動的にホストキーにフォールバックします。
|
||||
|
||||
---
|
||||
|
||||
@@ -72,5 +72,5 @@ Sim Studioのホストキーの代わりに、AIモデルプロバイダー用
|
||||
| `DISABLE_INVITATIONS`、`NEXT_PUBLIC_DISABLE_INVITATIONS` | ワークスペース/組織への招待をグローバルに無効化 |
|
||||
|
||||
<Callout type="warn">
|
||||
BYOKはホスト型Sim Studioでのみ利用可能です。セルフホスト型デプロイメントでは、環境変数を介してAIプロバイダーキーを直接設定します。
|
||||
BYOKはホスト型Simでのみ利用可能です。セルフホスト型デプロイメントでは、環境変数を介してAIプロバイダーキーを直接設定します。
|
||||
</Callout>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Docker
|
||||
description: Docker Composeを使用してSim Studioをデプロイする
|
||||
description: Docker Composeを使用してSimをデプロイする
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: 環境変数
|
||||
description: Sim Studioの設定リファレンス
|
||||
description: Simの設定リファレンス
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
title: セルフホスティング
|
||||
description: 自社のインフラストラクチャにSim Studioをデプロイ
|
||||
description: 自社のインフラストラクチャにSimをデプロイ
|
||||
---
|
||||
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
DockerまたはKubernetesを使用して、自社のインフラストラクチャにSim Studioをデプロイします。
|
||||
DockerまたはKubernetesを使用して、自社のインフラストラクチャにSimをデプロイします。
|
||||
|
||||
## 要件
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Kubernetes
|
||||
description: Helmを使用してSim Studioをデプロイする
|
||||
description: Helmを使用してSimをデプロイする
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: クラウドプラットフォーム
|
||||
description: クラウドプラットフォームにSim Studioをデプロイする
|
||||
description: クラウドプラットフォームにSimをデプロイする
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
@@ -64,7 +64,7 @@ sudo usermod -aG docker $USER
|
||||
docker --version
|
||||
```
|
||||
|
||||
### Sim Studioのデプロイ
|
||||
### Simのデプロイ
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
|
||||
@@ -5,13 +5,13 @@ description: 为具有高级安全性和合规性需求的组织提供企业级
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
Sim Studio 企业版为需要更高安全性、合规性和管理能力的组织提供高级功能。
|
||||
Sim 企业版为需要更高安全性、合规性和管理能力的组织提供高级功能。
|
||||
|
||||
---
|
||||
|
||||
## 自带密钥(BYOK)
|
||||
|
||||
使用您自己的 API 密钥对接 AI 模型服务商,而不是使用 Sim Studio 托管的密钥。
|
||||
使用您自己的 API 密钥对接 AI 模型服务商,而不是使用 Sim 托管的密钥。
|
||||
|
||||
### 支持的服务商
|
||||
|
||||
@@ -32,7 +32,7 @@ Sim Studio 企业版为需要更高安全性、合规性和管理能力的组织
|
||||
BYOK 密钥静态加密存储。仅组织管理员和所有者可管理密钥。
|
||||
</Callout>
|
||||
|
||||
配置后,工作流将使用您的密钥而非 Sim Studio 托管密钥。如移除,工作流会自动切换回托管密钥。
|
||||
配置后,工作流将使用您的密钥而非 Sim 托管密钥。如移除,工作流会自动切换回托管密钥。
|
||||
|
||||
---
|
||||
|
||||
@@ -72,5 +72,5 @@ Sim Studio 企业版为需要更高安全性、合规性和管理能力的组织
|
||||
| `DISABLE_INVITATIONS`,`NEXT_PUBLIC_DISABLE_INVITATIONS` | 全局禁用工作区/组织邀请 |
|
||||
|
||||
<Callout type="warn">
|
||||
BYOK 仅适用于托管版 Sim Studio。自托管部署需通过环境变量直接配置 AI 提供商密钥。
|
||||
BYOK 仅适用于托管版 Sim。自托管部署需通过环境变量直接配置 AI 提供商密钥。
|
||||
</Callout>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Docker
|
||||
description: 使用 Docker Compose 部署 Sim Studio
|
||||
description: 使用 Docker Compose 部署 Sim
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: 环境变量
|
||||
description: Sim Studio 的配置参考
|
||||
description: Sim 的配置参考
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
title: 自托管
|
||||
description: 在您自己的基础设施上部署 Sim Studio
|
||||
description: 在您自己的基础设施上部署 Sim
|
||||
---
|
||||
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
使用 Docker 或 Kubernetes 在您自己的基础设施上部署 Sim Studio。
|
||||
使用 Docker 或 Kubernetes 在您自己的基础设施上部署 Sim。
|
||||
|
||||
## 要求
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Kubernetes
|
||||
description: 使用 Helm 部署 Sim Studio
|
||||
description: 使用 Helm 部署 Sim
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: 云平台
|
||||
description: 在云平台上部署 Sim Studio
|
||||
description: 在云平台上部署 Sim
|
||||
---
|
||||
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
@@ -64,7 +64,7 @@ sudo usermod -aG docker $USER
|
||||
docker --version
|
||||
```
|
||||
|
||||
### 部署 Sim Studio
|
||||
### 部署 Sim
|
||||
|
||||
```bash
|
||||
git clone https://github.com/simstudioai/sim.git && cd sim
|
||||
|
||||
@@ -6,7 +6,7 @@ export default function StructuredData() {
|
||||
'@type': 'Organization',
|
||||
'@id': 'https://sim.ai/#organization',
|
||||
name: 'Sim',
|
||||
alternateName: 'Sim Studio',
|
||||
alternateName: 'Sim',
|
||||
description:
|
||||
'Open-source AI agent workflow builder used by developers at trail-blazing startups to Fortune 500 companies',
|
||||
url: 'https://sim.ai',
|
||||
|
||||
@@ -76,6 +76,14 @@
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/**
|
||||
* Suppress the default selection ring for grouped selections
|
||||
* These blocks show a more transparent ring via the component's ring overlay
|
||||
*/
|
||||
.react-flow__node.selected > div[data-grouped-selection="true"] > div::after {
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
/**
|
||||
* Color tokens - single source of truth for all colors
|
||||
* Light mode: Warm theme
|
||||
|
||||
289
apps/sim/app/api/a2a/agents/[agentId]/route.ts
Normal file
289
apps/sim/app/api/a2a/agents/[agentId]/route.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
import { db } from '@sim/db'
|
||||
import { a2aAgent, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { generateAgentCard, generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
|
||||
import type { AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentCardAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
agentId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Returns the Agent Card for discovery
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const [agent] = await db
|
||||
.select({
|
||||
agent: a2aAgent,
|
||||
workflow: workflow,
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.innerJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!agent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!agent.agent.isPublished) {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success) {
|
||||
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
|
||||
}
|
||||
}
|
||||
|
||||
const agentCard = generateAgentCard(
|
||||
{
|
||||
id: agent.agent.id,
|
||||
name: agent.agent.name,
|
||||
description: agent.agent.description,
|
||||
version: agent.agent.version,
|
||||
capabilities: agent.agent.capabilities as AgentCapabilities,
|
||||
skills: agent.agent.skills as AgentSkill[],
|
||||
},
|
||||
{
|
||||
id: agent.workflow.id,
|
||||
name: agent.workflow.name,
|
||||
description: agent.workflow.description,
|
||||
}
|
||||
)
|
||||
|
||||
return NextResponse.json(agentCard, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Cache-Control': agent.agent.isPublished ? 'public, max-age=3600' : 'private, no-cache',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting Agent Card:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT - Update an agent
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
if (
|
||||
body.skillTags !== undefined &&
|
||||
(!Array.isArray(body.skillTags) ||
|
||||
!body.skillTags.every((tag: unknown): tag is string => typeof tag === 'string'))
|
||||
) {
|
||||
return NextResponse.json({ error: 'skillTags must be an array of strings' }, { status: 400 })
|
||||
}
|
||||
|
||||
let skills = body.skills ?? existingAgent.skills
|
||||
if (body.skillTags !== undefined) {
|
||||
const agentName = body.name ?? existingAgent.name
|
||||
const agentDescription = body.description ?? existingAgent.description
|
||||
skills = generateSkillsFromWorkflow(agentName, agentDescription, body.skillTags)
|
||||
}
|
||||
|
||||
const [updatedAgent] = await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
name: body.name ?? existingAgent.name,
|
||||
description: body.description ?? existingAgent.description,
|
||||
version: body.version ?? existingAgent.version,
|
||||
capabilities: body.capabilities ?? existingAgent.capabilities,
|
||||
skills,
|
||||
authentication: body.authentication ?? existingAgent.authentication,
|
||||
isPublished: body.isPublished ?? existingAgent.isPublished,
|
||||
publishedAt:
|
||||
body.isPublished && !existingAgent.isPublished ? new Date() : existingAgent.publishedAt,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.returning()
|
||||
|
||||
logger.info(`Updated A2A agent: ${agentId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agent: updatedAgent })
|
||||
} catch (error) {
|
||||
logger.error('Error updating agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE - Delete an agent
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Deleted A2A agent: ${agentId}`)
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Publish/unpublish an agent
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn('A2A agent publish auth failed:', { error: auth.error, hasUserId: !!auth.userId })
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const action = body.action as 'publish' | 'unpublish' | 'refresh'
|
||||
|
||||
if (action === 'publish') {
|
||||
const [wf] = await db
|
||||
.select({ isDeployed: workflow.isDeployed })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingAgent.workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!wf?.isDeployed) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow must be deployed before publishing agent' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
isPublished: true,
|
||||
publishedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
const redis = getRedisClient()
|
||||
if (redis) {
|
||||
try {
|
||||
await redis.del(`a2a:agent:${agentId}:card`)
|
||||
} catch (err) {
|
||||
logger.warn('Failed to invalidate agent card cache', { agentId, error: err })
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Published A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, isPublished: true })
|
||||
}
|
||||
|
||||
if (action === 'unpublish') {
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
isPublished: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
const redis = getRedisClient()
|
||||
if (redis) {
|
||||
try {
|
||||
await redis.del(`a2a:agent:${agentId}:card`)
|
||||
} catch (err) {
|
||||
logger.warn('Failed to invalidate agent card cache', { agentId, error: err })
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Unpublished A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, isPublished: false })
|
||||
}
|
||||
|
||||
if (action === 'refresh') {
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(existingAgent.workflowId)
|
||||
if (!workflowData) {
|
||||
return NextResponse.json({ error: 'Failed to load workflow' }, { status: 500 })
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({ name: workflow.name, description: workflow.description })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingAgent.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const skills = generateSkillsFromWorkflow(wf?.name || existingAgent.name, wf?.description)
|
||||
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
skills,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Refreshed skills for A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, skills })
|
||||
}
|
||||
|
||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
||||
} catch (error) {
|
||||
logger.error('Error with agent action:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
186
apps/sim/app/api/a2a/agents/route.ts
Normal file
186
apps/sim/app/api/a2a/agents/route.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* A2A Agents List Endpoint
|
||||
*
|
||||
* List and create A2A agents for a workspace.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { a2aAgent, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
|
||||
import { A2A_DEFAULT_CAPABILITIES } from '@/lib/a2a/constants'
|
||||
import { sanitizeAgentName } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* GET - List all A2A agents for a workspace
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
|
||||
if (!workspaceId) {
|
||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const agents = await db
|
||||
.select({
|
||||
id: a2aAgent.id,
|
||||
workspaceId: a2aAgent.workspaceId,
|
||||
workflowId: a2aAgent.workflowId,
|
||||
name: a2aAgent.name,
|
||||
description: a2aAgent.description,
|
||||
version: a2aAgent.version,
|
||||
capabilities: a2aAgent.capabilities,
|
||||
skills: a2aAgent.skills,
|
||||
authentication: a2aAgent.authentication,
|
||||
isPublished: a2aAgent.isPublished,
|
||||
publishedAt: a2aAgent.publishedAt,
|
||||
createdAt: a2aAgent.createdAt,
|
||||
updatedAt: a2aAgent.updatedAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
taskCount: sql<number>`(
|
||||
SELECT COUNT(*)::int
|
||||
FROM "a2a_task"
|
||||
WHERE "a2a_task"."agent_id" = "a2a_agent"."id"
|
||||
)`.as('task_count'),
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.leftJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
|
||||
.where(eq(a2aAgent.workspaceId, workspaceId))
|
||||
.orderBy(a2aAgent.createdAt)
|
||||
|
||||
logger.info(`Listed ${agents.length} A2A agents for workspace ${workspaceId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agents })
|
||||
} catch (error) {
|
||||
logger.error('Error listing agents:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Create a new A2A agent from a workflow
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { workspaceId, workflowId, name, description, capabilities, authentication, skillTags } =
|
||||
body
|
||||
|
||||
if (!workspaceId || !workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'workspaceId and workflowId are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.id, workflowId), eq(workflow.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
if (!wf) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow not found or does not belong to workspace' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!workflowData || !hasValidStartBlockInState(workflowData)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow must have a Start block to be exposed as an A2A agent' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: a2aAgent.id })
|
||||
.from(a2aAgent)
|
||||
.where(and(eq(a2aAgent.workspaceId, workspaceId), eq(a2aAgent.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existing) {
|
||||
return NextResponse.json(
|
||||
{ error: 'An agent already exists for this workflow' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
const skills = generateSkillsFromWorkflow(
|
||||
name || wf.name,
|
||||
description || wf.description,
|
||||
skillTags
|
||||
)
|
||||
|
||||
const agentId = uuidv4()
|
||||
const agentName = name || sanitizeAgentName(wf.name)
|
||||
|
||||
const [agent] = await db
|
||||
.insert(a2aAgent)
|
||||
.values({
|
||||
id: agentId,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createdBy: auth.userId,
|
||||
name: agentName,
|
||||
description: description || wf.description,
|
||||
version: '1.0.0',
|
||||
capabilities: {
|
||||
...A2A_DEFAULT_CAPABILITIES,
|
||||
...capabilities,
|
||||
},
|
||||
skills,
|
||||
authentication: authentication || {
|
||||
schemes: ['bearer', 'apiKey'],
|
||||
},
|
||||
isPublished: false,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`Created A2A agent ${agentId} for workflow ${workflowId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agent }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error('Error creating agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
1263
apps/sim/app/api/a2a/serve/[agentId]/route.ts
Normal file
1263
apps/sim/app/api/a2a/serve/[agentId]/route.ts
Normal file
File diff suppressed because it is too large
Load Diff
176
apps/sim/app/api/a2a/serve/[agentId]/utils.ts
Normal file
176
apps/sim/app/api/a2a/serve/[agentId]/utils.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
/** A2A v0.3 JSON-RPC method names */
|
||||
export const A2A_METHODS = {
|
||||
MESSAGE_SEND: 'message/send',
|
||||
MESSAGE_STREAM: 'message/stream',
|
||||
TASKS_GET: 'tasks/get',
|
||||
TASKS_CANCEL: 'tasks/cancel',
|
||||
TASKS_RESUBSCRIBE: 'tasks/resubscribe',
|
||||
PUSH_NOTIFICATION_SET: 'tasks/pushNotificationConfig/set',
|
||||
PUSH_NOTIFICATION_GET: 'tasks/pushNotificationConfig/get',
|
||||
PUSH_NOTIFICATION_DELETE: 'tasks/pushNotificationConfig/delete',
|
||||
} as const
|
||||
|
||||
/** A2A v0.3 error codes */
|
||||
export const A2A_ERROR_CODES = {
|
||||
PARSE_ERROR: -32700,
|
||||
INVALID_REQUEST: -32600,
|
||||
METHOD_NOT_FOUND: -32601,
|
||||
INVALID_PARAMS: -32602,
|
||||
INTERNAL_ERROR: -32603,
|
||||
TASK_NOT_FOUND: -32001,
|
||||
TASK_ALREADY_COMPLETE: -32002,
|
||||
AGENT_UNAVAILABLE: -32003,
|
||||
AUTHENTICATION_REQUIRED: -32004,
|
||||
} as const
|
||||
|
||||
export interface JSONRPCRequest {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number
|
||||
method: string
|
||||
params?: unknown
|
||||
}
|
||||
|
||||
export interface JSONRPCResponse {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number | null
|
||||
result?: unknown
|
||||
error?: {
|
||||
code: number
|
||||
message: string
|
||||
data?: unknown
|
||||
}
|
||||
}
|
||||
|
||||
export interface MessageSendParams {
|
||||
message: Message
|
||||
configuration?: {
|
||||
acceptedOutputModes?: string[]
|
||||
historyLength?: number
|
||||
pushNotificationConfig?: PushNotificationConfig
|
||||
}
|
||||
}
|
||||
|
||||
export interface TaskIdParams {
|
||||
id: string
|
||||
historyLength?: number
|
||||
}
|
||||
|
||||
export interface PushNotificationSetParams {
|
||||
id: string
|
||||
pushNotificationConfig: PushNotificationConfig
|
||||
}
|
||||
|
||||
export function createResponse(id: string | number | null, result: unknown): JSONRPCResponse {
|
||||
return { jsonrpc: '2.0', id, result }
|
||||
}
|
||||
|
||||
export function createError(
|
||||
id: string | number | null,
|
||||
code: number,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): JSONRPCResponse {
|
||||
return { jsonrpc: '2.0', id, error: { code, message, data } }
|
||||
}
|
||||
|
||||
export function isJSONRPCRequest(obj: unknown): obj is JSONRPCRequest {
|
||||
if (!obj || typeof obj !== 'object') return false
|
||||
const r = obj as Record<string, unknown>
|
||||
return r.jsonrpc === '2.0' && typeof r.method === 'string' && r.id !== undefined
|
||||
}
|
||||
|
||||
export function generateTaskId(): string {
|
||||
return uuidv4()
|
||||
}
|
||||
|
||||
export function createTaskStatus(state: TaskState): { state: TaskState; timestamp: string } {
|
||||
return { state, timestamp: new Date().toISOString() }
|
||||
}
|
||||
|
||||
export function formatTaskResponse(task: Task, historyLength?: number): Task {
|
||||
if (historyLength !== undefined && task.history) {
|
||||
return {
|
||||
...task,
|
||||
history: task.history.slice(-historyLength),
|
||||
}
|
||||
}
|
||||
return task
|
||||
}
|
||||
|
||||
export interface ExecuteRequestConfig {
|
||||
workflowId: string
|
||||
apiKey?: string | null
|
||||
stream?: boolean
|
||||
}
|
||||
|
||||
export interface ExecuteRequestResult {
|
||||
url: string
|
||||
headers: Record<string, string>
|
||||
useInternalAuth: boolean
|
||||
}
|
||||
|
||||
export async function buildExecuteRequest(
|
||||
config: ExecuteRequestConfig
|
||||
): Promise<ExecuteRequestResult> {
|
||||
const url = `${getBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
let useInternalAuth = false
|
||||
|
||||
if (config.apiKey) {
|
||||
headers['X-API-Key'] = config.apiKey
|
||||
} else {
|
||||
const internalToken = await generateInternalToken()
|
||||
headers.Authorization = `Bearer ${internalToken}`
|
||||
useInternalAuth = true
|
||||
}
|
||||
|
||||
if (config.stream) {
|
||||
headers['X-Stream-Response'] = 'true'
|
||||
}
|
||||
|
||||
return { url, headers, useInternalAuth }
|
||||
}
|
||||
|
||||
export function extractAgentContent(executeResult: {
|
||||
output?: { content?: string; [key: string]: unknown }
|
||||
error?: string
|
||||
}): string {
|
||||
// Prefer explicit content field
|
||||
if (executeResult.output?.content) {
|
||||
return executeResult.output.content
|
||||
}
|
||||
|
||||
// If output is an object with meaningful data, stringify it
|
||||
if (typeof executeResult.output === 'object' && executeResult.output !== null) {
|
||||
const keys = Object.keys(executeResult.output)
|
||||
// Skip empty objects or objects with only undefined values
|
||||
if (keys.length > 0 && keys.some((k) => executeResult.output![k] !== undefined)) {
|
||||
return JSON.stringify(executeResult.output)
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to error message or default
|
||||
return executeResult.error || 'Task completed'
|
||||
}
|
||||
|
||||
export function buildTaskResponse(params: {
|
||||
taskId: string
|
||||
contextId: string
|
||||
state: TaskState
|
||||
history: Message[]
|
||||
artifacts?: Artifact[]
|
||||
}): Task {
|
||||
return {
|
||||
kind: 'task',
|
||||
id: params.taskId,
|
||||
contextId: params.contextId,
|
||||
status: createTaskStatus(params.state),
|
||||
history: params.history,
|
||||
artifacts: params.artifacts || [],
|
||||
}
|
||||
}
|
||||
@@ -97,6 +97,7 @@ const ChatMessageSchema = z.object({
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
commands: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -132,6 +133,7 @@ export async function POST(req: NextRequest) {
|
||||
provider,
|
||||
conversationId,
|
||||
contexts,
|
||||
commands,
|
||||
} = ChatMessageSchema.parse(body)
|
||||
// Ensure we have a consistent user message ID for this request
|
||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||
@@ -462,6 +464,7 @@ export async function POST(req: NextRequest) {
|
||||
...(integrationTools.length > 0 && { tools: integrationTools }),
|
||||
...(baseTools.length > 0 && { baseTools }),
|
||||
...(credentials && { credentials }),
|
||||
...(commands && commands.length > 0 && { commands }),
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { memory, permissions, workspace } from '@sim/db/schema'
|
||||
import { memory } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MemoryByIdAPI')
|
||||
|
||||
@@ -29,46 +30,6 @@ const memoryPutBodySchema = z.object({
|
||||
workspaceId: z.string().uuid('Invalid workspace ID format'),
|
||||
})
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; canWrite: boolean }> {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceRow.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite: permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin',
|
||||
}
|
||||
}
|
||||
|
||||
async function validateMemoryAccess(
|
||||
request: NextRequest,
|
||||
workspaceId: string,
|
||||
@@ -86,8 +47,8 @@ async function validateMemoryAccess(
|
||||
}
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists || !access.hasAccess) {
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
@@ -96,7 +57,7 @@ async function validateMemoryAccess(
|
||||
}
|
||||
}
|
||||
|
||||
if (action === 'write' && !canWrite) {
|
||||
if (action === 'write' && !access.canWrite) {
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied' } },
|
||||
|
||||
@@ -1,56 +1,17 @@
|
||||
import { db } from '@sim/db'
|
||||
import { memory, permissions, workspace } from '@sim/db/schema'
|
||||
import { memory } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, like } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MemoryAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; canWrite: boolean }> {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceRow.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite: permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin',
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -76,8 +37,14 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
@@ -155,15 +122,21 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!canWrite) {
|
||||
if (!access.canWrite) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
@@ -282,15 +255,21 @@ export async function DELETE(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!canWrite) {
|
||||
if (!access.canWrite) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
|
||||
84
apps/sim/app/api/tools/a2a/cancel-task/route.ts
Normal file
84
apps/sim/app/api/tools/a2a/cancel-task/route.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import type { Task } from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('A2ACancelTaskAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const A2ACancelTaskSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A cancel task attempt`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ACancelTaskSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Canceling A2A task`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const task = (await client.cancelTask({ id: validatedData.taskId })) as Task
|
||||
|
||||
logger.info(`[${requestId}] Successfully canceled A2A task`, {
|
||||
taskId: validatedData.taskId,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
cancelled: true,
|
||||
state: task.status.state,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid A2A cancel task request`, {
|
||||
errors: error.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error canceling A2A task:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to cancel task',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
94
apps/sim/app/api/tools/a2a/delete-push-notification/route.ts
Normal file
94
apps/sim/app/api/tools/a2a/delete-push-notification/route.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ADeletePushNotificationAPI')
|
||||
|
||||
const A2ADeletePushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
pushNotificationConfigId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A delete push notification attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A delete push notification request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ADeletePushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Deleting A2A push notification config`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
pushNotificationConfigId: validatedData.pushNotificationConfigId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
await client.deleteTaskPushNotificationConfig({
|
||||
id: validatedData.taskId,
|
||||
pushNotificationConfigId: validatedData.pushNotificationConfigId || validatedData.taskId,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Push notification config deleted successfully`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to delete push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
92
apps/sim/app/api/tools/a2a/get-agent-card/route.ts
Normal file
92
apps/sim/app/api/tools/a2a/get-agent-card/route.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetAgentCardAPI')
|
||||
|
||||
const A2AGetAgentCardSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A get agent card attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A get agent card request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetAgentCardSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Fetching Agent Card`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const agentCard = await client.getAgentCard()
|
||||
|
||||
logger.info(`[${requestId}] Agent Card fetched successfully`, {
|
||||
agentName: agentCard.name,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
name: agentCard.name,
|
||||
description: agentCard.description,
|
||||
url: agentCard.url,
|
||||
version: agentCard.protocolVersion,
|
||||
capabilities: agentCard.capabilities,
|
||||
skills: agentCard.skills,
|
||||
defaultInputModes: agentCard.defaultInputModes,
|
||||
defaultOutputModes: agentCard.defaultOutputModes,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error fetching Agent Card:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to fetch Agent Card',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
115
apps/sim/app/api/tools/a2a/get-push-notification/route.ts
Normal file
115
apps/sim/app/api/tools/a2a/get-push-notification/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetPushNotificationAPI')
|
||||
|
||||
const A2AGetPushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A get push notification attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A get push notification request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetPushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Getting push notification config`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const result = await client.getTaskPushNotificationConfig({
|
||||
id: validatedData.taskId,
|
||||
})
|
||||
|
||||
if (!result || !result.pushNotificationConfig) {
|
||||
logger.info(`[${requestId}] No push notification config found for task`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
exists: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Push notification config retrieved successfully`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url: result.pushNotificationConfig.url,
|
||||
token: result.pushNotificationConfig.token,
|
||||
exists: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message.includes('not found')) {
|
||||
logger.info(`[${requestId}] Task not found, returning exists: false`)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
exists: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to get push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
95
apps/sim/app/api/tools/a2a/get-task/route.ts
Normal file
95
apps/sim/app/api/tools/a2a/get-task/route.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import type { Task } from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetTaskAPI')
|
||||
|
||||
const A2AGetTaskSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
historyLength: z.number().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A get task attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated A2A get task request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetTaskSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Getting A2A task`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
historyLength: validatedData.historyLength,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const task = (await client.getTask({
|
||||
id: validatedData.taskId,
|
||||
historyLength: validatedData.historyLength,
|
||||
})) as Task
|
||||
|
||||
logger.info(`[${requestId}] Successfully retrieved A2A task`, {
|
||||
taskId: task.id,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting A2A task:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to get task',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
119
apps/sim/app/api/tools/a2a/resubscribe/route.ts
Normal file
119
apps/sim/app/api/tools/a2a/resubscribe/route.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('A2AResubscribeAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const A2AResubscribeSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A resubscribe attempt`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AResubscribeSchema.parse(body)
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const stream = client.resubscribeTask({ id: validatedData.taskId })
|
||||
|
||||
let taskId = validatedData.taskId
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resubscribed to A2A task ${taskId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
isRunning: !isTerminalState(state),
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid A2A resubscribe data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error resubscribing to A2A task:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to resubscribe',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
150
apps/sim/app/api/tools/a2a/send-message-stream/route.ts
Normal file
150
apps/sim/app/api/tools/a2a/send-message-stream/route.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageStreamAPI')
|
||||
|
||||
const A2ASendMessageStreamSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A send message stream attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message stream request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageStreamSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A streaming message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const stream = client.sendMessageStream({ message })
|
||||
|
||||
let taskId = ''
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] A2A streaming message completed`, {
|
||||
taskId,
|
||||
state,
|
||||
artifactCount: artifacts.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(state) && state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in A2A streaming:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Streaming failed',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
126
apps/sim/app/api/tools/a2a/send-message/route.ts
Normal file
126
apps/sim/app/api/tools/a2a/send-message/route.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import type { Message, Task } from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageAPI')
|
||||
|
||||
const A2ASendMessageSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A send message attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const result = await client.sendMessage({ message })
|
||||
|
||||
if (result.kind === 'message') {
|
||||
const responseMessage = result as Message
|
||||
|
||||
logger.info(`[${requestId}] A2A message sent successfully (message response)`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: extractTextContent(responseMessage),
|
||||
taskId: responseMessage.taskId || '',
|
||||
contextId: responseMessage.contextId,
|
||||
state: 'completed',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const task = result as Task
|
||||
const lastAgentMessage = task.history?.filter((m) => m.role === 'agent').pop()
|
||||
const content = lastAgentMessage ? extractTextContent(lastAgentMessage) : ''
|
||||
|
||||
logger.info(`[${requestId}] A2A message sent successfully (task response)`, {
|
||||
taskId: task.id,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(task.status.state) && task.status.state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error sending A2A message:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
93
apps/sim/app/api/tools/a2a/set-push-notification/route.ts
Normal file
93
apps/sim/app/api/tools/a2a/set-push-notification/route.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASetPushNotificationAPI')
|
||||
|
||||
const A2ASetPushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
webhookUrl: z.string().min(1, 'Webhook URL is required'),
|
||||
token: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A set push notification attempt`, {
|
||||
error: authResult.error || 'Authentication required',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification request`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
webhookUrl: validatedData.webhookUrl,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const result = await client.setTaskPushNotificationConfig({
|
||||
taskId: validatedData.taskId,
|
||||
pushNotificationConfig: {
|
||||
url: validatedData.webhookUrl,
|
||||
token: validatedData.token,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification successful`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url: result.pushNotificationConfig.url,
|
||||
token: result.pushNotificationConfig.token,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error setting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to set push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
247
apps/sim/app/api/v1/admin/folders/[id]/export/route.ts
Normal file
247
apps/sim/app/api/v1/admin/folders/[id]/export/route.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
/**
|
||||
* GET /api/v1/admin/folders/[id]/export
|
||||
*
|
||||
* Export a folder and all its contents (workflows + subfolders) as a ZIP file or JSON (raw, unsanitized for admin backup/restore).
|
||||
*
|
||||
* Query Parameters:
|
||||
* - format: 'zip' (default) or 'json'
|
||||
*
|
||||
* Response:
|
||||
* - ZIP file download (Content-Type: application/zip)
|
||||
* - JSON: FolderExportFullPayload
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { exportFolderToZip, sanitizePathSegment } from '@/lib/workflows/operations/import-export'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import {
|
||||
type FolderExportPayload,
|
||||
parseWorkflowVariables,
|
||||
type WorkflowExportState,
|
||||
} from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminFolderExportAPI')
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
interface CollectedWorkflow {
|
||||
id: string
|
||||
folderId: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively collects all workflows within a folder and its subfolders.
|
||||
*/
|
||||
function collectWorkflowsInFolder(
|
||||
folderId: string,
|
||||
allWorkflows: Array<{ id: string; folderId: string | null }>,
|
||||
allFolders: Array<{ id: string; parentId: string | null }>
|
||||
): CollectedWorkflow[] {
|
||||
const collected: CollectedWorkflow[] = []
|
||||
|
||||
for (const wf of allWorkflows) {
|
||||
if (wf.folderId === folderId) {
|
||||
collected.push({ id: wf.id, folderId: wf.folderId })
|
||||
}
|
||||
}
|
||||
|
||||
for (const folder of allFolders) {
|
||||
if (folder.parentId === folderId) {
|
||||
const childWorkflows = collectWorkflowsInFolder(folder.id, allWorkflows, allFolders)
|
||||
collected.push(...childWorkflows)
|
||||
}
|
||||
}
|
||||
|
||||
return collected
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects all subfolders recursively under a root folder.
|
||||
* Returns folders with parentId adjusted so direct children of rootFolderId have parentId: null.
|
||||
*/
|
||||
function collectSubfolders(
|
||||
rootFolderId: string,
|
||||
allFolders: Array<{ id: string; name: string; parentId: string | null }>
|
||||
): FolderExportPayload[] {
|
||||
const subfolders: FolderExportPayload[] = []
|
||||
|
||||
function collect(parentId: string) {
|
||||
for (const folder of allFolders) {
|
||||
if (folder.parentId === parentId) {
|
||||
subfolders.push({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
parentId: folder.parentId === rootFolderId ? null : folder.parentId,
|
||||
})
|
||||
collect(folder.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
collect(rootFolderId)
|
||||
return subfolders
|
||||
}
|
||||
|
||||
export const GET = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
const { id: folderId } = await context.params
|
||||
const url = new URL(request.url)
|
||||
const format = url.searchParams.get('format') || 'zip'
|
||||
|
||||
try {
|
||||
const [folderData] = await db
|
||||
.select({
|
||||
id: workflowFolder.id,
|
||||
name: workflowFolder.name,
|
||||
workspaceId: workflowFolder.workspaceId,
|
||||
})
|
||||
.from(workflowFolder)
|
||||
.where(eq(workflowFolder.id, folderId))
|
||||
.limit(1)
|
||||
|
||||
if (!folderData) {
|
||||
return notFoundResponse('Folder')
|
||||
}
|
||||
|
||||
const allWorkflows = await db
|
||||
.select({ id: workflow.id, folderId: workflow.folderId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, folderData.workspaceId))
|
||||
|
||||
const allFolders = await db
|
||||
.select({
|
||||
id: workflowFolder.id,
|
||||
name: workflowFolder.name,
|
||||
parentId: workflowFolder.parentId,
|
||||
})
|
||||
.from(workflowFolder)
|
||||
.where(eq(workflowFolder.workspaceId, folderData.workspaceId))
|
||||
|
||||
const workflowsInFolder = collectWorkflowsInFolder(folderId, allWorkflows, allFolders)
|
||||
const subfolders = collectSubfolders(folderId, allFolders)
|
||||
|
||||
const workflowExports: Array<{
|
||||
workflow: {
|
||||
id: string
|
||||
name: string
|
||||
description: string | null
|
||||
color: string | null
|
||||
folderId: string | null
|
||||
}
|
||||
state: WorkflowExportState
|
||||
}> = []
|
||||
|
||||
for (const collectedWf of workflowsInFolder) {
|
||||
try {
|
||||
const [wfData] = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, collectedWf.id))
|
||||
.limit(1)
|
||||
|
||||
if (!wfData) {
|
||||
logger.warn(`Skipping workflow ${collectedWf.id} - not found`)
|
||||
continue
|
||||
}
|
||||
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(collectedWf.id)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.warn(`Skipping workflow ${collectedWf.id} - no normalized data found`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variables = parseWorkflowVariables(wfData.variables)
|
||||
|
||||
const remappedFolderId = collectedWf.folderId === folderId ? null : collectedWf.folderId
|
||||
|
||||
const state: WorkflowExportState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
metadata: {
|
||||
name: wfData.name,
|
||||
description: wfData.description ?? undefined,
|
||||
color: wfData.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables,
|
||||
}
|
||||
|
||||
workflowExports.push({
|
||||
workflow: {
|
||||
id: wfData.id,
|
||||
name: wfData.name,
|
||||
description: wfData.description,
|
||||
color: wfData.color,
|
||||
folderId: remappedFolderId,
|
||||
},
|
||||
state,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Failed to load workflow ${collectedWf.id}:`, { error })
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Admin API: Exporting folder ${folderId} with ${workflowExports.length} workflows and ${subfolders.length} subfolders`
|
||||
)
|
||||
|
||||
if (format === 'json') {
|
||||
const exportPayload = {
|
||||
version: '1.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
folder: {
|
||||
id: folderData.id,
|
||||
name: folderData.name,
|
||||
},
|
||||
workflows: workflowExports,
|
||||
folders: subfolders,
|
||||
}
|
||||
|
||||
return singleResponse(exportPayload)
|
||||
}
|
||||
|
||||
const zipWorkflows = workflowExports.map((wf) => ({
|
||||
workflow: {
|
||||
id: wf.workflow.id,
|
||||
name: wf.workflow.name,
|
||||
description: wf.workflow.description ?? undefined,
|
||||
color: wf.workflow.color ?? undefined,
|
||||
folderId: wf.workflow.folderId,
|
||||
},
|
||||
state: wf.state,
|
||||
variables: wf.state.variables,
|
||||
}))
|
||||
|
||||
const zipBlob = await exportFolderToZip(folderData.name, zipWorkflows, subfolders)
|
||||
const arrayBuffer = await zipBlob.arrayBuffer()
|
||||
|
||||
const sanitizedName = sanitizePathSegment(folderData.name)
|
||||
const filename = `${sanitizedName}-${new Date().toISOString().split('T')[0]}.zip`
|
||||
|
||||
return new NextResponse(arrayBuffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/zip',
|
||||
'Content-Disposition': `attachment; filename="${filename}"`,
|
||||
'Content-Length': arrayBuffer.byteLength.toString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to export folder', { error, folderId })
|
||||
return internalErrorResponse('Failed to export folder')
|
||||
}
|
||||
})
|
||||
@@ -34,12 +34,16 @@
|
||||
* GET /api/v1/admin/workflows/:id - Get workflow details
|
||||
* DELETE /api/v1/admin/workflows/:id - Delete workflow
|
||||
* GET /api/v1/admin/workflows/:id/export - Export workflow (JSON)
|
||||
* POST /api/v1/admin/workflows/export - Export multiple workflows (ZIP/JSON)
|
||||
* POST /api/v1/admin/workflows/import - Import single workflow
|
||||
* POST /api/v1/admin/workflows/:id/deploy - Deploy workflow
|
||||
* DELETE /api/v1/admin/workflows/:id/deploy - Undeploy workflow
|
||||
* GET /api/v1/admin/workflows/:id/versions - List deployment versions
|
||||
* POST /api/v1/admin/workflows/:id/versions/:vid/activate - Activate specific version
|
||||
*
|
||||
* Folders:
|
||||
* GET /api/v1/admin/folders/:id/export - Export folder with contents (ZIP/JSON)
|
||||
*
|
||||
* Organizations:
|
||||
* GET /api/v1/admin/organizations - List all organizations
|
||||
* POST /api/v1/admin/organizations - Create organization (requires ownerId)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* GET /api/v1/admin/workflows/[id]/export
|
||||
*
|
||||
* Export a single workflow as JSON.
|
||||
* Export a single workflow as JSON (raw, unsanitized for admin backup/restore).
|
||||
*
|
||||
* Response: AdminSingleResponse<WorkflowExportPayload>
|
||||
*/
|
||||
|
||||
147
apps/sim/app/api/v1/admin/workflows/export/route.ts
Normal file
147
apps/sim/app/api/v1/admin/workflows/export/route.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
/**
|
||||
* POST /api/v1/admin/workflows/export
|
||||
*
|
||||
* Export multiple workflows as a ZIP file or JSON array (raw, unsanitized for admin backup/restore).
|
||||
*
|
||||
* Request Body:
|
||||
* - ids: string[] - Array of workflow IDs to export
|
||||
*
|
||||
* Query Parameters:
|
||||
* - format: 'zip' (default) or 'json'
|
||||
*
|
||||
* Response:
|
||||
* - ZIP file download (Content-Type: application/zip) - each workflow as JSON in root
|
||||
* - JSON: AdminListResponse<WorkflowExportPayload[]>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { inArray } from 'drizzle-orm'
|
||||
import JSZip from 'jszip'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { sanitizePathSegment } from '@/lib/workflows/operations/import-export'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
listResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import {
|
||||
parseWorkflowVariables,
|
||||
type WorkflowExportPayload,
|
||||
type WorkflowExportState,
|
||||
} from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminWorkflowsExportAPI')
|
||||
|
||||
interface ExportRequest {
|
||||
ids: string[]
|
||||
}
|
||||
|
||||
export const POST = withAdminAuth(async (request) => {
|
||||
const url = new URL(request.url)
|
||||
const format = url.searchParams.get('format') || 'zip'
|
||||
|
||||
let body: ExportRequest
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch {
|
||||
return badRequestResponse('Invalid JSON body')
|
||||
}
|
||||
|
||||
if (!body.ids || !Array.isArray(body.ids) || body.ids.length === 0) {
|
||||
return badRequestResponse('ids must be a non-empty array of workflow IDs')
|
||||
}
|
||||
|
||||
try {
|
||||
const workflows = await db.select().from(workflow).where(inArray(workflow.id, body.ids))
|
||||
|
||||
if (workflows.length === 0) {
|
||||
return badRequestResponse('No workflows found with the provided IDs')
|
||||
}
|
||||
|
||||
const workflowExports: WorkflowExportPayload[] = []
|
||||
|
||||
for (const wf of workflows) {
|
||||
try {
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(wf.id)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.warn(`Skipping workflow ${wf.id} - no normalized data found`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variables = parseWorkflowVariables(wf.variables)
|
||||
|
||||
const state: WorkflowExportState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
metadata: {
|
||||
name: wf.name,
|
||||
description: wf.description ?? undefined,
|
||||
color: wf.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables,
|
||||
}
|
||||
|
||||
const exportPayload: WorkflowExportPayload = {
|
||||
version: '1.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
workflow: {
|
||||
id: wf.id,
|
||||
name: wf.name,
|
||||
description: wf.description,
|
||||
color: wf.color,
|
||||
workspaceId: wf.workspaceId,
|
||||
folderId: wf.folderId,
|
||||
},
|
||||
state,
|
||||
}
|
||||
|
||||
workflowExports.push(exportPayload)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to load workflow ${wf.id}:`, { error })
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Admin API: Exporting ${workflowExports.length} workflows`)
|
||||
|
||||
if (format === 'json') {
|
||||
return listResponse(workflowExports, {
|
||||
total: workflowExports.length,
|
||||
limit: workflowExports.length,
|
||||
offset: 0,
|
||||
hasMore: false,
|
||||
})
|
||||
}
|
||||
|
||||
const zip = new JSZip()
|
||||
|
||||
for (const exportPayload of workflowExports) {
|
||||
const filename = `${sanitizePathSegment(exportPayload.workflow.name)}.json`
|
||||
zip.file(filename, JSON.stringify(exportPayload, null, 2))
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const arrayBuffer = await zipBlob.arrayBuffer()
|
||||
|
||||
const filename = `workflows-export-${new Date().toISOString().split('T')[0]}.zip`
|
||||
|
||||
return new NextResponse(arrayBuffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/zip',
|
||||
'Content-Disposition': `attachment; filename="${filename}"`,
|
||||
'Content-Length': arrayBuffer.byteLength.toString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to export workflows', { error, ids: body.ids })
|
||||
return internalErrorResponse('Failed to export workflows')
|
||||
}
|
||||
})
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* GET /api/v1/admin/workspaces/[id]/export
|
||||
*
|
||||
* Export an entire workspace as a ZIP file or JSON.
|
||||
* Export an entire workspace as a ZIP file or JSON (raw, unsanitized for admin backup/restore).
|
||||
*
|
||||
* Query Parameters:
|
||||
* - format: 'zip' (default) or 'json'
|
||||
@@ -16,7 +16,7 @@ import { workflow, workflowFolder, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
|
||||
import { exportWorkspaceToZip, sanitizePathSegment } from '@/lib/workflows/operations/import-export'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
@@ -146,7 +146,7 @@ export const GET = withAdminAuthParams<RouteParams>(async (request, context) =>
|
||||
const zipBlob = await exportWorkspaceToZip(workspaceData.name, zipWorkflows, folderExports)
|
||||
const arrayBuffer = await zipBlob.arrayBuffer()
|
||||
|
||||
const sanitizedName = workspaceData.name.replace(/[^a-z0-9-_]/gi, '-')
|
||||
const sanitizedName = sanitizePathSegment(workspaceData.name)
|
||||
const filename = `${sanitizedName}-${new Date().toISOString().split('T')[0]}.zip`
|
||||
|
||||
return new NextResponse(arrayBuffer, {
|
||||
|
||||
@@ -27,7 +27,7 @@ import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
const logger = createLogger('WorkflowExecuteAPI')
|
||||
|
||||
@@ -109,7 +109,7 @@ type AsyncExecutionParams = {
|
||||
workflowId: string
|
||||
userId: string
|
||||
input: any
|
||||
triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
triggerType: CoreTriggerType
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -215,10 +215,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowStateOverride,
|
||||
} = validation.data
|
||||
|
||||
// For API key auth, the entire body is the input (except for our control fields)
|
||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||
// For session auth, the input is explicitly provided in the input field
|
||||
const input =
|
||||
auth.authType === 'api_key'
|
||||
auth.authType === 'api_key' || auth.authType === 'internal_jwt'
|
||||
? (() => {
|
||||
const {
|
||||
selectedOutputs,
|
||||
@@ -226,6 +226,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
stream,
|
||||
useDraftState,
|
||||
workflowStateOverride,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
} = body
|
||||
return Object.keys(rest).length > 0 ? rest : validatedInput
|
||||
@@ -252,17 +253,9 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
})
|
||||
|
||||
const executionId = uuidv4()
|
||||
type LoggingTriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
let loggingTriggerType: LoggingTriggerType = 'manual'
|
||||
if (
|
||||
triggerType === 'api' ||
|
||||
triggerType === 'chat' ||
|
||||
triggerType === 'webhook' ||
|
||||
triggerType === 'schedule' ||
|
||||
triggerType === 'manual' ||
|
||||
triggerType === 'mcp'
|
||||
) {
|
||||
loggingTriggerType = triggerType as LoggingTriggerType
|
||||
let loggingTriggerType: CoreTriggerType = 'manual'
|
||||
if (CORE_TRIGGER_TYPES.includes(triggerType as CoreTriggerType)) {
|
||||
loggingTriggerType = triggerType as CoreTriggerType
|
||||
}
|
||||
const loggingSession = new LoggingSession(
|
||||
workflowId,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workspace } from '@sim/db/schema'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, workspaceExists } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('WorkflowAPI')
|
||||
@@ -36,13 +36,9 @@ export async function GET(request: Request) {
|
||||
const userId = session.user.id
|
||||
|
||||
if (workspaceId) {
|
||||
const workspaceExists = await db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.then((rows) => rows.length > 0)
|
||||
const wsExists = await workspaceExists(workspaceId)
|
||||
|
||||
if (!workspaceExists) {
|
||||
if (!wsExists) {
|
||||
logger.warn(
|
||||
`[${requestId}] Attempt to fetch workflows for non-existent workspace: ${workspaceId}`
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { apiKey, workspace } from '@sim/db/schema'
|
||||
import { apiKey } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
@@ -9,7 +9,7 @@ import { createApiKey, getApiKeyDisplayFormat } from '@/lib/api-key/auth'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceApiKeysAPI')
|
||||
|
||||
@@ -34,8 +34,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const ws = await db.select().from(workspace).where(eq(workspace.id, workspaceId)).limit(1)
|
||||
if (!ws.length) {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workspace, workspaceBYOKKeys } from '@sim/db/schema'
|
||||
import { workspaceBYOKKeys } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
@@ -8,7 +8,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceBYOKKeysAPI')
|
||||
|
||||
@@ -46,8 +46,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const ws = await db.select().from(workspace).where(eq(workspace.id, workspaceId)).limit(1)
|
||||
if (!ws.length) {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspace, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
@@ -7,7 +7,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceEnvironmentAPI')
|
||||
|
||||
@@ -33,8 +33,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const userId = session.user.id
|
||||
|
||||
// Validate workspace exists
|
||||
const ws = await db.select().from(workspace).where(eq(workspace.id, workspaceId)).limit(1)
|
||||
if (!ws.length) {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
|
||||
@@ -11,9 +11,9 @@ export const metadata: Metadata = {
|
||||
'Open-source AI agent workflow builder used by 60,000+ developers. Build and deploy agentic workflows with a visual drag-and-drop canvas. Connect 100+ apps and ship SOC2 & HIPAA-ready AI automations from startups to Fortune 500.',
|
||||
keywords:
|
||||
'AI agent workflow builder, agentic workflows, open source AI, visual workflow builder, AI automation, LLM workflows, AI agents, workflow automation, no-code AI, SOC2 compliant, HIPAA compliant, enterprise AI',
|
||||
authors: [{ name: 'Sim Studio' }],
|
||||
creator: 'Sim Studio',
|
||||
publisher: 'Sim Studio',
|
||||
authors: [{ name: 'Sim' }],
|
||||
creator: 'Sim',
|
||||
publisher: 'Sim',
|
||||
formatDetection: {
|
||||
email: false,
|
||||
address: false,
|
||||
|
||||
@@ -364,12 +364,30 @@ export default function PlaygroundPage() {
|
||||
</VariantRow>
|
||||
<VariantRow label='tag variants'>
|
||||
<Tag value='valid@email.com' variant='default' />
|
||||
<Tag value='secondary-tag' variant='secondary' />
|
||||
<Tag value='invalid-email' variant='invalid' />
|
||||
</VariantRow>
|
||||
<VariantRow label='tag with remove'>
|
||||
<Tag value='removable@tag.com' variant='default' onRemove={() => {}} />
|
||||
<Tag value='secondary-removable' variant='secondary' onRemove={() => {}} />
|
||||
<Tag value='invalid-removable' variant='invalid' onRemove={() => {}} />
|
||||
</VariantRow>
|
||||
<VariantRow label='secondary variant'>
|
||||
<div className='w-80'>
|
||||
<TagInput
|
||||
items={[
|
||||
{ value: 'workflow', isValid: true },
|
||||
{ value: 'automation', isValid: true },
|
||||
]}
|
||||
onAdd={() => true}
|
||||
onRemove={() => {}}
|
||||
placeholder='Add tags'
|
||||
placeholderWithTags='Add another'
|
||||
tagVariant='secondary'
|
||||
triggerKeys={['Enter', ',']}
|
||||
/>
|
||||
</div>
|
||||
</VariantRow>
|
||||
<VariantRow label='disabled'>
|
||||
<div className='w-80'>
|
||||
<TagInput
|
||||
|
||||
@@ -72,6 +72,7 @@ const TRIGGER_VARIANT_MAP: Record<string, React.ComponentProps<typeof Badge>['va
|
||||
schedule: 'green',
|
||||
chat: 'purple',
|
||||
webhook: 'orange',
|
||||
a2a: 'teal',
|
||||
}
|
||||
|
||||
interface StatusBadgeProps {
|
||||
|
||||
@@ -888,7 +888,7 @@ export function Chat() {
|
||||
selectedOutputs={selectedOutputs}
|
||||
onOutputSelect={handleOutputSelection}
|
||||
disabled={!activeWorkflowId}
|
||||
placeholder='Select outputs'
|
||||
placeholder='Outputs'
|
||||
align='end'
|
||||
maxHeight={180}
|
||||
/>
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import type React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Check, RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { useMemo } from 'react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Combobox, type ComboboxOptionGroup } from '@/components/emcn'
|
||||
import {
|
||||
extractFieldsFromSchema,
|
||||
parseResponseFormatSafely,
|
||||
@@ -21,7 +14,7 @@ import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Renders a tag icon with background color.
|
||||
* Renders a tag icon with background color for block section headers.
|
||||
*
|
||||
* @param icon - Either a letter string or a Lucide icon component
|
||||
* @param color - Background color for the icon container
|
||||
@@ -62,14 +55,9 @@ interface OutputSelectProps {
|
||||
placeholder?: string
|
||||
/** Whether to emit output IDs or labels in onOutputSelect callback */
|
||||
valueMode?: 'id' | 'label'
|
||||
/**
|
||||
* When true, renders the underlying popover content inline instead of in a portal.
|
||||
* Useful when used inside dialogs or other portalled components that manage scroll locking.
|
||||
*/
|
||||
disablePopoverPortal?: boolean
|
||||
/** Alignment of the popover relative to the trigger */
|
||||
/** Alignment of the dropdown relative to the trigger */
|
||||
align?: 'start' | 'end' | 'center'
|
||||
/** Maximum height of the popover content in pixels */
|
||||
/** Maximum height of the dropdown content in pixels */
|
||||
maxHeight?: number
|
||||
}
|
||||
|
||||
@@ -90,14 +78,9 @@ export function OutputSelect({
|
||||
disabled = false,
|
||||
placeholder = 'Select outputs',
|
||||
valueMode = 'id',
|
||||
disablePopoverPortal = false,
|
||||
align = 'start',
|
||||
maxHeight = 200,
|
||||
}: OutputSelectProps) {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [highlightedIndex, setHighlightedIndex] = useState(-1)
|
||||
const triggerRef = useRef<HTMLDivElement>(null)
|
||||
const popoverRef = useRef<HTMLDivElement>(null)
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const { isShowingDiff, isDiffReady, hasActiveDiff, baselineWorkflow } = useWorkflowDiffStore()
|
||||
const subBlockValues = useSubBlockStore((state) =>
|
||||
@@ -206,21 +189,10 @@ export function OutputSelect({
|
||||
shouldUseBaseline,
|
||||
])
|
||||
|
||||
/**
|
||||
* Checks if an output is currently selected by comparing both ID and label
|
||||
* @param o - The output object to check
|
||||
* @returns True if the output is selected, false otherwise
|
||||
*/
|
||||
const isSelectedValue = useCallback(
|
||||
(o: { id: string; label: string }) =>
|
||||
selectedOutputs.includes(o.id) || selectedOutputs.includes(o.label),
|
||||
[selectedOutputs]
|
||||
)
|
||||
|
||||
/**
|
||||
* Gets display text for selected outputs
|
||||
*/
|
||||
const selectedOutputsDisplayText = useMemo(() => {
|
||||
const selectedDisplayText = useMemo(() => {
|
||||
if (!selectedOutputs || selectedOutputs.length === 0) {
|
||||
return placeholder
|
||||
}
|
||||
@@ -234,19 +206,27 @@ export function OutputSelect({
|
||||
}
|
||||
|
||||
if (validOutputs.length === 1) {
|
||||
const output = workflowOutputs.find(
|
||||
(o) => o.id === validOutputs[0] || o.label === validOutputs[0]
|
||||
)
|
||||
return output?.label || placeholder
|
||||
return '1 output'
|
||||
}
|
||||
|
||||
return `${validOutputs.length} outputs`
|
||||
}, [selectedOutputs, workflowOutputs, placeholder])
|
||||
|
||||
/**
|
||||
* Groups outputs by block and sorts by distance from starter block
|
||||
* Gets the background color for a block output based on its type
|
||||
* @param blockType - The type of the block
|
||||
* @returns The hex color code for the block
|
||||
*/
|
||||
const groupedOutputs = useMemo(() => {
|
||||
const getOutputColor = (blockType: string) => {
|
||||
const blockConfig = getBlock(blockType)
|
||||
return blockConfig?.bgColor || '#2F55FF'
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups outputs by block and sorts by distance from starter block.
|
||||
* Returns ComboboxOptionGroup[] for use with Combobox.
|
||||
*/
|
||||
const comboboxGroups = useMemo((): ComboboxOptionGroup[] => {
|
||||
const groups: Record<string, typeof workflowOutputs> = {}
|
||||
const blockDistances: Record<string, number> = {}
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
@@ -283,242 +263,75 @@ export function OutputSelect({
|
||||
groups[output.blockName].push(output)
|
||||
})
|
||||
|
||||
return Object.entries(groups)
|
||||
const sortedGroups = Object.entries(groups)
|
||||
.map(([blockName, outputs]) => ({
|
||||
blockName,
|
||||
outputs,
|
||||
distance: blockDistances[outputs[0]?.blockId] || 0,
|
||||
}))
|
||||
.sort((a, b) => b.distance - a.distance)
|
||||
.reduce(
|
||||
(acc, { blockName, outputs }) => {
|
||||
acc[blockName] = outputs
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, typeof workflowOutputs>
|
||||
)
|
||||
}, [workflowOutputs, blocks])
|
||||
|
||||
/**
|
||||
* Gets the background color for a block output based on its type
|
||||
* @param blockId - The block ID (unused but kept for future extensibility)
|
||||
* @param blockType - The type of the block
|
||||
* @returns The hex color code for the block
|
||||
*/
|
||||
const getOutputColor = (blockId: string, blockType: string) => {
|
||||
const blockConfig = getBlock(blockType)
|
||||
return blockConfig?.bgColor || '#2F55FF'
|
||||
}
|
||||
return sortedGroups.map(({ blockName, outputs }) => {
|
||||
const firstOutput = outputs[0]
|
||||
const blockConfig = getBlock(firstOutput.blockType)
|
||||
const blockColor = getOutputColor(firstOutput.blockType)
|
||||
|
||||
/**
|
||||
* Flattened outputs for keyboard navigation
|
||||
*/
|
||||
const flattenedOutputs = useMemo(() => {
|
||||
return Object.values(groupedOutputs).flat()
|
||||
}, [groupedOutputs])
|
||||
let blockIcon: string | React.ComponentType<{ className?: string }> = blockName
|
||||
.charAt(0)
|
||||
.toUpperCase()
|
||||
|
||||
/**
|
||||
* Handles output selection by toggling the selected state
|
||||
* @param value - The output label to toggle
|
||||
*/
|
||||
const handleOutputSelection = useCallback(
|
||||
(value: string) => {
|
||||
const emittedValue =
|
||||
valueMode === 'label' ? value : workflowOutputs.find((o) => o.label === value)?.id || value
|
||||
const index = selectedOutputs.indexOf(emittedValue)
|
||||
|
||||
const newSelectedOutputs =
|
||||
index === -1
|
||||
? [...new Set([...selectedOutputs, emittedValue])]
|
||||
: selectedOutputs.filter((id) => id !== emittedValue)
|
||||
|
||||
onOutputSelect(newSelectedOutputs)
|
||||
},
|
||||
[valueMode, workflowOutputs, selectedOutputs, onOutputSelect]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles keyboard navigation within the output list
|
||||
* Supports ArrowUp, ArrowDown, Enter, and Escape keys
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!open || flattenedOutputs.length === 0) return
|
||||
|
||||
const handleKeyboardEvent = (e: KeyboardEvent) => {
|
||||
switch (e.key) {
|
||||
case 'ArrowDown':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setHighlightedIndex((prev) => {
|
||||
if (prev === -1 || prev >= flattenedOutputs.length - 1) {
|
||||
return 0
|
||||
}
|
||||
return prev + 1
|
||||
})
|
||||
break
|
||||
|
||||
case 'ArrowUp':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setHighlightedIndex((prev) => {
|
||||
if (prev <= 0) {
|
||||
return flattenedOutputs.length - 1
|
||||
}
|
||||
return prev - 1
|
||||
})
|
||||
break
|
||||
|
||||
case 'Enter':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setHighlightedIndex((currentIndex) => {
|
||||
if (currentIndex >= 0 && currentIndex < flattenedOutputs.length) {
|
||||
handleOutputSelection(flattenedOutputs[currentIndex].label)
|
||||
}
|
||||
return currentIndex
|
||||
})
|
||||
break
|
||||
|
||||
case 'Escape':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setOpen(false)
|
||||
break
|
||||
if (blockConfig?.icon) {
|
||||
blockIcon = blockConfig.icon
|
||||
} else if (firstOutput.blockType === 'loop') {
|
||||
blockIcon = RepeatIcon
|
||||
} else if (firstOutput.blockType === 'parallel') {
|
||||
blockIcon = SplitIcon
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('keydown', handleKeyboardEvent, true)
|
||||
return () => window.removeEventListener('keydown', handleKeyboardEvent, true)
|
||||
}, [open, flattenedOutputs, handleOutputSelection])
|
||||
|
||||
/**
|
||||
* Reset highlighted index when popover opens/closes
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
const firstSelectedIndex = flattenedOutputs.findIndex((output) => isSelectedValue(output))
|
||||
setHighlightedIndex(firstSelectedIndex >= 0 ? firstSelectedIndex : -1)
|
||||
} else {
|
||||
setHighlightedIndex(-1)
|
||||
}
|
||||
}, [open, flattenedOutputs, isSelectedValue])
|
||||
|
||||
/**
|
||||
* Scroll highlighted item into view
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (highlightedIndex >= 0 && popoverRef.current) {
|
||||
const highlightedElement = popoverRef.current.querySelector(
|
||||
`[data-option-index="${highlightedIndex}"]`
|
||||
)
|
||||
if (highlightedElement) {
|
||||
highlightedElement.scrollIntoView({ behavior: 'smooth', block: 'nearest' })
|
||||
return {
|
||||
sectionElement: (
|
||||
<div className='flex items-center gap-1.5 px-[6px] py-[4px]'>
|
||||
<TagIcon icon={blockIcon} color={blockColor} />
|
||||
<span className='font-medium text-[13px]'>{blockName}</span>
|
||||
</div>
|
||||
),
|
||||
items: outputs.map((output) => ({
|
||||
label: output.path,
|
||||
value: valueMode === 'label' ? output.label : output.id,
|
||||
})),
|
||||
}
|
||||
}
|
||||
}, [highlightedIndex])
|
||||
})
|
||||
}, [workflowOutputs, blocks, valueMode])
|
||||
|
||||
/**
|
||||
* Closes popover when clicking outside
|
||||
* Normalize selected values to match the valueMode
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!open) return
|
||||
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
const target = event.target as Node
|
||||
const insideTrigger = triggerRef.current?.contains(target)
|
||||
const insidePopover = popoverRef.current?.contains(target)
|
||||
|
||||
if (!insideTrigger && !insidePopover) {
|
||||
setOpen(false)
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside)
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside)
|
||||
}, [open])
|
||||
const normalizedSelectedValues = useMemo(() => {
|
||||
return selectedOutputs
|
||||
.map((val) => {
|
||||
// Find the output that matches either id or label
|
||||
const output = workflowOutputs.find((o) => o.id === val || o.label === val)
|
||||
if (!output) return null
|
||||
// Return in the format matching valueMode
|
||||
return valueMode === 'label' ? output.label : output.id
|
||||
})
|
||||
.filter((v): v is string => v !== null)
|
||||
}, [selectedOutputs, workflowOutputs, valueMode])
|
||||
|
||||
return (
|
||||
<Popover open={open} variant='default'>
|
||||
<PopoverTrigger asChild>
|
||||
<div ref={triggerRef} className='min-w-0 max-w-full'>
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
title='Select outputs'
|
||||
aria-expanded={open}
|
||||
onMouseDown={(e) => {
|
||||
if (disabled || workflowOutputs.length === 0) return
|
||||
e.stopPropagation()
|
||||
setOpen((prev) => !prev)
|
||||
}}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>{selectedOutputsDisplayText}</span>
|
||||
</Badge>
|
||||
</div>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
ref={popoverRef}
|
||||
side='bottom'
|
||||
align={align}
|
||||
sideOffset={4}
|
||||
maxHeight={maxHeight}
|
||||
maxWidth={300}
|
||||
minWidth={160}
|
||||
border
|
||||
disablePortal={disablePopoverPortal}
|
||||
>
|
||||
<div className='space-y-[2px]'>
|
||||
{Object.entries(groupedOutputs).map(([blockName, outputs], groupIndex, groupArray) => {
|
||||
const startIndex = flattenedOutputs.findIndex((o) => o.blockName === blockName)
|
||||
|
||||
const firstOutput = outputs[0]
|
||||
const blockConfig = getBlock(firstOutput.blockType)
|
||||
const blockColor = getOutputColor(firstOutput.blockId, firstOutput.blockType)
|
||||
|
||||
let blockIcon: string | React.ComponentType<{ className?: string }> = blockName
|
||||
.charAt(0)
|
||||
.toUpperCase()
|
||||
|
||||
if (blockConfig?.icon) {
|
||||
blockIcon = blockConfig.icon
|
||||
} else if (firstOutput.blockType === 'loop') {
|
||||
blockIcon = RepeatIcon
|
||||
} else if (firstOutput.blockType === 'parallel') {
|
||||
blockIcon = SplitIcon
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={blockName}>
|
||||
<div className='flex items-center gap-1.5 px-[6px] py-[4px]'>
|
||||
<TagIcon icon={blockIcon} color={blockColor} />
|
||||
<span className='font-medium text-[13px]'>{blockName}</span>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
{outputs.map((output, localIndex) => {
|
||||
const globalIndex = startIndex + localIndex
|
||||
const isHighlighted = globalIndex === highlightedIndex
|
||||
|
||||
return (
|
||||
<PopoverItem
|
||||
key={output.id}
|
||||
active={isSelectedValue(output) || isHighlighted}
|
||||
data-option-index={globalIndex}
|
||||
onClick={() => handleOutputSelection(output.label)}
|
||||
onMouseEnter={() => setHighlightedIndex(globalIndex)}
|
||||
>
|
||||
<span className='min-w-0 flex-1 truncate'>{output.path}</span>
|
||||
{isSelectedValue(output) && <Check className='h-3 w-3 flex-shrink-0' />}
|
||||
</PopoverItem>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
{groupIndex < groupArray.length - 1 && <PopoverDivider />}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
<Combobox
|
||||
size='sm'
|
||||
className='!w-fit !py-[2px] [&>svg]:!ml-[4px] [&>svg]:!h-3 [&>svg]:!w-3 [&>span]:!text-[var(--text-secondary)] min-w-[100px] rounded-[6px] bg-transparent px-[9px] hover:bg-[var(--surface-5)] dark:hover:border-[var(--surface-6)] dark:hover:bg-transparent [&>span]:text-center'
|
||||
groups={comboboxGroups}
|
||||
options={[]}
|
||||
multiSelect
|
||||
multiSelectValues={normalizedSelectedValues}
|
||||
onMultiSelectChange={onOutputSelect}
|
||||
placeholder={selectedDisplayText}
|
||||
disabled={disabled || workflowOutputs.length === 0}
|
||||
align={align}
|
||||
maxHeight={maxHeight}
|
||||
dropdownWidth={180}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -29,6 +29,8 @@ export function BlockContextMenu({
|
||||
onRemoveFromSubflow,
|
||||
onOpenEditor,
|
||||
onRename,
|
||||
onGroupBlocks,
|
||||
onUngroupBlocks,
|
||||
hasClipboard = false,
|
||||
showRemoveFromSubflow = false,
|
||||
disableEdit = false,
|
||||
@@ -47,6 +49,14 @@ export function BlockContextMenu({
|
||||
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasStarterBlock
|
||||
|
||||
// Check if we can group: need at least 2 blocks selected
|
||||
const canGroup = selectedBlocks.length >= 2
|
||||
|
||||
// Check if we can ungroup: at least one selected block must be in a group
|
||||
// Ungrouping will ungroup all blocks in that group (the entire group, not just selected blocks)
|
||||
const hasGroupedBlock = selectedBlocks.some((b) => !!b.groupId)
|
||||
const canUngroup = hasGroupedBlock
|
||||
|
||||
const getToggleEnabledLabel = () => {
|
||||
if (allEnabled) return 'Disable'
|
||||
if (allDisabled) return 'Enable'
|
||||
@@ -141,6 +151,31 @@ export function BlockContextMenu({
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Block group actions */}
|
||||
{(canGroup || canUngroup) && <PopoverDivider />}
|
||||
{canGroup && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
onClick={() => {
|
||||
onGroupBlocks()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Group Blocks
|
||||
</PopoverItem>
|
||||
)}
|
||||
{canUngroup && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
onClick={() => {
|
||||
onUngroupBlocks()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Ungroup
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Single block actions */}
|
||||
{isSingleBlock && <PopoverDivider />}
|
||||
{isSingleBlock && !isSubflow && (
|
||||
|
||||
@@ -24,6 +24,8 @@ export interface ContextMenuBlockInfo {
|
||||
parentId?: string
|
||||
/** Parent type ('loop' | 'parallel') if nested */
|
||||
parentType?: string
|
||||
/** Group ID if block is in a group */
|
||||
groupId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -50,6 +52,8 @@ export interface BlockContextMenuProps {
|
||||
onRemoveFromSubflow: () => void
|
||||
onOpenEditor: () => void
|
||||
onRename: () => void
|
||||
onGroupBlocks: () => void
|
||||
onUngroupBlocks: () => void
|
||||
/** Whether clipboard has content for pasting */
|
||||
hasClipboard?: boolean
|
||||
/** Whether remove from subflow option should be shown */
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { memo, useCallback, useMemo } from 'react'
|
||||
import ReactMarkdown from 'react-markdown'
|
||||
import type { NodeProps } from 'reactflow'
|
||||
import { type NodeProps, useReactFlow } from 'reactflow'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
useBlockDimensions,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-dimensions'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { ActionBar } from '../workflow-block/components'
|
||||
import type { WorkflowBlockProps } from '../workflow-block/types'
|
||||
|
||||
@@ -198,6 +199,57 @@ export const NoteBlock = memo(function NoteBlock({ id, data }: NodeProps<NoteBlo
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
// Get React Flow methods for group selection expansion
|
||||
const { getNodes, setNodes } = useReactFlow()
|
||||
const { getGroups } = useWorkflowStore()
|
||||
|
||||
/**
|
||||
* Expands selection to include all group members on mouse down.
|
||||
* This ensures that when a user starts dragging a note in a group,
|
||||
* all other blocks in the group are also selected and will move together.
|
||||
*/
|
||||
const handleGroupMouseDown = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
// Only process left mouse button clicks
|
||||
if (e.button !== 0) return
|
||||
|
||||
const groupId = data.groupId
|
||||
if (!groupId) return
|
||||
|
||||
const groups = getGroups()
|
||||
const group = groups[groupId]
|
||||
if (!group || group.blockIds.length <= 1) return
|
||||
|
||||
const groupBlockIds = new Set(group.blockIds)
|
||||
const allNodes = getNodes()
|
||||
|
||||
// Check if all group members are already selected
|
||||
const allSelected = [...groupBlockIds].every((blockId) =>
|
||||
allNodes.find((n) => n.id === blockId && n.selected)
|
||||
)
|
||||
|
||||
if (allSelected) return
|
||||
|
||||
// Expand selection to include all group members
|
||||
setNodes((nodes) =>
|
||||
nodes.map((n) => {
|
||||
const isInGroup = groupBlockIds.has(n.id)
|
||||
const isThisBlock = n.id === id
|
||||
return {
|
||||
...n,
|
||||
selected: isInGroup ? true : n.selected,
|
||||
data: {
|
||||
...n.data,
|
||||
// Mark as grouped selection if in group but not the directly clicked block
|
||||
isGroupedSelection: isInGroup && !isThisBlock && !n.selected,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
},
|
||||
[id, data.groupId, getNodes, setNodes, getGroups]
|
||||
)
|
||||
|
||||
/**
|
||||
* Calculate deterministic dimensions based on content structure.
|
||||
* Uses fixed width and computed height to avoid ResizeObserver jitter.
|
||||
@@ -216,8 +268,14 @@ export const NoteBlock = memo(function NoteBlock({ id, data }: NodeProps<NoteBlo
|
||||
dependencies: [isEmpty],
|
||||
})
|
||||
|
||||
const isGroupedSelection = data.isGroupedSelection ?? false
|
||||
|
||||
return (
|
||||
<div className='group relative'>
|
||||
<div
|
||||
className='group relative'
|
||||
data-grouped-selection={isGroupedSelection ? 'true' : undefined}
|
||||
onMouseDown={handleGroupMouseDown}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'relative z-[20] w-[250px] cursor-default select-none rounded-[8px] border border-[var(--border)] bg-[var(--surface-2)]'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import React, { useEffect, useMemo, useState } from 'react'
|
||||
import React, { memo, useCallback, useState } from 'react'
|
||||
import { Check, Copy } from 'lucide-react'
|
||||
import ReactMarkdown from 'react-markdown'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
@@ -28,55 +28,95 @@ const getTextContent = (element: React.ReactNode): string => {
|
||||
return ''
|
||||
}
|
||||
|
||||
// Global layout fixes for markdown content inside the copilot panel
|
||||
if (typeof document !== 'undefined') {
|
||||
const styleId = 'copilot-markdown-fix'
|
||||
if (!document.getElementById(styleId)) {
|
||||
const style = document.createElement('style')
|
||||
style.id = styleId
|
||||
style.textContent = `
|
||||
/* Prevent any markdown content from expanding beyond the panel */
|
||||
.copilot-markdown-wrapper,
|
||||
.copilot-markdown-wrapper * {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
|
||||
.copilot-markdown-wrapper p,
|
||||
.copilot-markdown-wrapper li {
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-word !important;
|
||||
}
|
||||
|
||||
.copilot-markdown-wrapper a {
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-all !important;
|
||||
}
|
||||
|
||||
.copilot-markdown-wrapper code:not(pre code) {
|
||||
white-space: normal !important;
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-word !important;
|
||||
}
|
||||
|
||||
/* Reduce top margin for first heading (e.g., right after thinking block) */
|
||||
.copilot-markdown-wrapper > h1:first-child,
|
||||
.copilot-markdown-wrapper > h2:first-child,
|
||||
.copilot-markdown-wrapper > h3:first-child,
|
||||
.copilot-markdown-wrapper > h4:first-child {
|
||||
margin-top: 0.25rem !important;
|
||||
}
|
||||
`
|
||||
document.head.appendChild(style)
|
||||
}
|
||||
/**
|
||||
* Maps common language aliases to supported viewer languages
|
||||
*/
|
||||
const LANGUAGE_MAP: Record<string, 'javascript' | 'json' | 'python'> = {
|
||||
js: 'javascript',
|
||||
javascript: 'javascript',
|
||||
jsx: 'javascript',
|
||||
ts: 'javascript',
|
||||
typescript: 'javascript',
|
||||
tsx: 'javascript',
|
||||
json: 'json',
|
||||
python: 'python',
|
||||
py: 'python',
|
||||
code: 'javascript',
|
||||
}
|
||||
|
||||
/**
|
||||
* Link component with hover preview tooltip
|
||||
* Displays full URL on hover for better UX
|
||||
* @param props - Component props with href and children
|
||||
* @returns Link element with tooltip preview
|
||||
* Normalizes a language string to a supported viewer language
|
||||
*/
|
||||
function LinkWithPreview({ href, children }: { href: string; children: React.ReactNode }) {
|
||||
function normalizeLanguage(lang: string): 'javascript' | 'json' | 'python' {
|
||||
const normalized = (lang || '').toLowerCase()
|
||||
return LANGUAGE_MAP[normalized] || 'javascript'
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for the CodeBlock component
|
||||
*/
|
||||
interface CodeBlockProps {
|
||||
/** Code content to display */
|
||||
code: string
|
||||
/** Language identifier from markdown */
|
||||
language: string
|
||||
}
|
||||
|
||||
/**
|
||||
* CodeBlock component with isolated copy state
|
||||
* Prevents full markdown re-renders when copy button is clicked
|
||||
*/
|
||||
const CodeBlock = memo(function CodeBlock({ code, language }: CodeBlockProps) {
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopy = useCallback(() => {
|
||||
if (code) {
|
||||
navigator.clipboard.writeText(code)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}
|
||||
}, [code])
|
||||
|
||||
const viewerLanguage = normalizeLanguage(language)
|
||||
const displayLanguage = language === 'code' ? viewerLanguage : language
|
||||
|
||||
return (
|
||||
<div className='mt-2.5 mb-2.5 w-0 min-w-full overflow-hidden rounded-md border border-[var(--border-1)] bg-[var(--surface-1)] text-sm'>
|
||||
<div className='flex items-center justify-between border-[var(--border-1)] border-b px-3 py-1'>
|
||||
<span className='font-season text-[var(--text-muted)] text-xs'>{displayLanguage}</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[var(--text-muted)] transition-colors hover:text-[var(--text-tertiary)]'
|
||||
title='Copy'
|
||||
type='button'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={code.replace(/\n+$/, '')}
|
||||
showGutter
|
||||
language={viewerLanguage}
|
||||
className='m-0 min-h-0 rounded-none border-0 bg-transparent'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Link component with hover preview tooltip
|
||||
*/
|
||||
const LinkWithPreview = memo(function LinkWithPreview({
|
||||
href,
|
||||
children,
|
||||
}: {
|
||||
href: string
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
return (
|
||||
<Tooltip.Root delayDuration={300}>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -94,7 +134,7 @@ function LinkWithPreview({ href, children }: { href: string; children: React.Rea
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Props for the CopilotMarkdownRenderer component
|
||||
@@ -104,275 +144,197 @@ interface CopilotMarkdownRendererProps {
|
||||
content: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Static markdown component definitions - optimized for LLM chat spacing
|
||||
* Tighter spacing compared to traditional prose for better chat UX
|
||||
*/
|
||||
const markdownComponents = {
|
||||
// Paragraphs - tight spacing, no margin on last
|
||||
p: ({ children }: React.HTMLAttributes<HTMLParagraphElement>) => (
|
||||
<p className='mb-1.5 font-base font-season text-[var(--text-primary)] text-sm leading-[1.4] last:mb-0 dark:font-[470]'>
|
||||
{children}
|
||||
</p>
|
||||
),
|
||||
|
||||
// Headings - minimal margins for chat context
|
||||
h1: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h1 className='mt-2 mb-1 font-season font-semibold text-[var(--text-primary)] text-base first:mt-0'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h2 className='mt-2 mb-1 font-season font-semibold text-[15px] text-[var(--text-primary)] first:mt-0'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h3 className='mt-1.5 mb-0.5 font-season font-semibold text-[var(--text-primary)] text-sm first:mt-0'>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
h4: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h4 className='mt-1.5 mb-0.5 font-season font-semibold text-[var(--text-primary)] text-sm first:mt-0'>
|
||||
{children}
|
||||
</h4>
|
||||
),
|
||||
|
||||
// Lists - compact spacing
|
||||
ul: ({ children }: React.HTMLAttributes<HTMLUListElement>) => (
|
||||
<ul
|
||||
className='my-1 space-y-0.5 pl-5 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'disc' }}
|
||||
>
|
||||
{children}
|
||||
</ul>
|
||||
),
|
||||
ol: ({ children }: React.HTMLAttributes<HTMLOListElement>) => (
|
||||
<ol
|
||||
className='my-1 space-y-0.5 pl-5 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'decimal' }}
|
||||
>
|
||||
{children}
|
||||
</ol>
|
||||
),
|
||||
li: ({ children }: React.LiHTMLAttributes<HTMLLIElement>) => (
|
||||
<li
|
||||
className='font-base font-season text-[var(--text-primary)] text-sm leading-[1.4] dark:font-[470]'
|
||||
style={{ display: 'list-item' }}
|
||||
>
|
||||
{children}
|
||||
</li>
|
||||
),
|
||||
|
||||
// Code blocks - handled by CodeBlock component
|
||||
pre: ({ children }: React.HTMLAttributes<HTMLPreElement>) => {
|
||||
let codeContent: React.ReactNode = children
|
||||
let language = 'code'
|
||||
|
||||
if (
|
||||
React.isValidElement<{ className?: string; children?: React.ReactNode }>(children) &&
|
||||
children.type === 'code'
|
||||
) {
|
||||
const childElement = children as React.ReactElement<{
|
||||
className?: string
|
||||
children?: React.ReactNode
|
||||
}>
|
||||
codeContent = childElement.props.children
|
||||
language = childElement.props.className?.replace('language-', '') || 'code'
|
||||
}
|
||||
|
||||
let actualCodeText = ''
|
||||
if (typeof codeContent === 'string') {
|
||||
actualCodeText = codeContent
|
||||
} else if (React.isValidElement(codeContent)) {
|
||||
actualCodeText = getTextContent(codeContent)
|
||||
} else if (Array.isArray(codeContent)) {
|
||||
actualCodeText = codeContent
|
||||
.map((child) =>
|
||||
typeof child === 'string'
|
||||
? child
|
||||
: React.isValidElement(child)
|
||||
? getTextContent(child)
|
||||
: ''
|
||||
)
|
||||
.join('')
|
||||
} else {
|
||||
actualCodeText = String(codeContent || '')
|
||||
}
|
||||
|
||||
return <CodeBlock code={actualCodeText} language={language} />
|
||||
},
|
||||
|
||||
// Inline code
|
||||
code: ({
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLElement> & { className?: string }) => (
|
||||
<code
|
||||
className='whitespace-normal break-all rounded border border-[var(--border-1)] bg-[var(--surface-1)] px-1 py-0.5 font-mono text-[0.85em] text-[var(--text-primary)]'
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</code>
|
||||
),
|
||||
|
||||
// Text formatting
|
||||
strong: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<strong className='font-semibold text-[var(--text-primary)]'>{children}</strong>
|
||||
),
|
||||
b: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<b className='font-semibold text-[var(--text-primary)]'>{children}</b>
|
||||
),
|
||||
em: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<em className='text-[var(--text-primary)] italic'>{children}</em>
|
||||
),
|
||||
i: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<i className='text-[var(--text-primary)] italic'>{children}</i>
|
||||
),
|
||||
|
||||
// Blockquote - compact
|
||||
blockquote: ({ children }: React.HTMLAttributes<HTMLQuoteElement>) => (
|
||||
<blockquote className='my-1.5 border-[var(--border-1)] border-l-2 py-0.5 pl-3 font-season text-[var(--text-secondary)] text-sm italic'>
|
||||
{children}
|
||||
</blockquote>
|
||||
),
|
||||
|
||||
// Horizontal rule
|
||||
hr: () => <hr className='my-3 border-[var(--divider)] border-t' />,
|
||||
|
||||
// Links
|
||||
a: ({ href, children }: React.AnchorHTMLAttributes<HTMLAnchorElement>) => (
|
||||
<LinkWithPreview href={href || '#'}>{children}</LinkWithPreview>
|
||||
),
|
||||
|
||||
// Tables - compact
|
||||
table: ({ children }: React.TableHTMLAttributes<HTMLTableElement>) => (
|
||||
<div className='my-2 max-w-full overflow-x-auto'>
|
||||
<table className='min-w-full table-auto border border-[var(--border-1)] font-season text-xs'>
|
||||
{children}
|
||||
</table>
|
||||
</div>
|
||||
),
|
||||
thead: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<thead className='bg-[var(--surface-5)] text-left dark:bg-[var(--surface-4)]'>{children}</thead>
|
||||
),
|
||||
tbody: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<tbody className='divide-y divide-[var(--border-1)]'>{children}</tbody>
|
||||
),
|
||||
tr: ({ children }: React.HTMLAttributes<HTMLTableRowElement>) => (
|
||||
<tr className='border-[var(--border-1)] border-b'>{children}</tr>
|
||||
),
|
||||
th: ({ children }: React.ThHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<th className='border-[var(--border-1)] border-r px-2 py-1 align-top font-base text-[var(--text-secondary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</th>
|
||||
),
|
||||
td: ({ children }: React.TdHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<td className='break-words border-[var(--border-1)] border-r px-2 py-1 align-top font-base text-[var(--text-primary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</td>
|
||||
),
|
||||
|
||||
// Images
|
||||
img: ({ src, alt, ...props }: React.ImgHTMLAttributes<HTMLImageElement>) => (
|
||||
<img src={src} alt={alt || 'Image'} className='my-2 h-auto max-w-full rounded-md' {...props} />
|
||||
),
|
||||
}
|
||||
|
||||
/**
|
||||
* CopilotMarkdownRenderer renders markdown content with custom styling
|
||||
* Supports GitHub-flavored markdown, code blocks with syntax highlighting,
|
||||
* tables, links with preview, and more
|
||||
* Optimized for LLM chat: tight spacing, memoized components, isolated state
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered markdown content
|
||||
*/
|
||||
export default function CopilotMarkdownRenderer({ content }: CopilotMarkdownRendererProps) {
|
||||
const [copiedCodeBlocks, setCopiedCodeBlocks] = useState<Record<string, boolean>>({})
|
||||
|
||||
useEffect(() => {
|
||||
const timers: Record<string, NodeJS.Timeout> = {}
|
||||
|
||||
Object.keys(copiedCodeBlocks).forEach((key) => {
|
||||
if (copiedCodeBlocks[key]) {
|
||||
timers[key] = setTimeout(() => {
|
||||
setCopiedCodeBlocks((prev) => ({ ...prev, [key]: false }))
|
||||
}, 2000)
|
||||
}
|
||||
})
|
||||
|
||||
return () => {
|
||||
Object.values(timers).forEach(clearTimeout)
|
||||
}
|
||||
}, [copiedCodeBlocks])
|
||||
|
||||
const markdownComponents = useMemo(
|
||||
() => ({
|
||||
p: ({ children }: React.HTMLAttributes<HTMLParagraphElement>) => (
|
||||
<p className='mb-2 font-base font-season text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0 dark:font-[470]'>
|
||||
{children}
|
||||
</p>
|
||||
),
|
||||
|
||||
h1: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h1 className='mt-3 mb-3 font-season font-semibold text-2xl text-[var(--text-primary)]'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h2 className='mt-2.5 mb-2.5 font-season font-semibold text-[var(--text-primary)] text-xl'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h3 className='mt-2 mb-2 font-season font-semibold text-[var(--text-primary)] text-lg'>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
h4: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h4 className='mt-2 mb-2 font-season font-semibold text-[var(--text-primary)] text-base'>
|
||||
{children}
|
||||
</h4>
|
||||
),
|
||||
|
||||
ul: ({ children }: React.HTMLAttributes<HTMLUListElement>) => (
|
||||
<ul
|
||||
className='mt-1 mb-1 space-y-1.5 pl-6 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'disc' }}
|
||||
>
|
||||
{children}
|
||||
</ul>
|
||||
),
|
||||
ol: ({ children }: React.HTMLAttributes<HTMLOListElement>) => (
|
||||
<ol
|
||||
className='mt-1 mb-1 space-y-1.5 pl-6 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'decimal' }}
|
||||
>
|
||||
{children}
|
||||
</ol>
|
||||
),
|
||||
li: ({
|
||||
children,
|
||||
ordered,
|
||||
}: React.LiHTMLAttributes<HTMLLIElement> & { ordered?: boolean }) => (
|
||||
<li
|
||||
className='font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ display: 'list-item' }}
|
||||
>
|
||||
{children}
|
||||
</li>
|
||||
),
|
||||
|
||||
pre: ({ children }: React.HTMLAttributes<HTMLPreElement>) => {
|
||||
let codeContent: React.ReactNode = children
|
||||
let language = 'code'
|
||||
|
||||
if (
|
||||
React.isValidElement<{ className?: string; children?: React.ReactNode }>(children) &&
|
||||
children.type === 'code'
|
||||
) {
|
||||
const childElement = children as React.ReactElement<{
|
||||
className?: string
|
||||
children?: React.ReactNode
|
||||
}>
|
||||
codeContent = childElement.props.children
|
||||
language = childElement.props.className?.replace('language-', '') || 'code'
|
||||
}
|
||||
|
||||
let actualCodeText = ''
|
||||
if (typeof codeContent === 'string') {
|
||||
actualCodeText = codeContent
|
||||
} else if (React.isValidElement(codeContent)) {
|
||||
actualCodeText = getTextContent(codeContent)
|
||||
} else if (Array.isArray(codeContent)) {
|
||||
actualCodeText = codeContent
|
||||
.map((child) =>
|
||||
typeof child === 'string'
|
||||
? child
|
||||
: React.isValidElement(child)
|
||||
? getTextContent(child)
|
||||
: ''
|
||||
)
|
||||
.join('')
|
||||
} else {
|
||||
actualCodeText = String(codeContent || '')
|
||||
}
|
||||
|
||||
const codeText = actualCodeText || 'code'
|
||||
const codeBlockKey = `${language}-${codeText.substring(0, 30).replace(/\s/g, '-')}-${codeText.length}`
|
||||
|
||||
const showCopySuccess = copiedCodeBlocks[codeBlockKey] || false
|
||||
|
||||
const handleCopy = () => {
|
||||
const textToCopy = actualCodeText
|
||||
if (textToCopy) {
|
||||
navigator.clipboard.writeText(textToCopy)
|
||||
setCopiedCodeBlocks((prev) => ({ ...prev, [codeBlockKey]: true }))
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedLanguage = (language || '').toLowerCase()
|
||||
const viewerLanguage: 'javascript' | 'json' | 'python' =
|
||||
normalizedLanguage === 'json'
|
||||
? 'json'
|
||||
: normalizedLanguage === 'python' || normalizedLanguage === 'py'
|
||||
? 'python'
|
||||
: 'javascript'
|
||||
|
||||
return (
|
||||
<div className='mt-6 mb-6 w-0 min-w-full overflow-hidden rounded-md border border-[var(--border-1)] bg-[var(--surface-1)] text-sm'>
|
||||
<div className='flex items-center justify-between border-[var(--border-1)] border-b px-4 py-1.5'>
|
||||
<span className='font-season text-[var(--text-muted)] text-xs'>
|
||||
{language === 'code' ? viewerLanguage : language}
|
||||
</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[var(--text-muted)] transition-colors hover:text-[var(--text-tertiary)]'
|
||||
title='Copy'
|
||||
>
|
||||
{showCopySuccess ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={actualCodeText.replace(/\n+$/, '')}
|
||||
showGutter
|
||||
language={viewerLanguage}
|
||||
className='m-0 min-h-0 rounded-none border-0 bg-transparent'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
|
||||
code: ({
|
||||
inline,
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLElement> & { className?: string; inline?: boolean }) => {
|
||||
if (inline) {
|
||||
return (
|
||||
<code
|
||||
className='whitespace-normal break-all rounded border border-[var(--border-1)] bg-[var(--surface-1)] px-1 py-0.5 font-mono text-[0.9em] text-[var(--text-primary)]'
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</code>
|
||||
)
|
||||
}
|
||||
return (
|
||||
<code className={className} {...props}>
|
||||
{children}
|
||||
</code>
|
||||
)
|
||||
},
|
||||
|
||||
strong: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<strong className='font-semibold text-[var(--text-primary)]'>{children}</strong>
|
||||
),
|
||||
|
||||
b: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<b className='font-semibold text-[var(--text-primary)]'>{children}</b>
|
||||
),
|
||||
|
||||
em: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<em className='text-[var(--text-primary)] italic'>{children}</em>
|
||||
),
|
||||
|
||||
i: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<i className='text-[var(--text-primary)] italic'>{children}</i>
|
||||
),
|
||||
|
||||
blockquote: ({ children }: React.HTMLAttributes<HTMLQuoteElement>) => (
|
||||
<blockquote className='my-4 border-[var(--border-1)] border-l-4 py-1 pl-4 font-season text-[var(--text-secondary)] italic'>
|
||||
{children}
|
||||
</blockquote>
|
||||
),
|
||||
|
||||
hr: () => <hr className='my-8 border-[var(--divider)] border-t' />,
|
||||
|
||||
a: ({ href, children, ...props }: React.AnchorHTMLAttributes<HTMLAnchorElement>) => (
|
||||
<LinkWithPreview href={href || '#'} {...props}>
|
||||
{children}
|
||||
</LinkWithPreview>
|
||||
),
|
||||
|
||||
table: ({ children }: React.TableHTMLAttributes<HTMLTableElement>) => (
|
||||
<div className='my-3 max-w-full overflow-x-auto'>
|
||||
<table className='min-w-full table-auto border border-[var(--border-1)] font-season text-xs'>
|
||||
{children}
|
||||
</table>
|
||||
</div>
|
||||
),
|
||||
thead: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<thead className='bg-[var(--surface-5)] text-left dark:bg-[var(--surface-4)]'>
|
||||
{children}
|
||||
</thead>
|
||||
),
|
||||
tbody: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<tbody className='divide-y divide-[var(--border-1)]'>{children}</tbody>
|
||||
),
|
||||
tr: ({ children }: React.HTMLAttributes<HTMLTableRowElement>) => (
|
||||
<tr className='border-[var(--border-1)] border-b transition-colors hover:bg-[var(--surface-5)] dark:hover:bg-[var(--surface-4)]/60'>
|
||||
{children}
|
||||
</tr>
|
||||
),
|
||||
th: ({ children }: React.ThHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<th className='border-[var(--border-1)] border-r px-2.5 py-1.5 align-top font-base text-[var(--text-secondary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</th>
|
||||
),
|
||||
td: ({ children }: React.TdHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<td className='break-words border-[var(--border-1)] border-r px-2.5 py-1.5 align-top font-base text-[var(--text-primary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</td>
|
||||
),
|
||||
|
||||
img: ({ src, alt, ...props }: React.ImgHTMLAttributes<HTMLImageElement>) => (
|
||||
<img
|
||||
src={src}
|
||||
alt={alt || 'Image'}
|
||||
className='my-3 h-auto max-w-full rounded-md'
|
||||
{...props}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
[copiedCodeBlocks]
|
||||
)
|
||||
|
||||
function CopilotMarkdownRenderer({ content }: CopilotMarkdownRendererProps) {
|
||||
return (
|
||||
<div className='copilot-markdown-wrapper max-w-full space-y-3 break-words font-base font-season text-[var(--text-primary)] text-sm leading-[1.25rem] dark:font-[470]'>
|
||||
<div className='max-w-full break-words font-base font-season text-[var(--text-primary)] text-sm leading-[1.4] dark:font-[470] [&_*]:max-w-full [&_a]:break-all [&_code:not(pre_code)]:break-words [&_li]:break-words [&_p]:break-words'>
|
||||
<ReactMarkdown remarkPlugins={[remarkGfm]} components={markdownComponents}>
|
||||
{content}
|
||||
</ReactMarkdown>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(CopilotMarkdownRenderer)
|
||||
|
||||
@@ -2,18 +2,38 @@ import { memo, useEffect, useRef, useState } from 'react'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
* Character animation delay in milliseconds
|
||||
* Minimum delay between characters (fast catch-up mode)
|
||||
*/
|
||||
const CHARACTER_DELAY = 3
|
||||
const MIN_DELAY = 1
|
||||
|
||||
/**
|
||||
* Maximum delay between characters (when waiting for content)
|
||||
*/
|
||||
const MAX_DELAY = 12
|
||||
|
||||
/**
|
||||
* Default delay when streaming normally
|
||||
*/
|
||||
const DEFAULT_DELAY = 4
|
||||
|
||||
/**
|
||||
* How far behind (in characters) before we speed up
|
||||
*/
|
||||
const CATCH_UP_THRESHOLD = 20
|
||||
|
||||
/**
|
||||
* How close to content before we slow down
|
||||
*/
|
||||
const SLOW_DOWN_THRESHOLD = 5
|
||||
|
||||
/**
|
||||
* StreamingIndicator shows animated dots during message streaming
|
||||
* Uses CSS classes for animations to follow best practices
|
||||
* Used as a standalone indicator when no content has arrived yet
|
||||
*
|
||||
* @returns Animated loading indicator
|
||||
*/
|
||||
export const StreamingIndicator = memo(() => (
|
||||
<div className='flex items-center py-1 text-muted-foreground transition-opacity duration-200 ease-in-out'>
|
||||
<div className='flex h-[1.25rem] items-center text-muted-foreground'>
|
||||
<div className='flex space-x-0.5'>
|
||||
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms] [animation-duration:1.2s]' />
|
||||
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:150ms] [animation-duration:1.2s]' />
|
||||
@@ -34,9 +54,39 @@ interface SmoothStreamingTextProps {
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates adaptive delay based on how far behind animation is from actual content
|
||||
*
|
||||
* @param displayedLength - Current displayed content length
|
||||
* @param totalLength - Total available content length
|
||||
* @returns Delay in milliseconds
|
||||
*/
|
||||
function calculateAdaptiveDelay(displayedLength: number, totalLength: number): number {
|
||||
const charsRemaining = totalLength - displayedLength
|
||||
|
||||
if (charsRemaining > CATCH_UP_THRESHOLD) {
|
||||
// Far behind - speed up to catch up
|
||||
// Scale from MIN_DELAY to DEFAULT_DELAY based on how far behind
|
||||
const catchUpFactor = Math.min(1, (charsRemaining - CATCH_UP_THRESHOLD) / 50)
|
||||
return MIN_DELAY + (DEFAULT_DELAY - MIN_DELAY) * (1 - catchUpFactor)
|
||||
}
|
||||
|
||||
if (charsRemaining <= SLOW_DOWN_THRESHOLD) {
|
||||
// Close to content edge - slow down to feel natural
|
||||
// The closer we are, the slower we go (up to MAX_DELAY)
|
||||
const slowFactor = 1 - charsRemaining / SLOW_DOWN_THRESHOLD
|
||||
return DEFAULT_DELAY + (MAX_DELAY - DEFAULT_DELAY) * slowFactor
|
||||
}
|
||||
|
||||
// Normal streaming speed
|
||||
return DEFAULT_DELAY
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothStreamingText component displays text with character-by-character animation
|
||||
* Creates a smooth streaming effect for AI responses
|
||||
* Creates a smooth streaming effect for AI responses with adaptive speed
|
||||
*
|
||||
* Uses adaptive pacing: speeds up when catching up, slows down near content edge
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Streaming text with smooth animation
|
||||
@@ -45,74 +95,73 @@ export const SmoothStreamingText = memo(
|
||||
({ content, isStreaming }: SmoothStreamingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
const contentRef = useRef(content)
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
const streamingStartTimeRef = useRef<number | null>(null)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Handles content streaming animation
|
||||
* Updates displayed content character by character during streaming
|
||||
*/
|
||||
useEffect(() => {
|
||||
contentRef.current = content
|
||||
|
||||
if (content.length === 0) {
|
||||
setDisplayedContent('')
|
||||
indexRef.current = 0
|
||||
streamingStartTimeRef.current = null
|
||||
return
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
if (streamingStartTimeRef.current === null) {
|
||||
streamingStartTimeRef.current = Date.now()
|
||||
}
|
||||
if (indexRef.current < content.length && !isAnimatingRef.current) {
|
||||
isAnimatingRef.current = true
|
||||
lastFrameTimeRef.current = performance.now()
|
||||
|
||||
if (indexRef.current < content.length) {
|
||||
const animateText = () => {
|
||||
const animateText = (timestamp: number) => {
|
||||
const currentContent = contentRef.current
|
||||
const currentIndex = indexRef.current
|
||||
const elapsed = timestamp - lastFrameTimeRef.current
|
||||
|
||||
if (currentIndex < currentContent.length) {
|
||||
const chunkSize = 1
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + chunkSize)
|
||||
// Calculate adaptive delay based on how far behind we are
|
||||
const delay = calculateAdaptiveDelay(currentIndex, currentContent.length)
|
||||
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + chunkSize
|
||||
if (elapsed >= delay) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + 1)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + 1
|
||||
lastFrameTimeRef.current = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
timeoutRef.current = setTimeout(animateText, CHARACTER_DELAY)
|
||||
if (indexRef.current < currentContent.length) {
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else {
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
if (!isAnimatingRef.current) {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
|
||||
isAnimatingRef.current = true
|
||||
animateText()
|
||||
}
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else if (indexRef.current < content.length && isAnimatingRef.current) {
|
||||
// Animation already running, it will pick up new content automatically
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
setDisplayedContent(content)
|
||||
indexRef.current = content.length
|
||||
isAnimatingRef.current = false
|
||||
streamingStartTimeRef.current = null
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}, [content, isStreaming])
|
||||
|
||||
return (
|
||||
<div className='relative min-h-[1.25rem] max-w-full overflow-hidden'>
|
||||
<div className='min-h-[1.25rem] max-w-full'>
|
||||
<CopilotMarkdownRenderer content={displayedContent} />
|
||||
</div>
|
||||
)
|
||||
@@ -121,7 +170,6 @@ export const SmoothStreamingText = memo(
|
||||
// Prevent re-renders during streaming unless content actually changed
|
||||
return (
|
||||
prevProps.content === nextProps.content && prevProps.isStreaming === nextProps.isStreaming
|
||||
// markdownComponents is now memoized so no need to compare
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { memo, useEffect, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp } from 'lucide-react'
|
||||
import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
@@ -8,18 +8,151 @@ import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
/**
|
||||
* Max height for thinking content before internal scrolling kicks in
|
||||
*/
|
||||
const THINKING_MAX_HEIGHT = 200
|
||||
const THINKING_MAX_HEIGHT = 150
|
||||
|
||||
/**
|
||||
* Height threshold before gradient fade kicks in
|
||||
*/
|
||||
const GRADIENT_THRESHOLD = 100
|
||||
|
||||
/**
|
||||
* Interval for auto-scroll during streaming (ms)
|
||||
*/
|
||||
const SCROLL_INTERVAL = 100
|
||||
const SCROLL_INTERVAL = 50
|
||||
|
||||
/**
|
||||
* Timer update interval in milliseconds
|
||||
*/
|
||||
const TIMER_UPDATE_INTERVAL = 100
|
||||
|
||||
/**
|
||||
* Thinking text streaming - much faster than main text
|
||||
* Essentially instant with minimal delay
|
||||
*/
|
||||
const THINKING_DELAY = 0.5
|
||||
const THINKING_CHARS_PER_FRAME = 3
|
||||
|
||||
/**
|
||||
* Props for the SmoothThinkingText component
|
||||
*/
|
||||
interface SmoothThinkingTextProps {
|
||||
content: string
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothThinkingText renders thinking content with fast streaming animation
|
||||
* Uses gradient fade at top when content is tall enough
|
||||
*/
|
||||
const SmoothThinkingText = memo(
|
||||
({ content, isStreaming }: SmoothThinkingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
const [showGradient, setShowGradient] = useState(false)
|
||||
const contentRef = useRef(content)
|
||||
const textRef = useRef<HTMLDivElement>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
contentRef.current = content
|
||||
|
||||
if (content.length === 0) {
|
||||
setDisplayedContent('')
|
||||
indexRef.current = 0
|
||||
return
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
if (indexRef.current < content.length && !isAnimatingRef.current) {
|
||||
isAnimatingRef.current = true
|
||||
lastFrameTimeRef.current = performance.now()
|
||||
|
||||
const animateText = (timestamp: number) => {
|
||||
const currentContent = contentRef.current
|
||||
const currentIndex = indexRef.current
|
||||
const elapsed = timestamp - lastFrameTimeRef.current
|
||||
|
||||
if (elapsed >= THINKING_DELAY) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
// Reveal multiple characters per frame for faster streaming
|
||||
const newIndex = Math.min(
|
||||
currentIndex + THINKING_CHARS_PER_FRAME,
|
||||
currentContent.length
|
||||
)
|
||||
const newDisplayed = currentContent.slice(0, newIndex)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = newIndex
|
||||
lastFrameTimeRef.current = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
if (indexRef.current < currentContent.length) {
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else {
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
setDisplayedContent(content)
|
||||
indexRef.current = content.length
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}, [content, isStreaming])
|
||||
|
||||
// Check if content height exceeds threshold for gradient
|
||||
useEffect(() => {
|
||||
if (textRef.current && isStreaming) {
|
||||
const height = textRef.current.scrollHeight
|
||||
setShowGradient(height > GRADIENT_THRESHOLD)
|
||||
} else {
|
||||
setShowGradient(false)
|
||||
}
|
||||
}, [displayedContent, isStreaming])
|
||||
|
||||
// Apply vertical gradient fade at the top only when content is tall enough
|
||||
const gradientStyle =
|
||||
isStreaming && showGradient
|
||||
? {
|
||||
maskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)',
|
||||
WebkitMaskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)',
|
||||
}
|
||||
: undefined
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={textRef}
|
||||
className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'
|
||||
style={gradientStyle}
|
||||
>
|
||||
<CopilotMarkdownRenderer content={displayedContent} />
|
||||
</div>
|
||||
)
|
||||
},
|
||||
(prevProps, nextProps) => {
|
||||
return (
|
||||
prevProps.content === nextProps.content && prevProps.isStreaming === nextProps.isStreaming
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
SmoothThinkingText.displayName = 'SmoothThinkingText'
|
||||
|
||||
/**
|
||||
* Props for the ThinkingBlock component
|
||||
*/
|
||||
@@ -66,8 +199,8 @@ export function ThinkingBlock({
|
||||
* Auto-collapses when streaming ends OR when following content arrives
|
||||
*/
|
||||
useEffect(() => {
|
||||
// Collapse if streaming ended or if there's following content (like a tool call)
|
||||
if (!isStreaming || hasFollowingContent) {
|
||||
// Collapse if streaming ended, there's following content, or special tags arrived
|
||||
if (!isStreaming || hasFollowingContent || hasSpecialTags) {
|
||||
setIsExpanded(false)
|
||||
userCollapsedRef.current = false
|
||||
setUserHasScrolledAway(false)
|
||||
@@ -77,7 +210,7 @@ export function ThinkingBlock({
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, content, hasFollowingContent])
|
||||
}, [isStreaming, content, hasFollowingContent, hasSpecialTags])
|
||||
|
||||
// Reset start time when streaming begins
|
||||
useEffect(() => {
|
||||
@@ -113,14 +246,14 @@ export function ThinkingBlock({
|
||||
const isNearBottom = distanceFromBottom <= 20
|
||||
|
||||
const delta = scrollTop - lastScrollTopRef.current
|
||||
const movedUp = delta < -2
|
||||
const movedUp = delta < -1
|
||||
|
||||
if (movedUp && !isNearBottom) {
|
||||
setUserHasScrolledAway(true)
|
||||
}
|
||||
|
||||
// Re-stick if user scrolls back to bottom
|
||||
if (userHasScrolledAway && isNearBottom) {
|
||||
// Re-stick if user scrolls back to bottom with intent
|
||||
if (userHasScrolledAway && isNearBottom && delta > 10) {
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
|
||||
@@ -133,7 +266,7 @@ export function ThinkingBlock({
|
||||
return () => container.removeEventListener('scroll', handleScroll)
|
||||
}, [isExpanded, userHasScrolledAway])
|
||||
|
||||
// Smart auto-scroll: only scroll if user hasn't scrolled away
|
||||
// Smart auto-scroll: always scroll to bottom while streaming unless user scrolled away
|
||||
useEffect(() => {
|
||||
if (!isStreaming || !isExpanded || userHasScrolledAway) return
|
||||
|
||||
@@ -141,20 +274,14 @@ export function ThinkingBlock({
|
||||
const container = scrollContainerRef.current
|
||||
if (!container) return
|
||||
|
||||
const { scrollTop, scrollHeight, clientHeight } = container
|
||||
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
|
||||
const isNearBottom = distanceFromBottom <= 50
|
||||
|
||||
if (isNearBottom) {
|
||||
programmaticScrollRef.current = true
|
||||
container.scrollTo({
|
||||
top: container.scrollHeight,
|
||||
behavior: 'smooth',
|
||||
})
|
||||
window.setTimeout(() => {
|
||||
programmaticScrollRef.current = false
|
||||
}, 150)
|
||||
}
|
||||
programmaticScrollRef.current = true
|
||||
container.scrollTo({
|
||||
top: container.scrollHeight,
|
||||
behavior: 'auto',
|
||||
})
|
||||
window.setTimeout(() => {
|
||||
programmaticScrollRef.current = false
|
||||
}, 16)
|
||||
}, SCROLL_INTERVAL)
|
||||
|
||||
return () => window.clearInterval(intervalId)
|
||||
@@ -241,15 +368,11 @@ export function ThinkingBlock({
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
'overflow-y-auto transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Render markdown during streaming with thinking text styling */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.3] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 [&_br]:!leading-[0.5] [&_table]:!my-2 [&_th]:!px-2 [&_th]:!py-1 [&_th]:!text-[11px] [&_td]:!px-2 [&_td]:!py-1 [&_td]:!text-[11px] whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
<span className='ml-1 inline-block h-2 w-1 animate-pulse bg-[var(--text-muted)]' />
|
||||
</div>
|
||||
<SmoothThinkingText content={content} isStreaming={isStreaming && !hasFollowingContent} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -281,12 +404,12 @@ export function ThinkingBlock({
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
'overflow-y-auto transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Use markdown renderer for completed content */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.3] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 [&_br]:!leading-[0.5] [&_table]:!my-2 [&_th]:!px-2 [&_th]:!py-1 [&_th]:!text-[11px] [&_td]:!px-2 [&_td]:!py-1 [&_td]:!text-[11px] whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-muted)]'>
|
||||
{/* Completed thinking text - dimmed with markdown */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -187,6 +187,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
|
||||
// Memoize content blocks to avoid re-rendering unchanged blocks
|
||||
// No entrance animations to prevent layout shift
|
||||
const memoizedContentBlocks = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) {
|
||||
return null
|
||||
@@ -205,14 +206,10 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
|
||||
// Use smooth streaming for the last text block if we're streaming
|
||||
const shouldUseSmoothing = isStreaming && isLastTextBlock
|
||||
const blockKey = `text-${index}-${block.timestamp || index}`
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`text-${index}-${block.timestamp || index}`}
|
||||
className={`w-full max-w-full overflow-hidden transition-opacity duration-200 ease-in-out ${
|
||||
cleanBlockContent.length > 0 ? 'opacity-100' : 'opacity-70'
|
||||
} ${shouldUseSmoothing ? 'translate-y-0 transition-transform duration-100 ease-out' : ''}`}
|
||||
>
|
||||
<div key={blockKey} className='w-full max-w-full'>
|
||||
{shouldUseSmoothing ? (
|
||||
<SmoothStreamingText content={cleanBlockContent} isStreaming={isStreaming} />
|
||||
) : (
|
||||
@@ -224,29 +221,33 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
if (block.type === 'thinking') {
|
||||
// Check if there are any blocks after this one (tool calls, text, etc.)
|
||||
const hasFollowingContent = index < message.contentBlocks!.length - 1
|
||||
// Check if special tags (options, plan) are present - should also close thinking
|
||||
const hasSpecialTags = !!(parsedTags?.options || parsedTags?.plan)
|
||||
const blockKey = `thinking-${index}-${block.timestamp || index}`
|
||||
|
||||
return (
|
||||
<div key={`thinking-${index}-${block.timestamp || index}`} className='w-full'>
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={block.content}
|
||||
isStreaming={isStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
if (block.type === 'tool_call') {
|
||||
const blockKey = `tool-${block.toolCall.id}`
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`tool-${block.toolCall.id}`}
|
||||
className='opacity-100 transition-opacity duration-300 ease-in-out'
|
||||
>
|
||||
<div key={blockKey}>
|
||||
<ToolCall toolCallId={block.toolCall.id} toolCall={block.toolCall} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return null
|
||||
})
|
||||
}, [message.contentBlocks, isStreaming])
|
||||
}, [message.contentBlocks, isStreaming, parsedTags])
|
||||
|
||||
if (isUser) {
|
||||
return (
|
||||
@@ -279,6 +280,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
onModeChange={setMode}
|
||||
panelWidth={panelWidth}
|
||||
clearOnSubmit={false}
|
||||
initialContexts={message.contexts}
|
||||
/>
|
||||
|
||||
{/* Inline Checkpoint Discard Confirmation - shown below input in edit mode */}
|
||||
@@ -346,14 +348,18 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const contexts: any[] = Array.isArray((message as any).contexts)
|
||||
? ((message as any).contexts as any[])
|
||||
: []
|
||||
const labels = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow')
|
||||
.map((c) => c?.label)
|
||||
.filter(Boolean) as string[]
|
||||
if (!labels.length) return text
|
||||
|
||||
// Build tokens with their prefixes (@ for mentions, / for commands)
|
||||
const tokens = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow' && c?.label)
|
||||
.map((c) => {
|
||||
const prefix = c?.kind === 'slash_command' ? '/' : '@'
|
||||
return `${prefix}${c.label}`
|
||||
})
|
||||
if (!tokens.length) return text
|
||||
|
||||
const escapeRegex = (s: string) => s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
const pattern = new RegExp(`@(${labels.map(escapeRegex).join('|')})`, 'g')
|
||||
const pattern = new RegExp(`(${tokens.map(escapeRegex).join('|')})`, 'g')
|
||||
|
||||
const nodes: React.ReactNode[] = []
|
||||
let lastIndex = 0
|
||||
@@ -460,17 +466,29 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
}
|
||||
|
||||
// Check if there's any visible content in the blocks
|
||||
const hasVisibleContent = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) return false
|
||||
return message.contentBlocks.some((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
return parsed.cleanContent.trim().length > 0
|
||||
}
|
||||
return block.type === 'thinking' || block.type === 'tool_call'
|
||||
})
|
||||
}, [message.contentBlocks])
|
||||
|
||||
if (isAssistant) {
|
||||
return (
|
||||
<div
|
||||
className={`w-full max-w-full overflow-hidden transition-opacity duration-200 [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
className={`w-full max-w-full overflow-hidden [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
style={{ '--panel-max-width': `${panelWidth - 16}px` } as React.CSSProperties}
|
||||
>
|
||||
<div className='max-w-full space-y-1.5 px-[2px] transition-all duration-200 ease-in-out'>
|
||||
<div className='max-w-full space-y-1 px-[2px]'>
|
||||
{/* Content blocks in chronological order */}
|
||||
{memoizedContentBlocks}
|
||||
|
||||
{/* Always show streaming indicator at the end while streaming */}
|
||||
{/* Streaming indicator always at bottom during streaming */}
|
||||
{isStreaming && <StreamingIndicator />}
|
||||
|
||||
{message.errorType === 'usage_limit' && (
|
||||
|
||||
@@ -497,6 +497,11 @@ const ACTION_VERBS = [
|
||||
'Accessed',
|
||||
'Managing',
|
||||
'Managed',
|
||||
'Scraping',
|
||||
'Scraped',
|
||||
'Crawling',
|
||||
'Crawled',
|
||||
'Getting',
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -1061,7 +1066,7 @@ function SubAgentContent({
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
'overflow-y-auto transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
@@ -1157,10 +1162,10 @@ function SubAgentThinkingContent({
|
||||
|
||||
/**
|
||||
* Subagents that should collapse when done streaming.
|
||||
* Default behavior is to NOT collapse (stay expanded like edit).
|
||||
* Only these specific subagents collapse into "Planned for Xs >" style headers.
|
||||
* Default behavior is to NOT collapse (stay expanded like edit, superagent, info, etc.).
|
||||
* Only plan, debug, and research collapse into summary headers.
|
||||
*/
|
||||
const COLLAPSIBLE_SUBAGENTS = new Set(['plan', 'debug', 'research', 'info'])
|
||||
const COLLAPSIBLE_SUBAGENTS = new Set(['plan', 'debug', 'research'])
|
||||
|
||||
/**
|
||||
* SubagentContentRenderer handles the rendering of subagent content.
|
||||
@@ -1321,7 +1326,7 @@ function SubagentContentRenderer({
|
||||
|
||||
<div
|
||||
className={clsx(
|
||||
'overflow-hidden transition-all duration-300 ease-in-out',
|
||||
'overflow-hidden transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[5000px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
@@ -1631,10 +1636,8 @@ function WorkflowEditSummary({ toolCall }: { toolCall: CopilotToolCall }) {
|
||||
* Checks if a tool is an integration tool (server-side executed, not a client tool)
|
||||
*/
|
||||
function isIntegrationTool(toolName: string): boolean {
|
||||
// Check if it's NOT a client tool (not in CLASS_TOOL_METADATA and not in registered tools)
|
||||
const isClientTool = !!CLASS_TOOL_METADATA[toolName]
|
||||
const isRegisteredTool = !!getRegisteredTools()[toolName]
|
||||
return !isClientTool && !isRegisteredTool
|
||||
// Any tool NOT in CLASS_TOOL_METADATA is an integration tool (server-side execution)
|
||||
return !CLASS_TOOL_METADATA[toolName]
|
||||
}
|
||||
|
||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
@@ -1663,16 +1666,9 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
return true
|
||||
}
|
||||
|
||||
// Also show buttons for integration tools in pending state (they need user confirmation)
|
||||
// But NOT if the tool is auto-allowed (it will auto-execute)
|
||||
// Always show buttons for integration tools in pending state (they need user confirmation)
|
||||
const mode = useCopilotStore.getState().mode
|
||||
const isAutoAllowed = useCopilotStore.getState().isToolAutoAllowed(toolCall.name)
|
||||
if (
|
||||
mode === 'build' &&
|
||||
isIntegrationTool(toolCall.name) &&
|
||||
toolCall.state === 'pending' &&
|
||||
!isAutoAllowed
|
||||
) {
|
||||
if (mode === 'build' && isIntegrationTool(toolCall.name) && toolCall.state === 'pending') {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -1895,15 +1891,20 @@ function RunSkipButtons({
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||
// Hide "Always Allow" for integration tools (only show for client tools with interrupts)
|
||||
const showAlwaysAllow = !isIntegrationTool(toolCall.name)
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, (Always Allow for client tools only), Skip
|
||||
return (
|
||||
<div className='mt-1.5 flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||
{isProcessing ? 'Allowing...' : 'Allow'}
|
||||
</Button>
|
||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
||||
</Button>
|
||||
{showAlwaysAllow && (
|
||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
||||
</Button>
|
||||
)}
|
||||
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
||||
Skip
|
||||
</Button>
|
||||
@@ -1969,6 +1970,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
'tour',
|
||||
'info',
|
||||
'workflow',
|
||||
'superagent',
|
||||
]
|
||||
const isSubagentTool = SUBAGENT_TOOLS.includes(toolCall.name)
|
||||
|
||||
@@ -2596,16 +2598,23 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
}
|
||||
}
|
||||
|
||||
// For edit_workflow, hide text display when we have operations (WorkflowEditSummary replaces it)
|
||||
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||
|
||||
return (
|
||||
<div className='w-full'>
|
||||
<div className={isToolNameClickable ? 'cursor-pointer' : ''} onClick={handleToolNameClick}>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
{!hideTextForEditWorkflow && (
|
||||
<div className={isToolNameClickable ? 'cursor-pointer' : ''} onClick={handleToolNameClick}>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{isExpandableTool && expanded && <div className='mt-1.5'>{renderPendingDetails()}</div>}
|
||||
{showRemoveAutoAllow && isAutoAllowed && (
|
||||
<div className='mt-1.5'>
|
||||
|
||||
@@ -3,3 +3,4 @@ export { ContextPills } from './context-pills/context-pills'
|
||||
export { MentionMenu } from './mention-menu/mention-menu'
|
||||
export { ModeSelector } from './mode-selector/mode-selector'
|
||||
export { ModelSelector } from './model-selector/model-selector'
|
||||
export { SlashMenu } from './slash-menu/slash-menu'
|
||||
|
||||
@@ -0,0 +1,249 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverBackButton,
|
||||
PopoverContent,
|
||||
PopoverFolder,
|
||||
PopoverItem,
|
||||
PopoverScrollArea,
|
||||
} from '@/components/emcn'
|
||||
import type { useMentionMenu } from '../../hooks/use-mention-menu'
|
||||
|
||||
/**
|
||||
* Top-level slash command options
|
||||
*/
|
||||
const TOP_LEVEL_COMMANDS = [
|
||||
{ id: 'fast', label: 'fast' },
|
||||
{ id: 'plan', label: 'plan' },
|
||||
{ id: 'debug', label: 'debug' },
|
||||
{ id: 'research', label: 'research' },
|
||||
{ id: 'deploy', label: 'deploy' },
|
||||
{ id: 'superagent', label: 'superagent' },
|
||||
] as const
|
||||
|
||||
/**
|
||||
* Web submenu commands
|
||||
*/
|
||||
const WEB_COMMANDS = [
|
||||
{ id: 'search', label: 'search' },
|
||||
{ id: 'read', label: 'read' },
|
||||
{ id: 'scrape', label: 'scrape' },
|
||||
{ id: 'crawl', label: 'crawl' },
|
||||
] as const
|
||||
|
||||
/**
|
||||
* All command labels for filtering
|
||||
*/
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
interface SlashMenuProps {
|
||||
mentionMenu: ReturnType<typeof useMentionMenu>
|
||||
message: string
|
||||
onSelectCommand: (command: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* SlashMenu component for slash command dropdown.
|
||||
* Shows command options when user types '/'.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered slash menu
|
||||
*/
|
||||
export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuProps) {
|
||||
const {
|
||||
mentionMenuRef,
|
||||
menuListRef,
|
||||
getActiveSlashQueryAtPosition,
|
||||
getCaretPos,
|
||||
submenuActiveIndex,
|
||||
mentionActiveIndex,
|
||||
openSubmenuFor,
|
||||
setOpenSubmenuFor,
|
||||
} = mentionMenu
|
||||
|
||||
/**
|
||||
* Get the current query string after /
|
||||
*/
|
||||
const currentQuery = useMemo(() => {
|
||||
const caretPos = getCaretPos()
|
||||
const active = getActiveSlashQueryAtPosition(caretPos, message)
|
||||
return active?.query.trim().toLowerCase() || ''
|
||||
}, [message, getCaretPos, getActiveSlashQueryAtPosition])
|
||||
|
||||
/**
|
||||
* Filter commands based on query (search across all commands when there's a query)
|
||||
*/
|
||||
const filteredCommands = useMemo(() => {
|
||||
if (!currentQuery) return null // Show folder view when no query
|
||||
return ALL_COMMANDS.filter((cmd) => cmd.label.toLowerCase().includes(currentQuery))
|
||||
}, [currentQuery])
|
||||
|
||||
// Show aggregated view when there's a query
|
||||
const showAggregatedView = currentQuery.length > 0
|
||||
|
||||
// Compute caret viewport position via mirror technique for precise anchoring
|
||||
const textareaEl = mentionMenu.textareaRef.current
|
||||
if (!textareaEl) return null
|
||||
|
||||
const getCaretViewport = (textarea: HTMLTextAreaElement, caretPosition: number, text: string) => {
|
||||
const textareaRect = textarea.getBoundingClientRect()
|
||||
const style = window.getComputedStyle(textarea)
|
||||
|
||||
const mirrorDiv = document.createElement('div')
|
||||
mirrorDiv.style.position = 'absolute'
|
||||
mirrorDiv.style.visibility = 'hidden'
|
||||
mirrorDiv.style.whiteSpace = 'pre-wrap'
|
||||
mirrorDiv.style.wordWrap = 'break-word'
|
||||
mirrorDiv.style.font = style.font
|
||||
mirrorDiv.style.padding = style.padding
|
||||
mirrorDiv.style.border = style.border
|
||||
mirrorDiv.style.width = style.width
|
||||
mirrorDiv.style.lineHeight = style.lineHeight
|
||||
mirrorDiv.style.boxSizing = style.boxSizing
|
||||
mirrorDiv.style.letterSpacing = style.letterSpacing
|
||||
mirrorDiv.style.textTransform = style.textTransform
|
||||
mirrorDiv.style.textIndent = style.textIndent
|
||||
mirrorDiv.style.textAlign = style.textAlign
|
||||
|
||||
mirrorDiv.textContent = text.substring(0, caretPosition)
|
||||
|
||||
const caretMarker = document.createElement('span')
|
||||
caretMarker.style.display = 'inline-block'
|
||||
caretMarker.style.width = '0px'
|
||||
caretMarker.style.padding = '0'
|
||||
caretMarker.style.border = '0'
|
||||
mirrorDiv.appendChild(caretMarker)
|
||||
|
||||
document.body.appendChild(mirrorDiv)
|
||||
const markerRect = caretMarker.getBoundingClientRect()
|
||||
const mirrorRect = mirrorDiv.getBoundingClientRect()
|
||||
document.body.removeChild(mirrorDiv)
|
||||
|
||||
const leftOffset = markerRect.left - mirrorRect.left - textarea.scrollLeft
|
||||
const topOffset = markerRect.top - mirrorRect.top - textarea.scrollTop
|
||||
|
||||
return {
|
||||
left: textareaRect.left + leftOffset,
|
||||
top: textareaRect.top + topOffset,
|
||||
}
|
||||
}
|
||||
|
||||
const caretPos = getCaretPos()
|
||||
const caretViewport = getCaretViewport(textareaEl, caretPos, message)
|
||||
|
||||
// Decide preferred side based on available space
|
||||
const margin = 8
|
||||
const spaceAbove = caretViewport.top - margin
|
||||
const spaceBelow = window.innerHeight - caretViewport.top - margin
|
||||
const side: 'top' | 'bottom' = spaceBelow >= spaceAbove ? 'bottom' : 'top'
|
||||
|
||||
// Check if we're in folder navigation mode (no query, not in submenu)
|
||||
const isInFolderNavigationMode = !openSubmenuFor && !showAggregatedView
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={true}
|
||||
onOpenChange={() => {
|
||||
/* controlled externally */
|
||||
}}
|
||||
>
|
||||
<PopoverAnchor asChild>
|
||||
<div
|
||||
style={{
|
||||
position: 'fixed',
|
||||
top: `${caretViewport.top}px`,
|
||||
left: `${caretViewport.left}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
/>
|
||||
</PopoverAnchor>
|
||||
<PopoverContent
|
||||
ref={mentionMenuRef}
|
||||
side={side}
|
||||
align='start'
|
||||
collisionPadding={6}
|
||||
maxHeight={360}
|
||||
className='pointer-events-auto'
|
||||
style={{
|
||||
width: `180px`,
|
||||
}}
|
||||
onOpenAutoFocus={(e) => e.preventDefault()}
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<PopoverBackButton />
|
||||
<PopoverScrollArea ref={menuListRef} className='space-y-[2px]'>
|
||||
{openSubmenuFor === 'Web' ? (
|
||||
// Web submenu view
|
||||
<>
|
||||
{WEB_COMMANDS.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={index === submenuActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
</>
|
||||
) : showAggregatedView ? (
|
||||
// Aggregated filtered view
|
||||
<>
|
||||
{filteredCommands && filteredCommands.length === 0 ? (
|
||||
<div className='px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]'>
|
||||
No commands found
|
||||
</div>
|
||||
) : (
|
||||
filteredCommands?.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={index === submenuActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
// Folder navigation view
|
||||
<>
|
||||
{TOP_LEVEL_COMMANDS.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={isInFolderNavigationMode && index === mentionActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
|
||||
<PopoverFolder
|
||||
id='web'
|
||||
title='Web'
|
||||
onOpen={() => setOpenSubmenuFor('Web')}
|
||||
active={
|
||||
isInFolderNavigationMode && mentionActiveIndex === TOP_LEVEL_COMMANDS.length
|
||||
}
|
||||
data-idx={TOP_LEVEL_COMMANDS.length}
|
||||
>
|
||||
{WEB_COMMANDS.map((cmd) => (
|
||||
<PopoverItem key={cmd.id} onClick={() => onSelectCommand(cmd.label)}>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
</PopoverFolder>
|
||||
</>
|
||||
)}
|
||||
</PopoverScrollArea>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -1,9 +1,11 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
|
||||
interface UseContextManagementProps {
|
||||
/** Current message text */
|
||||
message: string
|
||||
/** Initial contexts to populate when editing a message */
|
||||
initialContexts?: ChatContext[]
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -13,8 +15,17 @@ interface UseContextManagementProps {
|
||||
* @param props - Configuration object
|
||||
* @returns Context state and management functions
|
||||
*/
|
||||
export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>([])
|
||||
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
|
||||
const initializedRef = useRef(false)
|
||||
|
||||
// Initialize with initial contexts when they're first provided (for edit mode)
|
||||
useEffect(() => {
|
||||
if (initialContexts && initialContexts.length > 0 && !initializedRef.current) {
|
||||
setSelectedContexts(initialContexts)
|
||||
initializedRef.current = true
|
||||
}
|
||||
}, [initialContexts])
|
||||
|
||||
/**
|
||||
* Adds a context to the selected contexts list, avoiding duplicates
|
||||
@@ -63,6 +74,9 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
if (c.kind === 'docs') {
|
||||
return true // Only one docs context allowed
|
||||
}
|
||||
if (c.kind === 'slash_command' && 'command' in context && 'command' in c) {
|
||||
return c.command === (context as any).command
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
@@ -103,6 +117,8 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
return (c as any).executionId !== (contextToRemove as any).executionId
|
||||
case 'docs':
|
||||
return false // Remove docs (only one docs context)
|
||||
case 'slash_command':
|
||||
return (c as any).command !== (contextToRemove as any).command
|
||||
default:
|
||||
return c.label !== contextToRemove.label
|
||||
}
|
||||
@@ -118,7 +134,7 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Synchronizes selected contexts with inline @label tokens in the message.
|
||||
* Synchronizes selected contexts with inline @label or /label tokens in the message.
|
||||
* Removes contexts whose labels are no longer present in the message.
|
||||
*/
|
||||
useEffect(() => {
|
||||
@@ -130,17 +146,16 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
setSelectedContexts((prev) => {
|
||||
if (prev.length === 0) return prev
|
||||
|
||||
const presentLabels = new Set<string>()
|
||||
const labels = prev.map((c) => c.label).filter(Boolean)
|
||||
|
||||
for (const label of labels) {
|
||||
const token = ` @${label} `
|
||||
if (message.includes(token)) {
|
||||
presentLabels.add(label)
|
||||
}
|
||||
}
|
||||
|
||||
const filtered = prev.filter((c) => !!c.label && presentLabels.has(c.label))
|
||||
const filtered = prev.filter((c) => {
|
||||
if (!c.label) return false
|
||||
// Check for slash command tokens or mention tokens based on kind
|
||||
const isSlashCommand = c.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
const tokenWithSpaces = ` ${prefix}${c.label} `
|
||||
const tokenAtStart = `${prefix}${c.label} `
|
||||
// Token can appear with leading space OR at the start of the message
|
||||
return message.includes(tokenWithSpaces) || message.startsWith(tokenAtStart)
|
||||
})
|
||||
return filtered.length === prev.length ? prev : filtered
|
||||
})
|
||||
}, [message])
|
||||
|
||||
@@ -70,11 +70,25 @@ export function useMentionMenu({
|
||||
// Ensure '@' starts a token (start or whitespace before)
|
||||
if (atIndex > 0 && !/\s/.test(before.charAt(atIndex - 1))) return null
|
||||
|
||||
// Check if this '@' is part of a completed mention token ( @label )
|
||||
// Check if this '@' is part of a completed mention token
|
||||
if (selectedContexts.length > 0) {
|
||||
const labels = selectedContexts.map((c) => c.label).filter(Boolean) as string[]
|
||||
for (const label of labels) {
|
||||
// Space-wrapped token: " @label "
|
||||
// Only check non-slash_command contexts for mentions
|
||||
const mentionLabels = selectedContexts
|
||||
.filter((c) => c.kind !== 'slash_command')
|
||||
.map((c) => c.label)
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
for (const label of mentionLabels) {
|
||||
// Check for token at start of text: "@label "
|
||||
if (atIndex === 0) {
|
||||
const startToken = `@${label} `
|
||||
if (text.startsWith(startToken)) {
|
||||
// This @ is part of a completed token
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// Check for space-wrapped token: " @label "
|
||||
const token = ` @${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= text.length) {
|
||||
@@ -88,7 +102,6 @@ export function useMentionMenu({
|
||||
// Check if the @ we found is the @ of this completed token
|
||||
if (atIndex === atPositionInToken) {
|
||||
// The @ we found is part of a completed mention
|
||||
// Don't show menu - user is typing after the completed mention
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -113,6 +126,76 @@ export function useMentionMenu({
|
||||
[message, selectedContexts]
|
||||
)
|
||||
|
||||
/**
|
||||
* Finds active slash command query at the given position
|
||||
*
|
||||
* @param pos - Position in the text to check
|
||||
* @param textOverride - Optional text override (for checking during input)
|
||||
* @returns Active slash query object or null if no active slash command
|
||||
*/
|
||||
const getActiveSlashQueryAtPosition = useCallback(
|
||||
(pos: number, textOverride?: string) => {
|
||||
const text = textOverride ?? message
|
||||
const before = text.slice(0, pos)
|
||||
const slashIndex = before.lastIndexOf('/')
|
||||
if (slashIndex === -1) return null
|
||||
|
||||
// Ensure '/' starts a token (start or whitespace before)
|
||||
if (slashIndex > 0 && !/\s/.test(before.charAt(slashIndex - 1))) return null
|
||||
|
||||
// Check if this '/' is part of a completed slash token
|
||||
if (selectedContexts.length > 0) {
|
||||
// Only check slash_command contexts
|
||||
const slashLabels = selectedContexts
|
||||
.filter((c) => c.kind === 'slash_command')
|
||||
.map((c) => c.label)
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
for (const label of slashLabels) {
|
||||
// Check for token at start of text: "/label "
|
||||
if (slashIndex === 0) {
|
||||
const startToken = `/${label} `
|
||||
if (text.startsWith(startToken)) {
|
||||
// This slash is part of a completed token
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// Check for space-wrapped token: " /label "
|
||||
const token = ` /${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= text.length) {
|
||||
const idx = text.indexOf(token, fromIndex)
|
||||
if (idx === -1) break
|
||||
|
||||
const tokenStart = idx
|
||||
const tokenEnd = idx + token.length
|
||||
const slashPositionInToken = idx + 1 // position of / in " /label "
|
||||
|
||||
if (slashIndex === slashPositionInToken) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (pos > tokenStart && pos < tokenEnd) {
|
||||
return null
|
||||
}
|
||||
|
||||
fromIndex = tokenEnd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const segment = before.slice(slashIndex + 1)
|
||||
// Close the popup if user types space immediately after /
|
||||
if (segment.length > 0 && /^\s/.test(segment)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return { query: segment, start: slashIndex, end: pos }
|
||||
},
|
||||
[message, selectedContexts]
|
||||
)
|
||||
|
||||
/**
|
||||
* Gets the submenu query text
|
||||
*
|
||||
@@ -200,9 +283,10 @@ export function useMentionMenu({
|
||||
const before = message.slice(0, active.start)
|
||||
const after = message.slice(active.end)
|
||||
|
||||
// Always include leading space, avoid duplicate if one exists
|
||||
const needsLeadingSpace = !before.endsWith(' ')
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}@${label} `
|
||||
// Add leading space only if not at start and previous char isn't whitespace
|
||||
const needsLeadingSpace = before.length > 0 && !before.endsWith(' ')
|
||||
// Always add trailing space for easy continued typing
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}@${label} `
|
||||
|
||||
const next = `${before}${insertion}${after}`
|
||||
onMessageChange(next)
|
||||
@@ -217,6 +301,41 @@ export function useMentionMenu({
|
||||
[message, getActiveMentionQueryAtPosition, onMessageChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Replaces active slash command with a label
|
||||
*
|
||||
* @param label - Label to replace the slash command with
|
||||
* @returns True if replacement was successful, false if no active slash command found
|
||||
*/
|
||||
const replaceActiveSlashWith = useCallback(
|
||||
(label: string) => {
|
||||
const textarea = textareaRef.current
|
||||
if (!textarea) return false
|
||||
const pos = textarea.selectionStart ?? message.length
|
||||
const active = getActiveSlashQueryAtPosition(pos)
|
||||
if (!active) return false
|
||||
|
||||
const before = message.slice(0, active.start)
|
||||
const after = message.slice(active.end)
|
||||
|
||||
// Add leading space only if not at start and previous char isn't whitespace
|
||||
const needsLeadingSpace = before.length > 0 && !before.endsWith(' ')
|
||||
// Always add trailing space for easy continued typing
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}/${label} `
|
||||
|
||||
const next = `${before}${insertion}${after}`
|
||||
onMessageChange(next)
|
||||
|
||||
setTimeout(() => {
|
||||
const cursorPos = before.length + insertion.length
|
||||
textarea.setSelectionRange(cursorPos, cursorPos)
|
||||
textarea.focus()
|
||||
}, 0)
|
||||
return true
|
||||
},
|
||||
[message, getActiveSlashQueryAtPosition, onMessageChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Scrolls active item into view in the menu
|
||||
*
|
||||
@@ -304,10 +423,12 @@ export function useMentionMenu({
|
||||
// Operations
|
||||
getCaretPos,
|
||||
getActiveMentionQueryAtPosition,
|
||||
getActiveSlashQueryAtPosition,
|
||||
getSubmenuQuery,
|
||||
resetActiveMentionQuery,
|
||||
insertAtCursor,
|
||||
replaceActiveMentionWith,
|
||||
replaceActiveSlashWith,
|
||||
scrollActiveItemIntoView,
|
||||
closeMentionMenu,
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ export function useMentionTokens({
|
||||
setSelectedContexts,
|
||||
}: UseMentionTokensProps) {
|
||||
/**
|
||||
* Computes all mention ranges in the message
|
||||
* Computes all mention ranges in the message (both @mentions and /commands)
|
||||
*
|
||||
* @returns Array of mention ranges sorted by start position
|
||||
*/
|
||||
@@ -55,8 +55,19 @@ export function useMentionTokens({
|
||||
const uniqueLabels = Array.from(new Set(labels))
|
||||
|
||||
for (const label of uniqueLabels) {
|
||||
// Space-wrapped token: " @label " (search from start)
|
||||
const token = ` @${label} `
|
||||
// Find matching context to determine if it's a slash command
|
||||
const matchingContext = selectedContexts.find((c) => c.label === label)
|
||||
const isSlashCommand = matchingContext?.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
|
||||
// Check for token at the very start of the message (no leading space)
|
||||
const tokenAtStart = `${prefix}${label} `
|
||||
if (message.startsWith(tokenAtStart)) {
|
||||
ranges.push({ start: 0, end: tokenAtStart.length, label })
|
||||
}
|
||||
|
||||
// Space-wrapped token: " @label " or " /label " (search from start)
|
||||
const token = ` ${prefix}${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= message.length) {
|
||||
const idx = message.indexOf(token, fromIndex)
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
MentionMenu,
|
||||
ModelSelector,
|
||||
ModeSelector,
|
||||
SlashMenu,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/components'
|
||||
import { NEAR_TOP_THRESHOLD } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/constants'
|
||||
import {
|
||||
@@ -67,6 +68,8 @@ interface UserInputProps {
|
||||
hideModeSelector?: boolean
|
||||
/** Disable @mention functionality */
|
||||
disableMentions?: boolean
|
||||
/** Initial contexts for editing a message with existing context mentions */
|
||||
initialContexts?: ChatContext[]
|
||||
}
|
||||
|
||||
interface UserInputRef {
|
||||
@@ -103,6 +106,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
onModelChangeOverride,
|
||||
hideModeSelector = false,
|
||||
disableMentions = false,
|
||||
initialContexts,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
@@ -123,6 +127,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
const [isNearTop, setIsNearTop] = useState(false)
|
||||
const [containerRef, setContainerRef] = useState<HTMLDivElement | null>(null)
|
||||
const [inputContainerRef, setInputContainerRef] = useState<HTMLDivElement | null>(null)
|
||||
const [showSlashMenu, setShowSlashMenu] = useState(false)
|
||||
|
||||
// Controlled vs uncontrolled message state
|
||||
const message = controlledValue !== undefined ? controlledValue : internalMessage
|
||||
@@ -140,7 +145,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
// Custom hooks - order matters for ref sharing
|
||||
// Context management (manages selectedContexts state)
|
||||
const contextManagement = useContextManagement({ message })
|
||||
const contextManagement = useContextManagement({ message, initialContexts })
|
||||
|
||||
// Mention menu
|
||||
const mentionMenu = useMentionMenu({
|
||||
@@ -370,20 +375,131 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
}, [onAbort, isLoading])
|
||||
|
||||
const handleSlashCommandSelect = useCallback(
|
||||
(command: string) => {
|
||||
// Capitalize the command for display
|
||||
const capitalizedCommand = command.charAt(0).toUpperCase() + command.slice(1)
|
||||
|
||||
// Replace the active slash query with the capitalized command
|
||||
mentionMenu.replaceActiveSlashWith(capitalizedCommand)
|
||||
|
||||
// Add as a context so it gets highlighted
|
||||
contextManagement.addContext({
|
||||
kind: 'slash_command',
|
||||
command,
|
||||
label: capitalizedCommand,
|
||||
})
|
||||
|
||||
setShowSlashMenu(false)
|
||||
mentionMenu.textareaRef.current?.focus()
|
||||
},
|
||||
[mentionMenu, contextManagement]
|
||||
)
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
// Escape key handling
|
||||
if (e.key === 'Escape' && mentionMenu.showMentionMenu) {
|
||||
if (e.key === 'Escape' && (mentionMenu.showMentionMenu || showSlashMenu)) {
|
||||
e.preventDefault()
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
} else {
|
||||
mentionMenu.closeMentionMenu()
|
||||
setShowSlashMenu(false)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow navigation in slash menu
|
||||
if (showSlashMenu) {
|
||||
const TOP_LEVEL_COMMANDS = ['fast', 'plan', 'debug', 'research', 'deploy', 'superagent']
|
||||
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl']
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
const caretPos = mentionMenu.getCaretPos()
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
|
||||
const query = activeSlash?.query.trim().toLowerCase() || ''
|
||||
const showAggregatedView = query.length > 0
|
||||
|
||||
if (e.key === 'ArrowDown' || e.key === 'ArrowUp') {
|
||||
e.preventDefault()
|
||||
|
||||
if (mentionMenu.openSubmenuFor === 'Web') {
|
||||
// Navigate in Web submenu
|
||||
const last = WEB_COMMANDS.length - 1
|
||||
mentionMenu.setSubmenuActiveIndex((prev) => {
|
||||
const next =
|
||||
e.key === 'ArrowDown'
|
||||
? prev >= last
|
||||
? 0
|
||||
: prev + 1
|
||||
: prev <= 0
|
||||
? last
|
||||
: prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
} else if (showAggregatedView) {
|
||||
// Navigate in filtered view
|
||||
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
|
||||
const last = Math.max(0, filtered.length - 1)
|
||||
mentionMenu.setSubmenuActiveIndex((prev) => {
|
||||
if (filtered.length === 0) return 0
|
||||
const next =
|
||||
e.key === 'ArrowDown'
|
||||
? prev >= last
|
||||
? 0
|
||||
: prev + 1
|
||||
: prev <= 0
|
||||
? last
|
||||
: prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
} else {
|
||||
// Navigate in folder view (top-level + Web folder)
|
||||
const totalItems = TOP_LEVEL_COMMANDS.length + 1 // +1 for Web folder
|
||||
const last = totalItems - 1
|
||||
mentionMenu.setMentionActiveIndex((prev) => {
|
||||
const next =
|
||||
e.key === 'ArrowDown'
|
||||
? prev >= last
|
||||
? 0
|
||||
: prev + 1
|
||||
: prev <= 0
|
||||
? last
|
||||
: prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow right to enter Web submenu
|
||||
if (e.key === 'ArrowRight') {
|
||||
e.preventDefault()
|
||||
if (!showAggregatedView && !mentionMenu.openSubmenuFor) {
|
||||
// Check if Web folder is selected (it's after all top-level commands)
|
||||
if (mentionMenu.mentionActiveIndex === TOP_LEVEL_COMMANDS.length) {
|
||||
mentionMenu.setOpenSubmenuFor('Web')
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow left to exit submenu
|
||||
if (e.key === 'ArrowLeft') {
|
||||
e.preventDefault()
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Arrow navigation in mention menu
|
||||
if (mentionKeyboard.handleArrowNavigation(e)) return
|
||||
if (mentionKeyboard.handleArrowRight(e)) return
|
||||
@@ -392,6 +508,42 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
// Enter key handling
|
||||
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
|
||||
e.preventDefault()
|
||||
if (showSlashMenu) {
|
||||
const TOP_LEVEL_COMMANDS = ['fast', 'plan', 'debug', 'research', 'deploy', 'superagent']
|
||||
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl']
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
const caretPos = mentionMenu.getCaretPos()
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
|
||||
const query = activeSlash?.query.trim().toLowerCase() || ''
|
||||
const showAggregatedView = query.length > 0
|
||||
|
||||
if (mentionMenu.openSubmenuFor === 'Web') {
|
||||
// Select from Web submenu
|
||||
const selectedCommand =
|
||||
WEB_COMMANDS[mentionMenu.submenuActiveIndex] || WEB_COMMANDS[0]
|
||||
handleSlashCommandSelect(selectedCommand)
|
||||
} else if (showAggregatedView) {
|
||||
// Select from filtered view
|
||||
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
|
||||
if (filtered.length > 0) {
|
||||
const selectedCommand = filtered[mentionMenu.submenuActiveIndex] || filtered[0]
|
||||
handleSlashCommandSelect(selectedCommand)
|
||||
}
|
||||
} else {
|
||||
// Folder navigation view
|
||||
const selectedIndex = mentionMenu.mentionActiveIndex
|
||||
if (selectedIndex < TOP_LEVEL_COMMANDS.length) {
|
||||
// Top-level command selected
|
||||
handleSlashCommandSelect(TOP_LEVEL_COMMANDS[selectedIndex])
|
||||
} else if (selectedIndex === TOP_LEVEL_COMMANDS.length) {
|
||||
// Web folder selected - open it
|
||||
mentionMenu.setOpenSubmenuFor('Web')
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
if (!mentionMenu.showMentionMenu) {
|
||||
handleSubmit()
|
||||
} else {
|
||||
@@ -469,7 +621,15 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
}
|
||||
},
|
||||
[mentionMenu, mentionKeyboard, handleSubmit, message.length, mentionTokensWithContext]
|
||||
[
|
||||
mentionMenu,
|
||||
mentionKeyboard,
|
||||
handleSubmit,
|
||||
handleSlashCommandSelect,
|
||||
message,
|
||||
mentionTokensWithContext,
|
||||
showSlashMenu,
|
||||
]
|
||||
)
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
@@ -481,9 +641,14 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
if (disableMentions) return
|
||||
|
||||
const caret = e.target.selectionStart ?? newValue.length
|
||||
const active = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
|
||||
|
||||
if (active) {
|
||||
// Check for @ mention trigger
|
||||
const activeMention = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
|
||||
// Check for / slash command trigger
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caret, newValue)
|
||||
|
||||
if (activeMention) {
|
||||
setShowSlashMenu(false)
|
||||
mentionMenu.setShowMentionMenu(true)
|
||||
mentionMenu.setInAggregated(false)
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
@@ -492,10 +657,17 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
mentionMenu.setMentionActiveIndex(0)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
} else if (activeSlash) {
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
setShowSlashMenu(true)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
} else {
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
setShowSlashMenu(false)
|
||||
}
|
||||
},
|
||||
[setMessage, mentionMenu, disableMentions]
|
||||
@@ -542,6 +714,32 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}, [disabled, isLoading, mentionMenu, message, setMessage])
|
||||
|
||||
const handleOpenSlashMenu = useCallback(() => {
|
||||
if (disabled || isLoading) return
|
||||
const textarea = mentionMenu.textareaRef.current
|
||||
if (!textarea) return
|
||||
textarea.focus()
|
||||
const pos = textarea.selectionStart ?? message.length
|
||||
const needsSpaceBefore = pos > 0 && !/\s/.test(message.charAt(pos - 1))
|
||||
|
||||
const insertText = needsSpaceBefore ? ' /' : '/'
|
||||
const start = textarea.selectionStart ?? message.length
|
||||
const end = textarea.selectionEnd ?? message.length
|
||||
const before = message.slice(0, start)
|
||||
const after = message.slice(end)
|
||||
const next = `${before}${insertText}${after}`
|
||||
setMessage(next)
|
||||
|
||||
setTimeout(() => {
|
||||
const newPos = before.length + insertText.length
|
||||
textarea.setSelectionRange(newPos, newPos)
|
||||
textarea.focus()
|
||||
}, 0)
|
||||
|
||||
setShowSlashMenu(true)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}, [disabled, isLoading, mentionMenu, message, setMessage])
|
||||
|
||||
const canSubmit = message.trim().length > 0 && !disabled && !isLoading
|
||||
const showAbortButton = isLoading && onAbort
|
||||
|
||||
@@ -643,6 +841,20 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
</Badge>
|
||||
|
||||
<Badge
|
||||
variant='outline'
|
||||
onClick={handleOpenSlashMenu}
|
||||
title='Insert /'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<span className='flex h-3 w-3 items-center justify-center font-medium text-[11px] leading-none'>
|
||||
/
|
||||
</span>
|
||||
</Badge>
|
||||
|
||||
{/* Selected Context Pills */}
|
||||
<ContextPills
|
||||
contexts={contextManagement.selectedContexts}
|
||||
@@ -717,6 +929,18 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
/>,
|
||||
document.body
|
||||
)}
|
||||
|
||||
{/* Slash Menu Portal */}
|
||||
{!disableMentions &&
|
||||
showSlashMenu &&
|
||||
createPortal(
|
||||
<SlashMenu
|
||||
mentionMenu={mentionMenu}
|
||||
message={message}
|
||||
onSelectCommand={handleSlashCommandSelect}
|
||||
/>,
|
||||
document.body
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Bottom Row: Mode Selector + Model Selector + Attach Button + Send Button */}
|
||||
|
||||
@@ -0,0 +1,941 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Check, Clipboard } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Checkbox,
|
||||
Code,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
Input,
|
||||
Label,
|
||||
TagInput,
|
||||
Textarea,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import type { AgentAuthentication, AgentCapabilities } from '@/lib/a2a/types'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
|
||||
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import {
|
||||
useA2AAgentByWorkflow,
|
||||
useCreateA2AAgent,
|
||||
useDeleteA2AAgent,
|
||||
usePublishA2AAgent,
|
||||
useUpdateA2AAgent,
|
||||
} from '@/hooks/queries/a2a/agents'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('A2ADeploy')
|
||||
|
||||
interface InputFormatField {
|
||||
id?: string
|
||||
name?: string
|
||||
type?: string
|
||||
value?: unknown
|
||||
collapsed?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a description is a default/placeholder value that should be filtered out
|
||||
*/
|
||||
function isDefaultDescription(desc: string | null | undefined, workflowName: string): boolean {
|
||||
if (!desc) return true
|
||||
const normalized = desc.toLowerCase().trim()
|
||||
return (
|
||||
normalized === '' || normalized === 'new workflow' || normalized === workflowName.toLowerCase()
|
||||
)
|
||||
}
|
||||
|
||||
type CodeLanguage = 'curl' | 'python' | 'javascript' | 'typescript'
|
||||
|
||||
const LANGUAGE_LABELS: Record<CodeLanguage, string> = {
|
||||
curl: 'cURL',
|
||||
python: 'Python',
|
||||
javascript: 'JavaScript',
|
||||
typescript: 'TypeScript',
|
||||
}
|
||||
|
||||
const LANGUAGE_SYNTAX: Record<CodeLanguage, 'python' | 'javascript' | 'json'> = {
|
||||
curl: 'javascript',
|
||||
python: 'python',
|
||||
javascript: 'javascript',
|
||||
typescript: 'javascript',
|
||||
}
|
||||
|
||||
interface A2aDeployProps {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
workflowDescription?: string | null
|
||||
isDeployed: boolean
|
||||
workflowNeedsRedeployment?: boolean
|
||||
onSubmittingChange?: (submitting: boolean) => void
|
||||
onCanSaveChange?: (canSave: boolean) => void
|
||||
onAgentExistsChange?: (exists: boolean) => void
|
||||
onPublishedChange?: (published: boolean) => void
|
||||
onNeedsRepublishChange?: (needsRepublish: boolean) => void
|
||||
onDeployWorkflow?: () => Promise<void>
|
||||
}
|
||||
|
||||
type AuthScheme = 'none' | 'apiKey'
|
||||
|
||||
export function A2aDeploy({
|
||||
workflowId,
|
||||
workflowName,
|
||||
workflowDescription,
|
||||
isDeployed,
|
||||
workflowNeedsRedeployment,
|
||||
onSubmittingChange,
|
||||
onCanSaveChange,
|
||||
onAgentExistsChange,
|
||||
onPublishedChange,
|
||||
onNeedsRepublishChange,
|
||||
onDeployWorkflow,
|
||||
}: A2aDeployProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const { data: existingAgent, isLoading } = useA2AAgentByWorkflow(workspaceId, workflowId)
|
||||
|
||||
const createAgent = useCreateA2AAgent()
|
||||
const updateAgent = useUpdateA2AAgent()
|
||||
const deleteAgent = useDeleteA2AAgent()
|
||||
const publishAgent = usePublishA2AAgent()
|
||||
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const startBlockId = useMemo(() => {
|
||||
if (!blocks || Object.keys(blocks).length === 0) return null
|
||||
const candidate = TriggerUtils.findStartBlock(blocks, 'api')
|
||||
if (!candidate || candidate.path !== StartBlockPath.UNIFIED) return null
|
||||
return candidate.blockId
|
||||
}, [blocks])
|
||||
|
||||
const startBlockInputFormat = useSubBlockStore((state) => {
|
||||
if (!workflowId || !startBlockId) return null
|
||||
const workflowValues = state.workflowValues[workflowId]
|
||||
const fromStore = workflowValues?.[startBlockId]?.inputFormat
|
||||
if (fromStore !== undefined) return fromStore
|
||||
const startBlock = blocks[startBlockId]
|
||||
return startBlock?.subBlocks?.inputFormat?.value ?? null
|
||||
})
|
||||
|
||||
const missingFields = useMemo(() => {
|
||||
if (!startBlockId) return { input: false, data: false, files: false, any: false }
|
||||
const normalizedFields = normalizeInputFormatValue(startBlockInputFormat)
|
||||
const existingNames = new Set(
|
||||
normalizedFields
|
||||
.map((field) => field.name)
|
||||
.filter((n): n is string => typeof n === 'string' && n.trim() !== '')
|
||||
.map((n) => n.trim().toLowerCase())
|
||||
)
|
||||
const missing = {
|
||||
input: !existingNames.has('input'),
|
||||
data: !existingNames.has('data'),
|
||||
files: !existingNames.has('files'),
|
||||
any: false,
|
||||
}
|
||||
missing.any = missing.input || missing.data || missing.files
|
||||
return missing
|
||||
}, [startBlockId, startBlockInputFormat])
|
||||
|
||||
const handleAddA2AInputs = useCallback(() => {
|
||||
if (!startBlockId) return
|
||||
|
||||
const normalizedExisting = normalizeInputFormatValue(startBlockInputFormat)
|
||||
const newFields: InputFormatField[] = []
|
||||
|
||||
// Add input field if missing (for TextPart)
|
||||
if (missingFields.input) {
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'input',
|
||||
type: 'string',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
}
|
||||
|
||||
// Add data field if missing (for DataPart)
|
||||
if (missingFields.data) {
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'data',
|
||||
type: 'object',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
}
|
||||
|
||||
// Add files field if missing (for FilePart)
|
||||
if (missingFields.files) {
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'files',
|
||||
type: 'files',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
}
|
||||
|
||||
if (newFields.length > 0) {
|
||||
const updatedFields = [...newFields, ...normalizedExisting]
|
||||
collaborativeSetSubblockValue(startBlockId, 'inputFormat', updatedFields)
|
||||
logger.info(
|
||||
`Added A2A input fields to Start block: ${newFields.map((f) => f.name).join(', ')}`
|
||||
)
|
||||
}
|
||||
}, [startBlockId, startBlockInputFormat, missingFields, collaborativeSetSubblockValue])
|
||||
|
||||
const [name, setName] = useState('')
|
||||
const [description, setDescription] = useState('')
|
||||
const [authScheme, setAuthScheme] = useState<AuthScheme>('apiKey')
|
||||
const [pushNotificationsEnabled, setPushNotificationsEnabled] = useState(false)
|
||||
const [skillTags, setSkillTags] = useState<string[]>([])
|
||||
const [language, setLanguage] = useState<CodeLanguage>('curl')
|
||||
const [useStreamingExample, setUseStreamingExample] = useState(false)
|
||||
const [urlCopied, setUrlCopied] = useState(false)
|
||||
const [codeCopied, setCodeCopied] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (existingAgent) {
|
||||
setName(existingAgent.name)
|
||||
const savedDesc = existingAgent.description || ''
|
||||
setDescription(isDefaultDescription(savedDesc, workflowName) ? '' : savedDesc)
|
||||
setPushNotificationsEnabled(existingAgent.capabilities?.pushNotifications ?? false)
|
||||
const schemes = existingAgent.authentication?.schemes || []
|
||||
if (schemes.includes('apiKey')) {
|
||||
setAuthScheme('apiKey')
|
||||
} else {
|
||||
setAuthScheme('none')
|
||||
}
|
||||
const skills = existingAgent.skills as Array<{ tags?: string[] }> | undefined
|
||||
const savedTags = skills?.[0]?.tags
|
||||
setSkillTags(savedTags?.length ? savedTags : [])
|
||||
} else {
|
||||
setName(workflowName)
|
||||
setDescription(
|
||||
isDefaultDescription(workflowDescription, workflowName) ? '' : workflowDescription || ''
|
||||
)
|
||||
setAuthScheme('apiKey')
|
||||
setPushNotificationsEnabled(false)
|
||||
setSkillTags([])
|
||||
}
|
||||
}, [existingAgent, workflowName, workflowDescription])
|
||||
|
||||
useEffect(() => {
|
||||
onAgentExistsChange?.(!!existingAgent)
|
||||
}, [existingAgent, onAgentExistsChange])
|
||||
|
||||
useEffect(() => {
|
||||
onPublishedChange?.(existingAgent?.isPublished ?? false)
|
||||
}, [existingAgent?.isPublished, onPublishedChange])
|
||||
|
||||
const hasFormChanges = useMemo(() => {
|
||||
if (!existingAgent) return false
|
||||
const savedSchemes = existingAgent.authentication?.schemes || []
|
||||
const savedAuthScheme = savedSchemes.includes('apiKey') ? 'apiKey' : 'none'
|
||||
const savedDesc = existingAgent.description || ''
|
||||
const normalizedSavedDesc = isDefaultDescription(savedDesc, workflowName) ? '' : savedDesc
|
||||
const skills = existingAgent.skills as Array<{ tags?: string[] }> | undefined
|
||||
const savedTags = skills?.[0]?.tags || []
|
||||
const tagsChanged =
|
||||
skillTags.length !== savedTags.length || skillTags.some((t, i) => t !== savedTags[i])
|
||||
return (
|
||||
name !== existingAgent.name ||
|
||||
description !== normalizedSavedDesc ||
|
||||
pushNotificationsEnabled !== (existingAgent.capabilities?.pushNotifications ?? false) ||
|
||||
authScheme !== savedAuthScheme ||
|
||||
tagsChanged
|
||||
)
|
||||
}, [
|
||||
existingAgent,
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workflowName,
|
||||
])
|
||||
|
||||
const hasWorkflowChanges = useMemo(() => {
|
||||
if (!existingAgent) return false
|
||||
return !!workflowNeedsRedeployment
|
||||
}, [existingAgent, workflowNeedsRedeployment])
|
||||
|
||||
const needsRepublish = existingAgent && (hasFormChanges || hasWorkflowChanges)
|
||||
|
||||
useEffect(() => {
|
||||
onNeedsRepublishChange?.(!!needsRepublish)
|
||||
}, [needsRepublish, onNeedsRepublishChange])
|
||||
|
||||
const authSchemeOptions: ComboboxOption[] = useMemo(
|
||||
() => [
|
||||
{ label: 'API Key', value: 'apiKey' },
|
||||
{ label: 'None (Public)', value: 'none' },
|
||||
],
|
||||
[]
|
||||
)
|
||||
|
||||
const canSave = name.trim().length > 0 && description.trim().length > 0
|
||||
useEffect(() => {
|
||||
onCanSaveChange?.(canSave)
|
||||
}, [canSave, onCanSaveChange])
|
||||
|
||||
const isSubmitting =
|
||||
createAgent.isPending ||
|
||||
updateAgent.isPending ||
|
||||
deleteAgent.isPending ||
|
||||
publishAgent.isPending
|
||||
|
||||
useEffect(() => {
|
||||
onSubmittingChange?.(isSubmitting)
|
||||
}, [isSubmitting, onSubmittingChange])
|
||||
|
||||
const handleCreateOrUpdate = useCallback(async () => {
|
||||
const capabilities: AgentCapabilities = {
|
||||
streaming: true,
|
||||
pushNotifications: pushNotificationsEnabled,
|
||||
stateTransitionHistory: true,
|
||||
}
|
||||
|
||||
const authentication: AgentAuthentication = {
|
||||
schemes: authScheme === 'none' ? ['none'] : [authScheme],
|
||||
}
|
||||
|
||||
try {
|
||||
if (existingAgent) {
|
||||
await updateAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
} else {
|
||||
await createAgent.mutateAsync({
|
||||
workspaceId,
|
||||
workflowId,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to save A2A agent:', error)
|
||||
}
|
||||
}, [
|
||||
existingAgent,
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
])
|
||||
|
||||
const handlePublish = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
try {
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
action: 'publish',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to publish A2A agent:', error)
|
||||
}
|
||||
}, [existingAgent, workspaceId, publishAgent])
|
||||
|
||||
const handleUnpublish = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
try {
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
action: 'unpublish',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to unpublish A2A agent:', error)
|
||||
}
|
||||
}, [existingAgent, workspaceId, publishAgent])
|
||||
|
||||
const handleDelete = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
try {
|
||||
await deleteAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
})
|
||||
setName(workflowName)
|
||||
setDescription(workflowDescription || '')
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete A2A agent:', error)
|
||||
}
|
||||
}, [existingAgent, workspaceId, deleteAgent, workflowName, workflowDescription])
|
||||
|
||||
const handlePublishNewAgent = useCallback(async () => {
|
||||
const capabilities: AgentCapabilities = {
|
||||
streaming: true,
|
||||
pushNotifications: pushNotificationsEnabled,
|
||||
stateTransitionHistory: true,
|
||||
}
|
||||
|
||||
const authentication: AgentAuthentication = {
|
||||
schemes: authScheme === 'none' ? ['none'] : [authScheme],
|
||||
}
|
||||
|
||||
try {
|
||||
if (!isDeployed && onDeployWorkflow) {
|
||||
await onDeployWorkflow()
|
||||
}
|
||||
|
||||
const newAgent = await createAgent.mutateAsync({
|
||||
workspaceId,
|
||||
workflowId,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: newAgent.id,
|
||||
workspaceId,
|
||||
action: 'publish',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to publish A2A agent:', error)
|
||||
}
|
||||
}, [
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createAgent,
|
||||
publishAgent,
|
||||
isDeployed,
|
||||
onDeployWorkflow,
|
||||
])
|
||||
|
||||
const handleUpdateAndRepublish = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
|
||||
const capabilities: AgentCapabilities = {
|
||||
streaming: true,
|
||||
pushNotifications: pushNotificationsEnabled,
|
||||
stateTransitionHistory: true,
|
||||
}
|
||||
|
||||
const authentication: AgentAuthentication = {
|
||||
schemes: authScheme === 'none' ? ['none'] : [authScheme],
|
||||
}
|
||||
|
||||
try {
|
||||
if ((!isDeployed || workflowNeedsRedeployment) && onDeployWorkflow) {
|
||||
await onDeployWorkflow()
|
||||
}
|
||||
|
||||
await updateAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
action: 'publish',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to update and republish A2A agent:', error)
|
||||
}
|
||||
}, [
|
||||
existingAgent,
|
||||
isDeployed,
|
||||
workflowNeedsRedeployment,
|
||||
onDeployWorkflow,
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workspaceId,
|
||||
updateAgent,
|
||||
publishAgent,
|
||||
])
|
||||
|
||||
const baseUrl = getBaseUrl()
|
||||
const endpoint = existingAgent ? `${baseUrl}/api/a2a/serve/${existingAgent.id}` : null
|
||||
|
||||
const additionalInputFields = useMemo(() => {
|
||||
const allFields = normalizeInputFormatValue(startBlockInputFormat)
|
||||
return allFields.filter(
|
||||
(field): field is InputFormatField & { name: string } =>
|
||||
!!field.name &&
|
||||
field.name.toLowerCase() !== 'input' &&
|
||||
field.name.toLowerCase() !== 'data' &&
|
||||
field.name.toLowerCase() !== 'files'
|
||||
)
|
||||
}, [startBlockInputFormat])
|
||||
|
||||
const getExampleInputData = useCallback((): Record<string, unknown> => {
|
||||
const data: Record<string, unknown> = {}
|
||||
for (const field of additionalInputFields) {
|
||||
switch (field.type) {
|
||||
case 'string':
|
||||
data[field.name] = 'example'
|
||||
break
|
||||
case 'number':
|
||||
data[field.name] = 42
|
||||
break
|
||||
case 'boolean':
|
||||
data[field.name] = true
|
||||
break
|
||||
case 'object':
|
||||
data[field.name] = { key: 'value' }
|
||||
break
|
||||
case 'array':
|
||||
data[field.name] = [1, 2, 3]
|
||||
break
|
||||
default:
|
||||
data[field.name] = 'example'
|
||||
}
|
||||
}
|
||||
return data
|
||||
}, [additionalInputFields])
|
||||
|
||||
const getJsonRpcPayload = useCallback((): Record<string, unknown> => {
|
||||
const inputData = getExampleInputData()
|
||||
const hasAdditionalData = Object.keys(inputData).length > 0
|
||||
|
||||
// Build parts array: TextPart for message text, DataPart for additional fields
|
||||
const parts: Array<Record<string, unknown>> = [{ kind: 'text', text: 'Hello, agent!' }]
|
||||
if (hasAdditionalData) {
|
||||
parts.push({ kind: 'data', data: inputData })
|
||||
}
|
||||
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
id: '1',
|
||||
method: useStreamingExample ? 'message/stream' : 'message/send',
|
||||
params: {
|
||||
message: {
|
||||
role: 'user',
|
||||
parts,
|
||||
},
|
||||
},
|
||||
}
|
||||
}, [getExampleInputData, useStreamingExample])
|
||||
|
||||
const getCurlCommand = useCallback((): string => {
|
||||
if (!endpoint) return ''
|
||||
const payload = getJsonRpcPayload()
|
||||
const requiresAuth = authScheme !== 'none'
|
||||
|
||||
switch (language) {
|
||||
case 'curl':
|
||||
return requiresAuth
|
||||
? `curl -X POST \\
|
||||
-H "X-API-Key: $SIM_API_KEY" \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-d '${JSON.stringify(payload)}' \\
|
||||
${endpoint}`
|
||||
: `curl -X POST \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-d '${JSON.stringify(payload)}' \\
|
||||
${endpoint}`
|
||||
|
||||
case 'python':
|
||||
return requiresAuth
|
||||
? `import os
|
||||
import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={
|
||||
"X-API-Key": os.environ.get("SIM_API_KEY"),
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
json=${JSON.stringify(payload, null, 4).replace(/\n/g, '\n ')}
|
||||
)
|
||||
|
||||
print(response.json())`
|
||||
: `import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={"Content-Type": "application/json"},
|
||||
json=${JSON.stringify(payload, null, 4).replace(/\n/g, '\n ')}
|
||||
)
|
||||
|
||||
print(response.json())`
|
||||
|
||||
case 'javascript':
|
||||
return requiresAuth
|
||||
? `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log(data);`
|
||||
: `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {"Content-Type": "application/json"},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log(data);`
|
||||
|
||||
case 'typescript':
|
||||
return requiresAuth
|
||||
? `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data: Record<string, unknown> = await response.json();
|
||||
console.log(data);`
|
||||
: `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {"Content-Type": "application/json"},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data: Record<string, unknown> = await response.json();
|
||||
console.log(data);`
|
||||
|
||||
default:
|
||||
return ''
|
||||
}
|
||||
}, [endpoint, language, getJsonRpcPayload, authScheme])
|
||||
|
||||
const handleCopyCommand = useCallback(() => {
|
||||
navigator.clipboard.writeText(getCurlCommand())
|
||||
setCodeCopied(true)
|
||||
setTimeout(() => setCodeCopied(false), 2000)
|
||||
}, [getCurlCommand])
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='-mx-1 space-y-[12px] px-1'>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[80px]' />
|
||||
<Skeleton className='h-[34px] w-full rounded-[4px]' />
|
||||
<Skeleton className='mt-[6.5px] h-[14px] w-[200px]' />
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[70px]' />
|
||||
<Skeleton className='h-[80px] w-full rounded-[4px]' />
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[50px]' />
|
||||
<Skeleton className='h-[34px] w-full rounded-[4px]' />
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[90px]' />
|
||||
<Skeleton className='h-[34px] w-full rounded-[4px]' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<form
|
||||
id='a2a-deploy-form'
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault()
|
||||
handleCreateOrUpdate()
|
||||
}}
|
||||
className='-mx-1 space-y-[12px] overflow-y-auto px-1 pb-[16px]'
|
||||
>
|
||||
{/* Endpoint URL (shown when agent exists) */}
|
||||
{existingAgent && endpoint && (
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
URL
|
||||
</Label>
|
||||
<div className='relative flex items-stretch overflow-hidden rounded-[4px] border border-[var(--border-1)]'>
|
||||
<div className='flex items-center whitespace-nowrap bg-[var(--surface-5)] pr-[6px] pl-[8px] font-medium text-[var(--text-secondary)] text-sm dark:bg-[var(--surface-5)]'>
|
||||
{baseUrl.replace(/^https?:\/\//, '')}/api/a2a/serve/
|
||||
</div>
|
||||
<div className='relative flex-1'>
|
||||
<Input
|
||||
value={existingAgent.id}
|
||||
readOnly
|
||||
className='rounded-none border-0 pr-[32px] pl-0 text-[var(--text-tertiary)] shadow-none'
|
||||
/>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => {
|
||||
navigator.clipboard.writeText(endpoint)
|
||||
setUrlCopied(true)
|
||||
setTimeout(() => setUrlCopied(false), 2000)
|
||||
}}
|
||||
className='-translate-y-1/2 absolute top-1/2 right-2'
|
||||
>
|
||||
{urlCopied ? (
|
||||
<Check className='h-3 w-3 text-[var(--brand-tertiary-2)]' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3 text-[var(--text-tertiary)]' />
|
||||
)}
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
The A2A endpoint URL where clients can discover and call your agent
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Agent Name */}
|
||||
<div>
|
||||
<Label
|
||||
htmlFor='a2a-name'
|
||||
className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'
|
||||
>
|
||||
Agent name <span className='text-red-500'>*</span>
|
||||
</Label>
|
||||
<Input
|
||||
id='a2a-name'
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
placeholder='Enter agent name'
|
||||
required
|
||||
/>
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
Human-readable name shown in the Agent Card
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Description */}
|
||||
<div>
|
||||
<Label
|
||||
htmlFor='a2a-description'
|
||||
className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'
|
||||
>
|
||||
Description <span className='text-red-500'>*</span>
|
||||
</Label>
|
||||
<Textarea
|
||||
id='a2a-description'
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
placeholder='Describe what this agent does...'
|
||||
className='min-h-[80px] resize-none'
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Authentication */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Authentication
|
||||
</Label>
|
||||
<Combobox
|
||||
options={authSchemeOptions}
|
||||
value={authScheme}
|
||||
onChange={(v) => setAuthScheme(v as AuthScheme)}
|
||||
placeholder='Select authentication...'
|
||||
/>
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
{authScheme === 'none'
|
||||
? 'Anyone can call this agent without authentication'
|
||||
: 'Requires X-API-Key header or API key query parameter'}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Capabilities */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Capabilities
|
||||
</Label>
|
||||
<div className='space-y-[8px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Checkbox
|
||||
id='a2a-push'
|
||||
checked={pushNotificationsEnabled}
|
||||
onCheckedChange={(checked) => setPushNotificationsEnabled(checked === true)}
|
||||
/>
|
||||
<label htmlFor='a2a-push' className='text-[13px] text-[var(--text-primary)]'>
|
||||
Push notifications (webhooks)
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Tags */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Tags
|
||||
</Label>
|
||||
<TagInput
|
||||
items={skillTags.map((tag) => ({ value: tag, isValid: true }))}
|
||||
onAdd={(value) => {
|
||||
if (!skillTags.includes(value)) {
|
||||
setSkillTags((prev) => [...prev, value])
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}}
|
||||
onRemove={(_value, index) => {
|
||||
setSkillTags((prev) => prev.filter((_, i) => i !== index))
|
||||
}}
|
||||
placeholder='Add tags'
|
||||
placeholderWithTags='Add another'
|
||||
tagVariant='secondary'
|
||||
triggerKeys={['Enter', ',']}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Curl Preview (shown when agent exists) */}
|
||||
{existingAgent && endpoint && (
|
||||
<>
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Language
|
||||
</Label>
|
||||
</div>
|
||||
<ButtonGroup value={language} onValueChange={(val) => setLanguage(val as CodeLanguage)}>
|
||||
{(Object.keys(LANGUAGE_LABELS) as CodeLanguage[]).map((lang) => (
|
||||
<ButtonGroupItem key={lang} value={lang}>
|
||||
{LANGUAGE_LABELS[lang]}
|
||||
</ButtonGroupItem>
|
||||
))}
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Send message
|
||||
</Label>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Checkbox
|
||||
id='a2a-stream-example'
|
||||
checked={useStreamingExample}
|
||||
onCheckedChange={(checked) => setUseStreamingExample(checked === true)}
|
||||
/>
|
||||
<label
|
||||
htmlFor='a2a-stream-example'
|
||||
className='text-[12px] text-[var(--text-secondary)]'
|
||||
>
|
||||
Stream
|
||||
</label>
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={handleCopyCommand}
|
||||
aria-label='Copy command'
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{codeCopied ? (
|
||||
<Check className='h-3 w-3' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{codeCopied ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={getCurlCommand()}
|
||||
language={LANGUAGE_SYNTAX[language]}
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
<div className='mt-[6.5px] flex items-start justify-between gap-2'>
|
||||
<p className='text-[11px] text-[var(--text-secondary)]'>
|
||||
External A2A clients can discover and call your agent. TextPart →{' '}
|
||||
<code className='text-[10px]'><start.input></code>, DataPart →{' '}
|
||||
<code className='text-[10px]'><start.data></code>, FilePart →{' '}
|
||||
<code className='text-[10px]'><start.files></code>.
|
||||
</p>
|
||||
{missingFields.any && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
title='Add required A2A input fields to Start block'
|
||||
onClick={handleAddA2AInputs}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Hidden triggers for modal footer */}
|
||||
<button type='submit' data-a2a-save-trigger className='hidden' />
|
||||
<button type='button' data-a2a-publish-trigger className='hidden' onClick={handlePublish} />
|
||||
<button
|
||||
type='button'
|
||||
data-a2a-unpublish-trigger
|
||||
className='hidden'
|
||||
onClick={handleUnpublish}
|
||||
/>
|
||||
<button type='button' data-a2a-delete-trigger className='hidden' onClick={handleDelete} />
|
||||
<button
|
||||
type='button'
|
||||
data-a2a-publish-new-trigger
|
||||
className='hidden'
|
||||
onClick={handlePublishNewAgent}
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
data-a2a-update-republish-trigger
|
||||
className='hidden'
|
||||
onClick={handleUpdateAndRepublish}
|
||||
/>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -125,12 +125,13 @@ export function ApiDeploy({
|
||||
${endpoint}`
|
||||
|
||||
case 'python':
|
||||
return `import requests
|
||||
return `import os
|
||||
import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": os.environ.get("SIM_API_KEY"),
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
json=${JSON.stringify(payload, null, 4).replace(/\n/g, '\n ')}
|
||||
@@ -142,7 +143,7 @@ print(response.json())`
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
@@ -155,7 +156,7 @@ console.log(data);`
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
@@ -183,12 +184,13 @@ console.log(data);`
|
||||
${endpoint}`
|
||||
|
||||
case 'python':
|
||||
return `import requests
|
||||
return `import os
|
||||
import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": os.environ.get("SIM_API_KEY"),
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
json=${JSON.stringify(payload, null, 4).replace(/\n/g, '\n ')},
|
||||
@@ -203,7 +205,7 @@ for line in response.iter_lines():
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
@@ -222,7 +224,7 @@ while (true) {
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
@@ -260,12 +262,13 @@ while (true) {
|
||||
${endpoint}`
|
||||
|
||||
case 'python':
|
||||
return `import requests
|
||||
return `import os
|
||||
import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": os.environ.get("SIM_API_KEY"),
|
||||
"Content-Type": "application/json",
|
||||
"X-Execution-Mode": "async"
|
||||
},
|
||||
@@ -279,7 +282,7 @@ print(job) # Contains job_id for status checking`
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json",
|
||||
"X-Execution-Mode": "async"
|
||||
},
|
||||
@@ -293,7 +296,7 @@ console.log(job); // Contains job_id for status checking`
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"X-API-Key": process.env.SIM_API_KEY,
|
||||
"Content-Type": "application/json",
|
||||
"X-Execution-Mode": "async"
|
||||
},
|
||||
@@ -314,11 +317,12 @@ console.log(job); // Contains job_id for status checking`
|
||||
${baseUrl}/api/jobs/JOB_ID_FROM_EXECUTION`
|
||||
|
||||
case 'python':
|
||||
return `import requests
|
||||
return `import os
|
||||
import requests
|
||||
|
||||
response = requests.get(
|
||||
"${baseUrl}/api/jobs/JOB_ID_FROM_EXECUTION",
|
||||
headers={"X-API-Key": SIM_API_KEY}
|
||||
headers={"X-API-Key": os.environ.get("SIM_API_KEY")}
|
||||
)
|
||||
|
||||
status = response.json()
|
||||
@@ -328,7 +332,7 @@ print(status)`
|
||||
return `const response = await fetch(
|
||||
"${baseUrl}/api/jobs/JOB_ID_FROM_EXECUTION",
|
||||
{
|
||||
headers: { "X-API-Key": SIM_API_KEY }
|
||||
headers: { "X-API-Key": process.env.SIM_API_KEY }
|
||||
}
|
||||
);
|
||||
|
||||
@@ -339,7 +343,7 @@ console.log(status);`
|
||||
return `const response = await fetch(
|
||||
"${baseUrl}/api/jobs/JOB_ID_FROM_EXECUTION",
|
||||
{
|
||||
headers: { "X-API-Key": SIM_API_KEY }
|
||||
headers: { "X-API-Key": process.env.SIM_API_KEY }
|
||||
}
|
||||
);
|
||||
|
||||
@@ -357,11 +361,12 @@ console.log(status);`
|
||||
${baseUrl}/api/users/me/usage-limits`
|
||||
|
||||
case 'python':
|
||||
return `import requests
|
||||
return `import os
|
||||
import requests
|
||||
|
||||
response = requests.get(
|
||||
"${baseUrl}/api/users/me/usage-limits",
|
||||
headers={"X-API-Key": SIM_API_KEY}
|
||||
headers={"X-API-Key": os.environ.get("SIM_API_KEY")}
|
||||
)
|
||||
|
||||
limits = response.json()
|
||||
@@ -371,7 +376,7 @@ print(limits)`
|
||||
return `const response = await fetch(
|
||||
"${baseUrl}/api/users/me/usage-limits",
|
||||
{
|
||||
headers: { "X-API-Key": SIM_API_KEY }
|
||||
headers: { "X-API-Key": process.env.SIM_API_KEY }
|
||||
}
|
||||
);
|
||||
|
||||
@@ -382,7 +387,7 @@ console.log(limits);`
|
||||
return `const response = await fetch(
|
||||
"${baseUrl}/api/users/me/usage-limits",
|
||||
{
|
||||
headers: { "X-API-Key": SIM_API_KEY }
|
||||
headers: { "X-API-Key": process.env.SIM_API_KEY }
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ import { Alert, AlertDescription, Skeleton } from '@/components/ui'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { generatePassword } from '@/lib/core/security/encryption'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getEmailDomain } from '@/lib/core/utils/urls'
|
||||
import { getBaseUrl, getEmailDomain } from '@/lib/core/utils/urls'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/chat/components/output-select/output-select'
|
||||
import {
|
||||
@@ -493,7 +493,7 @@ function IdentifierInput({
|
||||
onChange(lowercaseValue)
|
||||
}
|
||||
|
||||
const fullUrl = `${getEnv('NEXT_PUBLIC_APP_URL')}/chat/${value}`
|
||||
const fullUrl = `${getBaseUrl()}/chat/${value}`
|
||||
const displayUrl = fullUrl.replace(/^https?:\/\//, '')
|
||||
|
||||
return (
|
||||
|
||||
@@ -14,7 +14,6 @@ import {
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { getEnv } from '@/lib/core/config/env'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl, getEmailDomain } from '@/lib/core/utils/urls'
|
||||
@@ -392,7 +391,7 @@ export function FormDeploy({
|
||||
)
|
||||
}
|
||||
|
||||
const fullUrl = `${getEnv('NEXT_PUBLIC_APP_URL')}/form/${identifier}`
|
||||
const fullUrl = `${getBaseUrl()}/form/${identifier}`
|
||||
const displayUrl = fullUrl.replace(/^https?:\/\//, '')
|
||||
|
||||
return (
|
||||
|
||||
@@ -513,25 +513,31 @@ export function McpDeploy({
|
||||
{inputFormat.map((field) => (
|
||||
<div
|
||||
key={field.name}
|
||||
className='rounded-[6px] border bg-[var(--surface-3)] px-[10px] py-[8px]'
|
||||
className='overflow-hidden rounded-[4px] border border-[var(--border-1)]'
|
||||
>
|
||||
<div className='flex items-center justify-between'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-primary)]'>{field.name}</p>
|
||||
<Badge variant='outline' className='text-[10px]'>
|
||||
{field.type}
|
||||
</Badge>
|
||||
<div className='flex items-center justify-between bg-[var(--surface-4)] px-[10px] py-[5px]'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{field.name}
|
||||
</span>
|
||||
<Badge size='sm'>{field.type}</Badge>
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Description</Label>
|
||||
<Input
|
||||
value={parameterDescriptions[field.name] || ''}
|
||||
onChange={(e) =>
|
||||
setParameterDescriptions((prev) => ({
|
||||
...prev,
|
||||
[field.name]: e.target.value,
|
||||
}))
|
||||
}
|
||||
placeholder={`Enter description for ${field.name}`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Input
|
||||
value={parameterDescriptions[field.name] || ''}
|
||||
onChange={(e) =>
|
||||
setParameterDescriptions((prev) => ({
|
||||
...prev,
|
||||
[field.name]: e.target.value,
|
||||
}))
|
||||
}
|
||||
placeholder='Description'
|
||||
className='mt-[6px] h-[28px] text-[12px]'
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
@@ -551,7 +557,6 @@ export function McpDeploy({
|
||||
searchable
|
||||
searchPlaceholder='Search servers...'
|
||||
disabled={!toolName.trim() || isPending}
|
||||
isLoading={isPending}
|
||||
overlayContent={
|
||||
<span className='truncate text-[var(--text-primary)]'>{selectedServersLabel}</span>
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user