mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-13 00:48:26 -05:00
Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
034ad8331d | ||
|
|
56458625b7 | ||
|
|
f93a946272 | ||
|
|
f2950c7060 | ||
|
|
88b4a1fe6e | ||
|
|
2512767dde | ||
|
|
b5f55b7c63 | ||
|
|
684ad5aeec | ||
|
|
a3dff1027f | ||
|
|
0aec9ef571 | ||
|
|
cb4db20a5f |
@@ -4061,6 +4061,31 @@ export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function A2AIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 860 860' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<circle cx='544' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='154' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='706' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='316' cy='307' r='27' fill='currentColor' />
|
||||
<path
|
||||
d='M336.5 191.003H162C97.6588 191.003 45.5 243.162 45.5 307.503C45.5 371.844 97.6442 424.003 161.985 424.003C206.551 424.003 256.288 424.003 296.5 424.003C487.5 424.003 374 191.005 569 191.001C613.886 191 658.966 191 698.025 191C762.366 191.001 814.5 243.16 814.5 307.501C814.5 371.843 762.34 424.003 697.998 424.003H523.5'
|
||||
stroke='currentColor'
|
||||
strokeWidth='48'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
<path
|
||||
d='M256 510.002C270.359 510.002 282 521.643 282 536.002C282 550.361 270.359 562.002 256 562.002H148C133.641 562.002 122 550.361 122 536.002C122 521.643 133.641 510.002 148 510.002H256ZM712 510.002C726.359 510.002 738 521.643 738 536.002C738 550.361 726.359 562.002 712 562.002H360C345.641 562.002 334 550.361 334 536.002C334 521.643 345.641 510.002 360 510.002H712Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M444 628.002C458.359 628.002 470 639.643 470 654.002C470 668.361 458.359 680.002 444 680.002H100C85.6406 680.002 74 668.361 74 654.002C74 639.643 85.6406 628.002 100 628.002H444ZM548 628.002C562.359 628.002 574 639.643 574 654.002C574 668.361 562.359 680.002 548 680.002C533.641 680.002 522 668.361 522 654.002C522 639.643 533.641 628.002 548 628.002ZM760 628.002C774.359 628.002 786 639.643 786 654.002C786 668.361 774.359 680.002 760 680.002H652C637.641 680.002 626 668.361 626 654.002C626 639.643 637.641 628.002 652 628.002H760Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function WordpressIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 25.925 25.925'>
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import type { ComponentType, SVGProps } from 'react'
|
||||
import {
|
||||
A2AIcon,
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
ApifyIcon,
|
||||
@@ -126,6 +127,7 @@ import {
|
||||
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||
|
||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
a2a: A2AIcon,
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
apify: ApifyIcon,
|
||||
|
||||
240
apps/docs/content/docs/en/tools/a2a.mdx
Normal file
240
apps/docs/content/docs/en/tools/a2a.mdx
Normal file
@@ -0,0 +1,240 @@
|
||||
---
|
||||
title: A2A
|
||||
description: Interact with external A2A-compatible agents
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="a2a"
|
||||
color="#4151B5"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The A2A (Agent-to-Agent) protocol enables Sim to interact with external AI agents and systems that implement A2A-compatible APIs. With A2A, you can connect Sim’s automations and workflows to remote agents—such as LLM-powered bots, microservices, and other AI-based tools—using a standardized messaging format.
|
||||
|
||||
Using the A2A tools in Sim, you can:
|
||||
|
||||
- **Send Messages to External Agents**: Communicate directly with remote agents, providing prompts, commands, or data.
|
||||
- **Receive and Stream Responses**: Get structured responses, artifacts, or real-time updates from the agent as the task progresses.
|
||||
- **Continue Conversations or Tasks**: Carry on multi-turn conversations or workflows by referencing task and context IDs.
|
||||
- **Integrate Third-Party AI and Automation**: Leverage external A2A-compatible services as part of your Sim workflows.
|
||||
|
||||
These features allow you to build advanced workflows that combine Sim’s native capabilities with the intelligence and automation of external AIs or custom agents. To use A2A integrations, you’ll need the external agent’s endpoint URL and, if required, an API key or credentials.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Use the A2A (Agent-to-Agent) protocol to interact with external AI agents.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `a2a_send_message`
|
||||
|
||||
Send a message to an external A2A-compatible agent.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `message` | string | Yes | Message to send to the agent |
|
||||
| `taskId` | string | No | Task ID for continuing an existing task |
|
||||
| `contextId` | string | No | Context ID for conversation continuity |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | The text response from the agent |
|
||||
| `taskId` | string | Task ID for follow-up interactions |
|
||||
| `contextId` | string | Context ID for conversation continuity |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Structured output artifacts |
|
||||
| `history` | array | Full message history |
|
||||
|
||||
### `a2a_send_message_stream`
|
||||
|
||||
Send a message to an external A2A-compatible agent with real-time streaming.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `message` | string | Yes | Message to send to the agent |
|
||||
| `taskId` | string | No | Task ID for continuing an existing task |
|
||||
| `contextId` | string | No | Context ID for conversation continuity |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | The text response from the agent |
|
||||
| `taskId` | string | Task ID for follow-up interactions |
|
||||
| `contextId` | string | Context ID for conversation continuity |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Structured output artifacts |
|
||||
| `history` | array | Full message history |
|
||||
|
||||
### `a2a_get_task`
|
||||
|
||||
Query the status of an existing A2A task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to query |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
| `historyLength` | number | No | Number of history messages to include |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Task ID |
|
||||
| `contextId` | string | Context ID |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Output artifacts |
|
||||
| `history` | array | Message history |
|
||||
|
||||
### `a2a_cancel_task`
|
||||
|
||||
Cancel a running A2A task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to cancel |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `cancelled` | boolean | Whether cancellation was successful |
|
||||
| `state` | string | Task state after cancellation |
|
||||
|
||||
### `a2a_get_agent_card`
|
||||
|
||||
Fetch the Agent Card (discovery document) for an A2A agent.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `apiKey` | string | No | API key for authentication \(if required\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `name` | string | Agent name |
|
||||
| `description` | string | Agent description |
|
||||
| `url` | string | Agent endpoint URL |
|
||||
| `version` | string | Agent version |
|
||||
| `capabilities` | object | Agent capabilities \(streaming, pushNotifications, etc.\) |
|
||||
| `skills` | array | Skills the agent can perform |
|
||||
| `defaultInputModes` | array | Default input modes \(text, file, data\) |
|
||||
| `defaultOutputModes` | array | Default output modes \(text, file, data\) |
|
||||
|
||||
### `a2a_resubscribe`
|
||||
|
||||
Reconnect to an ongoing A2A task stream after connection interruption.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to resubscribe to |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Task ID |
|
||||
| `contextId` | string | Context ID |
|
||||
| `state` | string | Current task state |
|
||||
| `isRunning` | boolean | Whether the task is still running |
|
||||
| `artifacts` | array | Output artifacts |
|
||||
| `history` | array | Message history |
|
||||
|
||||
### `a2a_set_push_notification`
|
||||
|
||||
Configure a webhook to receive task update notifications.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to configure notifications for |
|
||||
| `webhookUrl` | string | Yes | HTTPS webhook URL to receive notifications |
|
||||
| `token` | string | No | Token for webhook validation |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | Configured webhook URL |
|
||||
| `token` | string | Token for webhook validation |
|
||||
| `success` | boolean | Whether configuration was successful |
|
||||
|
||||
### `a2a_get_push_notification`
|
||||
|
||||
Get the push notification webhook configuration for a task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to get notification config for |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | Configured webhook URL |
|
||||
| `token` | string | Token for webhook validation |
|
||||
| `exists` | boolean | Whether a push notification config exists |
|
||||
|
||||
### `a2a_delete_push_notification`
|
||||
|
||||
Delete the push notification webhook configuration for a task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to delete notification config for |
|
||||
| `pushNotificationConfigId` | string | No | Push notification configuration ID to delete \(optional - server can derive from taskId\) |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether deletion was successful |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `a2a`
|
||||
@@ -37,7 +37,7 @@ This integration empowers Sim agents to automate data management tasks within yo
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.
|
||||
|
||||
|
||||
|
||||
@@ -185,6 +185,27 @@ Delete an item from a DynamoDB table
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `dynamodb_introspect`
|
||||
|
||||
Introspect DynamoDB to list tables or get detailed schema information for a specific table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `tableName` | string | No | Optional table name to get detailed schema. If not provided, lists all tables. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | List of table names in the region |
|
||||
| `tableDetails` | object | Detailed schema information for a specific table |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -362,6 +362,29 @@ Get comprehensive statistics about the Elasticsearch cluster.
|
||||
| `nodes` | object | Node statistics including count and versions |
|
||||
| `indices` | object | Index statistics including document count and store size |
|
||||
|
||||
### `elasticsearch_list_indices`
|
||||
|
||||
List all indices in the Elasticsearch cluster with their health, status, and statistics.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deploymentType` | string | Yes | Deployment type: self_hosted or cloud |
|
||||
| `host` | string | No | Elasticsearch host URL \(for self-hosted\) |
|
||||
| `cloudId` | string | No | Elastic Cloud ID \(for cloud deployments\) |
|
||||
| `authMethod` | string | Yes | Authentication method: api_key or basic_auth |
|
||||
| `apiKey` | string | No | Elasticsearch API key |
|
||||
| `username` | string | No | Username for basic auth |
|
||||
| `password` | string | No | Password for basic auth |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Summary message about the indices |
|
||||
| `indices` | json | Array of index information objects |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -96,13 +96,13 @@ Download a file from Google Drive with complete metadata (exports Google Workspa
|
||||
| `fileId` | string | Yes | The ID of the file to download |
|
||||
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
|
||||
| `fileName` | string | No | Optional filename override |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true\) |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `file` | object | Downloaded file stored in execution files |
|
||||
| `file` | object | Downloaded file data |
|
||||
|
||||
### `google_drive_list`
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"pages": [
|
||||
"index",
|
||||
"a2a",
|
||||
"ahrefs",
|
||||
"airtable",
|
||||
"apify",
|
||||
|
||||
@@ -172,6 +172,30 @@ Execute MongoDB aggregation pipeline
|
||||
| `documents` | array | Array of documents returned from aggregation |
|
||||
| `documentCount` | number | Number of documents returned |
|
||||
|
||||
### `mongodb_introspect`
|
||||
|
||||
Introspect MongoDB database to list databases, collections, and indexes
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MongoDB server hostname or IP address |
|
||||
| `port` | number | Yes | MongoDB server port \(default: 27017\) |
|
||||
| `database` | string | No | Database name to introspect \(optional - if not provided, lists all databases\) |
|
||||
| `username` | string | No | MongoDB username |
|
||||
| `password` | string | No | MongoDB password |
|
||||
| `authSource` | string | No | Authentication database |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `databases` | array | Array of database names |
|
||||
| `collections` | array | Array of collection info with name, type, document count, and indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,29 @@ Execute raw SQL query on MySQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `mysql_introspect`
|
||||
|
||||
Introspect MySQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `databases` | array | List of available databases on the server |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -168,6 +168,33 @@ Execute arbitrary Cypher queries on Neo4j graph database for complex operations
|
||||
| `recordCount` | number | Number of records returned |
|
||||
| `summary` | json | Execution summary with timing and counters |
|
||||
|
||||
### `neo4j_introspect`
|
||||
|
||||
Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | Neo4j server hostname or IP address |
|
||||
| `port` | number | Yes | Neo4j server port \(default: 7687 for Bolt protocol\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Neo4j username |
|
||||
| `password` | string | Yes | Neo4j password |
|
||||
| `encryption` | string | No | Connection encryption mode \(enabled, disabled\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `labels` | array | Array of node labels in the database |
|
||||
| `relationshipTypes` | array | Array of relationship types in the database |
|
||||
| `nodeSchemas` | array | Array of node schemas with their properties |
|
||||
| `relationshipSchemas` | array | Array of relationship schemas with their properties |
|
||||
| `constraints` | array | Array of database constraints |
|
||||
| `indexes` | array | Array of database indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,30 @@ Execute raw SQL query on PostgreSQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `postgresql_introspect`
|
||||
|
||||
Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -165,6 +165,32 @@ Execute raw SQL on Amazon RDS using the Data API
|
||||
| `rows` | array | Array of rows returned or affected |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `rds_introspect`
|
||||
|
||||
Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `resourceArn` | string | Yes | ARN of the Aurora DB cluster |
|
||||
| `secretArn` | string | Yes | ARN of the Secrets Manager secret containing DB credentials |
|
||||
| `database` | string | No | Database name \(optional\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public for PostgreSQL, database name for MySQL\) |
|
||||
| `engine` | string | No | Database engine \(aurora-postgresql or aurora-mysql\). Auto-detected if not provided. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `engine` | string | Detected database engine type |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -261,6 +261,25 @@ Call a PostgreSQL function in Supabase
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | json | Result returned from the function |
|
||||
|
||||
### `supabase_introspect`
|
||||
|
||||
Introspect Supabase database schema to get table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
|
||||
| `schema` | string | No | Database schema to introspect \(defaults to all user schemas, commonly "public"\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
|
||||
### `supabase_storage_upload`
|
||||
|
||||
Upload a file to a Supabase storage bucket
|
||||
|
||||
269
apps/sim/app/api/a2a/agents/[agentId]/route.ts
Normal file
269
apps/sim/app/api/a2a/agents/[agentId]/route.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* A2A Agent Card Endpoint
|
||||
*
|
||||
* Returns the Agent Card (discovery document) for an A2A agent.
|
||||
* Also supports CRUD operations for managing agents.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { a2aAgent, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { generateAgentCard, generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
|
||||
import type { AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentCardAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
agentId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Returns the Agent Card for discovery
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const [agent] = await db
|
||||
.select({
|
||||
agent: a2aAgent,
|
||||
workflow: workflow,
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.innerJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!agent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!agent.agent.isPublished) {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success) {
|
||||
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
|
||||
}
|
||||
}
|
||||
|
||||
const agentCard = generateAgentCard(
|
||||
{
|
||||
id: agent.agent.id,
|
||||
name: agent.agent.name,
|
||||
description: agent.agent.description,
|
||||
version: agent.agent.version,
|
||||
capabilities: agent.agent.capabilities as AgentCapabilities,
|
||||
skills: agent.agent.skills as AgentSkill[],
|
||||
},
|
||||
{
|
||||
id: agent.workflow.id,
|
||||
name: agent.workflow.name,
|
||||
description: agent.workflow.description,
|
||||
}
|
||||
)
|
||||
|
||||
return NextResponse.json(agentCard, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Cache-Control': agent.agent.isPublished ? 'public, max-age=3600' : 'private, no-cache',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting Agent Card:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT - Update an agent
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
let skills = body.skills ?? existingAgent.skills
|
||||
if (body.skillTags !== undefined) {
|
||||
const agentName = body.name ?? existingAgent.name
|
||||
const agentDescription = body.description ?? existingAgent.description
|
||||
skills = generateSkillsFromWorkflow(agentName, agentDescription, body.skillTags)
|
||||
}
|
||||
|
||||
const [updatedAgent] = await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
name: body.name ?? existingAgent.name,
|
||||
description: body.description ?? existingAgent.description,
|
||||
version: body.version ?? existingAgent.version,
|
||||
capabilities: body.capabilities ?? existingAgent.capabilities,
|
||||
skills,
|
||||
authentication: body.authentication ?? existingAgent.authentication,
|
||||
isPublished: body.isPublished ?? existingAgent.isPublished,
|
||||
publishedAt:
|
||||
body.isPublished && !existingAgent.isPublished ? new Date() : existingAgent.publishedAt,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.returning()
|
||||
|
||||
logger.info(`Updated A2A agent: ${agentId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agent: updatedAgent })
|
||||
} catch (error) {
|
||||
logger.error('Error updating agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE - Delete an agent
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Deleted A2A agent: ${agentId}`)
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Publish/unpublish an agent
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn('A2A agent publish auth failed:', { error: auth.error, hasUserId: !!auth.userId })
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const action = body.action as 'publish' | 'unpublish' | 'refresh'
|
||||
|
||||
if (action === 'publish') {
|
||||
const [wf] = await db
|
||||
.select({ isDeployed: workflow.isDeployed })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingAgent.workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!wf?.isDeployed) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow must be deployed before publishing agent' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
isPublished: true,
|
||||
publishedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Published A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, isPublished: true })
|
||||
}
|
||||
|
||||
if (action === 'unpublish') {
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
isPublished: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Unpublished A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, isPublished: false })
|
||||
}
|
||||
|
||||
if (action === 'refresh') {
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(existingAgent.workflowId)
|
||||
if (!workflowData) {
|
||||
return NextResponse.json({ error: 'Failed to load workflow' }, { status: 500 })
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({ name: workflow.name, description: workflow.description })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingAgent.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const skills = generateSkillsFromWorkflow(wf?.name || existingAgent.name, wf?.description)
|
||||
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
skills,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Refreshed skills for A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, skills })
|
||||
}
|
||||
|
||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
||||
} catch (error) {
|
||||
logger.error('Error with agent action:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
186
apps/sim/app/api/a2a/agents/route.ts
Normal file
186
apps/sim/app/api/a2a/agents/route.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* A2A Agents List Endpoint
|
||||
*
|
||||
* List and create A2A agents for a workspace.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { a2aAgent, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
|
||||
import { A2A_DEFAULT_CAPABILITIES } from '@/lib/a2a/constants'
|
||||
import { sanitizeAgentName } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* GET - List all A2A agents for a workspace
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
|
||||
if (!workspaceId) {
|
||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const agents = await db
|
||||
.select({
|
||||
id: a2aAgent.id,
|
||||
workspaceId: a2aAgent.workspaceId,
|
||||
workflowId: a2aAgent.workflowId,
|
||||
name: a2aAgent.name,
|
||||
description: a2aAgent.description,
|
||||
version: a2aAgent.version,
|
||||
capabilities: a2aAgent.capabilities,
|
||||
skills: a2aAgent.skills,
|
||||
authentication: a2aAgent.authentication,
|
||||
isPublished: a2aAgent.isPublished,
|
||||
publishedAt: a2aAgent.publishedAt,
|
||||
createdAt: a2aAgent.createdAt,
|
||||
updatedAt: a2aAgent.updatedAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
taskCount: sql<number>`(
|
||||
SELECT COUNT(*)::int
|
||||
FROM "a2a_task"
|
||||
WHERE "a2a_task"."agent_id" = "a2a_agent"."id"
|
||||
)`.as('task_count'),
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.leftJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
|
||||
.where(eq(a2aAgent.workspaceId, workspaceId))
|
||||
.orderBy(a2aAgent.createdAt)
|
||||
|
||||
logger.info(`Listed ${agents.length} A2A agents for workspace ${workspaceId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agents })
|
||||
} catch (error) {
|
||||
logger.error('Error listing agents:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Create a new A2A agent from a workflow
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { workspaceId, workflowId, name, description, capabilities, authentication, skillTags } =
|
||||
body
|
||||
|
||||
if (!workspaceId || !workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'workspaceId and workflowId are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.id, workflowId), eq(workflow.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
if (!wf) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow not found or does not belong to workspace' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: a2aAgent.id })
|
||||
.from(a2aAgent)
|
||||
.where(and(eq(a2aAgent.workspaceId, workspaceId), eq(a2aAgent.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existing) {
|
||||
return NextResponse.json(
|
||||
{ error: 'An agent already exists for this workflow' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!workflowData || !hasValidStartBlockInState(workflowData)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow must have a Start block to be exposed as an A2A agent' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const skills = generateSkillsFromWorkflow(
|
||||
name || wf.name,
|
||||
description || wf.description,
|
||||
skillTags
|
||||
)
|
||||
|
||||
const agentId = uuidv4()
|
||||
const agentName = name || sanitizeAgentName(wf.name)
|
||||
|
||||
const [agent] = await db
|
||||
.insert(a2aAgent)
|
||||
.values({
|
||||
id: agentId,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createdBy: auth.userId,
|
||||
name: agentName,
|
||||
description: description || wf.description,
|
||||
version: '1.0.0',
|
||||
capabilities: {
|
||||
...A2A_DEFAULT_CAPABILITIES,
|
||||
...capabilities,
|
||||
},
|
||||
skills,
|
||||
authentication: authentication || {
|
||||
schemes: ['bearer', 'apiKey'],
|
||||
},
|
||||
isPublished: false,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`Created A2A agent ${agentId} for workflow ${workflowId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agent }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error('Error creating agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
1211
apps/sim/app/api/a2a/serve/[agentId]/route.ts
Normal file
1211
apps/sim/app/api/a2a/serve/[agentId]/route.ts
Normal file
File diff suppressed because it is too large
Load Diff
166
apps/sim/app/api/a2a/serve/[agentId]/utils.ts
Normal file
166
apps/sim/app/api/a2a/serve/[agentId]/utils.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
/** A2A v0.3 JSON-RPC method names */
|
||||
export const A2A_METHODS = {
|
||||
MESSAGE_SEND: 'message/send',
|
||||
MESSAGE_STREAM: 'message/stream',
|
||||
TASKS_GET: 'tasks/get',
|
||||
TASKS_CANCEL: 'tasks/cancel',
|
||||
TASKS_RESUBSCRIBE: 'tasks/resubscribe',
|
||||
PUSH_NOTIFICATION_SET: 'tasks/pushNotificationConfig/set',
|
||||
PUSH_NOTIFICATION_GET: 'tasks/pushNotificationConfig/get',
|
||||
PUSH_NOTIFICATION_DELETE: 'tasks/pushNotificationConfig/delete',
|
||||
} as const
|
||||
|
||||
/** A2A v0.3 error codes */
|
||||
export const A2A_ERROR_CODES = {
|
||||
PARSE_ERROR: -32700,
|
||||
INVALID_REQUEST: -32600,
|
||||
METHOD_NOT_FOUND: -32601,
|
||||
INVALID_PARAMS: -32602,
|
||||
INTERNAL_ERROR: -32603,
|
||||
TASK_NOT_FOUND: -32001,
|
||||
TASK_ALREADY_COMPLETE: -32002,
|
||||
AGENT_UNAVAILABLE: -32003,
|
||||
AUTHENTICATION_REQUIRED: -32004,
|
||||
} as const
|
||||
|
||||
export interface JSONRPCRequest {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number
|
||||
method: string
|
||||
params?: unknown
|
||||
}
|
||||
|
||||
export interface JSONRPCResponse {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number | null
|
||||
result?: unknown
|
||||
error?: {
|
||||
code: number
|
||||
message: string
|
||||
data?: unknown
|
||||
}
|
||||
}
|
||||
|
||||
export interface MessageSendParams {
|
||||
message: Message
|
||||
configuration?: {
|
||||
acceptedOutputModes?: string[]
|
||||
historyLength?: number
|
||||
pushNotificationConfig?: PushNotificationConfig
|
||||
}
|
||||
}
|
||||
|
||||
export interface TaskIdParams {
|
||||
id: string
|
||||
historyLength?: number
|
||||
}
|
||||
|
||||
export interface PushNotificationSetParams {
|
||||
id: string
|
||||
pushNotificationConfig: PushNotificationConfig
|
||||
}
|
||||
|
||||
export function createResponse(id: string | number | null, result: unknown): JSONRPCResponse {
|
||||
return { jsonrpc: '2.0', id, result }
|
||||
}
|
||||
|
||||
export function createError(
|
||||
id: string | number | null,
|
||||
code: number,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): JSONRPCResponse {
|
||||
return { jsonrpc: '2.0', id, error: { code, message, data } }
|
||||
}
|
||||
|
||||
export function isJSONRPCRequest(obj: unknown): obj is JSONRPCRequest {
|
||||
if (!obj || typeof obj !== 'object') return false
|
||||
const r = obj as Record<string, unknown>
|
||||
return r.jsonrpc === '2.0' && typeof r.method === 'string' && r.id !== undefined
|
||||
}
|
||||
|
||||
export function generateTaskId(): string {
|
||||
return uuidv4()
|
||||
}
|
||||
|
||||
export function createTaskStatus(state: TaskState): { state: TaskState; timestamp: string } {
|
||||
return { state, timestamp: new Date().toISOString() }
|
||||
}
|
||||
|
||||
export function formatTaskResponse(task: Task, historyLength?: number): Task {
|
||||
if (historyLength !== undefined && task.history) {
|
||||
return {
|
||||
...task,
|
||||
history: task.history.slice(-historyLength),
|
||||
}
|
||||
}
|
||||
return task
|
||||
}
|
||||
|
||||
export interface ExecuteRequestConfig {
|
||||
workflowId: string
|
||||
apiKey?: string | null
|
||||
stream?: boolean
|
||||
}
|
||||
|
||||
export interface ExecuteRequestResult {
|
||||
url: string
|
||||
headers: Record<string, string>
|
||||
useInternalAuth: boolean
|
||||
}
|
||||
|
||||
export async function buildExecuteRequest(
|
||||
config: ExecuteRequestConfig
|
||||
): Promise<ExecuteRequestResult> {
|
||||
const url = `${getBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
let useInternalAuth = false
|
||||
|
||||
if (config.apiKey) {
|
||||
headers['X-API-Key'] = config.apiKey
|
||||
} else {
|
||||
const internalToken = await generateInternalToken()
|
||||
headers.Authorization = `Bearer ${internalToken}`
|
||||
useInternalAuth = true
|
||||
}
|
||||
|
||||
if (config.stream) {
|
||||
headers['X-Stream-Response'] = 'true'
|
||||
}
|
||||
|
||||
return { url, headers, useInternalAuth }
|
||||
}
|
||||
|
||||
export function extractAgentContent(executeResult: {
|
||||
output?: { content?: string; [key: string]: unknown }
|
||||
error?: string
|
||||
}): string {
|
||||
return (
|
||||
executeResult.output?.content ||
|
||||
(typeof executeResult.output === 'object'
|
||||
? JSON.stringify(executeResult.output)
|
||||
: String(executeResult.output || executeResult.error || 'Task completed'))
|
||||
)
|
||||
}
|
||||
|
||||
export function buildTaskResponse(params: {
|
||||
taskId: string
|
||||
contextId: string
|
||||
state: TaskState
|
||||
history: Message[]
|
||||
artifacts?: Artifact[]
|
||||
}): Task {
|
||||
return {
|
||||
kind: 'task',
|
||||
id: params.taskId,
|
||||
contextId: params.contextId,
|
||||
status: createTaskStatus(params.state),
|
||||
history: params.history,
|
||||
artifacts: params.artifacts || [],
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { memory, permissions, workspace } from '@sim/db/schema'
|
||||
import { memory } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MemoryByIdAPI')
|
||||
|
||||
@@ -29,46 +30,6 @@ const memoryPutBodySchema = z.object({
|
||||
workspaceId: z.string().uuid('Invalid workspace ID format'),
|
||||
})
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; canWrite: boolean }> {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceRow.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite: permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin',
|
||||
}
|
||||
}
|
||||
|
||||
async function validateMemoryAccess(
|
||||
request: NextRequest,
|
||||
workspaceId: string,
|
||||
@@ -86,8 +47,8 @@ async function validateMemoryAccess(
|
||||
}
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists || !access.hasAccess) {
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
@@ -96,7 +57,7 @@ async function validateMemoryAccess(
|
||||
}
|
||||
}
|
||||
|
||||
if (action === 'write' && !canWrite) {
|
||||
if (action === 'write' && !access.canWrite) {
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied' } },
|
||||
|
||||
@@ -1,56 +1,17 @@
|
||||
import { db } from '@sim/db'
|
||||
import { memory, permissions, workspace } from '@sim/db/schema'
|
||||
import { memory } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, like } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MemoryAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; canWrite: boolean }> {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceRow.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite: permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin',
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -76,8 +37,14 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
@@ -155,15 +122,21 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!canWrite) {
|
||||
if (!access.canWrite) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
@@ -282,15 +255,21 @@ export async function DELETE(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!canWrite) {
|
||||
if (!access.canWrite) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
|
||||
85
apps/sim/app/api/tools/a2a/cancel-task/route.ts
Normal file
85
apps/sim/app/api/tools/a2a/cancel-task/route.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { Task } from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('A2ACancelTaskAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const A2ACancelTaskSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A cancel task attempt`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ACancelTaskSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Canceling A2A task`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const task = (await client.cancelTask({ id: validatedData.taskId })) as Task
|
||||
|
||||
logger.info(`[${requestId}] Successfully canceled A2A task`, {
|
||||
taskId: validatedData.taskId,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
cancelled: true,
|
||||
state: task.status.state,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid A2A cancel task request`, {
|
||||
errors: error.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error canceling A2A task:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to cancel task',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
95
apps/sim/app/api/tools/a2a/delete-push-notification/route.ts
Normal file
95
apps/sim/app/api/tools/a2a/delete-push-notification/route.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ADeletePushNotificationAPI')
|
||||
|
||||
const A2ADeletePushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
pushNotificationConfigId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A delete push notification attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A delete push notification request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ADeletePushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Deleting A2A push notification config`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
pushNotificationConfigId: validatedData.pushNotificationConfigId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
await client.deleteTaskPushNotificationConfig({
|
||||
id: validatedData.taskId,
|
||||
pushNotificationConfigId: validatedData.pushNotificationConfigId || validatedData.taskId,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Push notification config deleted successfully`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to delete push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
93
apps/sim/app/api/tools/a2a/get-agent-card/route.ts
Normal file
93
apps/sim/app/api/tools/a2a/get-agent-card/route.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetAgentCardAPI')
|
||||
|
||||
const A2AGetAgentCardSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A get agent card attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A get agent card request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetAgentCardSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Fetching Agent Card`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const agentCard = await client.getAgentCard()
|
||||
|
||||
logger.info(`[${requestId}] Agent Card fetched successfully`, {
|
||||
agentName: agentCard.name,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
name: agentCard.name,
|
||||
description: agentCard.description,
|
||||
url: agentCard.url,
|
||||
version: agentCard.protocolVersion,
|
||||
capabilities: agentCard.capabilities,
|
||||
skills: agentCard.skills,
|
||||
defaultInputModes: agentCard.defaultInputModes,
|
||||
defaultOutputModes: agentCard.defaultOutputModes,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error fetching Agent Card:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to fetch Agent Card',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
116
apps/sim/app/api/tools/a2a/get-push-notification/route.ts
Normal file
116
apps/sim/app/api/tools/a2a/get-push-notification/route.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetPushNotificationAPI')
|
||||
|
||||
const A2AGetPushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A get push notification attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A get push notification request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetPushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Getting push notification config`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const result = await client.getTaskPushNotificationConfig({
|
||||
id: validatedData.taskId,
|
||||
})
|
||||
|
||||
if (!result || !result.pushNotificationConfig) {
|
||||
logger.info(`[${requestId}] No push notification config found for task`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
exists: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Push notification config retrieved successfully`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url: result.pushNotificationConfig.url,
|
||||
token: result.pushNotificationConfig.token,
|
||||
exists: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message.includes('not found')) {
|
||||
logger.info(`[${requestId}] Task not found, returning exists: false`)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
exists: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to get push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
96
apps/sim/app/api/tools/a2a/get-task/route.ts
Normal file
96
apps/sim/app/api/tools/a2a/get-task/route.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import type { Task } from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetTaskAPI')
|
||||
|
||||
const A2AGetTaskSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
historyLength: z.number().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A get task attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated A2A get task request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetTaskSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Getting A2A task`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
historyLength: validatedData.historyLength,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const task = (await client.getTask({
|
||||
id: validatedData.taskId,
|
||||
historyLength: validatedData.historyLength,
|
||||
})) as Task
|
||||
|
||||
logger.info(`[${requestId}] Successfully retrieved A2A task`, {
|
||||
taskId: task.id,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting A2A task:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to get task',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
121
apps/sim/app/api/tools/a2a/resubscribe/route.ts
Normal file
121
apps/sim/app/api/tools/a2a/resubscribe/route.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('A2AResubscribeAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const A2AResubscribeSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A resubscribe attempt`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AResubscribeSchema.parse(body)
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const stream = client.resubscribeTask({ id: validatedData.taskId })
|
||||
|
||||
let taskId = validatedData.taskId
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resubscribed to A2A task ${taskId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
isRunning: !isTerminalState(state),
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid A2A resubscribe data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error resubscribing to A2A task:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to resubscribe',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
152
apps/sim/app/api/tools/a2a/send-message-stream/route.ts
Normal file
152
apps/sim/app/api/tools/a2a/send-message-stream/route.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageStreamAPI')
|
||||
|
||||
const A2ASendMessageStreamSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A send message stream attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message stream request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageStreamSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A streaming message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const stream = client.sendMessageStream({ message })
|
||||
|
||||
let taskId = ''
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] A2A streaming message completed`, {
|
||||
taskId,
|
||||
state,
|
||||
artifactCount: artifacts.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(state) && state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in A2A streaming:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Streaming failed',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
128
apps/sim/app/api/tools/a2a/send-message/route.ts
Normal file
128
apps/sim/app/api/tools/a2a/send-message/route.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import type { Message, Task } from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageAPI')
|
||||
|
||||
const A2ASendMessageSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A send message attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const result = await client.sendMessage({ message })
|
||||
|
||||
if (result.kind === 'message') {
|
||||
const responseMessage = result as Message
|
||||
|
||||
logger.info(`[${requestId}] A2A message sent successfully (message response)`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: extractTextContent(responseMessage),
|
||||
taskId: responseMessage.taskId || '',
|
||||
contextId: responseMessage.contextId,
|
||||
state: 'completed',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const task = result as Task
|
||||
const lastAgentMessage = task.history?.filter((m) => m.role === 'agent').pop()
|
||||
const content = lastAgentMessage ? extractTextContent(lastAgentMessage) : ''
|
||||
|
||||
logger.info(`[${requestId}] A2A message sent successfully (task response)`, {
|
||||
taskId: task.id,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(task.status.state) && task.status.state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error sending A2A message:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
94
apps/sim/app/api/tools/a2a/set-push-notification/route.ts
Normal file
94
apps/sim/app/api/tools/a2a/set-push-notification/route.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASetPushNotificationAPI')
|
||||
|
||||
const A2ASetPushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
webhookUrl: z.string().min(1, 'Webhook URL is required'),
|
||||
token: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A set push notification attempt`, {
|
||||
error: authResult.error || 'Authentication required',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification request`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
webhookUrl: validatedData.webhookUrl,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const result = await client.setTaskPushNotificationConfig({
|
||||
taskId: validatedData.taskId,
|
||||
pushNotificationConfig: {
|
||||
url: validatedData.webhookUrl,
|
||||
token: validatedData.token,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification successful`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url: result.pushNotificationConfig.url,
|
||||
token: result.pushNotificationConfig.token,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error setting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to set push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRawDynamoDBClient, describeTable, listTables } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const logger = createLogger('DynamoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
tableName: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Introspecting DynamoDB in region ${params.region}`)
|
||||
|
||||
const client = createRawDynamoDBClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
})
|
||||
|
||||
try {
|
||||
const { tables } = await listTables(client)
|
||||
|
||||
if (params.tableName) {
|
||||
logger.info(`[${requestId}] Describing table: ${params.tableName}`)
|
||||
const { tableDetails } = await describeTable(client, params.tableName)
|
||||
|
||||
logger.info(`[${requestId}] Table description completed for '${params.tableName}'`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Table '${params.tableName}' described successfully.`,
|
||||
tables,
|
||||
tableDetails,
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Found ${tables.length} table(s) in region '${params.region}'.`,
|
||||
tables,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] DynamoDB introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `DynamoDB introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DynamoDBClient } from '@aws-sdk/client-dynamodb'
|
||||
import { DescribeTableCommand, DynamoDBClient, ListTablesCommand } from '@aws-sdk/client-dynamodb'
|
||||
import {
|
||||
DeleteCommand,
|
||||
DynamoDBDocumentClient,
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
ScanCommand,
|
||||
UpdateCommand,
|
||||
} from '@aws-sdk/lib-dynamodb'
|
||||
import type { DynamoDBConnectionConfig } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBConnectionConfig, DynamoDBTableSchema } from '@/tools/dynamodb/types'
|
||||
|
||||
export function createDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBDocumentClient {
|
||||
const client = new DynamoDBClient({
|
||||
@@ -172,3 +172,99 @@ export async function deleteItem(
|
||||
await client.send(command)
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raw DynamoDB client for operations that don't require DocumentClient
|
||||
*/
|
||||
export function createRawDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBClient {
|
||||
return new DynamoDBClient({
|
||||
region: config.region,
|
||||
credentials: {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all DynamoDB tables in the configured region
|
||||
*/
|
||||
export async function listTables(client: DynamoDBClient): Promise<{ tables: string[] }> {
|
||||
const tables: string[] = []
|
||||
let exclusiveStartTableName: string | undefined
|
||||
|
||||
do {
|
||||
const command = new ListTablesCommand({
|
||||
ExclusiveStartTableName: exclusiveStartTableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
if (response.TableNames) {
|
||||
tables.push(...response.TableNames)
|
||||
}
|
||||
exclusiveStartTableName = response.LastEvaluatedTableName
|
||||
} while (exclusiveStartTableName)
|
||||
|
||||
return { tables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes a specific DynamoDB table and returns its schema information
|
||||
*/
|
||||
export async function describeTable(
|
||||
client: DynamoDBClient,
|
||||
tableName: string
|
||||
): Promise<{ tableDetails: DynamoDBTableSchema }> {
|
||||
const command = new DescribeTableCommand({
|
||||
TableName: tableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
const table = response.Table
|
||||
|
||||
if (!table) {
|
||||
throw new Error(`Table '${tableName}' not found`)
|
||||
}
|
||||
|
||||
const tableDetails: DynamoDBTableSchema = {
|
||||
tableName: table.TableName || tableName,
|
||||
tableStatus: table.TableStatus || 'UNKNOWN',
|
||||
keySchema:
|
||||
table.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
attributeDefinitions:
|
||||
table.AttributeDefinitions?.map((attr) => ({
|
||||
attributeName: attr.AttributeName || '',
|
||||
attributeType: (attr.AttributeType as 'S' | 'N' | 'B') || 'S',
|
||||
})) || [],
|
||||
globalSecondaryIndexes:
|
||||
table.GlobalSecondaryIndexes?.map((gsi) => ({
|
||||
indexName: gsi.IndexName || '',
|
||||
keySchema:
|
||||
gsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: gsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: gsi.IndexStatus || 'UNKNOWN',
|
||||
})) || [],
|
||||
localSecondaryIndexes:
|
||||
table.LocalSecondaryIndexes?.map((lsi) => ({
|
||||
indexName: lsi.IndexName || '',
|
||||
keySchema:
|
||||
lsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: lsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: 'ACTIVE',
|
||||
})) || [],
|
||||
itemCount: Number(table.ItemCount) || 0,
|
||||
tableSizeBytes: Number(table.TableSizeBytes) || 0,
|
||||
billingMode: table.BillingModeSummary?.BillingMode || 'PROVISIONED',
|
||||
}
|
||||
|
||||
return { tableDetails }
|
||||
}
|
||||
|
||||
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMongoDBConnection, executeIntrospect } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
password: z.string().optional(),
|
||||
authSource: z.string().optional(),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MongoDB at ${params.host}:${params.port}${params.database ? `/${params.database}` : ''}`
|
||||
)
|
||||
|
||||
client = await createMongoDBConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database || 'admin',
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
authSource: params.authSource,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
const result = await executeIntrospect(client, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${result.databases.length} databases, ${result.collections.length} collections`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: result.message,
|
||||
databases: result.databases,
|
||||
collections: result.collections,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MongoDB introspect failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MongoDB introspect failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (client) {
|
||||
await client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { MongoClient } from 'mongodb'
|
||||
import type { MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
import type { MongoDBCollectionInfo, MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
|
||||
export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
|
||||
const credentials =
|
||||
@@ -129,3 +129,59 @@ export function sanitizeCollectionName(name: string): string {
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect MongoDB to get databases, collections, and indexes
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: MongoClient,
|
||||
database?: string
|
||||
): Promise<{
|
||||
message: string
|
||||
databases: string[]
|
||||
collections: MongoDBCollectionInfo[]
|
||||
}> {
|
||||
const databases: string[] = []
|
||||
const collections: MongoDBCollectionInfo[] = []
|
||||
|
||||
if (database) {
|
||||
databases.push(database)
|
||||
const db = client.db(database)
|
||||
const collectionList = await db.listCollections().toArray()
|
||||
|
||||
for (const collInfo of collectionList) {
|
||||
const coll = db.collection(collInfo.name)
|
||||
const indexes = await coll.indexes()
|
||||
const documentCount = await coll.estimatedDocumentCount()
|
||||
|
||||
collections.push({
|
||||
name: collInfo.name,
|
||||
type: collInfo.type || 'collection',
|
||||
documentCount,
|
||||
indexes: indexes.map((idx) => ({
|
||||
name: idx.name || '',
|
||||
key: idx.key as Record<string, number>,
|
||||
unique: idx.unique || false,
|
||||
sparse: idx.sparse,
|
||||
})),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const admin = client.db().admin()
|
||||
const dbList = await admin.listDatabases()
|
||||
|
||||
for (const dbInfo of dbList.databases) {
|
||||
databases.push(dbInfo.name)
|
||||
}
|
||||
}
|
||||
|
||||
const message = database
|
||||
? `Found ${collections.length} collections in database '${database}'`
|
||||
: `Found ${databases.length} databases`
|
||||
|
||||
return {
|
||||
message,
|
||||
databases,
|
||||
collections,
|
||||
}
|
||||
}
|
||||
|
||||
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MySQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(connection, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in database '${params.database}'.`,
|
||||
tables: result.tables,
|
||||
databases: result.databases,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MySQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -166,3 +166,146 @@ function sanitizeSingleIdentifier(identifier: string): string {
|
||||
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
|
||||
export interface MySQLIntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
database: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
autoIncrement: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
databases: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
connection: mysql.Connection,
|
||||
databaseName: string
|
||||
): Promise<MySQLIntrospectionResult> {
|
||||
const [databasesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const databases = databasesRows.map((row) => row.SCHEMA_NAME)
|
||||
|
||||
const [tablesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[databaseName]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesRows) {
|
||||
const tableName = tableRow.TABLE_NAME
|
||||
|
||||
const [columnsRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE, IS_NULLABLE, COLUMN_DEFAULT, EXTRA
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const [pkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
const primaryKeyColumns = pkRows.map((row) => row.COLUMN_NAME)
|
||||
|
||||
const [fkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT kcu.COLUMN_NAME, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = ? AND kcu.TABLE_NAME = ? AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const foreignKeys = fkRows.map((row) => ({
|
||||
column: row.COLUMN_NAME,
|
||||
referencesTable: row.REFERENCED_TABLE_NAME,
|
||||
referencesColumn: row.REFERENCED_COLUMN_NAME,
|
||||
}))
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const [indexRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT INDEX_NAME, COLUMN_NAME, SEQ_IN_INDEX, NON_UNIQUE
|
||||
FROM INFORMATION_SCHEMA.STATISTICS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexRows) {
|
||||
const indexName = row.INDEX_NAME
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.NON_UNIQUE === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.COLUMN_NAME)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsRows.map((col) => {
|
||||
const columnName = col.COLUMN_NAME
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
const isAutoIncrement = col.EXTRA?.toLowerCase().includes('auto_increment') || false
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.COLUMN_TYPE || col.DATA_TYPE,
|
||||
nullable: col.IS_NULLABLE === 'YES',
|
||||
default: col.COLUMN_DEFAULT,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
autoIncrement: isAutoIncrement,
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
database: databaseName,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, databases }
|
||||
}
|
||||
|
||||
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createNeo4jDriver } from '@/app/api/tools/neo4j/utils'
|
||||
import type { Neo4jNodeSchema, Neo4jRelationshipSchema } from '@/tools/neo4j/types'
|
||||
|
||||
const logger = createLogger('Neo4jIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
encryption: z.enum(['enabled', 'disabled']).default('disabled'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting Neo4j database at ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
driver = await createNeo4jDriver({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
encryption: params.encryption,
|
||||
})
|
||||
|
||||
session = driver.session({ database: params.database })
|
||||
|
||||
const labelsResult = await session.run(
|
||||
'CALL db.labels() YIELD label RETURN label ORDER BY label'
|
||||
)
|
||||
const labels: string[] = labelsResult.records.map((record) => record.get('label') as string)
|
||||
|
||||
const relationshipTypesResult = await session.run(
|
||||
'CALL db.relationshipTypes() YIELD relationshipType RETURN relationshipType ORDER BY relationshipType'
|
||||
)
|
||||
const relationshipTypes: string[] = relationshipTypesResult.records.map(
|
||||
(record) => record.get('relationshipType') as string
|
||||
)
|
||||
|
||||
const nodeSchemas: Neo4jNodeSchema[] = []
|
||||
try {
|
||||
const nodePropertiesResult = await session.run(
|
||||
'CALL db.schema.nodeTypeProperties() YIELD nodeLabels, propertyName, propertyTypes RETURN nodeLabels, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const nodePropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of nodePropertiesResult.records) {
|
||||
const nodeLabels = record.get('nodeLabels') as string[]
|
||||
const propertyName = record.get('propertyName') as string
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
const labelKey = nodeLabels.join(':')
|
||||
if (!nodePropertiesMap.has(labelKey)) {
|
||||
nodePropertiesMap.set(labelKey, [])
|
||||
}
|
||||
nodePropertiesMap.get(labelKey)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
|
||||
for (const [labelKey, properties] of nodePropertiesMap) {
|
||||
nodeSchemas.push({
|
||||
label: labelKey,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (nodePropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch node properties (may not be supported in this Neo4j version): ${nodePropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const relationshipSchemas: Neo4jRelationshipSchema[] = []
|
||||
try {
|
||||
const relPropertiesResult = await session.run(
|
||||
'CALL db.schema.relTypeProperties() YIELD relationshipType, propertyName, propertyTypes RETURN relationshipType, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const relPropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of relPropertiesResult.records) {
|
||||
const relType = record.get('relationshipType') as string
|
||||
const propertyName = record.get('propertyName') as string | null
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
if (!relPropertiesMap.has(relType)) {
|
||||
relPropertiesMap.set(relType, [])
|
||||
}
|
||||
if (propertyName) {
|
||||
relPropertiesMap.get(relType)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
}
|
||||
|
||||
for (const [relType, properties] of relPropertiesMap) {
|
||||
relationshipSchemas.push({
|
||||
type: relType,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (relPropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch relationship properties (may not be supported in this Neo4j version): ${relPropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const constraints: Array<{
|
||||
name: string
|
||||
type: string
|
||||
entityType: string
|
||||
properties: string[]
|
||||
}> = []
|
||||
try {
|
||||
const constraintsResult = await session.run('SHOW CONSTRAINTS')
|
||||
|
||||
for (const record of constraintsResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
constraints.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (constraintsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch constraints (may not be supported in this Neo4j version): ${constraintsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }> =
|
||||
[]
|
||||
try {
|
||||
const indexesResult = await session.run('SHOW INDEXES')
|
||||
|
||||
for (const record of indexesResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
indexes.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (indexesError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch indexes (may not be supported in this Neo4j version): ${indexesError}`
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${labels.length} labels, ${relationshipTypes.length} relationship types, ${constraints.length} constraints, ${indexes.length} indexes`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Database introspection completed: found ${labels.length} labels, ${relationshipTypes.length} relationship types, ${nodeSchemas.length} node schemas, ${relationshipSchemas.length} relationship schemas, ${constraints.length} constraints, ${indexes.length} indexes`,
|
||||
labels,
|
||||
relationshipTypes,
|
||||
nodeSchemas,
|
||||
relationshipSchemas,
|
||||
constraints,
|
||||
indexes,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] Neo4j introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Neo4j introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (session) {
|
||||
await session.close()
|
||||
}
|
||||
if (driver) {
|
||||
await driver.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
schema: z.string().default('public'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting PostgreSQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const sql = createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(sql, params.schema)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in schema '${params.schema}'.`,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
await sql.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -187,3 +187,184 @@ export async function executeDelete(
|
||||
rowCount,
|
||||
}
|
||||
}
|
||||
|
||||
export interface IntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
schema: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
schemas: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
sql: any,
|
||||
schemaName = 'public'
|
||||
): Promise<IntrospectionResult> {
|
||||
const schemasResult = await sql`
|
||||
SELECT schema_name
|
||||
FROM information_schema.schemata
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
ORDER BY schema_name
|
||||
`
|
||||
const schemas = schemasResult.map((row: { schema_name: string }) => row.schema_name)
|
||||
|
||||
const tablesResult = await sql`
|
||||
SELECT table_name, table_schema
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = ${schemaName}
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name
|
||||
`
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult) {
|
||||
const tableName = tableRow.table_name
|
||||
const tableSchema = tableRow.table_schema
|
||||
|
||||
const columnsResult = await sql`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
c.udt_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_schema = ${tableSchema}
|
||||
AND c.table_name = ${tableName}
|
||||
ORDER BY c.ordinal_position
|
||||
`
|
||||
|
||||
const pkResult = await sql`
|
||||
SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_schema = ${tableSchema}
|
||||
AND tc.table_name = ${tableName}
|
||||
`
|
||||
const primaryKeyColumns = pkResult.map((row: { column_name: string }) => row.column_name)
|
||||
|
||||
const fkResult = await sql`
|
||||
SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = ${tableSchema}
|
||||
AND tc.table_name = ${tableName}
|
||||
`
|
||||
|
||||
const foreignKeys = fkResult.map(
|
||||
(row: { column_name: string; foreign_table_name: string; foreign_column_name: string }) => ({
|
||||
column: row.column_name,
|
||||
referencesTable: row.foreign_table_name,
|
||||
referencesColumn: row.foreign_column_name,
|
||||
})
|
||||
)
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk: { column: string }) => fk.column))
|
||||
|
||||
const indexesResult = await sql`
|
||||
SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
WHERE t.relkind = 'r'
|
||||
AND n.nspname = ${tableSchema}
|
||||
AND t.relname = ${tableName}
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname, a.attnum
|
||||
`
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexesResult) {
|
||||
const indexName = row.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.is_unique,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.map(
|
||||
(col: {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
udt_name: string
|
||||
}) => {
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f: { column: string }) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, schemas }
|
||||
}
|
||||
|
||||
80
apps/sim/app/api/tools/rds/introspect/route.ts
Normal file
80
apps/sim/app/api/tools/rds/introspect/route.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRdsClient, executeIntrospect, type RdsEngine } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
resourceArn: z.string().min(1, 'Resource ARN is required'),
|
||||
secretArn: z.string().min(1, 'Secret ARN is required'),
|
||||
database: z.string().optional(),
|
||||
schema: z.string().optional(),
|
||||
engine: z.enum(['aurora-postgresql', 'aurora-mysql']).optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting RDS Aurora database${params.database ? ` (${params.database})` : ''}`
|
||||
)
|
||||
|
||||
const client = createRdsClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
resourceArn: params.resourceArn,
|
||||
secretArn: params.secretArn,
|
||||
database: params.database,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(
|
||||
client,
|
||||
params.resourceArn,
|
||||
params.secretArn,
|
||||
params.database,
|
||||
params.schema,
|
||||
params.engine as RdsEngine | undefined
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully. Engine: ${result.engine}, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Engine: ${result.engine}. Found ${result.tables.length} table(s).`,
|
||||
engine: result.engine,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] RDS introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `RDS introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -241,3 +241,487 @@ export async function executeDelete(
|
||||
|
||||
return executeStatement(client, resourceArn, secretArn, database, sql, parameters)
|
||||
}
|
||||
|
||||
export type RdsEngine = 'aurora-postgresql' | 'aurora-mysql'
|
||||
|
||||
export interface RdsIntrospectionResult {
|
||||
engine: RdsEngine
|
||||
tables: Array<{
|
||||
name: string
|
||||
schema: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
schemas: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects the database engine by querying SELECT VERSION()
|
||||
*/
|
||||
export async function detectEngine(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined
|
||||
): Promise<RdsEngine> {
|
||||
const result = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
'SELECT VERSION()'
|
||||
)
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
const versionRow = result.rows[0] as Record<string, unknown>
|
||||
const versionValue = Object.values(versionRow)[0]
|
||||
const versionString = String(versionValue).toLowerCase()
|
||||
|
||||
if (versionString.includes('postgresql') || versionString.includes('postgres')) {
|
||||
return 'aurora-postgresql'
|
||||
}
|
||||
if (versionString.includes('mysql') || versionString.includes('mariadb')) {
|
||||
return 'aurora-mysql'
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Unable to detect database engine. Please specify the engine parameter.')
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects PostgreSQL schema using INFORMATION_SCHEMA
|
||||
*/
|
||||
async function introspectPostgresql(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName: string
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const schemasResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
ORDER BY schema_name`
|
||||
)
|
||||
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
|
||||
|
||||
const tablesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT table_name, table_schema
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = :schemaName
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name`,
|
||||
[{ name: 'schemaName', value: { stringValue: schemaName } }]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult.rows) {
|
||||
const row = tableRow as { table_name: string; table_schema: string }
|
||||
const tableName = row.table_name
|
||||
const tableSchema = row.table_schema
|
||||
|
||||
const columnsResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
c.udt_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_schema = :tableSchema
|
||||
AND c.table_name = :tableName
|
||||
ORDER BY c.ordinal_position`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const pkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_schema = :tableSchema
|
||||
AND tc.table_name = :tableName`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
|
||||
|
||||
const fkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = :tableSchema
|
||||
AND tc.table_name = :tableName`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const foreignKeys = fkResult.rows.map((r) => {
|
||||
const fkRow = r as {
|
||||
column_name: string
|
||||
foreign_table_name: string
|
||||
foreign_column_name: string
|
||||
}
|
||||
return {
|
||||
column: fkRow.column_name,
|
||||
referencesTable: fkRow.foreign_table_name,
|
||||
referencesColumn: fkRow.foreign_column_name,
|
||||
}
|
||||
})
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const indexesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
WHERE t.relkind = 'r'
|
||||
AND n.nspname = :tableSchema
|
||||
AND t.relname = :tableName
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname, a.attnum`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const idxRow of indexesResult.rows) {
|
||||
const idx = idxRow as { index_name: string; column_name: string; is_unique: boolean }
|
||||
const indexName = idx.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: idx.is_unique,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(idx.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.rows.map((colRow) => {
|
||||
const col = colRow as {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
udt_name: string
|
||||
}
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { engine: 'aurora-postgresql', tables, schemas }
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects MySQL schema using INFORMATION_SCHEMA
|
||||
*/
|
||||
async function introspectMysql(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName: string
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const schemasResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT SCHEMA_NAME as schema_name FROM information_schema.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
|
||||
|
||||
const tablesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT TABLE_NAME as table_name, TABLE_SCHEMA as table_schema
|
||||
FROM information_schema.TABLES
|
||||
WHERE TABLE_SCHEMA = :schemaName
|
||||
AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[{ name: 'schemaName', value: { stringValue: schemaName } }]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult.rows) {
|
||||
const row = tableRow as { table_name: string; table_schema: string }
|
||||
const tableName = row.table_name
|
||||
const tableSchema = row.table_schema
|
||||
|
||||
const columnsResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
COLUMN_NAME as column_name,
|
||||
DATA_TYPE as data_type,
|
||||
IS_NULLABLE as is_nullable,
|
||||
COLUMN_DEFAULT as column_default,
|
||||
COLUMN_TYPE as column_type,
|
||||
COLUMN_KEY as column_key
|
||||
FROM information_schema.COLUMNS
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const pkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT COLUMN_NAME as column_name
|
||||
FROM information_schema.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
|
||||
|
||||
const fkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
kcu.COLUMN_NAME as column_name,
|
||||
kcu.REFERENCED_TABLE_NAME as foreign_table_name,
|
||||
kcu.REFERENCED_COLUMN_NAME as foreign_column_name
|
||||
FROM information_schema.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = :tableSchema
|
||||
AND kcu.TABLE_NAME = :tableName
|
||||
AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const foreignKeys = fkResult.rows.map((r) => {
|
||||
const fkRow = r as {
|
||||
column_name: string
|
||||
foreign_table_name: string
|
||||
foreign_column_name: string
|
||||
}
|
||||
return {
|
||||
column: fkRow.column_name,
|
||||
referencesTable: fkRow.foreign_table_name,
|
||||
referencesColumn: fkRow.foreign_column_name,
|
||||
}
|
||||
})
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const indexesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
INDEX_NAME as index_name,
|
||||
COLUMN_NAME as column_name,
|
||||
NON_UNIQUE as non_unique
|
||||
FROM information_schema.STATISTICS
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const idxRow of indexesResult.rows) {
|
||||
const idx = idxRow as { index_name: string; column_name: string; non_unique: number }
|
||||
const indexName = idx.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: idx.non_unique === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(idx.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.rows.map((colRow) => {
|
||||
const col = colRow as {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
column_type: string
|
||||
column_key: string
|
||||
}
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.column_type || col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: col.column_key === 'PRI',
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { engine: 'aurora-mysql', tables, schemas }
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects RDS Aurora database schema with auto-detection of engine type
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName?: string,
|
||||
engine?: RdsEngine
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const detectedEngine = engine || (await detectEngine(client, resourceArn, secretArn, database))
|
||||
|
||||
if (detectedEngine === 'aurora-postgresql') {
|
||||
const schema = schemaName || 'public'
|
||||
return introspectPostgresql(client, resourceArn, secretArn, database, schema)
|
||||
}
|
||||
const schema = schemaName || database || ''
|
||||
if (!schema) {
|
||||
throw new Error('Schema or database name is required for MySQL introspection')
|
||||
}
|
||||
return introspectMysql(client, resourceArn, secretArn, database, schema)
|
||||
}
|
||||
|
||||
@@ -215,10 +215,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowStateOverride,
|
||||
} = validation.data
|
||||
|
||||
// For API key auth, the entire body is the input (except for our control fields)
|
||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||
// For session auth, the input is explicitly provided in the input field
|
||||
const input =
|
||||
auth.authType === 'api_key'
|
||||
auth.authType === 'api_key' || auth.authType === 'internal_jwt'
|
||||
? (() => {
|
||||
const {
|
||||
selectedOutputs,
|
||||
@@ -226,6 +226,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
stream,
|
||||
useDraftState,
|
||||
workflowStateOverride,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
} = body
|
||||
return Object.keys(rest).length > 0 ? rest : validatedInput
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workspace } from '@sim/db/schema'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, workspaceExists } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('WorkflowAPI')
|
||||
@@ -36,13 +36,9 @@ export async function GET(request: Request) {
|
||||
const userId = session.user.id
|
||||
|
||||
if (workspaceId) {
|
||||
const workspaceExists = await db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.then((rows) => rows.length > 0)
|
||||
const wsExists = await workspaceExists(workspaceId)
|
||||
|
||||
if (!workspaceExists) {
|
||||
if (!wsExists) {
|
||||
logger.warn(
|
||||
`[${requestId}] Attempt to fetch workflows for non-existent workspace: ${workspaceId}`
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { apiKey, workspace } from '@sim/db/schema'
|
||||
import { apiKey } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
@@ -9,7 +9,7 @@ import { createApiKey, getApiKeyDisplayFormat } from '@/lib/api-key/auth'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceApiKeysAPI')
|
||||
|
||||
@@ -34,8 +34,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const ws = await db.select().from(workspace).where(eq(workspace.id, workspaceId)).limit(1)
|
||||
if (!ws.length) {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workspace, workspaceBYOKKeys } from '@sim/db/schema'
|
||||
import { workspaceBYOKKeys } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
@@ -10,7 +10,7 @@ import { isEnterpriseOrgAdminOrOwner } from '@/lib/billing/core/subscription'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceBYOKKeysAPI')
|
||||
|
||||
@@ -48,8 +48,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const ws = await db.select().from(workspace).where(eq(workspace.id, workspaceId)).limit(1)
|
||||
if (!ws.length) {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspace, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
@@ -7,7 +7,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceEnvironmentAPI')
|
||||
|
||||
@@ -33,8 +33,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const userId = session.user.id
|
||||
|
||||
// Validate workspace exists
|
||||
const ws = await db.select().from(workspace).where(eq(workspace.id, workspaceId)).limit(1)
|
||||
if (!ws.length) {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
|
||||
@@ -364,12 +364,30 @@ export default function PlaygroundPage() {
|
||||
</VariantRow>
|
||||
<VariantRow label='tag variants'>
|
||||
<Tag value='valid@email.com' variant='default' />
|
||||
<Tag value='secondary-tag' variant='secondary' />
|
||||
<Tag value='invalid-email' variant='invalid' />
|
||||
</VariantRow>
|
||||
<VariantRow label='tag with remove'>
|
||||
<Tag value='removable@tag.com' variant='default' onRemove={() => {}} />
|
||||
<Tag value='secondary-removable' variant='secondary' onRemove={() => {}} />
|
||||
<Tag value='invalid-removable' variant='invalid' onRemove={() => {}} />
|
||||
</VariantRow>
|
||||
<VariantRow label='secondary variant'>
|
||||
<div className='w-80'>
|
||||
<TagInput
|
||||
items={[
|
||||
{ value: 'workflow', isValid: true },
|
||||
{ value: 'automation', isValid: true },
|
||||
]}
|
||||
onAdd={() => true}
|
||||
onRemove={() => {}}
|
||||
placeholder='Add tags'
|
||||
placeholderWithTags='Add another'
|
||||
tagVariant='secondary'
|
||||
triggerKeys={['Enter', ',']}
|
||||
/>
|
||||
</div>
|
||||
</VariantRow>
|
||||
<VariantRow label='disabled'>
|
||||
<div className='w-80'>
|
||||
<TagInput
|
||||
|
||||
@@ -888,7 +888,7 @@ export function Chat() {
|
||||
selectedOutputs={selectedOutputs}
|
||||
onOutputSelect={handleOutputSelection}
|
||||
disabled={!activeWorkflowId}
|
||||
placeholder='Select outputs'
|
||||
placeholder='Outputs'
|
||||
align='end'
|
||||
maxHeight={180}
|
||||
/>
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import type React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Check, RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { useMemo } from 'react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Combobox, type ComboboxOptionGroup } from '@/components/emcn'
|
||||
import {
|
||||
extractFieldsFromSchema,
|
||||
parseResponseFormatSafely,
|
||||
@@ -21,7 +14,7 @@ import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Renders a tag icon with background color.
|
||||
* Renders a tag icon with background color for block section headers.
|
||||
*
|
||||
* @param icon - Either a letter string or a Lucide icon component
|
||||
* @param color - Background color for the icon container
|
||||
@@ -62,14 +55,9 @@ interface OutputSelectProps {
|
||||
placeholder?: string
|
||||
/** Whether to emit output IDs or labels in onOutputSelect callback */
|
||||
valueMode?: 'id' | 'label'
|
||||
/**
|
||||
* When true, renders the underlying popover content inline instead of in a portal.
|
||||
* Useful when used inside dialogs or other portalled components that manage scroll locking.
|
||||
*/
|
||||
disablePopoverPortal?: boolean
|
||||
/** Alignment of the popover relative to the trigger */
|
||||
/** Alignment of the dropdown relative to the trigger */
|
||||
align?: 'start' | 'end' | 'center'
|
||||
/** Maximum height of the popover content in pixels */
|
||||
/** Maximum height of the dropdown content in pixels */
|
||||
maxHeight?: number
|
||||
}
|
||||
|
||||
@@ -90,14 +78,9 @@ export function OutputSelect({
|
||||
disabled = false,
|
||||
placeholder = 'Select outputs',
|
||||
valueMode = 'id',
|
||||
disablePopoverPortal = false,
|
||||
align = 'start',
|
||||
maxHeight = 200,
|
||||
}: OutputSelectProps) {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [highlightedIndex, setHighlightedIndex] = useState(-1)
|
||||
const triggerRef = useRef<HTMLDivElement>(null)
|
||||
const popoverRef = useRef<HTMLDivElement>(null)
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const { isShowingDiff, isDiffReady, hasActiveDiff, baselineWorkflow } = useWorkflowDiffStore()
|
||||
const subBlockValues = useSubBlockStore((state) =>
|
||||
@@ -206,21 +189,10 @@ export function OutputSelect({
|
||||
shouldUseBaseline,
|
||||
])
|
||||
|
||||
/**
|
||||
* Checks if an output is currently selected by comparing both ID and label
|
||||
* @param o - The output object to check
|
||||
* @returns True if the output is selected, false otherwise
|
||||
*/
|
||||
const isSelectedValue = useCallback(
|
||||
(o: { id: string; label: string }) =>
|
||||
selectedOutputs.includes(o.id) || selectedOutputs.includes(o.label),
|
||||
[selectedOutputs]
|
||||
)
|
||||
|
||||
/**
|
||||
* Gets display text for selected outputs
|
||||
*/
|
||||
const selectedOutputsDisplayText = useMemo(() => {
|
||||
const selectedDisplayText = useMemo(() => {
|
||||
if (!selectedOutputs || selectedOutputs.length === 0) {
|
||||
return placeholder
|
||||
}
|
||||
@@ -234,19 +206,27 @@ export function OutputSelect({
|
||||
}
|
||||
|
||||
if (validOutputs.length === 1) {
|
||||
const output = workflowOutputs.find(
|
||||
(o) => o.id === validOutputs[0] || o.label === validOutputs[0]
|
||||
)
|
||||
return output?.label || placeholder
|
||||
return '1 output'
|
||||
}
|
||||
|
||||
return `${validOutputs.length} outputs`
|
||||
}, [selectedOutputs, workflowOutputs, placeholder])
|
||||
|
||||
/**
|
||||
* Groups outputs by block and sorts by distance from starter block
|
||||
* Gets the background color for a block output based on its type
|
||||
* @param blockType - The type of the block
|
||||
* @returns The hex color code for the block
|
||||
*/
|
||||
const groupedOutputs = useMemo(() => {
|
||||
const getOutputColor = (blockType: string) => {
|
||||
const blockConfig = getBlock(blockType)
|
||||
return blockConfig?.bgColor || '#2F55FF'
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups outputs by block and sorts by distance from starter block.
|
||||
* Returns ComboboxOptionGroup[] for use with Combobox.
|
||||
*/
|
||||
const comboboxGroups = useMemo((): ComboboxOptionGroup[] => {
|
||||
const groups: Record<string, typeof workflowOutputs> = {}
|
||||
const blockDistances: Record<string, number> = {}
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
@@ -283,242 +263,75 @@ export function OutputSelect({
|
||||
groups[output.blockName].push(output)
|
||||
})
|
||||
|
||||
return Object.entries(groups)
|
||||
const sortedGroups = Object.entries(groups)
|
||||
.map(([blockName, outputs]) => ({
|
||||
blockName,
|
||||
outputs,
|
||||
distance: blockDistances[outputs[0]?.blockId] || 0,
|
||||
}))
|
||||
.sort((a, b) => b.distance - a.distance)
|
||||
.reduce(
|
||||
(acc, { blockName, outputs }) => {
|
||||
acc[blockName] = outputs
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, typeof workflowOutputs>
|
||||
)
|
||||
}, [workflowOutputs, blocks])
|
||||
|
||||
/**
|
||||
* Gets the background color for a block output based on its type
|
||||
* @param blockId - The block ID (unused but kept for future extensibility)
|
||||
* @param blockType - The type of the block
|
||||
* @returns The hex color code for the block
|
||||
*/
|
||||
const getOutputColor = (blockId: string, blockType: string) => {
|
||||
const blockConfig = getBlock(blockType)
|
||||
return blockConfig?.bgColor || '#2F55FF'
|
||||
}
|
||||
return sortedGroups.map(({ blockName, outputs }) => {
|
||||
const firstOutput = outputs[0]
|
||||
const blockConfig = getBlock(firstOutput.blockType)
|
||||
const blockColor = getOutputColor(firstOutput.blockType)
|
||||
|
||||
/**
|
||||
* Flattened outputs for keyboard navigation
|
||||
*/
|
||||
const flattenedOutputs = useMemo(() => {
|
||||
return Object.values(groupedOutputs).flat()
|
||||
}, [groupedOutputs])
|
||||
let blockIcon: string | React.ComponentType<{ className?: string }> = blockName
|
||||
.charAt(0)
|
||||
.toUpperCase()
|
||||
|
||||
/**
|
||||
* Handles output selection by toggling the selected state
|
||||
* @param value - The output label to toggle
|
||||
*/
|
||||
const handleOutputSelection = useCallback(
|
||||
(value: string) => {
|
||||
const emittedValue =
|
||||
valueMode === 'label' ? value : workflowOutputs.find((o) => o.label === value)?.id || value
|
||||
const index = selectedOutputs.indexOf(emittedValue)
|
||||
|
||||
const newSelectedOutputs =
|
||||
index === -1
|
||||
? [...new Set([...selectedOutputs, emittedValue])]
|
||||
: selectedOutputs.filter((id) => id !== emittedValue)
|
||||
|
||||
onOutputSelect(newSelectedOutputs)
|
||||
},
|
||||
[valueMode, workflowOutputs, selectedOutputs, onOutputSelect]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles keyboard navigation within the output list
|
||||
* Supports ArrowUp, ArrowDown, Enter, and Escape keys
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!open || flattenedOutputs.length === 0) return
|
||||
|
||||
const handleKeyboardEvent = (e: KeyboardEvent) => {
|
||||
switch (e.key) {
|
||||
case 'ArrowDown':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setHighlightedIndex((prev) => {
|
||||
if (prev === -1 || prev >= flattenedOutputs.length - 1) {
|
||||
return 0
|
||||
}
|
||||
return prev + 1
|
||||
})
|
||||
break
|
||||
|
||||
case 'ArrowUp':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setHighlightedIndex((prev) => {
|
||||
if (prev <= 0) {
|
||||
return flattenedOutputs.length - 1
|
||||
}
|
||||
return prev - 1
|
||||
})
|
||||
break
|
||||
|
||||
case 'Enter':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setHighlightedIndex((currentIndex) => {
|
||||
if (currentIndex >= 0 && currentIndex < flattenedOutputs.length) {
|
||||
handleOutputSelection(flattenedOutputs[currentIndex].label)
|
||||
}
|
||||
return currentIndex
|
||||
})
|
||||
break
|
||||
|
||||
case 'Escape':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setOpen(false)
|
||||
break
|
||||
if (blockConfig?.icon) {
|
||||
blockIcon = blockConfig.icon
|
||||
} else if (firstOutput.blockType === 'loop') {
|
||||
blockIcon = RepeatIcon
|
||||
} else if (firstOutput.blockType === 'parallel') {
|
||||
blockIcon = SplitIcon
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('keydown', handleKeyboardEvent, true)
|
||||
return () => window.removeEventListener('keydown', handleKeyboardEvent, true)
|
||||
}, [open, flattenedOutputs, handleOutputSelection])
|
||||
|
||||
/**
|
||||
* Reset highlighted index when popover opens/closes
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
const firstSelectedIndex = flattenedOutputs.findIndex((output) => isSelectedValue(output))
|
||||
setHighlightedIndex(firstSelectedIndex >= 0 ? firstSelectedIndex : -1)
|
||||
} else {
|
||||
setHighlightedIndex(-1)
|
||||
}
|
||||
}, [open, flattenedOutputs, isSelectedValue])
|
||||
|
||||
/**
|
||||
* Scroll highlighted item into view
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (highlightedIndex >= 0 && popoverRef.current) {
|
||||
const highlightedElement = popoverRef.current.querySelector(
|
||||
`[data-option-index="${highlightedIndex}"]`
|
||||
)
|
||||
if (highlightedElement) {
|
||||
highlightedElement.scrollIntoView({ behavior: 'smooth', block: 'nearest' })
|
||||
return {
|
||||
sectionElement: (
|
||||
<div className='flex items-center gap-1.5 px-[6px] py-[4px]'>
|
||||
<TagIcon icon={blockIcon} color={blockColor} />
|
||||
<span className='font-medium text-[13px]'>{blockName}</span>
|
||||
</div>
|
||||
),
|
||||
items: outputs.map((output) => ({
|
||||
label: output.path,
|
||||
value: valueMode === 'label' ? output.label : output.id,
|
||||
})),
|
||||
}
|
||||
}
|
||||
}, [highlightedIndex])
|
||||
})
|
||||
}, [workflowOutputs, blocks, valueMode])
|
||||
|
||||
/**
|
||||
* Closes popover when clicking outside
|
||||
* Normalize selected values to match the valueMode
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!open) return
|
||||
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
const target = event.target as Node
|
||||
const insideTrigger = triggerRef.current?.contains(target)
|
||||
const insidePopover = popoverRef.current?.contains(target)
|
||||
|
||||
if (!insideTrigger && !insidePopover) {
|
||||
setOpen(false)
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside)
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside)
|
||||
}, [open])
|
||||
const normalizedSelectedValues = useMemo(() => {
|
||||
return selectedOutputs
|
||||
.map((val) => {
|
||||
// Find the output that matches either id or label
|
||||
const output = workflowOutputs.find((o) => o.id === val || o.label === val)
|
||||
if (!output) return null
|
||||
// Return in the format matching valueMode
|
||||
return valueMode === 'label' ? output.label : output.id
|
||||
})
|
||||
.filter((v): v is string => v !== null)
|
||||
}, [selectedOutputs, workflowOutputs, valueMode])
|
||||
|
||||
return (
|
||||
<Popover open={open} variant='default'>
|
||||
<PopoverTrigger asChild>
|
||||
<div ref={triggerRef} className='min-w-0 max-w-full'>
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
title='Select outputs'
|
||||
aria-expanded={open}
|
||||
onMouseDown={(e) => {
|
||||
if (disabled || workflowOutputs.length === 0) return
|
||||
e.stopPropagation()
|
||||
setOpen((prev) => !prev)
|
||||
}}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>{selectedOutputsDisplayText}</span>
|
||||
</Badge>
|
||||
</div>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
ref={popoverRef}
|
||||
side='bottom'
|
||||
align={align}
|
||||
sideOffset={4}
|
||||
maxHeight={maxHeight}
|
||||
maxWidth={300}
|
||||
minWidth={160}
|
||||
border
|
||||
disablePortal={disablePopoverPortal}
|
||||
>
|
||||
<div className='space-y-[2px]'>
|
||||
{Object.entries(groupedOutputs).map(([blockName, outputs], groupIndex, groupArray) => {
|
||||
const startIndex = flattenedOutputs.findIndex((o) => o.blockName === blockName)
|
||||
|
||||
const firstOutput = outputs[0]
|
||||
const blockConfig = getBlock(firstOutput.blockType)
|
||||
const blockColor = getOutputColor(firstOutput.blockId, firstOutput.blockType)
|
||||
|
||||
let blockIcon: string | React.ComponentType<{ className?: string }> = blockName
|
||||
.charAt(0)
|
||||
.toUpperCase()
|
||||
|
||||
if (blockConfig?.icon) {
|
||||
blockIcon = blockConfig.icon
|
||||
} else if (firstOutput.blockType === 'loop') {
|
||||
blockIcon = RepeatIcon
|
||||
} else if (firstOutput.blockType === 'parallel') {
|
||||
blockIcon = SplitIcon
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={blockName}>
|
||||
<div className='flex items-center gap-1.5 px-[6px] py-[4px]'>
|
||||
<TagIcon icon={blockIcon} color={blockColor} />
|
||||
<span className='font-medium text-[13px]'>{blockName}</span>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
{outputs.map((output, localIndex) => {
|
||||
const globalIndex = startIndex + localIndex
|
||||
const isHighlighted = globalIndex === highlightedIndex
|
||||
|
||||
return (
|
||||
<PopoverItem
|
||||
key={output.id}
|
||||
active={isSelectedValue(output) || isHighlighted}
|
||||
data-option-index={globalIndex}
|
||||
onClick={() => handleOutputSelection(output.label)}
|
||||
onMouseEnter={() => setHighlightedIndex(globalIndex)}
|
||||
>
|
||||
<span className='min-w-0 flex-1 truncate'>{output.path}</span>
|
||||
{isSelectedValue(output) && <Check className='h-3 w-3 flex-shrink-0' />}
|
||||
</PopoverItem>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
{groupIndex < groupArray.length - 1 && <PopoverDivider />}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
<Combobox
|
||||
size='sm'
|
||||
className='!w-fit !py-[2px] [&>svg]:!ml-[4px] [&>svg]:!h-3 [&>svg]:!w-3 [&>span]:!text-[var(--text-secondary)] min-w-[100px] rounded-[6px] bg-transparent px-[9px] hover:bg-[var(--surface-5)] dark:hover:border-[var(--surface-6)] dark:hover:bg-transparent [&>span]:text-center'
|
||||
groups={comboboxGroups}
|
||||
options={[]}
|
||||
multiSelect
|
||||
multiSelectValues={normalizedSelectedValues}
|
||||
onMultiSelectChange={onOutputSelect}
|
||||
placeholder={selectedDisplayText}
|
||||
disabled={disabled || workflowOutputs.length === 0}
|
||||
align={align}
|
||||
maxHeight={maxHeight}
|
||||
dropdownWidth={220}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,921 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Check, Clipboard } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Checkbox,
|
||||
Code,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
Input,
|
||||
Label,
|
||||
TagInput,
|
||||
Textarea,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import type { AgentAuthentication, AgentCapabilities } from '@/lib/a2a/types'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
|
||||
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import {
|
||||
a2aAgentKeys,
|
||||
useA2AAgentByWorkflow,
|
||||
useCreateA2AAgent,
|
||||
useDeleteA2AAgent,
|
||||
usePublishA2AAgent,
|
||||
useUpdateA2AAgent,
|
||||
} from '@/hooks/queries/a2a/agents'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('A2ADeploy')
|
||||
|
||||
interface InputFormatField {
|
||||
id?: string
|
||||
name?: string
|
||||
type?: string
|
||||
value?: unknown
|
||||
collapsed?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a description is a default/placeholder value that should be filtered out
|
||||
*/
|
||||
function isDefaultDescription(desc: string | null | undefined, workflowName: string): boolean {
|
||||
if (!desc) return true
|
||||
const normalized = desc.toLowerCase().trim()
|
||||
return (
|
||||
normalized === '' || normalized === 'new workflow' || normalized === workflowName.toLowerCase()
|
||||
)
|
||||
}
|
||||
|
||||
type CodeLanguage = 'curl' | 'python' | 'javascript' | 'typescript'
|
||||
|
||||
const LANGUAGE_LABELS: Record<CodeLanguage, string> = {
|
||||
curl: 'cURL',
|
||||
python: 'Python',
|
||||
javascript: 'JavaScript',
|
||||
typescript: 'TypeScript',
|
||||
}
|
||||
|
||||
const LANGUAGE_SYNTAX: Record<CodeLanguage, 'python' | 'javascript' | 'json'> = {
|
||||
curl: 'javascript',
|
||||
python: 'python',
|
||||
javascript: 'javascript',
|
||||
typescript: 'javascript',
|
||||
}
|
||||
|
||||
interface A2aDeployProps {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
workflowDescription?: string | null
|
||||
isDeployed: boolean
|
||||
workflowNeedsRedeployment?: boolean
|
||||
onSubmittingChange?: (submitting: boolean) => void
|
||||
onCanSaveChange?: (canSave: boolean) => void
|
||||
onAgentExistsChange?: (exists: boolean) => void
|
||||
onPublishedChange?: (published: boolean) => void
|
||||
onNeedsRepublishChange?: (needsRepublish: boolean) => void
|
||||
onDeployWorkflow?: () => Promise<void>
|
||||
}
|
||||
|
||||
type AuthScheme = 'none' | 'apiKey'
|
||||
|
||||
export function A2aDeploy({
|
||||
workflowId,
|
||||
workflowName,
|
||||
workflowDescription,
|
||||
isDeployed,
|
||||
workflowNeedsRedeployment,
|
||||
onSubmittingChange,
|
||||
onCanSaveChange,
|
||||
onAgentExistsChange,
|
||||
onPublishedChange,
|
||||
onNeedsRepublishChange,
|
||||
onDeployWorkflow,
|
||||
}: A2aDeployProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const { data: existingAgent, isLoading, refetch } = useA2AAgentByWorkflow(workspaceId, workflowId)
|
||||
|
||||
const createAgent = useCreateA2AAgent()
|
||||
const updateAgent = useUpdateA2AAgent()
|
||||
const deleteAgent = useDeleteA2AAgent()
|
||||
const publishAgent = usePublishA2AAgent()
|
||||
|
||||
// Start block input field detection
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const startBlockId = useMemo(() => {
|
||||
if (!blocks || Object.keys(blocks).length === 0) return null
|
||||
const candidate = TriggerUtils.findStartBlock(blocks, 'api')
|
||||
if (!candidate || candidate.path !== StartBlockPath.UNIFIED) return null
|
||||
return candidate.blockId
|
||||
}, [blocks])
|
||||
|
||||
const startBlockInputFormat = useSubBlockStore((state) => {
|
||||
if (!workflowId || !startBlockId) return null
|
||||
const workflowValues = state.workflowValues[workflowId]
|
||||
const fromStore = workflowValues?.[startBlockId]?.inputFormat
|
||||
if (fromStore !== undefined) return fromStore
|
||||
const startBlock = blocks[startBlockId]
|
||||
return startBlock?.subBlocks?.inputFormat?.value ?? null
|
||||
})
|
||||
|
||||
const missingFields = useMemo(() => {
|
||||
if (!startBlockId) return { input: false, data: false, files: false, any: false }
|
||||
const normalizedFields = normalizeInputFormatValue(startBlockInputFormat)
|
||||
const existingNames = new Set(
|
||||
normalizedFields
|
||||
.map((field) => field.name)
|
||||
.filter((n): n is string => typeof n === 'string' && n.trim() !== '')
|
||||
.map((n) => n.trim().toLowerCase())
|
||||
)
|
||||
const missing = {
|
||||
input: !existingNames.has('input'),
|
||||
data: !existingNames.has('data'),
|
||||
files: !existingNames.has('files'),
|
||||
any: false,
|
||||
}
|
||||
missing.any = missing.input || missing.data || missing.files
|
||||
return missing
|
||||
}, [startBlockId, startBlockInputFormat])
|
||||
|
||||
const handleAddA2AInputs = useCallback(() => {
|
||||
if (!startBlockId) return
|
||||
|
||||
const normalizedExisting = normalizeInputFormatValue(startBlockInputFormat)
|
||||
const newFields: InputFormatField[] = []
|
||||
|
||||
// Add input field if missing (for TextPart)
|
||||
if (missingFields.input) {
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'input',
|
||||
type: 'string',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
}
|
||||
|
||||
// Add data field if missing (for DataPart)
|
||||
if (missingFields.data) {
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'data',
|
||||
type: 'object',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
}
|
||||
|
||||
// Add files field if missing (for FilePart)
|
||||
if (missingFields.files) {
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'files',
|
||||
type: 'files',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
}
|
||||
|
||||
if (newFields.length > 0) {
|
||||
const updatedFields = [...newFields, ...normalizedExisting]
|
||||
// Use collaborative update to ensure proper socket sync
|
||||
collaborativeSetSubblockValue(startBlockId, 'inputFormat', updatedFields)
|
||||
logger.info(
|
||||
`Added A2A input fields to Start block: ${newFields.map((f) => f.name).join(', ')}`
|
||||
)
|
||||
}
|
||||
}, [startBlockId, startBlockInputFormat, missingFields, collaborativeSetSubblockValue])
|
||||
|
||||
const [name, setName] = useState('')
|
||||
const [description, setDescription] = useState('')
|
||||
const [authScheme, setAuthScheme] = useState<AuthScheme>('apiKey')
|
||||
const [pushNotificationsEnabled, setPushNotificationsEnabled] = useState(false)
|
||||
const [skillTags, setSkillTags] = useState<string[]>(['workflow', 'automation'])
|
||||
const [language, setLanguage] = useState<CodeLanguage>('curl')
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (existingAgent) {
|
||||
setName(existingAgent.name)
|
||||
// Filter out default descriptions to encourage user to enter a meaningful one
|
||||
const savedDesc = existingAgent.description || ''
|
||||
setDescription(isDefaultDescription(savedDesc, workflowName) ? '' : savedDesc)
|
||||
setPushNotificationsEnabled(existingAgent.capabilities?.pushNotifications ?? false)
|
||||
const schemes = existingAgent.authentication?.schemes || []
|
||||
if (schemes.includes('apiKey')) {
|
||||
setAuthScheme('apiKey')
|
||||
} else {
|
||||
setAuthScheme('none')
|
||||
}
|
||||
// Extract tags from first skill if available
|
||||
const skills = existingAgent.skills as Array<{ tags?: string[] }> | undefined
|
||||
const savedTags = skills?.[0]?.tags
|
||||
setSkillTags(savedTags?.length ? savedTags : ['workflow', 'automation'])
|
||||
} else {
|
||||
setName(workflowName)
|
||||
// Filter out default descriptions to encourage user to enter a meaningful one
|
||||
setDescription(
|
||||
isDefaultDescription(workflowDescription, workflowName) ? '' : workflowDescription || ''
|
||||
)
|
||||
setAuthScheme('apiKey')
|
||||
setPushNotificationsEnabled(false)
|
||||
setSkillTags(['workflow', 'automation'])
|
||||
}
|
||||
}, [existingAgent, workflowName, workflowDescription])
|
||||
|
||||
useEffect(() => {
|
||||
onAgentExistsChange?.(!!existingAgent)
|
||||
}, [existingAgent, onAgentExistsChange])
|
||||
|
||||
useEffect(() => {
|
||||
onPublishedChange?.(existingAgent?.isPublished ?? false)
|
||||
}, [existingAgent?.isPublished, onPublishedChange])
|
||||
|
||||
// Detect form changes compared to saved agent state
|
||||
const hasFormChanges = useMemo(() => {
|
||||
if (!existingAgent) return false
|
||||
const savedSchemes = existingAgent.authentication?.schemes || []
|
||||
const savedAuthScheme = savedSchemes.includes('apiKey') ? 'apiKey' : 'none'
|
||||
// Compare description, filtering out default values for both
|
||||
const savedDesc = existingAgent.description || ''
|
||||
const normalizedSavedDesc = isDefaultDescription(savedDesc, workflowName) ? '' : savedDesc
|
||||
// Compare tags
|
||||
const skills = existingAgent.skills as Array<{ tags?: string[] }> | undefined
|
||||
const savedTags = skills?.[0]?.tags || ['workflow', 'automation']
|
||||
const tagsChanged =
|
||||
skillTags.length !== savedTags.length || skillTags.some((t, i) => t !== savedTags[i])
|
||||
return (
|
||||
name !== existingAgent.name ||
|
||||
description !== normalizedSavedDesc ||
|
||||
pushNotificationsEnabled !== (existingAgent.capabilities?.pushNotifications ?? false) ||
|
||||
authScheme !== savedAuthScheme ||
|
||||
tagsChanged
|
||||
)
|
||||
}, [
|
||||
existingAgent,
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workflowName,
|
||||
])
|
||||
|
||||
// Detect if workflow has pending changes not yet deployed
|
||||
// This aligns with the General tab's "needs redeployment" detection
|
||||
const hasWorkflowChanges = useMemo(() => {
|
||||
if (!existingAgent) return false
|
||||
return !!workflowNeedsRedeployment
|
||||
}, [existingAgent, workflowNeedsRedeployment])
|
||||
|
||||
const needsRepublish = existingAgent && (hasFormChanges || hasWorkflowChanges)
|
||||
|
||||
useEffect(() => {
|
||||
onNeedsRepublishChange?.(!!needsRepublish)
|
||||
}, [needsRepublish, onNeedsRepublishChange])
|
||||
|
||||
const authSchemeOptions: ComboboxOption[] = useMemo(
|
||||
() => [
|
||||
{ label: 'API Key', value: 'apiKey' },
|
||||
{ label: 'None (Public)', value: 'none' },
|
||||
],
|
||||
[]
|
||||
)
|
||||
|
||||
// Require both name and description to publish
|
||||
const canSave = name.trim().length > 0 && description.trim().length > 0
|
||||
useEffect(() => {
|
||||
onCanSaveChange?.(canSave)
|
||||
}, [canSave, onCanSaveChange])
|
||||
|
||||
const isSubmitting =
|
||||
createAgent.isPending ||
|
||||
updateAgent.isPending ||
|
||||
deleteAgent.isPending ||
|
||||
publishAgent.isPending
|
||||
|
||||
useEffect(() => {
|
||||
onSubmittingChange?.(isSubmitting)
|
||||
}, [isSubmitting, onSubmittingChange])
|
||||
|
||||
const handleCreateOrUpdate = useCallback(async () => {
|
||||
const capabilities: AgentCapabilities = {
|
||||
streaming: true,
|
||||
pushNotifications: pushNotificationsEnabled,
|
||||
stateTransitionHistory: true,
|
||||
}
|
||||
|
||||
const authentication: AgentAuthentication = {
|
||||
schemes: authScheme === 'none' ? ['none'] : [authScheme],
|
||||
}
|
||||
|
||||
try {
|
||||
if (existingAgent) {
|
||||
await updateAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
} else {
|
||||
await createAgent.mutateAsync({
|
||||
workspaceId,
|
||||
workflowId,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
}
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [...a2aAgentKeys.all, 'byWorkflow', workspaceId, workflowId],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to save A2A agent:', error)
|
||||
}
|
||||
}, [
|
||||
existingAgent,
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
queryClient,
|
||||
])
|
||||
|
||||
const handlePublish = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
try {
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
action: 'publish',
|
||||
})
|
||||
refetch()
|
||||
} catch (error) {
|
||||
logger.error('Failed to publish A2A agent:', error)
|
||||
}
|
||||
}, [existingAgent, workspaceId, publishAgent, refetch])
|
||||
|
||||
const handleUnpublish = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
try {
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
action: 'unpublish',
|
||||
})
|
||||
refetch()
|
||||
} catch (error) {
|
||||
logger.error('Failed to unpublish A2A agent:', error)
|
||||
}
|
||||
}, [existingAgent, workspaceId, publishAgent, refetch])
|
||||
|
||||
const handleDelete = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
try {
|
||||
await deleteAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
})
|
||||
setName(workflowName)
|
||||
setDescription(workflowDescription || '')
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete A2A agent:', error)
|
||||
}
|
||||
}, [existingAgent, workspaceId, deleteAgent, workflowName, workflowDescription])
|
||||
|
||||
const handleCopyEndpoint = useCallback(() => {
|
||||
if (!existingAgent) return
|
||||
const copyEndpoint = `${getBaseUrl()}/api/a2a/serve/${existingAgent.id}`
|
||||
navigator.clipboard.writeText(copyEndpoint)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}, [existingAgent])
|
||||
|
||||
// Combined create + publish action (auto-deploys workflow if needed)
|
||||
const handlePublishNewAgent = useCallback(async () => {
|
||||
const capabilities: AgentCapabilities = {
|
||||
streaming: true,
|
||||
pushNotifications: pushNotificationsEnabled,
|
||||
stateTransitionHistory: true,
|
||||
}
|
||||
|
||||
const authentication: AgentAuthentication = {
|
||||
schemes: authScheme === 'none' ? ['none'] : [authScheme],
|
||||
}
|
||||
|
||||
try {
|
||||
// Auto-deploy workflow if not deployed
|
||||
if (!isDeployed && onDeployWorkflow) {
|
||||
await onDeployWorkflow()
|
||||
}
|
||||
|
||||
// First create the agent
|
||||
const newAgent = await createAgent.mutateAsync({
|
||||
workspaceId,
|
||||
workflowId,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
|
||||
// Then immediately publish it
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: newAgent.id,
|
||||
workspaceId,
|
||||
action: 'publish',
|
||||
})
|
||||
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [...a2aAgentKeys.all, 'byWorkflow', workspaceId, workflowId],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to publish A2A agent:', error)
|
||||
}
|
||||
}, [
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createAgent,
|
||||
publishAgent,
|
||||
queryClient,
|
||||
isDeployed,
|
||||
onDeployWorkflow,
|
||||
])
|
||||
|
||||
// Update agent and republish (auto-deploys workflow if needed)
|
||||
const handleUpdateAndRepublish = useCallback(async () => {
|
||||
if (!existingAgent) return
|
||||
|
||||
const capabilities: AgentCapabilities = {
|
||||
streaming: true,
|
||||
pushNotifications: pushNotificationsEnabled,
|
||||
stateTransitionHistory: true,
|
||||
}
|
||||
|
||||
const authentication: AgentAuthentication = {
|
||||
schemes: authScheme === 'none' ? ['none'] : [authScheme],
|
||||
}
|
||||
|
||||
try {
|
||||
// Auto-deploy workflow if not deployed
|
||||
if (!isDeployed && onDeployWorkflow) {
|
||||
await onDeployWorkflow()
|
||||
}
|
||||
|
||||
// First update the agent
|
||||
await updateAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
name: name.trim(),
|
||||
description: description.trim() || undefined,
|
||||
capabilities,
|
||||
authentication,
|
||||
skillTags,
|
||||
})
|
||||
|
||||
// Then republish it
|
||||
await publishAgent.mutateAsync({
|
||||
agentId: existingAgent.id,
|
||||
workspaceId,
|
||||
action: 'publish',
|
||||
})
|
||||
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [...a2aAgentKeys.all, 'byWorkflow', workspaceId, workflowId],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to update and republish A2A agent:', error)
|
||||
}
|
||||
}, [
|
||||
existingAgent,
|
||||
isDeployed,
|
||||
onDeployWorkflow,
|
||||
name,
|
||||
description,
|
||||
pushNotificationsEnabled,
|
||||
authScheme,
|
||||
skillTags,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
updateAgent,
|
||||
publishAgent,
|
||||
queryClient,
|
||||
])
|
||||
|
||||
// Curl preview generation
|
||||
const baseUrl = getBaseUrl()
|
||||
const endpoint = existingAgent ? `${baseUrl}/api/a2a/serve/${existingAgent.id}` : null
|
||||
|
||||
// Get additional input fields from Start block (excluding reserved fields handled via A2A parts)
|
||||
const additionalInputFields = useMemo(() => {
|
||||
const allFields = normalizeInputFormatValue(startBlockInputFormat)
|
||||
return allFields.filter(
|
||||
(field): field is InputFormatField & { name: string } =>
|
||||
!!field.name &&
|
||||
field.name.toLowerCase() !== 'input' &&
|
||||
field.name.toLowerCase() !== 'data' &&
|
||||
field.name.toLowerCase() !== 'files'
|
||||
)
|
||||
}, [startBlockInputFormat])
|
||||
|
||||
const getExampleInputData = useCallback((): Record<string, unknown> => {
|
||||
const data: Record<string, unknown> = {}
|
||||
for (const field of additionalInputFields) {
|
||||
switch (field.type) {
|
||||
case 'string':
|
||||
data[field.name] = 'example'
|
||||
break
|
||||
case 'number':
|
||||
data[field.name] = 42
|
||||
break
|
||||
case 'boolean':
|
||||
data[field.name] = true
|
||||
break
|
||||
case 'object':
|
||||
data[field.name] = { key: 'value' }
|
||||
break
|
||||
case 'array':
|
||||
data[field.name] = [1, 2, 3]
|
||||
break
|
||||
default:
|
||||
data[field.name] = 'example'
|
||||
}
|
||||
}
|
||||
return data
|
||||
}, [additionalInputFields])
|
||||
|
||||
const getJsonRpcPayload = useCallback((): Record<string, unknown> => {
|
||||
const inputData = getExampleInputData()
|
||||
const hasAdditionalData = Object.keys(inputData).length > 0
|
||||
|
||||
// Build parts array: TextPart for message text, DataPart for additional fields
|
||||
const parts: Array<Record<string, unknown>> = [{ kind: 'text', text: 'Hello, agent!' }]
|
||||
if (hasAdditionalData) {
|
||||
parts.push({ kind: 'data', data: inputData })
|
||||
}
|
||||
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
id: '1',
|
||||
method: 'message/send',
|
||||
params: {
|
||||
message: {
|
||||
role: 'user',
|
||||
parts,
|
||||
},
|
||||
},
|
||||
}
|
||||
}, [getExampleInputData])
|
||||
|
||||
const getCurlCommand = useCallback((): string => {
|
||||
if (!endpoint) return ''
|
||||
const payload = getJsonRpcPayload()
|
||||
const requiresAuth = authScheme !== 'none'
|
||||
|
||||
switch (language) {
|
||||
case 'curl':
|
||||
return requiresAuth
|
||||
? `curl -X POST \\
|
||||
-H "X-API-Key: $SIM_API_KEY" \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-d '${JSON.stringify(payload)}' \\
|
||||
${endpoint}`
|
||||
: `curl -X POST \\
|
||||
-H "Content-Type: application/json" \\
|
||||
-d '${JSON.stringify(payload)}' \\
|
||||
${endpoint}`
|
||||
|
||||
case 'python':
|
||||
return requiresAuth
|
||||
? `import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
json=${JSON.stringify(payload, null, 4).replace(/\n/g, '\n ')}
|
||||
)
|
||||
|
||||
print(response.json())`
|
||||
: `import requests
|
||||
|
||||
response = requests.post(
|
||||
"${endpoint}",
|
||||
headers={"Content-Type": "application/json"},
|
||||
json=${JSON.stringify(payload, null, 4).replace(/\n/g, '\n ')}
|
||||
)
|
||||
|
||||
print(response.json())`
|
||||
|
||||
case 'javascript':
|
||||
return requiresAuth
|
||||
? `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log(data);`
|
||||
: `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {"Content-Type": "application/json"},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log(data);`
|
||||
|
||||
case 'typescript':
|
||||
return requiresAuth
|
||||
? `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-API-Key": SIM_API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data: Record<string, unknown> = await response.json();
|
||||
console.log(data);`
|
||||
: `const response = await fetch("${endpoint}", {
|
||||
method: "POST",
|
||||
headers: {"Content-Type": "application/json"},
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const data: Record<string, unknown> = await response.json();
|
||||
console.log(data);`
|
||||
|
||||
default:
|
||||
return ''
|
||||
}
|
||||
}, [endpoint, language, getJsonRpcPayload, authScheme])
|
||||
|
||||
const handleCopyCommand = useCallback(() => {
|
||||
navigator.clipboard.writeText(getCurlCommand())
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}, [getCurlCommand])
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='-mx-1 space-y-[12px] px-1'>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[80px]' />
|
||||
<Skeleton className='h-[34px] w-full rounded-[4px]' />
|
||||
<Skeleton className='mt-[6.5px] h-[14px] w-[200px]' />
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[70px]' />
|
||||
<Skeleton className='h-[80px] w-full rounded-[4px]' />
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[50px]' />
|
||||
<Skeleton className='h-[34px] w-full rounded-[4px]' />
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-[6.5px] h-[16px] w-[90px]' />
|
||||
<Skeleton className='h-[34px] w-full rounded-[4px]' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<form
|
||||
id='a2a-deploy-form'
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault()
|
||||
handleCreateOrUpdate()
|
||||
}}
|
||||
className='-mx-1 space-y-[12px] overflow-y-auto px-1'
|
||||
>
|
||||
{/* Agent Name */}
|
||||
<div>
|
||||
<Label
|
||||
htmlFor='a2a-name'
|
||||
className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'
|
||||
>
|
||||
Agent name <span className='text-red-500'>*</span>
|
||||
</Label>
|
||||
<Input
|
||||
id='a2a-name'
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
placeholder='Enter agent name'
|
||||
required
|
||||
/>
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
Human-readable name shown in the Agent Card
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Description */}
|
||||
<div>
|
||||
<Label
|
||||
htmlFor='a2a-description'
|
||||
className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'
|
||||
>
|
||||
Description <span className='text-red-500'>*</span>
|
||||
</Label>
|
||||
<Textarea
|
||||
id='a2a-description'
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
placeholder='Describe what this agent does...'
|
||||
className='min-h-[80px] resize-none'
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Authentication */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Authentication
|
||||
</Label>
|
||||
<Combobox
|
||||
options={authSchemeOptions}
|
||||
value={authScheme}
|
||||
onChange={(v) => setAuthScheme(v as AuthScheme)}
|
||||
placeholder='Select authentication...'
|
||||
/>
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
{authScheme === 'none'
|
||||
? 'Anyone can call this agent without authentication'
|
||||
: 'Requires X-API-Key header or API key query parameter'}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Capabilities */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Capabilities
|
||||
</Label>
|
||||
<div className='space-y-[8px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Checkbox
|
||||
id='a2a-push'
|
||||
checked={pushNotificationsEnabled}
|
||||
onCheckedChange={(checked) => setPushNotificationsEnabled(checked === true)}
|
||||
/>
|
||||
<label htmlFor='a2a-push' className='text-[13px] text-[var(--text-primary)]'>
|
||||
Push notifications (webhooks)
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Tags */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Tags
|
||||
</Label>
|
||||
<TagInput
|
||||
items={skillTags.map((tag) => ({ value: tag, isValid: true }))}
|
||||
onAdd={(value) => {
|
||||
if (!skillTags.includes(value)) {
|
||||
setSkillTags((prev) => [...prev, value])
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}}
|
||||
onRemove={(_value, index) => {
|
||||
setSkillTags((prev) => prev.filter((_, i) => i !== index))
|
||||
}}
|
||||
placeholder='Add tags'
|
||||
placeholderWithTags='Add another'
|
||||
tagVariant='secondary'
|
||||
triggerKeys={['Enter', ',']}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Curl Preview (shown when agent exists) */}
|
||||
{existingAgent && endpoint && (
|
||||
<>
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Language
|
||||
</Label>
|
||||
</div>
|
||||
<ButtonGroup value={language} onValueChange={(val) => setLanguage(val as CodeLanguage)}>
|
||||
{(Object.keys(LANGUAGE_LABELS) as CodeLanguage[]).map((lang) => (
|
||||
<ButtonGroupItem key={lang} value={lang}>
|
||||
{LANGUAGE_LABELS[lang]}
|
||||
</ButtonGroupItem>
|
||||
))}
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Send message (JSON-RPC)
|
||||
</Label>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={handleCopyCommand}
|
||||
aria-label='Copy command'
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{copied ? <Check className='h-3 w-3' /> : <Clipboard className='h-3 w-3' />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{copied ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={getCurlCommand()}
|
||||
language={LANGUAGE_SYNTAX[language]}
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
<div className='mt-[6.5px] flex items-start justify-between gap-2'>
|
||||
<p className='text-[11px] text-[var(--text-secondary)]'>
|
||||
External A2A clients can discover and call your agent. TextPart →{' '}
|
||||
<code className='text-[10px]'><start.input></code>, DataPart →{' '}
|
||||
<code className='text-[10px]'><start.data></code>, FilePart →{' '}
|
||||
<code className='text-[10px]'><start.files></code>.
|
||||
</p>
|
||||
{missingFields.any && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
title='Add required A2A input fields to Start block'
|
||||
onClick={handleAddA2AInputs}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Hidden triggers for modal footer */}
|
||||
<button type='submit' data-a2a-save-trigger className='hidden' />
|
||||
<button type='button' data-a2a-publish-trigger className='hidden' onClick={handlePublish} />
|
||||
<button
|
||||
type='button'
|
||||
data-a2a-unpublish-trigger
|
||||
className='hidden'
|
||||
onClick={handleUnpublish}
|
||||
/>
|
||||
<button type='button' data-a2a-delete-trigger className='hidden' onClick={handleDelete} />
|
||||
<button
|
||||
type='button'
|
||||
data-a2a-publish-new-trigger
|
||||
className='hidden'
|
||||
onClick={handlePublishNewAgent}
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
data-a2a-update-republish-trigger
|
||||
className='hidden'
|
||||
onClick={handleUpdateAndRepublish}
|
||||
/>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -513,25 +513,31 @@ export function McpDeploy({
|
||||
{inputFormat.map((field) => (
|
||||
<div
|
||||
key={field.name}
|
||||
className='rounded-[6px] border bg-[var(--surface-3)] px-[10px] py-[8px]'
|
||||
className='overflow-hidden rounded-[4px] border border-[var(--border-1)]'
|
||||
>
|
||||
<div className='flex items-center justify-between'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-primary)]'>{field.name}</p>
|
||||
<Badge variant='outline' className='text-[10px]'>
|
||||
{field.type}
|
||||
</Badge>
|
||||
<div className='flex items-center justify-between bg-[var(--surface-4)] px-[10px] py-[5px]'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{field.name}
|
||||
</span>
|
||||
<Badge size='sm'>{field.type}</Badge>
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Description</Label>
|
||||
<Input
|
||||
value={parameterDescriptions[field.name] || ''}
|
||||
onChange={(e) =>
|
||||
setParameterDescriptions((prev) => ({
|
||||
...prev,
|
||||
[field.name]: e.target.value,
|
||||
}))
|
||||
}
|
||||
placeholder={`Enter description for ${field.name}`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Input
|
||||
value={parameterDescriptions[field.name] || ''}
|
||||
onChange={(e) =>
|
||||
setParameterDescriptions((prev) => ({
|
||||
...prev,
|
||||
[field.name]: e.target.value,
|
||||
}))
|
||||
}
|
||||
placeholder='Description'
|
||||
className='mt-[6px] h-[28px] text-[12px]'
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
@@ -551,7 +557,6 @@ export function McpDeploy({
|
||||
searchable
|
||||
searchPlaceholder='Search servers...'
|
||||
disabled={!toolName.trim() || isPending}
|
||||
isLoading={isPending}
|
||||
overlayContent={
|
||||
<span className='truncate text-[var(--text-primary)]'>{selectedServersLabel}</span>
|
||||
}
|
||||
|
||||
@@ -12,9 +12,10 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
TagInput,
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton, TagInput } from '@/components/ui'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { captureAndUploadOGImage, OG_IMAGE_HEIGHT, OG_IMAGE_WIDTH } from '@/lib/og'
|
||||
@@ -404,10 +405,24 @@ export function TemplateDeploy({
|
||||
Tags
|
||||
</Label>
|
||||
<TagInput
|
||||
value={formData.tags}
|
||||
onChange={(tags) => updateField('tags', tags)}
|
||||
items={formData.tags.map((tag) => ({ value: tag, isValid: true }))}
|
||||
onAdd={(value) => {
|
||||
if (!formData.tags.includes(value) && formData.tags.length < 10) {
|
||||
updateField('tags', [...formData.tags, value])
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}}
|
||||
onRemove={(_value, index) => {
|
||||
updateField(
|
||||
'tags',
|
||||
formData.tags.filter((_, i) => i !== index)
|
||||
)
|
||||
}}
|
||||
placeholder='Dev, Agents, Research, etc.'
|
||||
maxTags={10}
|
||||
placeholderWithTags='Add another'
|
||||
tagVariant='secondary'
|
||||
triggerKeys={['Enter', ',']}
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -27,6 +27,7 @@ import { useSettingsModalStore } from '@/stores/modals/settings/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { A2aDeploy } from './components/a2a/a2a'
|
||||
import { ApiDeploy } from './components/api/api'
|
||||
import { ChatDeploy, type ExistingChat } from './components/chat/chat'
|
||||
import { GeneralDeploy } from './components/general/general'
|
||||
@@ -55,7 +56,7 @@ interface WorkflowDeploymentInfo {
|
||||
needsRedeployment: boolean
|
||||
}
|
||||
|
||||
type TabView = 'general' | 'api' | 'chat' | 'template' | 'mcp' | 'form'
|
||||
type TabView = 'general' | 'api' | 'chat' | 'template' | 'mcp' | 'form' | 'a2a'
|
||||
|
||||
export function DeployModal({
|
||||
open,
|
||||
@@ -96,6 +97,11 @@ export function DeployModal({
|
||||
const [mcpToolSubmitting, setMcpToolSubmitting] = useState(false)
|
||||
const [mcpToolCanSave, setMcpToolCanSave] = useState(false)
|
||||
const [hasMcpServers, setHasMcpServers] = useState(false)
|
||||
const [a2aSubmitting, setA2aSubmitting] = useState(false)
|
||||
const [a2aCanSave, setA2aCanSave] = useState(false)
|
||||
const [hasA2aAgent, setHasA2aAgent] = useState(false)
|
||||
const [isA2aPublished, setIsA2aPublished] = useState(false)
|
||||
const [a2aNeedsRepublish, setA2aNeedsRepublish] = useState(false)
|
||||
const [hasExistingTemplate, setHasExistingTemplate] = useState(false)
|
||||
const [templateStatus, setTemplateStatus] = useState<{
|
||||
status: 'pending' | 'approved' | 'rejected' | null
|
||||
@@ -368,7 +374,6 @@ export function DeployModal({
|
||||
async (version: number) => {
|
||||
if (!workflowId) return
|
||||
|
||||
// Optimistically update versions to show the new active version immediately
|
||||
const previousVersions = [...versions]
|
||||
setVersions((prev) =>
|
||||
prev.map((v) => ({
|
||||
@@ -402,7 +407,6 @@ export function DeployModal({
|
||||
|
||||
setDeploymentStatus(workflowId, true, deployedAtTime, apiKeyLabel)
|
||||
|
||||
// Refresh deployed state in background (no loading flash)
|
||||
refetchDeployedState()
|
||||
fetchVersions()
|
||||
|
||||
@@ -423,7 +427,6 @@ export function DeployModal({
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
// Rollback optimistic update on error
|
||||
setVersions(previousVersions)
|
||||
throw error
|
||||
}
|
||||
@@ -578,6 +581,41 @@ export function DeployModal({
|
||||
form?.requestSubmit()
|
||||
}, [])
|
||||
|
||||
const handleA2aFormSubmit = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form') as HTMLFormElement
|
||||
form?.requestSubmit()
|
||||
}, [])
|
||||
|
||||
const handleA2aPublish = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form')
|
||||
const publishTrigger = form?.querySelector('[data-a2a-publish-trigger]') as HTMLButtonElement
|
||||
publishTrigger?.click()
|
||||
}, [])
|
||||
|
||||
const handleA2aUnpublish = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form')
|
||||
const unpublishTrigger = form?.querySelector(
|
||||
'[data-a2a-unpublish-trigger]'
|
||||
) as HTMLButtonElement
|
||||
unpublishTrigger?.click()
|
||||
}, [])
|
||||
|
||||
const handleA2aPublishNew = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form')
|
||||
const publishNewTrigger = form?.querySelector(
|
||||
'[data-a2a-publish-new-trigger]'
|
||||
) as HTMLButtonElement
|
||||
publishNewTrigger?.click()
|
||||
}, [])
|
||||
|
||||
const handleA2aUpdateRepublish = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form')
|
||||
const updateRepublishTrigger = form?.querySelector(
|
||||
'[data-a2a-update-republish-trigger]'
|
||||
) as HTMLButtonElement
|
||||
updateRepublishTrigger?.click()
|
||||
}, [])
|
||||
|
||||
const handleTemplateDelete = useCallback(() => {
|
||||
const form = document.getElementById('template-deploy-form')
|
||||
const deleteTrigger = form?.querySelector('[data-template-delete-trigger]') as HTMLButtonElement
|
||||
@@ -610,6 +648,7 @@ export function DeployModal({
|
||||
<ModalTabsTrigger value='general'>General</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
|
||||
{/* <ModalTabsTrigger value='form'>Form</ModalTabsTrigger> */}
|
||||
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
|
||||
@@ -700,6 +739,24 @@ export function DeployModal({
|
||||
/>
|
||||
)}
|
||||
</ModalTabsContent>
|
||||
|
||||
<ModalTabsContent value='a2a' className='h-full'>
|
||||
{workflowId && (
|
||||
<A2aDeploy
|
||||
workflowId={workflowId}
|
||||
workflowName={workflowMetadata?.name || 'Workflow'}
|
||||
workflowDescription={workflowMetadata?.description}
|
||||
isDeployed={isDeployed}
|
||||
workflowNeedsRedeployment={needsRedeployment}
|
||||
onSubmittingChange={setA2aSubmitting}
|
||||
onCanSaveChange={setA2aCanSave}
|
||||
onAgentExistsChange={setHasA2aAgent}
|
||||
onPublishedChange={setIsA2aPublished}
|
||||
onNeedsRepublishChange={setA2aNeedsRepublish}
|
||||
onDeployWorkflow={onDeploy}
|
||||
/>
|
||||
)}
|
||||
</ModalTabsContent>
|
||||
</ModalBody>
|
||||
</ModalTabs>
|
||||
|
||||
@@ -715,19 +772,23 @@ export function DeployModal({
|
||||
/>
|
||||
)}
|
||||
{activeTab === 'api' && (
|
||||
<ModalFooter className='items-center justify-end'>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
onClick={() => setIsCreateKeyModalOpen(true)}
|
||||
disabled={createButtonDisabled}
|
||||
>
|
||||
Generate API Key
|
||||
</Button>
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
onClick={() => setIsCreateKeyModalOpen(true)}
|
||||
disabled={createButtonDisabled}
|
||||
>
|
||||
Generate API Key
|
||||
</Button>
|
||||
</div>
|
||||
</ModalFooter>
|
||||
)}
|
||||
{activeTab === 'chat' && (
|
||||
<ModalFooter className='items-center'>
|
||||
<div className='flex gap-2'>
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
{chatExists && (
|
||||
<Button
|
||||
type='button'
|
||||
@@ -760,8 +821,9 @@ export function DeployModal({
|
||||
</ModalFooter>
|
||||
)}
|
||||
{activeTab === 'mcp' && isDeployed && hasMcpServers && (
|
||||
<ModalFooter className='items-center'>
|
||||
<div className='flex gap-2'>
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button
|
||||
type='button'
|
||||
variant='default'
|
||||
@@ -781,17 +843,17 @@ export function DeployModal({
|
||||
</ModalFooter>
|
||||
)}
|
||||
{activeTab === 'template' && (
|
||||
<ModalFooter
|
||||
className={`items-center ${hasExistingTemplate && templateStatus ? 'justify-between' : ''}`}
|
||||
>
|
||||
{hasExistingTemplate && templateStatus && (
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
{hasExistingTemplate && templateStatus ? (
|
||||
<TemplateStatusBadge
|
||||
status={templateStatus.status}
|
||||
views={templateStatus.views}
|
||||
stars={templateStatus.stars}
|
||||
/>
|
||||
) : (
|
||||
<div />
|
||||
)}
|
||||
<div className='flex gap-2'>
|
||||
<div className='flex items-center gap-2'>
|
||||
{hasExistingTemplate && (
|
||||
<Button
|
||||
type='button'
|
||||
@@ -820,8 +882,9 @@ export function DeployModal({
|
||||
</ModalFooter>
|
||||
)}
|
||||
{/* {activeTab === 'form' && (
|
||||
<ModalFooter className='items-center'>
|
||||
<div className='flex gap-2'>
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
{formExists && (
|
||||
<Button
|
||||
type='button'
|
||||
@@ -853,6 +916,71 @@ export function DeployModal({
|
||||
</div>
|
||||
</ModalFooter>
|
||||
)} */}
|
||||
{activeTab === 'a2a' && (
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
{/* Status badge on left */}
|
||||
{hasA2aAgent ? (
|
||||
isA2aPublished ? (
|
||||
<Badge variant={a2aNeedsRepublish ? 'amber' : 'green'} size='lg' dot>
|
||||
{a2aNeedsRepublish ? 'Update deployment' : 'Live'}
|
||||
</Badge>
|
||||
) : (
|
||||
<Badge variant='red' size='lg' dot>
|
||||
Unpublished
|
||||
</Badge>
|
||||
)
|
||||
) : (
|
||||
<div />
|
||||
)}
|
||||
<div className='flex items-center gap-2'>
|
||||
{/* No agent exists: Show "Publish Agent" button */}
|
||||
{!hasA2aAgent && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='tertiary'
|
||||
onClick={handleA2aPublishNew}
|
||||
disabled={a2aSubmitting || !a2aCanSave}
|
||||
>
|
||||
{a2aSubmitting ? 'Publishing...' : 'Publish Agent'}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{/* Agent exists and published: Show Unpublish and Update */}
|
||||
{hasA2aAgent && isA2aPublished && (
|
||||
<>
|
||||
<Button
|
||||
type='button'
|
||||
variant='default'
|
||||
onClick={handleA2aUnpublish}
|
||||
disabled={a2aSubmitting}
|
||||
>
|
||||
Unpublish
|
||||
</Button>
|
||||
<Button
|
||||
type='button'
|
||||
variant='tertiary'
|
||||
onClick={handleA2aUpdateRepublish}
|
||||
disabled={a2aSubmitting || !a2aCanSave || !a2aNeedsRepublish}
|
||||
>
|
||||
{a2aSubmitting ? 'Updating...' : 'Update'}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Agent exists but unpublished: Show Publish only */}
|
||||
{hasA2aAgent && !isA2aPublished && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='tertiary'
|
||||
onClick={handleA2aPublish}
|
||||
disabled={a2aSubmitting || !a2aCanSave}
|
||||
>
|
||||
{a2aSubmitting ? 'Publishing...' : 'Publish'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</ModalFooter>
|
||||
)}
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
||||
@@ -952,10 +1080,13 @@ function GeneralFooter({
|
||||
}: GeneralFooterProps) {
|
||||
if (!isDeployed) {
|
||||
return (
|
||||
<ModalFooter>
|
||||
<Button variant='tertiary' onClick={onDeploy} disabled={isSubmitting}>
|
||||
{isSubmitting ? 'Deploying...' : 'Deploy'}
|
||||
</Button>
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button variant='tertiary' onClick={onDeploy} disabled={isSubmitting}>
|
||||
{isSubmitting ? 'Deploying...' : 'Deploy'}
|
||||
</Button>
|
||||
</div>
|
||||
</ModalFooter>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
Switch,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { McpIcon } from '@/components/icons'
|
||||
import { McpIcon, WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
getIssueBadgeLabel,
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
} from '@/lib/oauth'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
CheckboxList,
|
||||
Code,
|
||||
@@ -769,9 +770,10 @@ function WorkflowToolDeployBadge({
|
||||
}) {
|
||||
const { isDeployed, needsRedeploy, isLoading, refetch } = useChildDeployment(workflowId)
|
||||
const [isDeploying, setIsDeploying] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const deployWorkflow = useCallback(async () => {
|
||||
if (isDeploying || !workflowId) return
|
||||
if (isDeploying || !workflowId || !userPermissions.canAdmin) return
|
||||
|
||||
try {
|
||||
setIsDeploying(true)
|
||||
@@ -796,7 +798,7 @@ function WorkflowToolDeployBadge({
|
||||
} finally {
|
||||
setIsDeploying(false)
|
||||
}
|
||||
}, [isDeploying, workflowId, refetch, onDeploySuccess])
|
||||
}, [isDeploying, workflowId, refetch, onDeploySuccess, userPermissions.canAdmin])
|
||||
|
||||
if (isLoading || (isDeployed && !needsRedeploy)) {
|
||||
return null
|
||||
@@ -811,13 +813,13 @@ function WorkflowToolDeployBadge({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
variant={!isDeployed ? 'red' : 'amber'}
|
||||
className='cursor-pointer'
|
||||
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
|
||||
size='sm'
|
||||
dot
|
||||
onClick={(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
e.preventDefault()
|
||||
if (!isDeploying) {
|
||||
if (!isDeploying && userPermissions.canAdmin) {
|
||||
deployWorkflow()
|
||||
}
|
||||
}}
|
||||
@@ -826,7 +828,13 @@ function WorkflowToolDeployBadge({
|
||||
</Badge>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span className='text-sm'>{!isDeployed ? 'Click to deploy' : 'Click to redeploy'}</span>
|
||||
<span className='text-sm'>
|
||||
{!userPermissions.canAdmin
|
||||
? 'Admin permission required to deploy'
|
||||
: !isDeployed
|
||||
? 'Click to deploy'
|
||||
: 'Click to redeploy'}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
@@ -933,6 +941,13 @@ export function ToolInput({
|
||||
const forceRefreshMcpTools = useForceRefreshMcpTools()
|
||||
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
|
||||
const mcpDataLoading = mcpLoading || mcpServersLoading
|
||||
|
||||
// Fetch workflows for the Workflows section in the dropdown
|
||||
const { data: workflowsList = [] } = useWorkflows(workspaceId, { syncRegistry: false })
|
||||
const availableWorkflows = useMemo(
|
||||
() => workflowsList.filter((w) => w.id !== workflowId),
|
||||
[workflowsList, workflowId]
|
||||
)
|
||||
const hasRefreshedRef = useRef(false)
|
||||
|
||||
const hasMcpTools = selectedTools.some((tool) => tool.type === 'mcp')
|
||||
@@ -1735,6 +1750,36 @@ export function ToolInput({
|
||||
})
|
||||
}
|
||||
|
||||
// Workflows section - shows available workflows that can be executed as tools
|
||||
if (availableWorkflows.length > 0) {
|
||||
groups.push({
|
||||
section: 'Workflows',
|
||||
items: availableWorkflows.map((workflow) => ({
|
||||
label: workflow.name,
|
||||
value: `workflow-${workflow.id}`,
|
||||
iconElement: createToolIcon('#6366F1', WorkflowIcon),
|
||||
onSelect: () => {
|
||||
const newTool: StoredTool = {
|
||||
type: 'workflow',
|
||||
title: 'Workflow',
|
||||
toolId: 'workflow_executor',
|
||||
params: {
|
||||
workflowId: workflow.id,
|
||||
},
|
||||
isExpanded: true,
|
||||
usageControl: 'auto',
|
||||
}
|
||||
setStoreValue([
|
||||
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
|
||||
newTool,
|
||||
])
|
||||
setOpen(false)
|
||||
},
|
||||
disabled: isPreview || disabled,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
return groups
|
||||
}, [
|
||||
customTools,
|
||||
@@ -1749,6 +1794,7 @@ export function ToolInput({
|
||||
handleSelectTool,
|
||||
permissionConfig.disableCustomTools,
|
||||
permissionConfig.disableMcpTools,
|
||||
availableWorkflows,
|
||||
])
|
||||
|
||||
const toolRequiresOAuth = (toolId: string): boolean => {
|
||||
|
||||
@@ -108,7 +108,7 @@ export function Panel() {
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => activeWorkflowId || '',
|
||||
workflowIds: activeWorkflowId || '',
|
||||
isActive: true,
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
@@ -1021,11 +1021,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
variant={!childIsDeployed ? 'red' : 'amber'}
|
||||
className='cursor-pointer'
|
||||
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
|
||||
dot
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (childWorkflowId && !isDeploying) {
|
||||
if (childWorkflowId && !isDeploying && userPermissions.canAdmin) {
|
||||
deployWorkflow(childWorkflowId)
|
||||
}
|
||||
}}
|
||||
@@ -1035,7 +1035,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span className='text-sm'>
|
||||
{!childIsDeployed ? 'Click to deploy' : 'Click to redeploy'}
|
||||
{!userPermissions.canAdmin
|
||||
? 'Admin permission required to deploy'
|
||||
: !childIsDeployed
|
||||
? 'Click to deploy'
|
||||
: 'Click to redeploy'}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
@@ -347,11 +347,11 @@ export function ContextMenu({
|
||||
title={name}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onColorChange(color)
|
||||
setHexInput(color)
|
||||
}}
|
||||
className={cn(
|
||||
'h-[20px] w-[20px] rounded-[4px]',
|
||||
currentColor?.toLowerCase() === color.toLowerCase() && 'ring-1 ring-white'
|
||||
hexInput.toLowerCase() === color.toLowerCase() && 'ring-1 ring-white'
|
||||
)}
|
||||
style={{ backgroundColor: color }}
|
||||
/>
|
||||
@@ -373,7 +373,7 @@ export function ContextMenu({
|
||||
onKeyDown={handleHexKeyDown}
|
||||
onFocus={handleHexFocus}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
className='h-[20px] min-w-0 flex-1 rounded-[4px] bg-[#363636] px-[6px] text-[11px] text-white uppercase focus:outline-none'
|
||||
className='h-[20px] min-w-0 flex-1 rounded-[4px] bg-[#363636] px-[6px] text-[11px] text-white uppercase caret-white focus:outline-none'
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
useCanDelete,
|
||||
useDeleteFolder,
|
||||
useDuplicateFolder,
|
||||
useExportFolder,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useCreateFolder, useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { useCreateWorkflow } from '@/hooks/queries/workflows'
|
||||
@@ -57,23 +58,24 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
const { canDeleteFolder } = useCanDelete({ workspaceId })
|
||||
const canDelete = useMemo(() => canDeleteFolder(folder.id), [canDeleteFolder, folder.id])
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
|
||||
// Delete folder hook
|
||||
const { isDeleting, handleDeleteFolder } = useDeleteFolder({
|
||||
workspaceId,
|
||||
getFolderIds: () => folder.id,
|
||||
folderIds: folder.id,
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
// Duplicate folder hook
|
||||
const { handleDuplicateFolder } = useDuplicateFolder({
|
||||
workspaceId,
|
||||
getFolderIds: () => folder.id,
|
||||
folderIds: folder.id,
|
||||
})
|
||||
|
||||
const { isExporting, hasWorkflows, handleExportFolder } = useExportFolder({
|
||||
workspaceId,
|
||||
folderId: folder.id,
|
||||
})
|
||||
|
||||
// Folder expand hook - must be declared before callbacks that use expandFolder
|
||||
const {
|
||||
isExpanded,
|
||||
handleToggleExpanded,
|
||||
@@ -90,7 +92,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
*/
|
||||
const handleCreateWorkflowInFolder = useCallback(async () => {
|
||||
try {
|
||||
// Generate name and color upfront for optimistic updates
|
||||
const name = generateCreativeWorkflowName()
|
||||
const color = getNextWorkflowColor()
|
||||
|
||||
@@ -103,15 +104,12 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
|
||||
if (result.id) {
|
||||
router.push(`/workspace/${workspaceId}/w/${result.id}`)
|
||||
// Expand the parent folder so the new workflow is visible
|
||||
expandFolder()
|
||||
// Scroll to the newly created workflow
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
// Error already handled by mutation's onError callback
|
||||
logger.error('Failed to create workflow in folder:', error)
|
||||
}
|
||||
}, [createWorkflowMutation, workspaceId, folder.id, router, expandFolder])
|
||||
@@ -128,9 +126,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
parentId: folder.id,
|
||||
})
|
||||
if (result.id) {
|
||||
// Expand the parent folder so the new folder is visible
|
||||
expandFolder()
|
||||
// Scroll to the newly created folder
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
|
||||
)
|
||||
@@ -147,7 +143,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
*/
|
||||
const onDragStart = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
// Don't start drag if editing
|
||||
if (isEditing) {
|
||||
e.preventDefault()
|
||||
return
|
||||
@@ -159,12 +154,10 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
[folder.id]
|
||||
)
|
||||
|
||||
// Item drag hook
|
||||
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
|
||||
onDragStart,
|
||||
})
|
||||
|
||||
// Context menu hook
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position,
|
||||
@@ -174,7 +167,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
preventDismiss,
|
||||
} = useContextMenu()
|
||||
|
||||
// Rename hook
|
||||
const {
|
||||
isEditing,
|
||||
editValue,
|
||||
@@ -258,7 +250,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
// Toggle: close if open, open if closed
|
||||
if (isContextMenuOpen) {
|
||||
closeMenu()
|
||||
return
|
||||
@@ -365,13 +356,16 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
onCreate={handleCreateWorkflowInFolder}
|
||||
onCreateFolder={handleCreateFolderInFolder}
|
||||
onDuplicate={handleDuplicateFolder}
|
||||
onExport={handleExportFolder}
|
||||
onDelete={() => setIsDeleteModalOpen(true)}
|
||||
showCreate={true}
|
||||
showCreateFolder={true}
|
||||
showExport={true}
|
||||
disableRename={!userPermissions.canEdit}
|
||||
disableCreate={!userPermissions.canEdit || createWorkflowMutation.isPending}
|
||||
disableCreateFolder={!userPermissions.canEdit || createFolderMutation.isPending}
|
||||
disableDuplicate={!userPermissions.canEdit}
|
||||
disableDuplicate={!userPermissions.canEdit || !hasWorkflows}
|
||||
disableExport={!userPermissions.canEdit || isExporting || !hasWorkflows}
|
||||
disableDelete={!userPermissions.canEdit || !canDelete}
|
||||
/>
|
||||
|
||||
|
||||
@@ -46,19 +46,15 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isSelected = selectedWorkflows.has(workflow.id)
|
||||
|
||||
// Can delete check hook
|
||||
const { canDeleteWorkflows } = useCanDelete({ workspaceId })
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [workflowIdsToDelete, setWorkflowIdsToDelete] = useState<string[]>([])
|
||||
const [deleteModalNames, setDeleteModalNames] = useState<string | string[]>('')
|
||||
const [canDeleteCaptured, setCanDeleteCaptured] = useState(true)
|
||||
|
||||
// Presence avatars state
|
||||
const [hasAvatars, setHasAvatars] = useState(false)
|
||||
|
||||
// Capture selection at right-click time (using ref to persist across renders)
|
||||
const capturedSelectionRef = useRef<{
|
||||
workflowIds: string[]
|
||||
workflowNames: string | string[]
|
||||
@@ -68,7 +64,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
* Handle opening the delete modal - uses pre-captured selection state
|
||||
*/
|
||||
const handleOpenDeleteModal = useCallback(() => {
|
||||
// Use the selection captured at right-click time
|
||||
if (capturedSelectionRef.current) {
|
||||
setWorkflowIdsToDelete(capturedSelectionRef.current.workflowIds)
|
||||
setDeleteModalNames(capturedSelectionRef.current.workflowNames)
|
||||
@@ -76,42 +71,32 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => workflowIdsToDelete,
|
||||
workflowIds: workflowIdsToDelete,
|
||||
isActive: (workflowIds) => workflowIds.includes(params.workflowId as string),
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
// Duplicate workflow hook
|
||||
const { handleDuplicateWorkflow } = useDuplicateWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => {
|
||||
// Use the selection captured at right-click time
|
||||
return capturedSelectionRef.current?.workflowIds || []
|
||||
},
|
||||
})
|
||||
const { handleDuplicateWorkflow: duplicateWorkflow } = useDuplicateWorkflow({ workspaceId })
|
||||
|
||||
// Export workflow hook
|
||||
const { handleExportWorkflow } = useExportWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => {
|
||||
// Use the selection captured at right-click time
|
||||
return capturedSelectionRef.current?.workflowIds || []
|
||||
},
|
||||
})
|
||||
const { handleExportWorkflow: exportWorkflow } = useExportWorkflow({ workspaceId })
|
||||
const handleDuplicateWorkflow = useCallback(() => {
|
||||
const workflowIds = capturedSelectionRef.current?.workflowIds || []
|
||||
if (workflowIds.length === 0) return
|
||||
duplicateWorkflow(workflowIds)
|
||||
}, [duplicateWorkflow])
|
||||
|
||||
const handleExportWorkflow = useCallback(() => {
|
||||
const workflowIds = capturedSelectionRef.current?.workflowIds || []
|
||||
if (workflowIds.length === 0) return
|
||||
exportWorkflow(workflowIds)
|
||||
}, [exportWorkflow])
|
||||
|
||||
/**
|
||||
* Opens the workflow in a new browser tab
|
||||
*/
|
||||
const handleOpenInNewTab = useCallback(() => {
|
||||
window.open(`/workspace/${workspaceId}/w/${workflow.id}`, '_blank')
|
||||
}, [workspaceId, workflow.id])
|
||||
|
||||
/**
|
||||
* Changes the workflow color
|
||||
*/
|
||||
const handleColorChange = useCallback(
|
||||
(color: string) => {
|
||||
updateWorkflow(workflow.id, { color })
|
||||
@@ -126,7 +111,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
*/
|
||||
const onDragStart = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
// Don't start drag if editing
|
||||
if (isEditing) {
|
||||
e.preventDefault()
|
||||
return
|
||||
@@ -141,12 +125,10 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
[isSelected, selectedWorkflows, workflow.id]
|
||||
)
|
||||
|
||||
// Item drag hook
|
||||
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
|
||||
onDragStart,
|
||||
})
|
||||
|
||||
// Context menu hook
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position,
|
||||
@@ -215,14 +197,12 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
// Toggle: close if open, open if closed
|
||||
if (isContextMenuOpen) {
|
||||
closeMenu()
|
||||
return
|
||||
}
|
||||
|
||||
captureSelectionState()
|
||||
// Open context menu aligned with the button
|
||||
const rect = e.currentTarget.getBoundingClientRect()
|
||||
handleContextMenuBase({
|
||||
preventDefault: () => {},
|
||||
@@ -234,7 +214,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
[isContextMenuOpen, closeMenu, captureSelectionState, handleContextMenuBase]
|
||||
)
|
||||
|
||||
// Rename hook
|
||||
const {
|
||||
isEditing,
|
||||
editValue,
|
||||
@@ -281,12 +260,10 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
|
||||
const isModifierClick = e.shiftKey || e.metaKey || e.ctrlKey
|
||||
|
||||
// Prevent default link behavior when using modifier keys
|
||||
if (isModifierClick) {
|
||||
e.preventDefault()
|
||||
}
|
||||
|
||||
// Use metaKey (Cmd on Mac) or ctrlKey (Ctrl on Windows/Linux)
|
||||
onWorkflowClick(workflow.id, e.shiftKey, e.metaKey || e.ctrlKey)
|
||||
},
|
||||
[shouldPreventClickRef, workflow.id, onWorkflowClick, isEditing]
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
useDragDrop,
|
||||
useWorkflowSelection,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks/use-import-workflow'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import type { FolderTreeNode } from '@/stores/folders/types'
|
||||
@@ -25,15 +24,13 @@ const TREE_SPACING = {
|
||||
interface WorkflowListProps {
|
||||
regularWorkflows: WorkflowMetadata[]
|
||||
isLoading?: boolean
|
||||
isImporting: boolean
|
||||
setIsImporting: (value: boolean) => void
|
||||
handleFileChange: (event: React.ChangeEvent<HTMLInputElement>) => void
|
||||
fileInputRef: React.RefObject<HTMLInputElement | null>
|
||||
scrollContainerRef: React.RefObject<HTMLDivElement | null>
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowList component displays workflows organized by folders with drag-and-drop support.
|
||||
* Uses the workflow import hook for handling JSON imports.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Workflow list with folders and drag-drop support
|
||||
@@ -41,8 +38,7 @@ interface WorkflowListProps {
|
||||
export function WorkflowList({
|
||||
regularWorkflows,
|
||||
isLoading = false,
|
||||
isImporting,
|
||||
setIsImporting,
|
||||
handleFileChange,
|
||||
fileInputRef,
|
||||
scrollContainerRef,
|
||||
}: WorkflowListProps) {
|
||||
@@ -65,9 +61,6 @@ export function WorkflowList({
|
||||
createFolderHeaderHoverHandlers,
|
||||
} = useDragDrop()
|
||||
|
||||
// Workflow import hook
|
||||
const { handleFileChange } = useImportWorkflow({ workspaceId })
|
||||
|
||||
// Set scroll container when ref changes
|
||||
useEffect(() => {
|
||||
if (scrollContainerRef.current) {
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowDown, Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, FolderPlus, Library, Tooltip } from '@/components/emcn'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
import {
|
||||
useDuplicateWorkspace,
|
||||
useExportWorkspace,
|
||||
useImportWorkflow,
|
||||
useImportWorkspace,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
@@ -85,9 +86,11 @@ export function Sidebar() {
|
||||
const isCollapsed = hasHydrated ? isCollapsedStore : false
|
||||
const isOnWorkflowPage = !!workflowId
|
||||
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const workspaceFileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
const { isImporting, handleFileChange: handleImportFileChange } = useImportWorkflow({
|
||||
workspaceId,
|
||||
})
|
||||
const { isImporting: isImportingWorkspace, handleImportWorkspace: importWorkspace } =
|
||||
useImportWorkspace()
|
||||
const { handleExportWorkspace: exportWorkspace } = useExportWorkspace()
|
||||
@@ -213,7 +216,7 @@ export function Sidebar() {
|
||||
}, [activeNavItemHref])
|
||||
|
||||
const { handleDuplicateWorkspace: duplicateWorkspace } = useDuplicateWorkspace({
|
||||
getWorkspaceId: () => workspaceId,
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const searchModalWorkflows = useMemo(
|
||||
@@ -565,21 +568,31 @@ export function Sidebar() {
|
||||
Workflows
|
||||
</div>
|
||||
<div className='flex items-center justify-center gap-[10px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={isImporting || !canEdit}
|
||||
>
|
||||
<ArrowDown className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<p>{isImporting ? 'Importing workflow...' : 'Import workflow'}</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{isImporting ? (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
disabled={!canEdit || isImporting}
|
||||
>
|
||||
<Loader className='h-[14px] w-[14px]' animate />
|
||||
</Button>
|
||||
) : (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={!canEdit}
|
||||
>
|
||||
<Download className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<p>Import workflows</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -622,8 +635,7 @@ export function Sidebar() {
|
||||
<WorkflowList
|
||||
regularWorkflows={regularWorkflows}
|
||||
isLoading={isLoading}
|
||||
isImporting={isImporting}
|
||||
setIsImporting={setIsImporting}
|
||||
handleFileChange={handleImportFileChange}
|
||||
fileInputRef={fileInputRef}
|
||||
scrollContainerRef={scrollContainerRef}
|
||||
/>
|
||||
|
||||
@@ -4,6 +4,7 @@ export { useDeleteWorkflow } from './use-delete-workflow'
|
||||
export { useDuplicateFolder } from './use-duplicate-folder'
|
||||
export { useDuplicateWorkflow } from './use-duplicate-workflow'
|
||||
export { useDuplicateWorkspace } from './use-duplicate-workspace'
|
||||
export { useExportFolder } from './use-export-folder'
|
||||
export { useExportWorkflow } from './use-export-workflow'
|
||||
export { useExportWorkspace } from './use-export-workspace'
|
||||
export { useImportWorkflow } from './use-import-workflow'
|
||||
|
||||
@@ -11,10 +11,9 @@ interface UseDeleteFolderProps {
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the folder ID(s) to delete
|
||||
* This function is called when deletion occurs to get fresh selection state
|
||||
* The folder ID(s) to delete
|
||||
*/
|
||||
getFolderIds: () => string | string[]
|
||||
folderIds: string | string[]
|
||||
/**
|
||||
* Optional callback after successful deletion
|
||||
*/
|
||||
@@ -24,17 +23,10 @@ interface UseDeleteFolderProps {
|
||||
/**
|
||||
* Hook for managing folder deletion.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk folder deletion
|
||||
* - Calling delete API for each folder
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after deletion
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Delete folder handlers and state
|
||||
*/
|
||||
export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
export function useDeleteFolder({ workspaceId, folderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
const deleteFolderMutation = useDeleteFolderMutation()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
@@ -46,23 +38,18 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
// Get fresh folder IDs at deletion time
|
||||
const folderIdsOrId = getFolderIds()
|
||||
if (!folderIdsOrId) {
|
||||
return
|
||||
}
|
||||
const folderIdsToDelete = Array.isArray(folderIds) ? folderIds : [folderIds]
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const folderIdsToDelete = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
|
||||
// Delete each folder sequentially
|
||||
for (const folderId of folderIdsToDelete) {
|
||||
await deleteFolderMutation.mutateAsync({ id: folderId, workspaceId })
|
||||
}
|
||||
|
||||
// Clear selection after successful deletion
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -74,7 +61,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [getFolderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
}, [folderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -12,10 +12,9 @@ interface UseDeleteWorkflowProps {
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to delete
|
||||
* This function is called when deletion occurs to get fresh selection state
|
||||
* Workflow ID(s) to delete
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
workflowIds: string | string[]
|
||||
/**
|
||||
* Whether the active workflow is being deleted
|
||||
* Can be a boolean or a function that receives the workflow IDs
|
||||
@@ -30,20 +29,12 @@ interface UseDeleteWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow deletion with navigation logic.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow deletion
|
||||
* - Finding next workflow to navigate to
|
||||
* - Navigating before deletion (if active workflow)
|
||||
* - Removing workflow(s) from registry
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Delete workflow handlers and state
|
||||
*/
|
||||
export function useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
workflowIds,
|
||||
isActive = false,
|
||||
onSuccess,
|
||||
}: UseDeleteWorkflowProps) {
|
||||
@@ -59,30 +50,21 @@ export function useDeleteWorkflow({
|
||||
return
|
||||
}
|
||||
|
||||
if (!workflowIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
// Get fresh workflow IDs at deletion time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
return
|
||||
}
|
||||
const workflowIdsToDelete = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToDelete = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
|
||||
// Determine if active workflow is being deleted
|
||||
const isActiveWorkflowBeingDeleted =
|
||||
typeof isActive === 'function' ? isActive(workflowIdsToDelete) : isActive
|
||||
|
||||
// Find next workflow to navigate to (if active workflow is being deleted)
|
||||
const sidebarWorkflows = Object.values(workflows).filter((w) => w.workspaceId === workspaceId)
|
||||
|
||||
// Find which specific workflow is the active one (if any in the deletion list)
|
||||
let activeWorkflowId: string | null = null
|
||||
if (isActiveWorkflowBeingDeleted && typeof isActive === 'function') {
|
||||
// Check each workflow being deleted to find which one is active
|
||||
activeWorkflowId =
|
||||
workflowIdsToDelete.find((id) => isActive([id])) || workflowIdsToDelete[0]
|
||||
} else {
|
||||
@@ -93,13 +75,11 @@ export function useDeleteWorkflow({
|
||||
|
||||
let nextWorkflowId: string | null = null
|
||||
if (isActiveWorkflowBeingDeleted && sidebarWorkflows.length > workflowIdsToDelete.length) {
|
||||
// Find the first workflow that's not being deleted
|
||||
const remainingWorkflows = sidebarWorkflows.filter(
|
||||
(w) => !workflowIdsToDelete.includes(w.id)
|
||||
)
|
||||
|
||||
if (remainingWorkflows.length > 0) {
|
||||
// Try to find the next workflow after the current one
|
||||
const workflowsAfterCurrent = remainingWorkflows.filter((w) => {
|
||||
const idx = sidebarWorkflows.findIndex((sw) => sw.id === w.id)
|
||||
return idx > currentIndex
|
||||
@@ -108,13 +88,11 @@ export function useDeleteWorkflow({
|
||||
if (workflowsAfterCurrent.length > 0) {
|
||||
nextWorkflowId = workflowsAfterCurrent[0].id
|
||||
} else {
|
||||
// Otherwise, use the first remaining workflow
|
||||
nextWorkflowId = remainingWorkflows[0].id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate first if this is the active workflow
|
||||
if (isActiveWorkflowBeingDeleted) {
|
||||
if (nextWorkflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
|
||||
@@ -123,10 +101,8 @@ export function useDeleteWorkflow({
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all workflows
|
||||
await Promise.all(workflowIdsToDelete.map((id) => removeWorkflow(id)))
|
||||
|
||||
// Clear selection after successful deletion
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -138,16 +114,7 @@ export function useDeleteWorkflow({
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [
|
||||
getWorkflowIds,
|
||||
isDeleting,
|
||||
workflows,
|
||||
workspaceId,
|
||||
isActive,
|
||||
router,
|
||||
removeWorkflow,
|
||||
onSuccess,
|
||||
])
|
||||
}, [workflowIds, isDeleting, workflows, workspaceId, isActive, router, removeWorkflow, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -7,7 +7,10 @@ const logger = createLogger('useDuplicateFolder')
|
||||
|
||||
interface UseDuplicateFolderProps {
|
||||
workspaceId: string
|
||||
getFolderIds: () => string | string[]
|
||||
/**
|
||||
* The folder ID(s) to duplicate
|
||||
*/
|
||||
folderIds: string | string[]
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
@@ -17,11 +20,7 @@ interface UseDuplicateFolderProps {
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate folder handlers and state
|
||||
*/
|
||||
export function useDuplicateFolder({
|
||||
workspaceId,
|
||||
getFolderIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateFolderProps) {
|
||||
export function useDuplicateFolder({ workspaceId, folderIds, onSuccess }: UseDuplicateFolderProps) {
|
||||
const duplicateFolderMutation = useDuplicateFolderMutation()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
@@ -46,21 +45,17 @@ export function useDuplicateFolder({
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDuplicating(true)
|
||||
try {
|
||||
// Get fresh folder IDs at duplication time
|
||||
const folderIdsOrId = getFolderIds()
|
||||
if (!folderIdsOrId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const folderIdsToDuplicate = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
const folderIdsToDuplicate = Array.isArray(folderIds) ? folderIds : [folderIds]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const folderStore = useFolderStore.getState()
|
||||
|
||||
// Duplicate each folder sequentially
|
||||
for (const folderId of folderIdsToDuplicate) {
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
@@ -72,7 +67,6 @@ export function useDuplicateFolder({
|
||||
const siblingNames = new Set(
|
||||
folderStore.getChildFolders(folder.parentId).map((sibling) => sibling.name)
|
||||
)
|
||||
// Avoid colliding with the original folder name
|
||||
siblingNames.add(folder.name)
|
||||
|
||||
const duplicateName = generateDuplicateName(folder.name, siblingNames)
|
||||
@@ -90,7 +84,6 @@ export function useDuplicateFolder({
|
||||
}
|
||||
}
|
||||
|
||||
// Clear selection after successful duplication
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -107,7 +100,7 @@ export function useDuplicateFolder({
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
}, [
|
||||
getFolderIds,
|
||||
folderIds,
|
||||
generateDuplicateName,
|
||||
isDuplicating,
|
||||
duplicateFolderMutation,
|
||||
|
||||
@@ -13,11 +13,6 @@ interface UseDuplicateWorkflowProps {
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to duplicate
|
||||
* This function is called when duplication occurs to get fresh selection state
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
/**
|
||||
* Optional callback after successful duplication
|
||||
*/
|
||||
@@ -27,89 +22,72 @@ interface UseDuplicateWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow duplication with optimistic updates.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow duplication
|
||||
* - Optimistic UI updates (shows new workflow immediately)
|
||||
* - Automatic rollback on failure
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after duplication
|
||||
* - Navigation to duplicated workflow (single only)
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate workflow handlers and state
|
||||
*/
|
||||
export function useDuplicateWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateWorkflowProps) {
|
||||
export function useDuplicateWorkflow({ workspaceId, onSuccess }: UseDuplicateWorkflowProps) {
|
||||
const router = useRouter()
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const duplicateMutation = useDuplicateWorkflowMutation()
|
||||
|
||||
/**
|
||||
* Duplicate the workflow(s)
|
||||
* @param workflowIds - The workflow ID(s) to duplicate
|
||||
*/
|
||||
const handleDuplicateWorkflow = useCallback(async () => {
|
||||
if (duplicateMutation.isPending) {
|
||||
return
|
||||
}
|
||||
const handleDuplicateWorkflow = useCallback(
|
||||
async (workflowIds: string | string[]) => {
|
||||
if (!workflowIds || (Array.isArray(workflowIds) && workflowIds.length === 0)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get fresh workflow IDs at duplication time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
return
|
||||
}
|
||||
if (duplicateMutation.isPending) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToDuplicate = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
const workflowIdsToDuplicate = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const duplicatedIds: string[] = []
|
||||
|
||||
try {
|
||||
// Duplicate each workflow sequentially
|
||||
for (const sourceId of workflowIdsToDuplicate) {
|
||||
const sourceWorkflow = workflows[sourceId]
|
||||
if (!sourceWorkflow) {
|
||||
logger.warn(`Workflow ${sourceId} not found, skipping`)
|
||||
continue
|
||||
try {
|
||||
for (const sourceId of workflowIdsToDuplicate) {
|
||||
const sourceWorkflow = workflows[sourceId]
|
||||
if (!sourceWorkflow) {
|
||||
logger.warn(`Workflow ${sourceId} not found, skipping`)
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await duplicateMutation.mutateAsync({
|
||||
workspaceId,
|
||||
sourceId,
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(),
|
||||
folderId: sourceWorkflow.folderId,
|
||||
})
|
||||
|
||||
duplicatedIds.push(result.id)
|
||||
}
|
||||
|
||||
const result = await duplicateMutation.mutateAsync({
|
||||
workspaceId,
|
||||
sourceId,
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(),
|
||||
folderId: sourceWorkflow.folderId,
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) duplicated successfully', {
|
||||
workflowIds: workflowIdsToDuplicate,
|
||||
duplicatedIds,
|
||||
})
|
||||
|
||||
duplicatedIds.push(result.id)
|
||||
if (duplicatedIds.length === 1) {
|
||||
router.push(`/workspace/${workspaceId}/w/${duplicatedIds[0]}`)
|
||||
}
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error duplicating workflow(s):', { error })
|
||||
throw error
|
||||
}
|
||||
|
||||
// Clear selection after successful duplication
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) duplicated successfully', {
|
||||
workflowIds: workflowIdsToDuplicate,
|
||||
duplicatedIds,
|
||||
})
|
||||
|
||||
// Navigate to duplicated workflow if single duplication
|
||||
if (duplicatedIds.length === 1) {
|
||||
router.push(`/workspace/${workspaceId}/w/${duplicatedIds[0]}`)
|
||||
}
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error duplicating workflow(s):', { error })
|
||||
throw error
|
||||
}
|
||||
}, [getWorkflowIds, duplicateMutation, workflows, workspaceId, router, onSuccess])
|
||||
},
|
||||
[duplicateMutation, workflows, workspaceId, router, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
isDuplicating: duplicateMutation.isPending,
|
||||
|
||||
@@ -6,10 +6,9 @@ const logger = createLogger('useDuplicateWorkspace')
|
||||
|
||||
interface UseDuplicateWorkspaceProps {
|
||||
/**
|
||||
* Function that returns the workspace ID to duplicate
|
||||
* This function is called when duplication occurs to get fresh state
|
||||
* The workspace ID to duplicate
|
||||
*/
|
||||
getWorkspaceId: () => string | null
|
||||
workspaceId: string | null
|
||||
/**
|
||||
* Optional callback after successful duplication
|
||||
*/
|
||||
@@ -19,17 +18,10 @@ interface UseDuplicateWorkspaceProps {
|
||||
/**
|
||||
* Hook for managing workspace duplication.
|
||||
*
|
||||
* Handles:
|
||||
* - Workspace duplication
|
||||
* - Calling duplicate API
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Navigation to duplicated workspace
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate workspace handlers and state
|
||||
*/
|
||||
export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicateWorkspaceProps) {
|
||||
export function useDuplicateWorkspace({ workspaceId, onSuccess }: UseDuplicateWorkspaceProps) {
|
||||
const router = useRouter()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
@@ -38,18 +30,12 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
*/
|
||||
const handleDuplicateWorkspace = useCallback(
|
||||
async (workspaceName: string) => {
|
||||
if (isDuplicating) {
|
||||
if (isDuplicating || !workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDuplicating(true)
|
||||
try {
|
||||
// Get fresh workspace ID at duplication time
|
||||
const workspaceId = getWorkspaceId()
|
||||
if (!workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/duplicate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -70,7 +56,6 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
workflowsCount: duplicatedWorkspace.workflowsCount,
|
||||
})
|
||||
|
||||
// Navigate to duplicated workspace
|
||||
router.push(`/workspace/${duplicatedWorkspace.id}/w`)
|
||||
|
||||
onSuccess?.()
|
||||
@@ -83,7 +68,7 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
},
|
||||
[getWorkspaceId, isDuplicating, router, onSuccess]
|
||||
[workspaceId, isDuplicating, router, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -0,0 +1,237 @@
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import JSZip from 'jszip'
|
||||
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import type { WorkflowFolder } from '@/stores/folders/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
import type { Variable } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('useExportFolder')
|
||||
|
||||
interface UseExportFolderProps {
|
||||
/**
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* The folder ID to export
|
||||
*/
|
||||
folderId: string
|
||||
/**
|
||||
* Optional callback after successful export
|
||||
*/
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively collects all workflow IDs within a folder and its subfolders.
|
||||
*
|
||||
* @param folderId - The folder ID to collect workflows from
|
||||
* @param workflows - All workflows in the workspace
|
||||
* @param folders - All folders in the workspace
|
||||
* @returns Array of workflow IDs
|
||||
*/
|
||||
function collectWorkflowsInFolder(
|
||||
folderId: string,
|
||||
workflows: Record<string, WorkflowMetadata>,
|
||||
folders: Record<string, WorkflowFolder>
|
||||
): string[] {
|
||||
const workflowIds: string[] = []
|
||||
|
||||
for (const workflow of Object.values(workflows)) {
|
||||
if (workflow.folderId === folderId) {
|
||||
workflowIds.push(workflow.id)
|
||||
}
|
||||
}
|
||||
|
||||
for (const folder of Object.values(folders)) {
|
||||
if (folder.parentId === folderId) {
|
||||
const childWorkflowIds = collectWorkflowsInFolder(folder.id, workflows, folders)
|
||||
workflowIds.push(...childWorkflowIds)
|
||||
}
|
||||
}
|
||||
|
||||
return workflowIds
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing folder export to ZIP.
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Export folder handlers and state
|
||||
*/
|
||||
export function useExportFolder({ workspaceId, folderId, onSuccess }: UseExportFolderProps) {
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const { folders } = useFolderStore()
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
/**
|
||||
* Check if the folder has any workflows (recursively)
|
||||
*/
|
||||
const hasWorkflows = useMemo(() => {
|
||||
if (!folderId) return false
|
||||
return collectWorkflowsInFolder(folderId, workflows, folders).length > 0
|
||||
}, [folderId, workflows, folders])
|
||||
|
||||
/**
|
||||
* Download file helper
|
||||
*/
|
||||
const downloadFile = (content: Blob, filename: string, mimeType = 'application/zip') => {
|
||||
try {
|
||||
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const a = document.createElement('a')
|
||||
a.href = url
|
||||
a.download = filename
|
||||
document.body.appendChild(a)
|
||||
a.click()
|
||||
document.body.removeChild(a)
|
||||
URL.revokeObjectURL(url)
|
||||
} catch (error) {
|
||||
logger.error('Failed to download file:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Export all workflows in the folder (including nested subfolders) to ZIP
|
||||
*/
|
||||
const handleExportFolder = useCallback(async () => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderId) {
|
||||
logger.warn('No folder ID provided for export')
|
||||
return
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const folderStore = useFolderStore.getState()
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
if (!folder) {
|
||||
logger.warn('Folder not found for export', { folderId })
|
||||
return
|
||||
}
|
||||
|
||||
const workflowIdsToExport = collectWorkflowsInFolder(folderId, workflows, folderStore.folders)
|
||||
|
||||
if (workflowIdsToExport.length === 0) {
|
||||
logger.warn('No workflows found in folder to export', { folderId, folderName: folder.name })
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting folder export', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
workflowCount: workflowIdsToExport.length,
|
||||
})
|
||||
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported from folder', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const zip = new JSZip()
|
||||
const seenFilenames = new Set<string>()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const baseName = exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')
|
||||
let filename = `${baseName}.json`
|
||||
let counter = 1
|
||||
while (seenFilenames.has(filename.toLowerCase())) {
|
||||
filename = `${baseName}-${counter}.json`
|
||||
counter++
|
||||
}
|
||||
seenFilenames.add(filename.toLowerCase())
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `${folder.name.replace(/[^a-z0-9]/gi, '-')}-export.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Folder exported successfully', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
workflowCount: exportedWorkflows.length,
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting folder:', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [folderId, isExporting, workflows, folders, onSuccess])
|
||||
|
||||
return {
|
||||
isExporting,
|
||||
hasWorkflows,
|
||||
handleExportFolder,
|
||||
}
|
||||
}
|
||||
@@ -13,11 +13,6 @@ interface UseExportWorkflowProps {
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to export
|
||||
* This function is called when export occurs to get fresh selection state
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
/**
|
||||
* Optional callback after successful export
|
||||
*/
|
||||
@@ -27,23 +22,10 @@ interface UseExportWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow export to JSON.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow export
|
||||
* - Fetching workflow data and variables from API
|
||||
* - Sanitizing workflow state for export
|
||||
* - Downloading as JSON file(s)
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after export
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Export workflow handlers and state
|
||||
*/
|
||||
export function useExportWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
onSuccess,
|
||||
}: UseExportWorkflowProps) {
|
||||
export function useExportWorkflow({ workspaceId, onSuccess }: UseExportWorkflowProps) {
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
@@ -75,130 +57,129 @@ export function useExportWorkflow({
|
||||
* - Single workflow: exports as JSON file
|
||||
* - Multiple workflows: exports as ZIP file containing all JSON files
|
||||
* Fetches workflow data from API to support bulk export of non-active workflows
|
||||
* @param workflowIds - The workflow ID(s) to export
|
||||
*/
|
||||
const handleExportWorkflow = useCallback(async () => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
// Get fresh workflow IDs at export time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
const handleExportWorkflow = useCallback(
|
||||
async (workflowIds: string | string[]) => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToExport = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
if (!workflowIds || (Array.isArray(workflowIds) && workflowIds.length === 0)) {
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting workflow export', {
|
||||
workflowIdsToExport,
|
||||
count: workflowIdsToExport.length,
|
||||
})
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const workflowIdsToExport = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
logger.info('Starting workflow export', {
|
||||
workflowIdsToExport,
|
||||
count: workflowIdsToExport.length,
|
||||
})
|
||||
|
||||
// Export each workflow
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
|
||||
// Fetch workflow state from API
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Fetch workflow variables (API returns Record format directly)
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Prepare export state
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported')
|
||||
return
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 1) {
|
||||
const filename = `${exportedWorkflows[0].name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(exportedWorkflows[0].content, filename, 'application/json')
|
||||
} else {
|
||||
const zip = new JSZip()
|
||||
const seenFilenames = new Set<string>()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const baseName = exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')
|
||||
let filename = `${baseName}.json`
|
||||
let counter = 1
|
||||
while (seenFilenames.has(filename.toLowerCase())) {
|
||||
filename = `${baseName}-${counter}.json`
|
||||
counter++
|
||||
}
|
||||
seenFilenames.add(filename.toLowerCase())
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported')
|
||||
return
|
||||
}
|
||||
|
||||
// Download as single JSON or ZIP depending on count
|
||||
if (exportedWorkflows.length === 1) {
|
||||
// Single workflow - download as JSON
|
||||
const filename = `${exportedWorkflows[0].name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(exportedWorkflows[0].content, filename, 'application/json')
|
||||
} else {
|
||||
// Multiple workflows - download as ZIP
|
||||
const zip = new JSZip()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const filename = `${exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `workflows-export-${Date.now()}.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `workflows-export-${Date.now()}.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) exported successfully', {
|
||||
workflowIds: workflowIdsToExport,
|
||||
count: exportedWorkflows.length,
|
||||
format: exportedWorkflows.length === 1 ? 'JSON' : 'ZIP',
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting workflow(s):', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
|
||||
// Clear selection after successful export
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) exported successfully', {
|
||||
workflowIds: workflowIdsToExport,
|
||||
count: exportedWorkflows.length,
|
||||
format: exportedWorkflows.length === 1 ? 'JSON' : 'ZIP',
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting workflow(s):', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [getWorkflowIds, isExporting, workflows, onSuccess])
|
||||
},
|
||||
[isExporting, workflows, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
isExporting,
|
||||
|
||||
@@ -44,21 +44,18 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Exporting workspace', { workspaceId })
|
||||
|
||||
// Fetch all workflows in workspace
|
||||
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
||||
if (!workflowsResponse.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
const { data: workflows } = await workflowsResponse.json()
|
||||
|
||||
// Fetch all folders in workspace
|
||||
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
if (!foldersResponse.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
const foldersData = await foldersResponse.json()
|
||||
|
||||
// Export each workflow
|
||||
const workflowsToExport: WorkflowExportData[] = []
|
||||
|
||||
for (const workflow of workflows) {
|
||||
|
||||
@@ -33,6 +33,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
const queryClient = useQueryClient()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -48,9 +49,8 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(content, filename)
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
clearDiff()
|
||||
|
||||
// Extract color from metadata
|
||||
const parsedContent = JSON.parse(content)
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
@@ -63,7 +63,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
// Update workflow color if we extracted one
|
||||
if (workflowColor !== '#3972F6') {
|
||||
await fetch(`/api/workflows/${newWorkflowId}`, {
|
||||
method: 'PATCH',
|
||||
@@ -72,16 +71,13 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
})
|
||||
}
|
||||
|
||||
// Save workflow state
|
||||
await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
// Save variables if any (handle both legacy Array and current Record formats)
|
||||
if (workflowData.variables) {
|
||||
// Convert to Record format for API (handles backwards compatibility with old Array exports)
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
: Object.values(workflowData.variables)
|
||||
@@ -114,7 +110,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
logger.info(`Imported workflow: ${workflowName}`)
|
||||
return newWorkflowId
|
||||
},
|
||||
[createWorkflowMutation, workspaceId]
|
||||
[clearDiff, createWorkflowMutation, workspaceId]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -134,7 +130,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const importedWorkflowIds: string[] = []
|
||||
|
||||
if (hasZip && fileArray.length === 1) {
|
||||
// Import from ZIP - preserves folder structure
|
||||
const zipFile = fileArray[0]
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
@@ -149,7 +144,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
try {
|
||||
let targetFolderId = importFolder.id
|
||||
|
||||
// Recreate nested folder structure
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
|
||||
@@ -187,7 +181,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
} else if (jsonFiles.length > 0) {
|
||||
// Import multiple JSON files or single JSON
|
||||
const extractedWorkflows = await extractWorkflowsFromFiles(jsonFiles)
|
||||
|
||||
for (const workflow of extractedWorkflows) {
|
||||
@@ -200,22 +193,21 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
|
||||
// Reload workflows and folders to show newly imported ones
|
||||
await queryClient.invalidateQueries({ queryKey: workflowKeys.list(workspaceId) })
|
||||
await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) })
|
||||
|
||||
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
|
||||
|
||||
// Navigate to first imported workflow if any
|
||||
if (importedWorkflowIds.length > 0) {
|
||||
router.push(`/workspace/${workspaceId}/w/${importedWorkflowIds[0]}`)
|
||||
router.push(
|
||||
`/workspace/${workspaceId}/w/${importedWorkflowIds[importedWorkflowIds.length - 1]}`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflows:', error)
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
|
||||
// Reset file input
|
||||
if (event.target) {
|
||||
event.target.value = ''
|
||||
}
|
||||
|
||||
@@ -21,15 +21,6 @@ interface UseImportWorkspaceProps {
|
||||
/**
|
||||
* Hook for managing workspace import from ZIP files.
|
||||
*
|
||||
* Handles:
|
||||
* - Extracting workflows from ZIP file
|
||||
* - Creating new workspace
|
||||
* - Recreating folder structure
|
||||
* - Importing all workflows with states and variables
|
||||
* - Navigation to imported workspace
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Import workspace handlers and state
|
||||
*/
|
||||
@@ -37,6 +28,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
const router = useRouter()
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
|
||||
|
||||
/**
|
||||
* Handle workspace import from ZIP file
|
||||
@@ -56,7 +48,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Importing workspace from ZIP')
|
||||
|
||||
// Extract workflows from ZIP
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
if (extractedWorkflows.length === 0) {
|
||||
@@ -64,7 +55,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
return
|
||||
}
|
||||
|
||||
// Create new workspace
|
||||
const workspaceName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
|
||||
const createResponse = await fetch('/api/workspaces', {
|
||||
method: 'POST',
|
||||
@@ -81,7 +71,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
const folderMap = new Map<string, string>()
|
||||
|
||||
// Import workflows
|
||||
for (const workflow of extractedWorkflows) {
|
||||
try {
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(workflow.content)
|
||||
@@ -91,7 +80,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
continue
|
||||
}
|
||||
|
||||
// Recreate folder structure
|
||||
let targetFolderId: string | null = null
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
@@ -120,14 +108,12 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(workflow.content, workflow.name)
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
clearDiff()
|
||||
|
||||
// Extract color from workflow metadata
|
||||
const parsedContent = JSON.parse(workflow.content)
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
|
||||
// Create workflow
|
||||
const createWorkflowResponse = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -147,7 +133,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
const newWorkflow = await createWorkflowResponse.json()
|
||||
|
||||
// Save workflow state
|
||||
const stateResponse = await fetch(`/api/workflows/${newWorkflow.id}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -159,9 +144,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
continue
|
||||
}
|
||||
|
||||
// Save variables if any (handle both legacy Array and current Record formats)
|
||||
if (workflowData.variables) {
|
||||
// Convert to Record format for API (handles backwards compatibility with old Array exports)
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
: Object.values(workflowData.variables)
|
||||
@@ -199,7 +182,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
logger.info(`Workspace import complete. Imported ${extractedWorkflows.length} workflows`)
|
||||
|
||||
// Navigate to new workspace
|
||||
router.push(`/workspace/${newWorkspace.id}/w`)
|
||||
|
||||
onSuccess?.()
|
||||
@@ -210,7 +192,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
setIsImporting(false)
|
||||
}
|
||||
},
|
||||
[isImporting, router, onSuccess, createFolderMutation]
|
||||
[isImporting, router, onSuccess, createFolderMutation, clearDiff]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
33
apps/sim/background/a2a-push-notification-delivery.ts
Normal file
33
apps/sim/background/a2a-push-notification-delivery.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { TaskState } from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { deliverPushNotification } from '@/lib/a2a/push-notifications'
|
||||
|
||||
const logger = createLogger('A2APushNotificationDelivery')
|
||||
|
||||
export interface A2APushNotificationParams {
|
||||
taskId: string
|
||||
state: TaskState
|
||||
}
|
||||
|
||||
export const a2aPushNotificationTask = task({
|
||||
id: 'a2a-push-notification-delivery',
|
||||
retry: {
|
||||
maxAttempts: 5,
|
||||
minTimeoutInMs: 1000,
|
||||
maxTimeoutInMs: 60000,
|
||||
factor: 2,
|
||||
},
|
||||
run: async (params: A2APushNotificationParams) => {
|
||||
logger.info('Delivering A2A push notification', params)
|
||||
|
||||
const success = await deliverPushNotification(params.taskId, params.state)
|
||||
|
||||
if (!success) {
|
||||
throw new Error(`Failed to deliver push notification for task ${params.taskId}`)
|
||||
}
|
||||
|
||||
logger.info('A2A push notification delivered successfully', params)
|
||||
return { success: true, taskId: params.taskId, state: params.state }
|
||||
},
|
||||
})
|
||||
338
apps/sim/blocks/blocks/a2a.ts
Normal file
338
apps/sim/blocks/blocks/a2a.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
/**
|
||||
* A2A Block (v0.3)
|
||||
*
|
||||
* Enables interaction with external A2A-compatible agents.
|
||||
* Supports sending messages, querying tasks, cancelling tasks, discovering agents,
|
||||
* resubscribing to streams, and managing push notification webhooks.
|
||||
*/
|
||||
|
||||
import { A2AIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface A2AResponse extends ToolResponse {
|
||||
output: {
|
||||
/** Response content from the agent */
|
||||
content?: string
|
||||
/** Task ID */
|
||||
taskId?: string
|
||||
/** Context ID for conversation continuity */
|
||||
contextId?: string
|
||||
/** Task state */
|
||||
state?: string
|
||||
/** Structured output artifacts */
|
||||
artifacts?: Array<{
|
||||
name?: string
|
||||
description?: string
|
||||
parts: Array<{ kind: string; text?: string; data?: unknown }>
|
||||
}>
|
||||
/** Full message history */
|
||||
history?: Array<{
|
||||
role: 'user' | 'agent'
|
||||
parts: Array<{ kind: string; text?: string }>
|
||||
}>
|
||||
/** Whether cancellation was successful (cancel_task) */
|
||||
cancelled?: boolean
|
||||
/** Whether task is still running (resubscribe) */
|
||||
isRunning?: boolean
|
||||
/** Agent name (get_agent_card) */
|
||||
name?: string
|
||||
/** Agent description (get_agent_card) */
|
||||
description?: string
|
||||
/** Agent URL (get_agent_card) */
|
||||
url?: string
|
||||
/** Agent version (get_agent_card) */
|
||||
version?: string
|
||||
/** Agent capabilities (get_agent_card) */
|
||||
capabilities?: Record<string, boolean>
|
||||
/** Agent skills (get_agent_card) */
|
||||
skills?: Array<{ id: string; name: string; description?: string }>
|
||||
/** Agent authentication schemes (get_agent_card) */
|
||||
authentication?: { schemes: string[] }
|
||||
/** Push notification webhook URL */
|
||||
webhookUrl?: string
|
||||
/** Push notification token */
|
||||
token?: string
|
||||
/** Whether push notification config exists */
|
||||
exists?: boolean
|
||||
/** Operation success indicator */
|
||||
success?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const A2ABlock: BlockConfig<A2AResponse> = {
|
||||
type: 'a2a',
|
||||
name: 'A2A',
|
||||
description: 'Interact with external A2A-compatible agents',
|
||||
longDescription:
|
||||
'Use the A2A (Agent-to-Agent) protocol to interact with external AI agents. ' +
|
||||
'Send messages, query task status, cancel tasks, or discover agent capabilities. ' +
|
||||
'Compatible with any A2A-compliant agent including LangGraph, Google ADK, and other Sim Studio workflows.',
|
||||
docsLink: 'https://docs.sim.ai/blocks/a2a',
|
||||
category: 'tools',
|
||||
bgColor: '#4151B5',
|
||||
icon: A2AIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Send Message', id: 'send_message' },
|
||||
{ label: 'Send Message (Streaming)', id: 'send_message_stream' },
|
||||
{ label: 'Get Task', id: 'get_task' },
|
||||
{ label: 'Cancel Task', id: 'cancel_task' },
|
||||
{ label: 'Get Agent Card', id: 'get_agent_card' },
|
||||
{ label: 'Resubscribe', id: 'resubscribe' },
|
||||
{ label: 'Set Push Notification', id: 'set_push_notification' },
|
||||
{ label: 'Get Push Notification', id: 'get_push_notification' },
|
||||
{ label: 'Delete Push Notification', id: 'delete_push_notification' },
|
||||
],
|
||||
defaultValue: 'send_message',
|
||||
},
|
||||
{
|
||||
id: 'agentUrl',
|
||||
title: 'Agent URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'https://api.example.com/a2a/serve/agent-id',
|
||||
required: true,
|
||||
description: 'The A2A endpoint URL',
|
||||
},
|
||||
{
|
||||
id: 'message',
|
||||
title: 'Message',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter your message to the agent...',
|
||||
description: 'The message to send to the agent',
|
||||
condition: { field: 'operation', value: ['send_message', 'send_message_stream'] },
|
||||
required: { field: 'operation', value: ['send_message', 'send_message_stream'] },
|
||||
},
|
||||
{
|
||||
id: 'taskId',
|
||||
title: 'Task ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Task ID',
|
||||
description: 'Task ID to query, cancel, continue, or configure',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'send_message',
|
||||
'send_message_stream',
|
||||
'get_task',
|
||||
'cancel_task',
|
||||
'resubscribe',
|
||||
'set_push_notification',
|
||||
'get_push_notification',
|
||||
'delete_push_notification',
|
||||
],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_task',
|
||||
'cancel_task',
|
||||
'resubscribe',
|
||||
'set_push_notification',
|
||||
'get_push_notification',
|
||||
'delete_push_notification',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'contextId',
|
||||
title: 'Context ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional - for multi-turn conversations',
|
||||
description: 'Context ID for conversation continuity across tasks',
|
||||
condition: { field: 'operation', value: ['send_message', 'send_message_stream'] },
|
||||
},
|
||||
{
|
||||
id: 'historyLength',
|
||||
title: 'History Length',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of messages to include',
|
||||
description: 'Number of history messages to include in the response',
|
||||
condition: { field: 'operation', value: 'get_task' },
|
||||
},
|
||||
{
|
||||
id: 'webhookUrl',
|
||||
title: 'Webhook URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'https://your-app.com/webhook',
|
||||
description: 'HTTPS webhook URL to receive task update notifications',
|
||||
condition: { field: 'operation', value: 'set_push_notification' },
|
||||
required: { field: 'operation', value: 'set_push_notification' },
|
||||
},
|
||||
{
|
||||
id: 'token',
|
||||
title: 'Webhook Token',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Optional token for webhook validation',
|
||||
description: 'Token that will be included in webhook requests for validation',
|
||||
condition: { field: 'operation', value: 'set_push_notification' },
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'API key for the remote agent',
|
||||
description: 'Authentication key for the A2A agent',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'a2a_send_message',
|
||||
'a2a_send_message_stream',
|
||||
'a2a_get_task',
|
||||
'a2a_cancel_task',
|
||||
'a2a_get_agent_card',
|
||||
'a2a_resubscribe',
|
||||
'a2a_set_push_notification',
|
||||
'a2a_get_push_notification',
|
||||
'a2a_delete_push_notification',
|
||||
],
|
||||
config: {
|
||||
tool: (params: Record<string, unknown>) => {
|
||||
const operation = params.operation as string
|
||||
switch (operation) {
|
||||
case 'send_message_stream':
|
||||
return 'a2a_send_message_stream'
|
||||
case 'get_task':
|
||||
return 'a2a_get_task'
|
||||
case 'cancel_task':
|
||||
return 'a2a_cancel_task'
|
||||
case 'get_agent_card':
|
||||
return 'a2a_get_agent_card'
|
||||
case 'resubscribe':
|
||||
return 'a2a_resubscribe'
|
||||
case 'set_push_notification':
|
||||
return 'a2a_set_push_notification'
|
||||
case 'get_push_notification':
|
||||
return 'a2a_get_push_notification'
|
||||
case 'delete_push_notification':
|
||||
return 'a2a_delete_push_notification'
|
||||
default:
|
||||
return 'a2a_send_message'
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: {
|
||||
type: 'string',
|
||||
description: 'A2A operation to perform',
|
||||
},
|
||||
agentUrl: {
|
||||
type: 'string',
|
||||
description: 'A2A endpoint URL',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Message to send to the agent',
|
||||
},
|
||||
taskId: {
|
||||
type: 'string',
|
||||
description: 'Task ID to query, cancel, continue, or configure',
|
||||
},
|
||||
contextId: {
|
||||
type: 'string',
|
||||
description: 'Context ID for conversation continuity',
|
||||
},
|
||||
historyLength: {
|
||||
type: 'number',
|
||||
description: 'Number of history messages to include',
|
||||
},
|
||||
webhookUrl: {
|
||||
type: 'string',
|
||||
description: 'HTTPS webhook URL for push notifications',
|
||||
},
|
||||
token: {
|
||||
type: 'string',
|
||||
description: 'Token for webhook validation',
|
||||
},
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
description: 'API key for authentication',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
content: {
|
||||
type: 'string',
|
||||
description: 'The text response from the agent',
|
||||
},
|
||||
taskId: {
|
||||
type: 'string',
|
||||
description: 'Task ID for follow-up interactions',
|
||||
},
|
||||
contextId: {
|
||||
type: 'string',
|
||||
description: 'Context ID for conversation continuity',
|
||||
},
|
||||
state: {
|
||||
type: 'string',
|
||||
description: 'Task state (completed, failed, etc.)',
|
||||
},
|
||||
artifacts: {
|
||||
type: 'array',
|
||||
description: 'Structured output artifacts from the agent',
|
||||
},
|
||||
history: {
|
||||
type: 'array',
|
||||
description: 'Full message history of the conversation',
|
||||
},
|
||||
cancelled: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the task was successfully cancelled',
|
||||
},
|
||||
isRunning: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the task is still running',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Agent name',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
description: 'Agent description',
|
||||
},
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'Agent endpoint URL',
|
||||
},
|
||||
version: {
|
||||
type: 'string',
|
||||
description: 'Agent version',
|
||||
},
|
||||
capabilities: {
|
||||
type: 'json',
|
||||
description: 'Agent capabilities (streaming, pushNotifications, etc.)',
|
||||
},
|
||||
skills: {
|
||||
type: 'array',
|
||||
description: 'Skills the agent can perform',
|
||||
},
|
||||
authentication: {
|
||||
type: 'json',
|
||||
description: 'Supported authentication schemes',
|
||||
},
|
||||
webhookUrl: {
|
||||
type: 'string',
|
||||
description: 'Configured webhook URL',
|
||||
},
|
||||
token: {
|
||||
type: 'string',
|
||||
description: 'Webhook validation token',
|
||||
},
|
||||
exists: {
|
||||
type: 'boolean',
|
||||
description: 'Whether push notification config exists',
|
||||
},
|
||||
success: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the operation was successful',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,13 +1,13 @@
|
||||
import { DynamoDBIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { DynamoDBResponse } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBIntrospectResponse, DynamoDBResponse } from '@/tools/dynamodb/types'
|
||||
|
||||
export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
export const DynamoDBBlock: BlockConfig<DynamoDBResponse | DynamoDBIntrospectResponse> = {
|
||||
type: 'dynamodb',
|
||||
name: 'Amazon DynamoDB',
|
||||
description: 'Connect to Amazon DynamoDB',
|
||||
longDescription:
|
||||
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.',
|
||||
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.',
|
||||
docsLink: 'https://docs.sim.ai/tools/dynamodb',
|
||||
category: 'tools',
|
||||
bgColor: 'linear-gradient(45deg, #2E27AD 0%, #527FFF 100%)',
|
||||
@@ -24,6 +24,7 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
{ label: 'Scan', id: 'scan' },
|
||||
{ label: 'Update Item', id: 'update' },
|
||||
{ label: 'Delete Item', id: 'delete' },
|
||||
{ label: 'Introspect', id: 'introspect' },
|
||||
],
|
||||
value: () => 'get',
|
||||
},
|
||||
@@ -56,6 +57,19 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'my-table',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'introspect',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tableName',
|
||||
title: 'Table Name (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty to list all tables',
|
||||
required: false,
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
// Key field for get, update, delete operations
|
||||
{
|
||||
@@ -420,6 +434,7 @@ Return ONLY the expression - no explanations.`,
|
||||
'dynamodb_scan',
|
||||
'dynamodb_update',
|
||||
'dynamodb_delete',
|
||||
'dynamodb_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -436,6 +451,8 @@ Return ONLY the expression - no explanations.`,
|
||||
return 'dynamodb_update'
|
||||
case 'delete':
|
||||
return 'dynamodb_delete'
|
||||
case 'introspect':
|
||||
return 'dynamodb_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid DynamoDB operation: ${params.operation}`)
|
||||
}
|
||||
@@ -552,5 +569,13 @@ Return ONLY the expression - no explanations.`,
|
||||
type: 'number',
|
||||
description: 'Number of items returned',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'List of table names from introspect operation',
|
||||
},
|
||||
tableDetails: {
|
||||
type: 'json',
|
||||
description: 'Detailed schema information for a specific table from introspect operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ export const ElasticsearchBlock: BlockConfig<ElasticsearchResponse> = {
|
||||
{ label: 'Create Index', id: 'elasticsearch_create_index' },
|
||||
{ label: 'Delete Index', id: 'elasticsearch_delete_index' },
|
||||
{ label: 'Get Index Info', id: 'elasticsearch_get_index' },
|
||||
{ label: 'List Indices', id: 'elasticsearch_list_indices' },
|
||||
// Cluster Operations
|
||||
{ label: 'Cluster Health', id: 'elasticsearch_cluster_health' },
|
||||
{ label: 'Cluster Stats', id: 'elasticsearch_cluster_stats' },
|
||||
@@ -452,6 +453,7 @@ Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
'elasticsearch_get_index',
|
||||
'elasticsearch_cluster_health',
|
||||
'elasticsearch_cluster_stats',
|
||||
'elasticsearch_list_indices',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { MongoDBIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { MongoDBResponse } from '@/tools/mongodb/types'
|
||||
import type { MongoDBIntrospectResponse, MongoDBResponse } from '@/tools/mongodb/types'
|
||||
|
||||
export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
export const MongoDBBlock: BlockConfig<MongoDBResponse | MongoDBIntrospectResponse> = {
|
||||
type: 'mongodb',
|
||||
name: 'MongoDB',
|
||||
description: 'Connect to MongoDB database',
|
||||
@@ -23,6 +23,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
{ label: 'Update Documents', id: 'update' },
|
||||
{ label: 'Delete Documents', id: 'delete' },
|
||||
{ label: 'Aggregate Pipeline', id: 'execute' },
|
||||
{ label: 'Introspect Database', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -86,6 +87,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'users',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'introspect', not: true },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
@@ -803,6 +805,7 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
'mongodb_update',
|
||||
'mongodb_delete',
|
||||
'mongodb_execute',
|
||||
'mongodb_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -817,6 +820,8 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
return 'mongodb_delete'
|
||||
case 'execute':
|
||||
return 'mongodb_execute'
|
||||
case 'introspect':
|
||||
return 'mongodb_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid MongoDB operation: ${params.operation}`)
|
||||
}
|
||||
@@ -936,5 +941,14 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
type: 'number',
|
||||
description: 'Number of documents matched (update operations)',
|
||||
},
|
||||
databases: {
|
||||
type: 'array',
|
||||
description: 'Array of database names (introspect operation)',
|
||||
},
|
||||
collections: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Array of collection info with name, type, document count, and indexes (introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const MySQLBlock: BlockConfig<MySQLResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -285,7 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
|
||||
access: [
|
||||
'mysql_query',
|
||||
'mysql_insert',
|
||||
'mysql_update',
|
||||
'mysql_delete',
|
||||
'mysql_execute',
|
||||
'mysql_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -299,6 +307,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
return 'mysql_delete'
|
||||
case 'execute':
|
||||
return 'mysql_execute'
|
||||
case 'introspect':
|
||||
return 'mysql_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid MySQL operation: ${params.operation}`)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Neo4jIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { Neo4jResponse } from '@/tools/neo4j/types'
|
||||
import type { Neo4jIntrospectResponse, Neo4jResponse } from '@/tools/neo4j/types'
|
||||
|
||||
export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
|
||||
export const Neo4jBlock: BlockConfig<Neo4jResponse | Neo4jIntrospectResponse> = {
|
||||
type: 'neo4j',
|
||||
name: 'Neo4j',
|
||||
description: 'Connect to Neo4j graph database',
|
||||
@@ -24,6 +24,7 @@ export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
|
||||
{ label: 'Update Properties (SET)', id: 'update' },
|
||||
{ label: 'Delete Nodes/Relationships', id: 'delete' },
|
||||
{ label: 'Execute Cypher', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -589,6 +590,7 @@ Return ONLY valid JSON.`,
|
||||
'neo4j_update',
|
||||
'neo4j_delete',
|
||||
'neo4j_execute',
|
||||
'neo4j_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -605,6 +607,8 @@ Return ONLY valid JSON.`,
|
||||
return 'neo4j_delete'
|
||||
case 'execute':
|
||||
return 'neo4j_execute'
|
||||
case 'introspect':
|
||||
return 'neo4j_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid Neo4j operation: ${params.operation}`)
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -285,6 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'public',
|
||||
value: () => 'public',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -293,6 +302,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
'postgresql_update',
|
||||
'postgresql_delete',
|
||||
'postgresql_execute',
|
||||
'postgresql_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -307,6 +317,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
return 'postgresql_delete'
|
||||
case 'execute':
|
||||
return 'postgresql_execute'
|
||||
case 'introspect':
|
||||
return 'postgresql_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
|
||||
}
|
||||
@@ -343,6 +355,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (rest.schema) result.schema = rest.schema
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
@@ -361,6 +374,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
schema: { type: 'string', description: 'Schema name for introspection' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
@@ -375,5 +389,13 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'Array of table schemas with columns, keys, and indexes (introspect operation)',
|
||||
},
|
||||
schemas: {
|
||||
type: 'array',
|
||||
description: 'List of available schemas in the database (introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { RDSIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { RdsResponse } from '@/tools/rds/types'
|
||||
import type { RdsIntrospectResponse, RdsResponse } from '@/tools/rds/types'
|
||||
|
||||
export const RDSBlock: BlockConfig<RdsResponse> = {
|
||||
export const RDSBlock: BlockConfig<RdsResponse | RdsIntrospectResponse> = {
|
||||
type: 'rds',
|
||||
name: 'Amazon RDS',
|
||||
description: 'Connect to Amazon RDS via Data API',
|
||||
@@ -23,6 +23,7 @@ export const RDSBlock: BlockConfig<RdsResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -340,9 +341,36 @@ Return ONLY the JSON object.`,
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'public (PostgreSQL) or database name (MySQL)',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'engine',
|
||||
title: 'Database Engine',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Auto-detect', id: '' },
|
||||
{ label: 'Aurora PostgreSQL', id: 'aurora-postgresql' },
|
||||
{ label: 'Aurora MySQL', id: 'aurora-mysql' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
value: () => '',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['rds_query', 'rds_insert', 'rds_update', 'rds_delete', 'rds_execute'],
|
||||
access: [
|
||||
'rds_query',
|
||||
'rds_insert',
|
||||
'rds_update',
|
||||
'rds_delete',
|
||||
'rds_execute',
|
||||
'rds_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -356,12 +384,14 @@ Return ONLY the JSON object.`,
|
||||
return 'rds_delete'
|
||||
case 'execute':
|
||||
return 'rds_execute'
|
||||
case 'introspect':
|
||||
return 'rds_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid RDS operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, conditions, ...rest } = params
|
||||
const { operation, data, conditions, schema, engine, ...rest } = params
|
||||
|
||||
// Parse JSON fields
|
||||
const parseJson = (value: unknown, fieldName: string) => {
|
||||
@@ -399,6 +429,8 @@ Return ONLY the JSON object.`,
|
||||
if (rest.query) result.query = rest.query
|
||||
if (parsedConditions !== undefined) result.conditions = parsedConditions
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
if (schema) result.schema = schema
|
||||
if (engine) result.engine = engine
|
||||
|
||||
return result
|
||||
},
|
||||
@@ -416,6 +448,11 @@ Return ONLY the JSON object.`,
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
conditions: { type: 'json', description: 'Conditions for update/delete (e.g., {"id": 1})' },
|
||||
schema: { type: 'string', description: 'Schema to introspect (for introspect operation)' },
|
||||
engine: {
|
||||
type: 'string',
|
||||
description: 'Database engine (aurora-postgresql or aurora-mysql, auto-detected if not set)',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
@@ -430,5 +467,18 @@ Return ONLY the JSON object.`,
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
engine: {
|
||||
type: 'string',
|
||||
description: 'Detected database engine type (for introspect operation)',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Array of table schemas with columns, keys, and indexes (for introspect operation)',
|
||||
},
|
||||
schemas: {
|
||||
type: 'array',
|
||||
description: 'List of available schemas in the database (for introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
|
||||
{ label: 'Full-Text Search', id: 'text_search' },
|
||||
{ label: 'Vector Search', id: 'vector_search' },
|
||||
{ label: 'Call RPC Function', id: 'rpc' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
// Storage - File Operations
|
||||
{ label: 'Storage: Upload File', id: 'storage_upload' },
|
||||
{ label: 'Storage: Download File', id: 'storage_download' },
|
||||
@@ -490,6 +491,14 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '{\n "param1": "value1",\n "param2": "value2"\n}',
|
||||
condition: { field: 'operation', value: 'rpc' },
|
||||
},
|
||||
// Introspect operation fields
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema',
|
||||
type: 'short-input',
|
||||
placeholder: 'public (leave empty for all user schemas)',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
// Text Search operation fields
|
||||
{
|
||||
id: 'column',
|
||||
@@ -876,6 +885,7 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
'supabase_text_search',
|
||||
'supabase_vector_search',
|
||||
'supabase_rpc',
|
||||
'supabase_introspect',
|
||||
'supabase_storage_upload',
|
||||
'supabase_storage_download',
|
||||
'supabase_storage_list',
|
||||
@@ -911,6 +921,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
return 'supabase_vector_search'
|
||||
case 'rpc':
|
||||
return 'supabase_rpc'
|
||||
case 'introspect':
|
||||
return 'supabase_introspect'
|
||||
case 'storage_upload':
|
||||
return 'supabase_storage_upload'
|
||||
case 'storage_download':
|
||||
@@ -1085,7 +1097,6 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
projectId: { type: 'string', description: 'Supabase project identifier' },
|
||||
table: { type: 'string', description: 'Database table name' },
|
||||
schema: { type: 'string', description: 'Database schema (default: public)' },
|
||||
select: { type: 'string', description: 'Columns to return (comma-separated, defaults to *)' },
|
||||
apiKey: { type: 'string', description: 'Service role secret key' },
|
||||
// Data for insert/update operations
|
||||
@@ -1113,6 +1124,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
language: { type: 'string', description: 'Language for text search' },
|
||||
// Count operation inputs
|
||||
countType: { type: 'string', description: 'Count type: exact, planned, or estimated' },
|
||||
// Introspect operation inputs
|
||||
schema: { type: 'string', description: 'Database schema to introspect (e.g., public)' },
|
||||
// Storage operation inputs
|
||||
bucket: { type: 'string', description: 'Storage bucket name' },
|
||||
path: { type: 'string', description: 'File or folder path in storage' },
|
||||
@@ -1158,5 +1171,13 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
type: 'string',
|
||||
description: 'Temporary signed URL for storage file',
|
||||
},
|
||||
tables: {
|
||||
type: 'json',
|
||||
description: 'Array of table schemas for introspect operation',
|
||||
},
|
||||
schemas: {
|
||||
type: 'json',
|
||||
description: 'Array of schema names found in the database',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { A2ABlock } from '@/blocks/blocks/a2a'
|
||||
import { AgentBlock } from '@/blocks/blocks/agent'
|
||||
import { AhrefsBlock } from '@/blocks/blocks/ahrefs'
|
||||
import { AirtableBlock } from '@/blocks/blocks/airtable'
|
||||
@@ -147,6 +148,7 @@ import { SQSBlock } from './blocks/sqs'
|
||||
|
||||
// Registry of all available blocks, alphabetically sorted
|
||||
export const registry: Record<string, BlockConfig> = {
|
||||
a2a: A2ABlock,
|
||||
agent: AgentBlock,
|
||||
ahrefs: AhrefsBlock,
|
||||
airtable: AirtableBlock,
|
||||
|
||||
@@ -59,6 +59,8 @@ export type ComboboxOption = {
|
||||
export type ComboboxOptionGroup = {
|
||||
/** Optional section header label */
|
||||
section?: string
|
||||
/** Optional custom section header element (overrides section label) */
|
||||
sectionElement?: ReactNode
|
||||
/** Options in this group */
|
||||
items: ComboboxOption[]
|
||||
}
|
||||
@@ -625,11 +627,13 @@ const Combobox = forwardRef<HTMLDivElement, ComboboxProps>(
|
||||
<div className='space-y-[2px]'>
|
||||
{filteredGroups.map((group, groupIndex) => (
|
||||
<div key={group.section || `group-${groupIndex}`}>
|
||||
{group.section && (
|
||||
<div className='px-[6px] py-[4px] font-base text-[11px] text-[var(--text-tertiary)] first:pt-[4px]'>
|
||||
{group.section}
|
||||
</div>
|
||||
)}
|
||||
{group.sectionElement
|
||||
? group.sectionElement
|
||||
: group.section && (
|
||||
<div className='px-[6px] py-[4px] font-base text-[11px] text-[var(--text-tertiary)] first:pt-[4px]'>
|
||||
{group.section}
|
||||
</div>
|
||||
)}
|
||||
{group.items.map((option) => {
|
||||
const isSelected = multiSelect
|
||||
? multiSelectValues?.includes(option.value)
|
||||
|
||||
@@ -53,6 +53,8 @@ const tagVariants = cva(
|
||||
variants: {
|
||||
variant: {
|
||||
default: 'bg-[#bfdbfe] text-[#1d4ed8] dark:bg-[rgba(59,130,246,0.2)] dark:text-[#93c5fd]',
|
||||
secondary:
|
||||
'border border-[var(--border-1)] bg-[var(--surface-4)] text-[var(--text-secondary)] hover:text-[var(--text-primary)]',
|
||||
invalid:
|
||||
'bg-[#fecaca] text-[var(--text-error)] dark:bg-[#551a1a] dark:text-[var(--text-error)]',
|
||||
},
|
||||
@@ -102,7 +104,9 @@ const Tag = React.memo(function Tag({
|
||||
'flex-shrink-0 opacity-80 transition-opacity hover:opacity-100 focus:outline-none',
|
||||
variant === 'invalid'
|
||||
? 'text-[var(--text-error)]'
|
||||
: 'text-[#1d4ed8] dark:text-[#93c5fd]'
|
||||
: variant === 'secondary'
|
||||
? 'text-[var(--text-tertiary)]'
|
||||
: 'text-[#1d4ed8] dark:text-[#93c5fd]'
|
||||
)}
|
||||
aria-label={`Remove ${value}`}
|
||||
>
|
||||
@@ -192,6 +196,8 @@ export interface TagInputProps extends VariantProps<typeof tagInputVariants> {
|
||||
renderTagSuffix?: (value: string, index: number) => React.ReactNode
|
||||
/** Options for enabling file input (drag/drop and file picker) */
|
||||
fileInputOptions?: FileInputOptions
|
||||
/** Variant for valid tags (defaults to 'default') */
|
||||
tagVariant?: 'default' | 'secondary'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -222,6 +228,7 @@ const TagInput = React.forwardRef<HTMLInputElement, TagInputProps>(
|
||||
triggerKeys = ['Enter', ',', ' '],
|
||||
renderTagSuffix,
|
||||
fileInputOptions,
|
||||
tagVariant = 'default',
|
||||
variant,
|
||||
},
|
||||
ref
|
||||
@@ -399,7 +406,7 @@ const TagInput = React.forwardRef<HTMLInputElement, TagInputProps>(
|
||||
<Tag
|
||||
key={`item-${index}`}
|
||||
value={item.value}
|
||||
variant={item.isValid ? 'default' : 'invalid'}
|
||||
variant={item.isValid ? tagVariant : 'invalid'}
|
||||
onRemove={() => onRemove(item.value, index, item.isValid)}
|
||||
disabled={disabled}
|
||||
suffix={item.isValid ? renderTagSuffix?.(item.value, index) : undefined}
|
||||
@@ -409,7 +416,7 @@ const TagInput = React.forwardRef<HTMLInputElement, TagInputProps>(
|
||||
className={cn(
|
||||
'flex items-center',
|
||||
inputValue.trim() &&
|
||||
cn(tagVariants({ variant: 'default' }), 'gap-0 py-0 pr-0 pl-[4px] opacity-80')
|
||||
cn(tagVariants({ variant: tagVariant }), 'gap-0 py-0 pr-0 pl-[4px] opacity-80')
|
||||
)}
|
||||
>
|
||||
<div className='relative inline-flex'>
|
||||
|
||||
22
apps/sim/components/emcn/icons/animate/download.module.css
Normal file
22
apps/sim/components/emcn/icons/animate/download.module.css
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Download icon animation
|
||||
* Subtle continuous animation for import/download states
|
||||
* Arrow gently pulses down to suggest downloading motion
|
||||
*/
|
||||
|
||||
@keyframes arrow-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
transform: translateY(1.5px);
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
|
||||
.animated-download-svg {
|
||||
animation: arrow-pulse 1.5s ease-in-out infinite;
|
||||
transform-origin: center center;
|
||||
}
|
||||
42
apps/sim/components/emcn/icons/download.tsx
Normal file
42
apps/sim/components/emcn/icons/download.tsx
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { SVGProps } from 'react'
|
||||
import styles from '@/components/emcn/icons/animate/download.module.css'
|
||||
|
||||
export interface DownloadProps extends SVGProps<SVGSVGElement> {
|
||||
/**
|
||||
* Enable animation on the download icon
|
||||
* @default false
|
||||
*/
|
||||
animate?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Download icon component with optional CSS-based animation
|
||||
* Based on lucide arrow-down icon structure.
|
||||
* When animate is false, this is a lightweight static icon with no animation overhead.
|
||||
* When animate is true, CSS module animations are applied for a subtle pulsing effect.
|
||||
* @param props - SVG properties including className, animate, etc.
|
||||
*/
|
||||
export function Download({ animate = false, className, ...props }: DownloadProps) {
|
||||
const svgClassName = animate
|
||||
? `${styles['animated-download-svg']} ${className || ''}`.trim()
|
||||
: className
|
||||
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='24'
|
||||
height='24'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
className={svgClassName}
|
||||
{...props}
|
||||
>
|
||||
<path d='M12 5v14' />
|
||||
<path d='m19 12-7 7-7-7' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
@@ -5,6 +5,7 @@ export { ChevronDown } from './chevron-down'
|
||||
export { Connections } from './connections'
|
||||
export { Copy } from './copy'
|
||||
export { DocumentAttachment } from './document-attachment'
|
||||
export { Download } from './download'
|
||||
export { Duplicate } from './duplicate'
|
||||
export { Eye } from './eye'
|
||||
export { FolderCode } from './folder-code'
|
||||
|
||||
@@ -4061,6 +4061,31 @@ export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function A2AIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 860 860' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<circle cx='544' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='154' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='706' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='316' cy='307' r='27' fill='currentColor' />
|
||||
<path
|
||||
d='M336.5 191.003H162C97.6588 191.003 45.5 243.162 45.5 307.503C45.5 371.844 97.6442 424.003 161.985 424.003C206.551 424.003 256.288 424.003 296.5 424.003C487.5 424.003 374 191.005 569 191.001C613.886 191 658.966 191 698.025 191C762.366 191.001 814.5 243.16 814.5 307.501C814.5 371.843 762.34 424.003 697.998 424.003H523.5'
|
||||
stroke='currentColor'
|
||||
strokeWidth='48'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
<path
|
||||
d='M256 510.002C270.359 510.002 282 521.643 282 536.002C282 550.361 270.359 562.002 256 562.002H148C133.641 562.002 122 550.361 122 536.002C122 521.643 133.641 510.002 148 510.002H256ZM712 510.002C726.359 510.002 738 521.643 738 536.002C738 550.361 726.359 562.002 712 562.002H360C345.641 562.002 334 550.361 334 536.002C334 521.643 345.641 510.002 360 510.002H712Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M444 628.002C458.359 628.002 470 639.643 470 654.002C470 668.361 458.359 680.002 444 680.002H100C85.6406 680.002 74 668.361 74 654.002C74 639.643 85.6406 628.002 100 628.002H444ZM548 628.002C562.359 628.002 574 639.643 574 654.002C574 668.361 562.359 680.002 548 680.002C533.641 680.002 522 668.361 522 654.002C522 639.643 533.641 628.002 548 628.002ZM760 628.002C774.359 628.002 786 639.643 786 654.002C786 668.361 774.359 680.002 760 680.002H652C637.641 680.002 626 668.361 626 654.002C626 639.643 637.641 628.002 652 628.002H760Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function WordpressIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 25.925 25.925'>
|
||||
|
||||
309
apps/sim/hooks/queries/a2a/agents.ts
Normal file
309
apps/sim/hooks/queries/a2a/agents.ts
Normal file
@@ -0,0 +1,309 @@
|
||||
/**
|
||||
* A2A Agents React Query Hooks
|
||||
*
|
||||
* Hooks for managing A2A agents in the UI.
|
||||
*/
|
||||
|
||||
import type { AgentCapabilities, AgentSkill } from '@a2a-js/sdk'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type { AgentAuthentication } from '@/lib/a2a/types'
|
||||
|
||||
/**
|
||||
* A2A Agent as returned from the API
|
||||
*/
|
||||
export interface A2AAgent {
|
||||
id: string
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
name: string
|
||||
description?: string
|
||||
version: string
|
||||
capabilities: AgentCapabilities
|
||||
skills: AgentSkill[]
|
||||
authentication: AgentAuthentication
|
||||
isPublished: boolean
|
||||
publishedAt?: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
workflowName?: string
|
||||
workflowDescription?: string
|
||||
isDeployed?: boolean
|
||||
taskCount?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Query keys for A2A agents
|
||||
*/
|
||||
export const a2aAgentKeys = {
|
||||
all: ['a2a-agents'] as const,
|
||||
list: (workspaceId: string) => [...a2aAgentKeys.all, 'list', workspaceId] as const,
|
||||
detail: (agentId: string) => [...a2aAgentKeys.all, 'detail', agentId] as const,
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch A2A agents for a workspace
|
||||
*/
|
||||
async function fetchA2AAgents(workspaceId: string): Promise<A2AAgent[]> {
|
||||
const response = await fetch(`/api/a2a/agents?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch A2A agents')
|
||||
}
|
||||
const data = await response.json()
|
||||
return data.agents
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to list A2A agents for a workspace
|
||||
*/
|
||||
export function useA2AAgents(workspaceId: string) {
|
||||
return useQuery({
|
||||
queryKey: a2aAgentKeys.list(workspaceId),
|
||||
queryFn: () => fetchA2AAgents(workspaceId),
|
||||
enabled: Boolean(workspaceId),
|
||||
staleTime: 60 * 1000, // 1 minute
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Agent Card as returned from the agent detail endpoint
|
||||
*/
|
||||
export interface A2AAgentCard {
|
||||
name: string
|
||||
description?: string
|
||||
url: string
|
||||
version: string
|
||||
documentationUrl?: string
|
||||
provider?: {
|
||||
organization: string
|
||||
url?: string
|
||||
}
|
||||
capabilities: AgentCapabilities
|
||||
skills: AgentSkill[]
|
||||
authentication?: AgentAuthentication
|
||||
defaultInputModes?: string[]
|
||||
defaultOutputModes?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a single A2A agent card (discovery document)
|
||||
*/
|
||||
async function fetchA2AAgentCard(agentId: string): Promise<A2AAgentCard> {
|
||||
const response = await fetch(`/api/a2a/agents/${agentId}`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch A2A agent')
|
||||
}
|
||||
return response.json()
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to get a single A2A agent card (discovery document)
|
||||
*/
|
||||
export function useA2AAgentCard(agentId: string) {
|
||||
return useQuery({
|
||||
queryKey: a2aAgentKeys.detail(agentId),
|
||||
queryFn: () => fetchA2AAgentCard(agentId),
|
||||
enabled: Boolean(agentId),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Create A2A agent params
|
||||
*/
|
||||
export interface CreateA2AAgentParams {
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
name?: string
|
||||
description?: string
|
||||
capabilities?: AgentCapabilities
|
||||
authentication?: AgentAuthentication
|
||||
skillTags?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new A2A agent
|
||||
*/
|
||||
async function createA2AAgent(params: CreateA2AAgentParams): Promise<A2AAgent> {
|
||||
const response = await fetch('/api/a2a/agents', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(params),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to create A2A agent')
|
||||
}
|
||||
const data = await response.json()
|
||||
return data.agent
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to create an A2A agent
|
||||
*/
|
||||
export function useCreateA2AAgent() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: createA2AAgent,
|
||||
onSuccess: (data) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aAgentKeys.list(data.workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update A2A agent params
|
||||
*/
|
||||
export interface UpdateA2AAgentParams {
|
||||
agentId: string
|
||||
name?: string
|
||||
description?: string
|
||||
version?: string
|
||||
capabilities?: AgentCapabilities
|
||||
skills?: AgentSkill[]
|
||||
authentication?: AgentAuthentication
|
||||
isPublished?: boolean
|
||||
skillTags?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an A2A agent
|
||||
*/
|
||||
async function updateA2AAgent(params: UpdateA2AAgentParams): Promise<A2AAgent> {
|
||||
const { agentId, ...body } = params
|
||||
const response = await fetch(`/api/a2a/agents/${agentId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update A2A agent')
|
||||
}
|
||||
const data = await response.json()
|
||||
return data.agent
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to update an A2A agent
|
||||
*/
|
||||
export function useUpdateA2AAgent() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateA2AAgent,
|
||||
onSuccess: (data) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aAgentKeys.detail(data.id),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aAgentKeys.list(data.workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an A2A agent
|
||||
*/
|
||||
async function deleteA2AAgent(params: { agentId: string; workspaceId: string }): Promise<void> {
|
||||
const response = await fetch(`/api/a2a/agents/${params.agentId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to delete A2A agent')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to delete an A2A agent
|
||||
*/
|
||||
export function useDeleteA2AAgent() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteA2AAgent,
|
||||
onSuccess: (_, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aAgentKeys.list(variables.workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Publish/unpublish agent params
|
||||
*/
|
||||
export interface PublishA2AAgentParams {
|
||||
agentId: string
|
||||
workspaceId: string
|
||||
action: 'publish' | 'unpublish' | 'refresh'
|
||||
}
|
||||
|
||||
/**
|
||||
* Publish or unpublish an A2A agent
|
||||
*/
|
||||
async function publishA2AAgent(params: PublishA2AAgentParams): Promise<{
|
||||
isPublished?: boolean
|
||||
skills?: AgentSkill[]
|
||||
}> {
|
||||
const response = await fetch(`/api/a2a/agents/${params.agentId}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ action: params.action }),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update A2A agent')
|
||||
}
|
||||
return response.json()
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to publish/unpublish an A2A agent
|
||||
*/
|
||||
export function usePublishA2AAgent() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: publishA2AAgent,
|
||||
onSuccess: (_, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aAgentKeys.detail(variables.agentId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aAgentKeys.list(variables.workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch A2A agent by workflow ID
|
||||
*/
|
||||
async function fetchA2AAgentByWorkflow(
|
||||
workspaceId: string,
|
||||
workflowId: string
|
||||
): Promise<A2AAgent | null> {
|
||||
const response = await fetch(`/api/a2a/agents?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch A2A agents')
|
||||
}
|
||||
const data = await response.json()
|
||||
const agents = data.agents as A2AAgent[]
|
||||
return agents.find((agent) => agent.workflowId === workflowId) || null
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to get A2A agent by workflow ID
|
||||
*/
|
||||
export function useA2AAgentByWorkflow(workspaceId: string, workflowId: string) {
|
||||
return useQuery({
|
||||
queryKey: [...a2aAgentKeys.all, 'byWorkflow', workspaceId, workflowId] as const,
|
||||
queryFn: () => fetchA2AAgentByWorkflow(workspaceId, workflowId),
|
||||
enabled: Boolean(workspaceId) && Boolean(workflowId),
|
||||
staleTime: 30 * 1000, // 30 seconds
|
||||
})
|
||||
}
|
||||
262
apps/sim/hooks/queries/a2a/tasks.ts
Normal file
262
apps/sim/hooks/queries/a2a/tasks.ts
Normal file
@@ -0,0 +1,262 @@
|
||||
/**
|
||||
* A2A Tasks React Query Hooks (v0.3)
|
||||
*
|
||||
* Hooks for interacting with A2A tasks in the UI.
|
||||
*/
|
||||
|
||||
import type { Artifact, Message, TaskState } from '@a2a-js/sdk'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { isTerminalState } from '@/lib/a2a/utils'
|
||||
|
||||
/** A2A v0.3 JSON-RPC method names */
|
||||
const A2A_METHODS = {
|
||||
MESSAGE_SEND: 'message/send',
|
||||
TASKS_GET: 'tasks/get',
|
||||
TASKS_CANCEL: 'tasks/cancel',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* A2A Task as returned from queries
|
||||
*/
|
||||
export interface A2ATask {
|
||||
kind: 'task'
|
||||
id: string
|
||||
contextId?: string
|
||||
status: {
|
||||
state: TaskState
|
||||
timestamp?: string
|
||||
message?: string
|
||||
}
|
||||
history?: Message[]
|
||||
artifacts?: Artifact[]
|
||||
metadata?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Query keys for A2A tasks
|
||||
*/
|
||||
export const a2aTaskKeys = {
|
||||
all: ['a2a-tasks'] as const,
|
||||
detail: (agentUrl: string, taskId: string) =>
|
||||
[...a2aTaskKeys.all, 'detail', agentUrl, taskId] as const,
|
||||
}
|
||||
|
||||
/**
|
||||
* Send task params
|
||||
*/
|
||||
export interface SendA2ATaskParams {
|
||||
agentUrl: string
|
||||
message: string
|
||||
taskId?: string
|
||||
contextId?: string
|
||||
apiKey?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Send task response
|
||||
*/
|
||||
export interface SendA2ATaskResponse {
|
||||
content: string
|
||||
taskId: string
|
||||
contextId?: string
|
||||
state: TaskState
|
||||
artifacts?: Artifact[]
|
||||
history?: Message[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a message to an A2A agent (v0.3)
|
||||
*/
|
||||
async function sendA2ATask(params: SendA2ATaskParams): Promise<SendA2ATaskResponse> {
|
||||
const userMessage: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: params.message }],
|
||||
...(params.taskId && { taskId: params.taskId }),
|
||||
...(params.contextId && { contextId: params.contextId }),
|
||||
}
|
||||
|
||||
const response = await fetch(params.agentUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(params.apiKey ? { Authorization: `Bearer ${params.apiKey}` } : {}),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
jsonrpc: '2.0',
|
||||
id: crypto.randomUUID(),
|
||||
method: A2A_METHODS.MESSAGE_SEND,
|
||||
params: {
|
||||
message: userMessage,
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`A2A request failed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.error) {
|
||||
throw new Error(result.error.message || 'A2A request failed')
|
||||
}
|
||||
|
||||
const task = result.result as A2ATask
|
||||
|
||||
const lastAgentMessage = task.history?.filter((m) => m.role === 'agent').pop()
|
||||
const content = lastAgentMessage
|
||||
? lastAgentMessage.parts
|
||||
.filter((p): p is import('@a2a-js/sdk').TextPart => p.kind === 'text')
|
||||
.map((p) => p.text)
|
||||
.join('')
|
||||
: ''
|
||||
|
||||
return {
|
||||
content,
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to send a message to an A2A agent
|
||||
*/
|
||||
export function useSendA2ATask() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: sendA2ATask,
|
||||
onSuccess: (data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aTaskKeys.detail(variables.agentUrl, data.taskId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get task params
|
||||
*/
|
||||
export interface GetA2ATaskParams {
|
||||
agentUrl: string
|
||||
taskId: string
|
||||
apiKey?: string
|
||||
historyLength?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a task from an A2A agent
|
||||
*/
|
||||
async function fetchA2ATask(params: GetA2ATaskParams): Promise<A2ATask> {
|
||||
const response = await fetch(params.agentUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(params.apiKey ? { Authorization: `Bearer ${params.apiKey}` } : {}),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
jsonrpc: '2.0',
|
||||
id: crypto.randomUUID(),
|
||||
method: A2A_METHODS.TASKS_GET,
|
||||
params: {
|
||||
id: params.taskId,
|
||||
historyLength: params.historyLength,
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`A2A request failed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.error) {
|
||||
throw new Error(result.error.message || 'A2A request failed')
|
||||
}
|
||||
|
||||
return result.result as A2ATask
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to get an A2A task
|
||||
*/
|
||||
export function useA2ATask(params: GetA2ATaskParams | null) {
|
||||
return useQuery({
|
||||
queryKey: params ? a2aTaskKeys.detail(params.agentUrl, params.taskId) : ['disabled'],
|
||||
queryFn: () => fetchA2ATask(params!),
|
||||
enabled: Boolean(params?.agentUrl && params?.taskId),
|
||||
staleTime: 5 * 1000, // 5 seconds - tasks can change quickly
|
||||
refetchInterval: (query) => {
|
||||
// Auto-refresh if task is still running
|
||||
const data = query.state.data as A2ATask | undefined
|
||||
if (data && !isTerminalState(data.status.state)) {
|
||||
return 2000 // 2 seconds
|
||||
}
|
||||
return false
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel task params
|
||||
*/
|
||||
export interface CancelA2ATaskParams {
|
||||
agentUrl: string
|
||||
taskId: string
|
||||
apiKey?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel a task
|
||||
*/
|
||||
async function cancelA2ATask(params: CancelA2ATaskParams): Promise<A2ATask> {
|
||||
const response = await fetch(params.agentUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(params.apiKey ? { Authorization: `Bearer ${params.apiKey}` } : {}),
|
||||
},
|
||||
body: JSON.stringify({
|
||||
jsonrpc: '2.0',
|
||||
id: crypto.randomUUID(),
|
||||
method: A2A_METHODS.TASKS_CANCEL,
|
||||
params: {
|
||||
id: params.taskId,
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`A2A request failed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.error) {
|
||||
throw new Error(result.error.message || 'A2A request failed')
|
||||
}
|
||||
|
||||
return result.result as A2ATask
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to cancel an A2A task
|
||||
*/
|
||||
export function useCancelA2ATask() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: cancelA2ATask,
|
||||
onSuccess: (data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: a2aTaskKeys.detail(variables.agentUrl, variables.taskId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
130
apps/sim/lib/a2a/agent-card.ts
Normal file
130
apps/sim/lib/a2a/agent-card.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import {
|
||||
A2A_DEFAULT_CAPABILITIES,
|
||||
A2A_DEFAULT_INPUT_MODES,
|
||||
A2A_DEFAULT_OUTPUT_MODES,
|
||||
A2A_PROTOCOL_VERSION,
|
||||
} from './constants'
|
||||
import type { AgentCapabilities, AgentSkill } from './types'
|
||||
import { buildA2AEndpointUrl, sanitizeAgentName } from './utils'
|
||||
|
||||
export interface AppAgentCard {
|
||||
name: string
|
||||
description: string
|
||||
url: string
|
||||
protocolVersion: string
|
||||
documentationUrl?: string
|
||||
provider?: {
|
||||
organization: string
|
||||
url: string
|
||||
}
|
||||
capabilities: AgentCapabilities
|
||||
skills: AgentSkill[]
|
||||
defaultInputModes: string[]
|
||||
defaultOutputModes: string[]
|
||||
}
|
||||
|
||||
interface WorkflowData {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
}
|
||||
|
||||
interface AgentData {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
version: string
|
||||
capabilities?: AgentCapabilities
|
||||
skills?: AgentSkill[]
|
||||
}
|
||||
|
||||
export function generateAgentCard(agent: AgentData, workflow: WorkflowData): AppAgentCard {
|
||||
const baseUrl = getBaseUrl()
|
||||
const description =
|
||||
agent.description || workflow.description || `${agent.name} - A2A Agent powered by Sim Studio`
|
||||
|
||||
return {
|
||||
name: agent.name,
|
||||
description,
|
||||
url: buildA2AEndpointUrl(baseUrl, agent.id),
|
||||
protocolVersion: A2A_PROTOCOL_VERSION,
|
||||
documentationUrl: `${baseUrl}/docs/a2a`,
|
||||
provider: {
|
||||
organization: 'Sim Studio',
|
||||
url: baseUrl,
|
||||
},
|
||||
capabilities: {
|
||||
...A2A_DEFAULT_CAPABILITIES,
|
||||
...agent.capabilities,
|
||||
},
|
||||
skills: agent.skills || [
|
||||
{
|
||||
id: 'execute',
|
||||
name: `Execute ${workflow.name}`,
|
||||
description: workflow.description || `Execute the ${workflow.name} workflow`,
|
||||
tags: ['workflow', 'automation'],
|
||||
},
|
||||
],
|
||||
defaultInputModes: [...A2A_DEFAULT_INPUT_MODES],
|
||||
defaultOutputModes: [...A2A_DEFAULT_OUTPUT_MODES],
|
||||
}
|
||||
}
|
||||
|
||||
export function generateSkillsFromWorkflow(
|
||||
workflowName: string,
|
||||
workflowDescription: string | undefined | null,
|
||||
tags?: string[]
|
||||
): AgentSkill[] {
|
||||
const skill: AgentSkill = {
|
||||
id: 'execute',
|
||||
name: `Execute ${workflowName}`,
|
||||
description: workflowDescription || `Execute the ${workflowName} workflow`,
|
||||
tags: tags?.length ? tags : ['workflow', 'automation'],
|
||||
}
|
||||
|
||||
return [skill]
|
||||
}
|
||||
|
||||
export function generateDefaultAgentName(workflowName: string): string {
|
||||
return sanitizeAgentName(workflowName)
|
||||
}
|
||||
|
||||
export function validateAgentCard(card: unknown): card is AppAgentCard {
|
||||
if (!card || typeof card !== 'object') return false
|
||||
|
||||
const c = card as Record<string, unknown>
|
||||
|
||||
if (typeof c.name !== 'string' || !c.name) return false
|
||||
if (typeof c.url !== 'string' || !c.url) return false
|
||||
if (typeof c.description !== 'string') return false
|
||||
|
||||
if (c.capabilities && typeof c.capabilities !== 'object') return false
|
||||
|
||||
if (!Array.isArray(c.skills)) return false
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
export function mergeAgentCard(
|
||||
existing: AppAgentCard,
|
||||
updates: Partial<AppAgentCard>
|
||||
): AppAgentCard {
|
||||
return {
|
||||
...existing,
|
||||
...updates,
|
||||
capabilities: {
|
||||
...existing.capabilities,
|
||||
...updates.capabilities,
|
||||
},
|
||||
skills: updates.skills || existing.skills,
|
||||
}
|
||||
}
|
||||
|
||||
export function getAgentCardPaths(agentId: string) {
|
||||
const baseUrl = getBaseUrl()
|
||||
return {
|
||||
card: `${baseUrl}/api/a2a/agents/${agentId}`,
|
||||
serve: `${baseUrl}/api/a2a/serve/${agentId}`,
|
||||
}
|
||||
}
|
||||
23
apps/sim/lib/a2a/constants.ts
Normal file
23
apps/sim/lib/a2a/constants.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
export { AGENT_CARD_PATH } from '@a2a-js/sdk'
|
||||
export const A2A_PROTOCOL_VERSION = '0.3.0'
|
||||
|
||||
export const A2A_DEFAULT_TIMEOUT = 300000
|
||||
|
||||
export const A2A_MAX_HISTORY_LENGTH = 100
|
||||
|
||||
export const A2A_DEFAULT_CAPABILITIES = {
|
||||
streaming: true,
|
||||
pushNotifications: false,
|
||||
stateTransitionHistory: true,
|
||||
} as const
|
||||
|
||||
export const A2A_DEFAULT_INPUT_MODES = ['text'] as const
|
||||
|
||||
export const A2A_DEFAULT_OUTPUT_MODES = ['text'] as const
|
||||
|
||||
export const A2A_CACHE = {
|
||||
AGENT_CARD_TTL: 3600, // 1 hour
|
||||
TASK_TTL: 86400, // 24 hours
|
||||
} as const
|
||||
|
||||
export const A2A_TERMINAL_STATES = ['completed', 'failed', 'canceled', 'rejected'] as const
|
||||
83
apps/sim/lib/a2a/index.ts
Normal file
83
apps/sim/lib/a2a/index.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { AppAgentCard } from './agent-card'
|
||||
import {
|
||||
generateAgentCard,
|
||||
generateDefaultAgentName,
|
||||
generateSkillsFromWorkflow,
|
||||
getAgentCardPaths,
|
||||
mergeAgentCard,
|
||||
validateAgentCard,
|
||||
} from './agent-card'
|
||||
import {
|
||||
A2A_CACHE,
|
||||
A2A_DEFAULT_CAPABILITIES,
|
||||
A2A_DEFAULT_INPUT_MODES,
|
||||
A2A_DEFAULT_OUTPUT_MODES,
|
||||
A2A_DEFAULT_TIMEOUT,
|
||||
A2A_MAX_HISTORY_LENGTH,
|
||||
A2A_PROTOCOL_VERSION,
|
||||
A2A_TERMINAL_STATES,
|
||||
} from './constants'
|
||||
import { deliverPushNotification, notifyTaskStateChange } from './push-notifications'
|
||||
import type {
|
||||
A2AAgentConfig,
|
||||
A2AApiResponse,
|
||||
A2ATaskRecord,
|
||||
AgentAuthentication,
|
||||
AgentCardSignature,
|
||||
JSONSchema,
|
||||
} from './types'
|
||||
import {
|
||||
buildA2AEndpointUrl,
|
||||
buildAgentCardUrl,
|
||||
createA2AToolId,
|
||||
createAgentMessage,
|
||||
createTextPart,
|
||||
createUserMessage,
|
||||
extractTextContent,
|
||||
getLastAgentMessage,
|
||||
getLastAgentMessageText,
|
||||
isTerminalState,
|
||||
parseA2AToolId,
|
||||
sanitizeAgentName,
|
||||
} from './utils'
|
||||
|
||||
export {
|
||||
generateAgentCard,
|
||||
generateDefaultAgentName,
|
||||
generateSkillsFromWorkflow,
|
||||
getAgentCardPaths,
|
||||
mergeAgentCard,
|
||||
validateAgentCard,
|
||||
A2A_CACHE,
|
||||
A2A_DEFAULT_CAPABILITIES,
|
||||
A2A_DEFAULT_INPUT_MODES,
|
||||
A2A_DEFAULT_OUTPUT_MODES,
|
||||
A2A_DEFAULT_TIMEOUT,
|
||||
A2A_MAX_HISTORY_LENGTH,
|
||||
A2A_PROTOCOL_VERSION,
|
||||
A2A_TERMINAL_STATES,
|
||||
deliverPushNotification,
|
||||
notifyTaskStateChange,
|
||||
buildA2AEndpointUrl,
|
||||
buildAgentCardUrl,
|
||||
createA2AToolId,
|
||||
createAgentMessage,
|
||||
createTextPart,
|
||||
createUserMessage,
|
||||
extractTextContent,
|
||||
getLastAgentMessage,
|
||||
getLastAgentMessageText,
|
||||
isTerminalState,
|
||||
parseA2AToolId,
|
||||
sanitizeAgentName,
|
||||
}
|
||||
|
||||
export type {
|
||||
AppAgentCard,
|
||||
A2AAgentConfig,
|
||||
A2AApiResponse,
|
||||
A2ATaskRecord,
|
||||
AgentAuthentication,
|
||||
AgentCardSignature,
|
||||
JSONSchema,
|
||||
}
|
||||
109
apps/sim/lib/a2a/push-notifications.ts
Normal file
109
apps/sim/lib/a2a/push-notifications.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import type { Artifact, Message, TaskState } from '@a2a-js/sdk'
|
||||
import { db } from '@sim/db'
|
||||
import { a2aPushNotificationConfig, a2aTask } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
|
||||
const logger = createLogger('A2APushNotifications')
|
||||
|
||||
/**
|
||||
* Deliver push notification for a task state change.
|
||||
* Works without any external dependencies (DB-only).
|
||||
*/
|
||||
export async function deliverPushNotification(taskId: string, state: TaskState): Promise<boolean> {
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(a2aPushNotificationConfig)
|
||||
.where(eq(a2aPushNotificationConfig.taskId, taskId))
|
||||
.limit(1)
|
||||
|
||||
if (!config || !config.isActive) {
|
||||
return true
|
||||
}
|
||||
|
||||
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, taskId)).limit(1)
|
||||
|
||||
if (!task) {
|
||||
logger.warn('Task not found for push notification', { taskId })
|
||||
return false
|
||||
}
|
||||
|
||||
const timestamp = new Date().toISOString()
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (config.token) {
|
||||
headers.Authorization = `Bearer ${config.token}`
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(config.url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
kind: 'task-update',
|
||||
task: {
|
||||
kind: 'task',
|
||||
id: task.id,
|
||||
contextId: task.sessionId,
|
||||
status: { state, timestamp },
|
||||
history: task.messages as Message[],
|
||||
artifacts: (task.artifacts as Artifact[]) || [],
|
||||
},
|
||||
}),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error('Push notification delivery failed', {
|
||||
taskId,
|
||||
url: config.url,
|
||||
status: response.status,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
logger.info('Push notification delivered successfully', { taskId, state })
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Push notification delivery error', { taskId, error })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify task state change.
|
||||
* Uses trigger.dev for durable delivery when available, falls back to inline delivery.
|
||||
*/
|
||||
export async function notifyTaskStateChange(taskId: string, state: TaskState): Promise<void> {
|
||||
const [config] = await db
|
||||
.select({ id: a2aPushNotificationConfig.id })
|
||||
.from(a2aPushNotificationConfig)
|
||||
.where(eq(a2aPushNotificationConfig.taskId, taskId))
|
||||
.limit(1)
|
||||
|
||||
if (!config) {
|
||||
return
|
||||
}
|
||||
|
||||
if (isTriggerDevEnabled) {
|
||||
try {
|
||||
const { a2aPushNotificationTask } = await import(
|
||||
'@/background/a2a-push-notification-delivery'
|
||||
)
|
||||
await a2aPushNotificationTask.trigger({ taskId, state })
|
||||
logger.info('Push notification queued to trigger.dev', { taskId, state })
|
||||
} catch (error) {
|
||||
logger.warn('Failed to queue push notification, falling back to inline delivery', {
|
||||
taskId,
|
||||
error,
|
||||
})
|
||||
await deliverPushNotification(taskId, state)
|
||||
}
|
||||
} else {
|
||||
await deliverPushNotification(taskId, state)
|
||||
}
|
||||
}
|
||||
142
apps/sim/lib/a2a/types.ts
Normal file
142
apps/sim/lib/a2a/types.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
/**
|
||||
* A2A (Agent-to-Agent) Protocol Types (v0.3)
|
||||
* @see https://a2a-protocol.org/specification
|
||||
*/
|
||||
|
||||
export {
|
||||
AGENT_CARD_PATH,
|
||||
type AgentCapabilities,
|
||||
type AgentCard,
|
||||
type AgentProvider,
|
||||
type AgentSkill,
|
||||
type Artifact,
|
||||
type DataPart,
|
||||
type FilePart,
|
||||
type Message,
|
||||
type MessageSendConfiguration,
|
||||
type MessageSendParams,
|
||||
type Part,
|
||||
type PushNotificationConfig,
|
||||
type Task,
|
||||
type TaskArtifactUpdateEvent,
|
||||
type TaskIdParams,
|
||||
type TaskPushNotificationConfig,
|
||||
type TaskQueryParams,
|
||||
type TaskState,
|
||||
type TaskStatus,
|
||||
type TaskStatusUpdateEvent,
|
||||
type TextPart,
|
||||
} from '@a2a-js/sdk'
|
||||
export {
|
||||
type A2AClientOptions,
|
||||
type AuthenticationHandler,
|
||||
Client,
|
||||
type ClientConfig,
|
||||
ClientFactory,
|
||||
type RequestOptions,
|
||||
} from '@a2a-js/sdk/client'
|
||||
export {
|
||||
A2AError,
|
||||
type AgentExecutor,
|
||||
DefaultExecutionEventBus,
|
||||
DefaultRequestHandler,
|
||||
type ExecutionEventBus,
|
||||
InMemoryTaskStore,
|
||||
JsonRpcTransportHandler,
|
||||
type RequestContext,
|
||||
type TaskStore,
|
||||
} from '@a2a-js/sdk/server'
|
||||
|
||||
/**
|
||||
* App-specific: Extended MessageSendParams
|
||||
* Note: Structured inputs should be passed via DataPart in message.parts (A2A spec compliant)
|
||||
* Files should be passed via FilePart in message.parts
|
||||
*/
|
||||
export interface ExtendedMessageSendParams {
|
||||
message: import('@a2a-js/sdk').Message
|
||||
configuration?: import('@a2a-js/sdk').MessageSendConfiguration
|
||||
}
|
||||
|
||||
/**
|
||||
* App-specific: Database model for A2A Agent configuration
|
||||
*/
|
||||
export interface A2AAgentConfig {
|
||||
id: string
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
name: string
|
||||
description?: string
|
||||
version: string
|
||||
capabilities: import('@a2a-js/sdk').AgentCapabilities
|
||||
skills: import('@a2a-js/sdk').AgentSkill[]
|
||||
authentication?: AgentAuthentication
|
||||
signatures?: AgentCardSignature[]
|
||||
isPublished: boolean
|
||||
publishedAt?: Date
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* App-specific: Agent authentication configuration
|
||||
*/
|
||||
export interface AgentAuthentication {
|
||||
schemes: Array<'bearer' | 'apiKey' | 'oauth2' | 'none'>
|
||||
credentials?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* App-specific: Agent card signature (v0.3)
|
||||
*/
|
||||
export interface AgentCardSignature {
|
||||
algorithm: string
|
||||
keyId: string
|
||||
value: string
|
||||
}
|
||||
|
||||
/**
|
||||
* App-specific: Database model for A2A Task record
|
||||
*/
|
||||
export interface A2ATaskRecord {
|
||||
id: string
|
||||
agentId: string
|
||||
contextId?: string
|
||||
status: import('@a2a-js/sdk').TaskState
|
||||
history: import('@a2a-js/sdk').Message[]
|
||||
artifacts?: import('@a2a-js/sdk').Artifact[]
|
||||
executionId?: string
|
||||
metadata?: Record<string, unknown>
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
completedAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* App-specific: A2A API Response wrapper
|
||||
*/
|
||||
export interface A2AApiResponse<T = unknown> {
|
||||
success: boolean
|
||||
data?: T
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* App-specific: JSON Schema definition for skill input/output schemas
|
||||
*/
|
||||
export interface JSONSchema {
|
||||
type?: string
|
||||
properties?: Record<string, JSONSchema>
|
||||
items?: JSONSchema
|
||||
required?: string[]
|
||||
description?: string
|
||||
enum?: unknown[]
|
||||
default?: unknown
|
||||
format?: string
|
||||
minimum?: number
|
||||
maximum?: number
|
||||
minLength?: number
|
||||
maxLength?: number
|
||||
pattern?: string
|
||||
additionalProperties?: boolean | JSONSchema
|
||||
[key: string]: unknown
|
||||
}
|
||||
212
apps/sim/lib/a2a/utils.ts
Normal file
212
apps/sim/lib/a2a/utils.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import type { DataPart, FilePart, Message, Part, Task, TaskState, TextPart } from '@a2a-js/sdk'
|
||||
import { A2A_TERMINAL_STATES } from './constants'
|
||||
|
||||
export function isTerminalState(state: TaskState): boolean {
|
||||
return (A2A_TERMINAL_STATES as readonly string[]).includes(state)
|
||||
}
|
||||
|
||||
export function extractTextContent(message: Message): string {
|
||||
return message.parts
|
||||
.filter((part): part is TextPart => part.kind === 'text')
|
||||
.map((part) => part.text)
|
||||
.join('\n')
|
||||
}
|
||||
|
||||
export function extractDataContent(message: Message): Record<string, unknown> {
|
||||
const dataParts = message.parts.filter((part): part is DataPart => part.kind === 'data')
|
||||
return dataParts.reduce((acc, part) => ({ ...acc, ...part.data }), {})
|
||||
}
|
||||
|
||||
export interface A2AFile {
|
||||
name?: string
|
||||
mimeType?: string
|
||||
uri?: string
|
||||
bytes?: string
|
||||
}
|
||||
|
||||
export function extractFileContent(message: Message): A2AFile[] {
|
||||
return message.parts
|
||||
.filter((part): part is FilePart => part.kind === 'file')
|
||||
.map((part) => ({
|
||||
name: part.file.name,
|
||||
mimeType: part.file.mimeType,
|
||||
...('uri' in part.file ? { uri: part.file.uri } : {}),
|
||||
...('bytes' in part.file ? { bytes: part.file.bytes } : {}),
|
||||
}))
|
||||
}
|
||||
|
||||
export interface ExecutionFileInput {
|
||||
type: 'file' | 'url'
|
||||
data: string
|
||||
name: string
|
||||
mime?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert A2A FileParts to execution file format
|
||||
* This format is then processed by processInputFileFields in the execute endpoint
|
||||
* FileWithUri → type 'url', FileWithBytes → type 'file' with data URL
|
||||
* Files without uri or bytes are filtered out as invalid
|
||||
*/
|
||||
export function convertFilesToExecutionFormat(files: A2AFile[]): ExecutionFileInput[] {
|
||||
return files
|
||||
.filter((file) => file.uri || file.bytes) // Skip invalid files without content
|
||||
.map((file) => {
|
||||
if (file.uri) {
|
||||
return {
|
||||
type: 'url' as const,
|
||||
data: file.uri,
|
||||
name: file.name || 'file',
|
||||
mime: file.mimeType,
|
||||
}
|
||||
}
|
||||
const dataUrl = `data:${file.mimeType || 'application/octet-stream'};base64,${file.bytes}`
|
||||
return {
|
||||
type: 'file' as const,
|
||||
data: dataUrl,
|
||||
name: file.name || 'file',
|
||||
mime: file.mimeType,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export interface WorkflowInput {
|
||||
input: string
|
||||
data?: Record<string, unknown>
|
||||
files?: ExecutionFileInput[]
|
||||
}
|
||||
|
||||
export function extractWorkflowInput(message: Message): WorkflowInput | null {
|
||||
const messageText = extractTextContent(message)
|
||||
const dataContent = extractDataContent(message)
|
||||
const fileContent = extractFileContent(message)
|
||||
const files = convertFilesToExecutionFormat(fileContent)
|
||||
const hasData = Object.keys(dataContent).length > 0
|
||||
|
||||
if (!messageText && !hasData && files.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
input: messageText,
|
||||
...(hasData ? { data: dataContent } : {}),
|
||||
...(files.length > 0 ? { files } : {}),
|
||||
}
|
||||
}
|
||||
|
||||
export function createTextPart(text: string): Part {
|
||||
return { kind: 'text', text }
|
||||
}
|
||||
|
||||
export function createUserMessage(text: string): Message {
|
||||
return {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text }],
|
||||
}
|
||||
}
|
||||
|
||||
export function createAgentMessage(text: string): Message {
|
||||
return {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'agent',
|
||||
parts: [{ kind: 'text', text }],
|
||||
}
|
||||
}
|
||||
|
||||
export function createA2AToolId(agentId: string, skillId: string): string {
|
||||
return `a2a:${agentId}:${skillId}`
|
||||
}
|
||||
|
||||
export function parseA2AToolId(toolId: string): { agentId: string; skillId: string } | null {
|
||||
const parts = toolId.split(':')
|
||||
if (parts.length !== 3 || parts[0] !== 'a2a') {
|
||||
return null
|
||||
}
|
||||
return { agentId: parts[1], skillId: parts[2] }
|
||||
}
|
||||
|
||||
export function sanitizeAgentName(name: string): string {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '')
|
||||
.substring(0, 64)
|
||||
}
|
||||
|
||||
export function buildA2AEndpointUrl(baseUrl: string, agentId: string): string {
|
||||
const base = baseUrl.replace(/\/$/, '')
|
||||
return `${base}/api/a2a/serve/${agentId}`
|
||||
}
|
||||
|
||||
export function buildAgentCardUrl(baseUrl: string, agentId: string): string {
|
||||
const base = baseUrl.replace(/\/$/, '')
|
||||
return `${base}/api/a2a/agents/${agentId}`
|
||||
}
|
||||
|
||||
export function getLastAgentMessage(task: Task): Message | undefined {
|
||||
return task.history?.filter((m) => m.role === 'agent').pop()
|
||||
}
|
||||
|
||||
export function getLastAgentMessageText(task: Task): string {
|
||||
const message = getLastAgentMessage(task)
|
||||
return message ? extractTextContent(message) : ''
|
||||
}
|
||||
|
||||
export interface ParsedSSEChunk {
|
||||
/** Incremental content from chunk events */
|
||||
content: string
|
||||
/** Final content if this chunk contains the final event */
|
||||
finalContent?: string
|
||||
/** Whether this chunk indicates the stream is done */
|
||||
isDone: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse workflow SSE chunk and extract clean content
|
||||
*
|
||||
* Workflow execute endpoint returns SSE in this format:
|
||||
* - data: {"event":"chunk","data":{"content":"partial text"}}
|
||||
* - data: {"event":"final","data":{"success":true,"output":{"content":"full text"}}}
|
||||
* - data: "[DONE]"
|
||||
*
|
||||
* This function extracts the actual text content for A2A streaming
|
||||
*/
|
||||
export function parseWorkflowSSEChunk(chunk: string): ParsedSSEChunk {
|
||||
const result: ParsedSSEChunk = {
|
||||
content: '',
|
||||
isDone: false,
|
||||
}
|
||||
|
||||
const lines = chunk.split('\n')
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim()
|
||||
|
||||
if (!trimmed.startsWith('data:')) continue
|
||||
|
||||
const dataContent = trimmed.slice(5).trim()
|
||||
|
||||
if (dataContent === '"[DONE]"' || dataContent === '[DONE]') {
|
||||
result.isDone = true
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(dataContent)
|
||||
|
||||
if (parsed.event === 'chunk' && parsed.data?.content) {
|
||||
result.content += parsed.data.content
|
||||
} else if (parsed.event === 'final' && parsed.data?.output?.content) {
|
||||
result.finalContent = parsed.data.output.content
|
||||
result.isDone = true
|
||||
}
|
||||
} catch {
|
||||
// Not valid JSON, skip
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -113,7 +113,7 @@ export async function checkHybridAuth(
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Try API key auth
|
||||
// 3. Try API key auth (X-API-Key header only)
|
||||
const apiKeyHeader = request.headers.get('x-api-key')
|
||||
if (apiKeyHeader) {
|
||||
const result = await authenticateApiKeyFromHeader(apiKeyHeader)
|
||||
|
||||
@@ -6,6 +6,47 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('DeploymentUtils')
|
||||
|
||||
export interface InputField {
|
||||
name: string
|
||||
type: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the input format from the Start block
|
||||
* Returns an array of field definitions with name and type
|
||||
*/
|
||||
export function getStartBlockInputFormat(): InputField[] {
|
||||
try {
|
||||
const candidates = resolveStartCandidates(useWorkflowStore.getState().blocks, {
|
||||
execution: 'api',
|
||||
})
|
||||
|
||||
const targetCandidate =
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.UNIFIED) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.SPLIT_API) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.SPLIT_INPUT) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.LEGACY_STARTER)
|
||||
|
||||
const targetBlock = targetCandidate?.block
|
||||
|
||||
if (targetBlock) {
|
||||
const inputFormat = useSubBlockStore.getState().getValue(targetBlock.id, 'inputFormat')
|
||||
if (inputFormat && Array.isArray(inputFormat)) {
|
||||
return inputFormat
|
||||
.map((field: { name?: string; type?: string }) => ({
|
||||
name: field.name || '',
|
||||
type: field.type || 'string',
|
||||
}))
|
||||
.filter((field) => field.name)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Error getting start block input format:', error)
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the input format example for a workflow's API deployment
|
||||
* Returns the -d flag with example data if inputs exist, empty string otherwise
|
||||
@@ -72,13 +113,11 @@ export function getInputFormatExample(
|
||||
})
|
||||
}
|
||||
|
||||
// Add streaming parameters if enabled and outputs are selected
|
||||
if (includeStreaming && selectedStreamingOutputs.length > 0) {
|
||||
exampleData.stream = true
|
||||
|
||||
const convertedOutputs = selectedStreamingOutputs
|
||||
.map((outputId) => {
|
||||
// If it starts with a UUID, convert to blockName.attribute format
|
||||
if (startsWithUuid(outputId)) {
|
||||
const underscoreIndex = outputId.indexOf('_')
|
||||
if (underscoreIndex === -1) return null
|
||||
@@ -86,25 +125,20 @@ export function getInputFormatExample(
|
||||
const blockId = outputId.substring(0, underscoreIndex)
|
||||
const attribute = outputId.substring(underscoreIndex + 1)
|
||||
|
||||
// Find the block by ID and get its name
|
||||
const block = blocks.find((b) => b.id === blockId)
|
||||
if (block?.name) {
|
||||
return `${normalizeName(block.name)}.${attribute}`
|
||||
}
|
||||
// Block not found (deleted), return null to filter out
|
||||
return null
|
||||
}
|
||||
|
||||
// Already in blockName.attribute format, verify the block exists
|
||||
const parts = outputId.split('.')
|
||||
if (parts.length >= 2) {
|
||||
const blockName = parts[0]
|
||||
// Check if a block with this name exists
|
||||
const block = blocks.find(
|
||||
(b) => b.name && normalizeName(b.name) === normalizeName(blockName)
|
||||
)
|
||||
if (!block) {
|
||||
// Block not found (deleted), return null to filter out
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, userStats, workflow as workflowTable, workspace } from '@sim/db/schema'
|
||||
import { permissions, userStats, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import type { PermissionType } from '@/lib/workspaces/permissions/utils'
|
||||
import { getWorkspaceWithOwner, type PermissionType } from '@/lib/workspaces/permissions/utils'
|
||||
import type { ExecutionResult } from '@/executor/types'
|
||||
|
||||
const logger = createLogger('WorkflowUtils')
|
||||
|
||||
type WorkflowSelection = InferSelectModel<typeof workflowTable>
|
||||
|
||||
export async function getWorkflowById(id: string) {
|
||||
const rows = await db.select().from(workflowTable).where(eq(workflowTable.id, id)).limit(1)
|
||||
|
||||
@@ -44,11 +41,7 @@ export async function getWorkflowAccessContext(
|
||||
let workspacePermission: PermissionType | null = null
|
||||
|
||||
if (workflow.workspaceId) {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workflow.workspaceId))
|
||||
.limit(1)
|
||||
const workspaceRow = await getWorkspaceWithOwner(workflow.workspaceId)
|
||||
|
||||
workspaceOwnerId = workspaceRow?.ownerId ?? null
|
||||
|
||||
@@ -147,7 +140,6 @@ export const workflowHasResponseBlock = (executionResult: ExecutionResult): bool
|
||||
return responseBlock !== undefined
|
||||
}
|
||||
|
||||
// Create a HTTP response from response block
|
||||
export const createHttpResponseFromBlock = (executionResult: ExecutionResult): NextResponse => {
|
||||
const { data = {}, status = 200, headers = {} } = executionResult.output
|
||||
|
||||
|
||||
@@ -40,11 +40,15 @@ vi.mock('drizzle-orm', () => drizzleOrmMock)
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
checkWorkspaceAccess,
|
||||
getManageableWorkspaces,
|
||||
getUserEntityPermissions,
|
||||
getUsersWithPermissions,
|
||||
getWorkspaceById,
|
||||
getWorkspaceWithOwner,
|
||||
hasAdminPermission,
|
||||
hasWorkspaceAdminAccess,
|
||||
workspaceExists,
|
||||
} from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const mockDb = db as any
|
||||
@@ -610,4 +614,209 @@ describe('Permission Utils', () => {
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('getWorkspaceById', () => {
|
||||
it.concurrent('should return workspace when it exists', async () => {
|
||||
const chain = createMockChain([{ id: 'workspace123' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getWorkspaceById('workspace123')
|
||||
|
||||
expect(result).toEqual({ id: 'workspace123' })
|
||||
})
|
||||
|
||||
it.concurrent('should return null when workspace does not exist', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getWorkspaceById('non-existent')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it.concurrent('should handle empty workspace ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getWorkspaceById('')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getWorkspaceWithOwner', () => {
|
||||
it.concurrent('should return workspace with owner when it exists', async () => {
|
||||
const chain = createMockChain([{ id: 'workspace123', ownerId: 'owner456' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getWorkspaceWithOwner('workspace123')
|
||||
|
||||
expect(result).toEqual({ id: 'workspace123', ownerId: 'owner456' })
|
||||
})
|
||||
|
||||
it.concurrent('should return null when workspace does not exist', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getWorkspaceWithOwner('non-existent')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it.concurrent('should handle workspace with null owner ID', async () => {
|
||||
const chain = createMockChain([{ id: 'workspace123', ownerId: null }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getWorkspaceWithOwner('workspace123')
|
||||
|
||||
expect(result).toEqual({ id: 'workspace123', ownerId: null })
|
||||
})
|
||||
})
|
||||
|
||||
describe('workspaceExists', () => {
|
||||
it.concurrent('should return true when workspace exists', async () => {
|
||||
const chain = createMockChain([{ id: 'workspace123' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await workspaceExists('workspace123')
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return false when workspace does not exist', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await workspaceExists('non-existent')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent('should handle empty workspace ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await workspaceExists('')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkWorkspaceAccess', () => {
|
||||
it('should return exists=false when workspace does not exist', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await checkWorkspaceAccess('non-existent', 'user123')
|
||||
|
||||
expect(result).toEqual({
|
||||
exists: false,
|
||||
hasAccess: false,
|
||||
canWrite: false,
|
||||
workspace: null,
|
||||
})
|
||||
})
|
||||
|
||||
it('should return full access when user is workspace owner', async () => {
|
||||
const chain = createMockChain([{ id: 'workspace123', ownerId: 'user123' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await checkWorkspaceAccess('workspace123', 'user123')
|
||||
|
||||
expect(result).toEqual({
|
||||
exists: true,
|
||||
hasAccess: true,
|
||||
canWrite: true,
|
||||
workspace: { id: 'workspace123', ownerId: 'user123' },
|
||||
})
|
||||
})
|
||||
|
||||
it('should return hasAccess=false when user has no permissions', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ id: 'workspace123', ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([]) // No permissions
|
||||
})
|
||||
|
||||
const result = await checkWorkspaceAccess('workspace123', 'user123')
|
||||
|
||||
expect(result.exists).toBe(true)
|
||||
expect(result.hasAccess).toBe(false)
|
||||
expect(result.canWrite).toBe(false)
|
||||
})
|
||||
|
||||
it('should return canWrite=true when user has admin permission', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ id: 'workspace123', ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([{ permissionType: 'admin' }])
|
||||
})
|
||||
|
||||
const result = await checkWorkspaceAccess('workspace123', 'user123')
|
||||
|
||||
expect(result.exists).toBe(true)
|
||||
expect(result.hasAccess).toBe(true)
|
||||
expect(result.canWrite).toBe(true)
|
||||
})
|
||||
|
||||
it('should return canWrite=true when user has write permission', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ id: 'workspace123', ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([{ permissionType: 'write' }])
|
||||
})
|
||||
|
||||
const result = await checkWorkspaceAccess('workspace123', 'user123')
|
||||
|
||||
expect(result.exists).toBe(true)
|
||||
expect(result.hasAccess).toBe(true)
|
||||
expect(result.canWrite).toBe(true)
|
||||
})
|
||||
|
||||
it('should return canWrite=false when user has read permission', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ id: 'workspace123', ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([{ permissionType: 'read' }])
|
||||
})
|
||||
|
||||
const result = await checkWorkspaceAccess('workspace123', 'user123')
|
||||
|
||||
expect(result.exists).toBe(true)
|
||||
expect(result.hasAccess).toBe(true)
|
||||
expect(result.canWrite).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle empty user ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await checkWorkspaceAccess('workspace123', '')
|
||||
|
||||
expect(result.exists).toBe(false)
|
||||
expect(result.hasAccess).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle empty workspace ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await checkWorkspaceAccess('', 'user123')
|
||||
|
||||
expect(result.exists).toBe(false)
|
||||
expect(result.hasAccess).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -3,6 +3,112 @@ import { permissions, type permissionTypeEnum, user, workspace } from '@sim/db/s
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
|
||||
export type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
|
||||
export interface WorkspaceBasic {
|
||||
id: string
|
||||
}
|
||||
|
||||
export interface WorkspaceWithOwner {
|
||||
id: string
|
||||
ownerId: string
|
||||
}
|
||||
|
||||
export interface WorkspaceAccess {
|
||||
exists: boolean
|
||||
hasAccess: boolean
|
||||
canWrite: boolean
|
||||
workspace: WorkspaceWithOwner | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a workspace by ID (basic existence check)
|
||||
*
|
||||
* @param workspaceId - The workspace ID to look up
|
||||
* @returns The workspace if found, null otherwise
|
||||
*/
|
||||
export async function getWorkspaceById(workspaceId: string): Promise<WorkspaceBasic | null> {
|
||||
const [ws] = await db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
return ws || null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a workspace with owner info by ID
|
||||
*
|
||||
* @param workspaceId - The workspace ID to look up
|
||||
* @returns The workspace with owner info if found, null otherwise
|
||||
*/
|
||||
export async function getWorkspaceWithOwner(
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceWithOwner | null> {
|
||||
const [ws] = await db
|
||||
.select({ id: workspace.id, ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
return ws || null
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a workspace exists
|
||||
*
|
||||
* @param workspaceId - The workspace ID to check
|
||||
* @returns True if the workspace exists, false otherwise
|
||||
*/
|
||||
export async function workspaceExists(workspaceId: string): Promise<boolean> {
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
return ws !== null
|
||||
}
|
||||
|
||||
/**
|
||||
* Check workspace access for a user
|
||||
*
|
||||
* Verifies the workspace exists and the user has access to it.
|
||||
* Returns access level (read/write) based on ownership and permissions.
|
||||
*
|
||||
* @param workspaceId - The workspace ID to check
|
||||
* @param userId - The user ID to check access for
|
||||
* @returns WorkspaceAccess object with exists, hasAccess, canWrite, and workspace data
|
||||
*/
|
||||
export async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<WorkspaceAccess> {
|
||||
const ws = await getWorkspaceWithOwner(workspaceId)
|
||||
|
||||
if (!ws) {
|
||||
return { exists: false, hasAccess: false, canWrite: false, workspace: null }
|
||||
}
|
||||
|
||||
if (ws.ownerId === userId) {
|
||||
return { exists: true, hasAccess: true, canWrite: true, workspace: ws }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { exists: true, hasAccess: false, canWrite: false, workspace: ws }
|
||||
}
|
||||
|
||||
const canWrite =
|
||||
permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin'
|
||||
|
||||
return { exists: true, hasAccess: true, canWrite, workspace: ws }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the highest permission level a user has for a specific entity
|
||||
@@ -111,17 +217,13 @@ export async function hasWorkspaceAdminAccess(
|
||||
userId: string,
|
||||
workspaceId: string
|
||||
): Promise<boolean> {
|
||||
const workspaceResult = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
const ws = await getWorkspaceWithOwner(workspaceId)
|
||||
|
||||
if (workspaceResult.length === 0) {
|
||||
if (!ws) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (workspaceResult[0].ownerId === userId) {
|
||||
if (ws.ownerId === userId) {
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user