diff --git a/apps/docs/content/docs/tools/airtable.mdx b/apps/docs/content/docs/tools/airtable.mdx index a078806f03..500bfab8ef 100644 --- a/apps/docs/content/docs/tools/airtable.mdx +++ b/apps/docs/content/docs/tools/airtable.mdx @@ -71,19 +71,16 @@ Read records from an Airtable table | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token | | `baseId` | string | Yes | ID of the Airtable base | | `tableId` | string | Yes | ID of the table | | `maxRecords` | number | No | Maximum number of records to return | -| `filterFormula` | string | No | Formula to filter records \(e.g., | +| `filterFormula` | string | No | Formula to filter records \(e.g., "\(\{Field Name\} = \'Value\'\)"\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `records` | json | Retrieved record data | -| `record` | json | Single record data | -| `metadata` | json | Operation metadata | +| `records` | json | Array of retrieved Airtable records | ### `airtable_get_record` @@ -93,7 +90,6 @@ Retrieve a single record from an Airtable table by its ID | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token | | `baseId` | string | Yes | ID of the Airtable base | | `tableId` | string | Yes | ID or name of the table | | `recordId` | string | Yes | ID of the record to retrieve | @@ -102,9 +98,8 @@ Retrieve a single record from an Airtable table by its ID | Parameter | Type | Description | | --------- | ---- | ----------- | -| `records` | json | Retrieved record data | -| `record` | json | Single record data | -| `metadata` | json | Operation metadata | +| `record` | json | Retrieved Airtable record with id, createdTime, and fields | +| `metadata` | json | Operation metadata including record count | ### `airtable_create_records` @@ -114,17 +109,16 @@ Write new records to an Airtable table | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token | | `baseId` | string | Yes | ID of the Airtable base | | `tableId` | string | Yes | ID or name of the table | +| `records` | json | Yes | Array of records to create, each with a `fields` object | +| `fields` | string | No | No description | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `records` | json | Retrieved record data | -| `record` | json | Single record data | -| `metadata` | json | Operation metadata | +| `records` | json | Array of created Airtable records | ### `airtable_update_record` @@ -134,7 +128,6 @@ Update an existing record in an Airtable table by ID | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token | | `baseId` | string | Yes | ID of the Airtable base | | `tableId` | string | Yes | ID or name of the table | | `recordId` | string | Yes | ID of the record to update | @@ -144,9 +137,8 @@ Update an existing record in an Airtable table by ID | Parameter | Type | Description | | --------- | ---- | ----------- | -| `records` | json | Retrieved record data | -| `record` | json | Single record data | -| `metadata` | json | Operation metadata | +| `record` | json | Updated Airtable record with id, createdTime, and fields | +| `metadata` | json | Operation metadata including record count and updated field names | ### `airtable_update_multiple_records` @@ -156,17 +148,17 @@ Update multiple existing records in an Airtable table | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token | | `baseId` | string | Yes | ID of the Airtable base | | `tableId` | string | Yes | ID or name of the table | +| `records` | json | Yes | Array of records to update, each with an `id` and a `fields` object | +| `fields` | string | No | No description | +| `fields` | string | No | No description | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `records` | json | Retrieved record data | -| `record` | json | Single record data | -| `metadata` | json | Operation metadata | +| `records` | json | Array of updated Airtable records | diff --git a/apps/docs/content/docs/tools/arxiv.mdx b/apps/docs/content/docs/tools/arxiv.mdx index b18140b41f..95d7bbd333 100644 --- a/apps/docs/content/docs/tools/arxiv.mdx +++ b/apps/docs/content/docs/tools/arxiv.mdx @@ -71,10 +71,7 @@ Search for academic papers on ArXiv by keywords, authors, titles, or other field | Parameter | Type | Description | | --------- | ---- | ----------- | -| `papers` | json | Found papers data | -| `totalResults` | number | Total results count | -| `paper` | json | Paper details | -| `authorPapers` | json | Author papers list | +| `papers` | json | Array of papers matching the search query | ### `arxiv_get_paper` @@ -84,16 +81,13 @@ Get detailed information about a specific ArXiv paper by its ID. | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `paperId` | string | Yes | ArXiv paper ID \(e.g., | +| `paperId` | string | Yes | ArXiv paper ID \(e.g., "1706.03762"\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `papers` | json | Found papers data | -| `totalResults` | number | Total results count | -| `paper` | json | Paper details | -| `authorPapers` | json | Author papers list | +| `paper` | json | Detailed information about the requested ArXiv paper | ### `arxiv_get_author_papers` @@ -110,10 +104,7 @@ Search for papers by a specific author on ArXiv. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `papers` | json | Found papers data | -| `totalResults` | number | Total results count | -| `paper` | json | Paper details | -| `authorPapers` | json | Author papers list | +| `authorPapers` | json | Array of papers authored by the specified author | diff --git a/apps/docs/content/docs/tools/browser_use.mdx b/apps/docs/content/docs/tools/browser_use.mdx index 2063625a6e..7ca9783a8a 100644 --- a/apps/docs/content/docs/tools/browser_use.mdx +++ b/apps/docs/content/docs/tools/browser_use.mdx @@ -73,6 +73,7 @@ Runs a browser automation task using BrowserUse | --------- | ---- | -------- | ----------- | | `task` | string | Yes | What should the browser agent do | | `variables` | json | No | Optional variables to use as secrets \(format: \{key: value\}\) | +| `format` | string | No | No description | | `save_browser_data` | boolean | No | Whether to save browser data | | `model` | string | No | LLM model to use \(default: gpt-4o\) | | `apiKey` | string | Yes | API key for BrowserUse API | @@ -81,10 +82,9 @@ Runs a browser automation task using BrowserUse | Parameter | Type | Description | | --------- | ---- | ----------- | -| `id` | string | Task execution identifier | -| `success` | boolean | Task completion status | -| `output` | any | Task output data | -| `steps` | json | Execution steps taken | +| `success` | boolean | Operation success status | +| `output` | json | Browser automation task results including task ID, success status, output data, and execution steps | +| `error` | string | Error message if the operation failed | diff --git a/apps/docs/content/docs/tools/clay.mdx b/apps/docs/content/docs/tools/clay.mdx index 6320eabb0a..364fc9f462 100644 --- a/apps/docs/content/docs/tools/clay.mdx +++ b/apps/docs/content/docs/tools/clay.mdx @@ -220,7 +220,8 @@ Populate Clay with data from a JSON file. Enables direct communication and notif | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | any | Response data | +| `success` | boolean | Operation success status | +| `output` | json | Clay populate operation results including response data from Clay webhook | diff --git a/apps/docs/content/docs/tools/confluence.mdx b/apps/docs/content/docs/tools/confluence.mdx index 311f3d7e24..981f39cf91 100644 --- a/apps/docs/content/docs/tools/confluence.mdx +++ b/apps/docs/content/docs/tools/confluence.mdx @@ -57,7 +57,6 @@ Retrieve content from Confluence pages using the Confluence API. | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Confluence | | `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) | | `pageId` | string | Yes | Confluence page ID to retrieve | | `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. | @@ -66,11 +65,10 @@ Retrieve content from Confluence pages using the Confluence API. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Timestamp | -| `pageId` | string | Page identifier | -| `content` | string | Page content | +| `ts` | string | Timestamp of retrieval | +| `pageId` | string | Confluence page ID | +| `content` | string | Page content with HTML tags stripped | | `title` | string | Page title | -| `success` | boolean | Operation success status | ### `confluence_update` @@ -80,7 +78,6 @@ Update a Confluence page using the Confluence API. | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Confluence | | `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) | | `pageId` | string | Yes | Confluence page ID to update | | `title` | string | No | New title for the page | @@ -92,11 +89,10 @@ Update a Confluence page using the Confluence API. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Timestamp | -| `pageId` | string | Page identifier | -| `content` | string | Page content | -| `title` | string | Page title | -| `success` | boolean | Operation success status | +| `ts` | string | Timestamp of update | +| `pageId` | string | Confluence page ID | +| `title` | string | Updated page title | +| `success` | boolean | Update operation success status | diff --git a/apps/docs/content/docs/tools/discord.mdx b/apps/docs/content/docs/tools/discord.mdx index 674e834e3e..a96c49964a 100644 --- a/apps/docs/content/docs/tools/discord.mdx +++ b/apps/docs/content/docs/tools/discord.mdx @@ -80,8 +80,8 @@ Send a message to a Discord channel | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Message content | -| `data` | any | Response data | +| `message` | string | Success or error message | +| `data` | object | Discord message data | ### `discord_get_messages` @@ -99,8 +99,8 @@ Retrieve messages from a Discord channel | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Message content | -| `data` | any | Response data | +| `message` | string | Success or error message | +| `messages` | array | Array of Discord messages with full metadata | ### `discord_get_server` @@ -117,8 +117,8 @@ Retrieve information about a Discord server (guild) | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Message content | -| `data` | any | Response data | +| `message` | string | Success or error message | +| `data` | object | Discord server \(guild\) information | ### `discord_get_user` @@ -135,8 +135,8 @@ Retrieve information about a Discord user | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Message content | -| `data` | any | Response data | +| `message` | string | Success or error message | +| `data` | object | Discord user information | diff --git a/apps/docs/content/docs/tools/exa.mdx b/apps/docs/content/docs/tools/exa.mdx index 34e860ff2c..baaaace971 100644 --- a/apps/docs/content/docs/tools/exa.mdx +++ b/apps/docs/content/docs/tools/exa.mdx @@ -68,11 +68,7 @@ Search the web using Exa AI. Returns relevant search results with titles, URLs, | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `similarLinks` | json | Similar links found | -| `answer` | string | Generated answer | -| `citations` | json | Answer citations | -| `research` | json | Research findings | +| `results` | array | Search results with titles, URLs, and text snippets | ### `exa_get_contents` @@ -91,11 +87,7 @@ Retrieve the contents of webpages using Exa AI. Returns the title, text content, | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `similarLinks` | json | Similar links found | -| `answer` | string | Generated answer | -| `citations` | json | Answer citations | -| `research` | json | Research findings | +| `results` | array | Retrieved content from URLs with title, text, and summaries | ### `exa_find_similar_links` @@ -114,11 +106,7 @@ Find webpages similar to a given URL using Exa AI. Returns a list of similar lin | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `similarLinks` | json | Similar links found | -| `answer` | string | Generated answer | -| `citations` | json | Answer citations | -| `research` | json | Research findings | +| `similarLinks` | array | Similar links found with titles, URLs, and text snippets | ### `exa_answer` @@ -136,11 +124,8 @@ Get an AI-generated answer to a question with citations from the web using Exa A | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `similarLinks` | json | Similar links found | -| `answer` | string | Generated answer | -| `citations` | json | Answer citations | -| `research` | json | Research findings | +| `answer` | string | AI-generated answer to the question | +| `citations` | array | Sources and citations for the answer | ### `exa_research` @@ -158,11 +143,7 @@ Perform comprehensive research using AI to generate detailed reports with citati | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `similarLinks` | json | Similar links found | -| `answer` | string | Generated answer | -| `citations` | json | Answer citations | -| `research` | json | Research findings | +| `research` | array | Comprehensive research findings with citations and summaries | diff --git a/apps/docs/content/docs/tools/file.mdx b/apps/docs/content/docs/tools/file.mdx index 88a3fecf8e..a7c326908e 100644 --- a/apps/docs/content/docs/tools/file.mdx +++ b/apps/docs/content/docs/tools/file.mdx @@ -71,8 +71,8 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc | Parameter | Type | Description | | --------- | ---- | ----------- | -| `files` | json | Parsed file data | -| `combinedContent` | string | Combined file content | +| `files` | json | Array of parsed file objects with content, metadata, and file properties | +| `combinedContent` | string | All file contents merged into a single text string | diff --git a/apps/docs/content/docs/tools/firecrawl.mdx b/apps/docs/content/docs/tools/firecrawl.mdx index ba28ff956f..c478a258eb 100644 --- a/apps/docs/content/docs/tools/firecrawl.mdx +++ b/apps/docs/content/docs/tools/firecrawl.mdx @@ -81,14 +81,9 @@ Extract structured content from web pages with comprehensive metadata support. C | Parameter | Type | Description | | --------- | ---- | ----------- | -| `markdown` | string | Page content markdown | -| `html` | any | Raw HTML content | -| `metadata` | json | Page metadata | -| `data` | json | Search results data | -| `warning` | any | Warning messages | -| `pages` | json | Crawled pages data | -| `total` | number | Total pages found | -| `creditsUsed` | number | Credits consumed | +| `markdown` | string | Page content in markdown format | +| `html` | string | Raw HTML content of the page | +| `metadata` | object | Page metadata including SEO and Open Graph information | ### `firecrawl_search` @@ -105,14 +100,7 @@ Search for information on the web using Firecrawl | Parameter | Type | Description | | --------- | ---- | ----------- | -| `markdown` | string | Page content markdown | -| `html` | any | Raw HTML content | -| `metadata` | json | Page metadata | -| `data` | json | Search results data | -| `warning` | any | Warning messages | -| `pages` | json | Crawled pages data | -| `total` | number | Total pages found | -| `creditsUsed` | number | Credits consumed | +| `data` | array | Search results data | ### `firecrawl_crawl` @@ -131,14 +119,7 @@ Crawl entire websites and extract structured content from all accessible pages | Parameter | Type | Description | | --------- | ---- | ----------- | -| `markdown` | string | Page content markdown | -| `html` | any | Raw HTML content | -| `metadata` | json | Page metadata | -| `data` | json | Search results data | -| `warning` | any | Warning messages | -| `pages` | json | Crawled pages data | -| `total` | number | Total pages found | -| `creditsUsed` | number | Credits consumed | +| `pages` | array | Array of crawled pages with their content and metadata | diff --git a/apps/docs/content/docs/tools/generic_webhook.mdx b/apps/docs/content/docs/tools/generic_webhook.mdx new file mode 100644 index 0000000000..048916b7a5 --- /dev/null +++ b/apps/docs/content/docs/tools/generic_webhook.mdx @@ -0,0 +1,32 @@ +--- +title: Webhook +description: Receive webhooks from any service +--- + +import { BlockInfoCard } from "@/components/ui/block-info-card" + + + + + `} +/> + + + + + +## Notes + +- Category: `triggers` +- Type: `generic_webhook` diff --git a/apps/docs/content/docs/tools/github.mdx b/apps/docs/content/docs/tools/github.mdx index b9b48839af..3169cf4f72 100644 --- a/apps/docs/content/docs/tools/github.mdx +++ b/apps/docs/content/docs/tools/github.mdx @@ -1,6 +1,6 @@ --- title: GitHub -description: Interact with GitHub +description: Interact with GitHub or trigger workflows from GitHub events --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -35,7 +35,7 @@ In Sim, the GitHub integration enables your agents to interact directly with Git ## Usage Instructions -Access GitHub repositories, pull requests, and comments through the GitHub API. Automate code reviews, PR management, and repository interactions within your workflow. +Access GitHub repositories, pull requests, and comments through the GitHub API. Automate code reviews, PR management, and repository interactions within your workflow. Trigger workflows from GitHub events like push, pull requests, and issues. @@ -58,8 +58,8 @@ Fetch PR details including diff and files changed | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Response metadata | +| `content` | string | Human-readable PR summary | +| `metadata` | object | Detailed PR metadata including file changes | ### `github_comment` @@ -85,8 +85,8 @@ Create comments on GitHub PRs | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Response metadata | +| `content` | string | Human-readable comment confirmation | +| `metadata` | object | Comment metadata | ### `github_repo_info` @@ -104,8 +104,8 @@ Retrieve comprehensive GitHub repository metadata including stars, forks, issues | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Response metadata | +| `content` | string | Human-readable repository summary | +| `metadata` | object | Repository metadata | ### `github_latest_commit` @@ -117,15 +117,15 @@ Retrieve the latest commit from a GitHub repository | --------- | ---- | -------- | ----------- | | `owner` | string | Yes | Repository owner \(user or organization\) | | `repo` | string | Yes | Repository name | -| `branch` | string | No | Branch name \(defaults to the repository | +| `branch` | string | No | Branch name \(defaults to the repository's default branch\) | | `apiKey` | string | Yes | GitHub API token | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Response metadata | +| `content` | string | Human-readable commit summary | +| `metadata` | object | Commit metadata | diff --git a/apps/docs/content/docs/tools/gmail.mdx b/apps/docs/content/docs/tools/gmail.mdx index a12d6cc67b..011b44ab99 100644 --- a/apps/docs/content/docs/tools/gmail.mdx +++ b/apps/docs/content/docs/tools/gmail.mdx @@ -1,6 +1,6 @@ --- title: Gmail -description: Send Gmail +description: Send Gmail or trigger workflows from Gmail events --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -51,7 +51,7 @@ In Sim, the Gmail integration enables your agents to send, read, and search emai ## Usage Instructions -Integrate Gmail functionality to send email messages within your workflow. Automate email communications and process email content using OAuth authentication. +Comprehensive Gmail integration with OAuth authentication. Send email messages, read email content, and trigger workflows from Gmail events like new emails and label changes. @@ -65,7 +65,6 @@ Send emails using Gmail | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Gmail API | | `to` | string | Yes | Recipient email address | | `subject` | string | Yes | Email subject | | `body` | string | Yes | Email body content | @@ -74,8 +73,8 @@ Send emails using Gmail | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Email metadata | +| `content` | string | Success message | +| `metadata` | object | Email metadata | ### `gmail_draft` @@ -85,7 +84,6 @@ Draft emails using Gmail | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Gmail API | | `to` | string | Yes | Recipient email address | | `subject` | string | Yes | Email subject | | `body` | string | Yes | Email body content | @@ -94,8 +92,8 @@ Draft emails using Gmail | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Email metadata | +| `content` | string | Success message | +| `metadata` | object | Draft metadata | ### `gmail_read` @@ -105,7 +103,6 @@ Read emails from Gmail | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Gmail API | | `messageId` | string | No | ID of the message to read | | `folder` | string | No | Folder/label to read emails from | | `unreadOnly` | boolean | No | Only retrieve unread messages | @@ -115,8 +112,8 @@ Read emails from Gmail | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Email metadata | +| `content` | string | Email content or summary | +| `metadata` | object | Email metadata | ### `gmail_search` @@ -126,7 +123,6 @@ Search emails in Gmail | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Gmail API | | `query` | string | Yes | Search query for emails | | `maxResults` | number | No | Maximum number of results to return | @@ -134,8 +130,8 @@ Search emails in Gmail | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Response content | -| `metadata` | json | Email metadata | +| `content` | string | Search results summary | +| `metadata` | object | Search metadata | diff --git a/apps/docs/content/docs/tools/google_calendar.mdx b/apps/docs/content/docs/tools/google_calendar.mdx index eb02695622..b51c247c31 100644 --- a/apps/docs/content/docs/tools/google_calendar.mdx +++ b/apps/docs/content/docs/tools/google_calendar.mdx @@ -104,7 +104,6 @@ Create a new event in Google Calendar | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Google Calendar API | | `calendarId` | string | No | Calendar ID \(defaults to primary\) | | `summary` | string | Yes | Event title/summary | | `description` | string | No | Event description | @@ -119,8 +118,8 @@ Create a new event in Google Calendar | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Operation response content | -| `metadata` | json | Event metadata | +| `content` | string | Event creation confirmation message | +| `metadata` | json | Created event metadata including ID, status, and details | ### `google_calendar_list` @@ -130,7 +129,6 @@ List events from Google Calendar | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Google Calendar API | | `calendarId` | string | No | Calendar ID \(defaults to primary\) | | `timeMin` | string | No | Lower bound for events \(RFC3339 timestamp, e.g., 2025-06-03T00:00:00Z\) | | `timeMax` | string | No | Upper bound for events \(RFC3339 timestamp, e.g., 2025-06-04T00:00:00Z\) | @@ -141,8 +139,8 @@ List events from Google Calendar | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Operation response content | -| `metadata` | json | Event metadata | +| `content` | string | Summary of found events count | +| `metadata` | json | List of events with pagination tokens and event details | ### `google_calendar_get` @@ -152,7 +150,6 @@ Get a specific event from Google Calendar | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Google Calendar API | | `calendarId` | string | No | Calendar ID \(defaults to primary\) | | `eventId` | string | Yes | Event ID to retrieve | @@ -160,8 +157,8 @@ Get a specific event from Google Calendar | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Operation response content | -| `metadata` | json | Event metadata | +| `content` | string | Event retrieval confirmation message | +| `metadata` | json | Event details including ID, status, times, and attendees | ### `google_calendar_quick_add` @@ -171,9 +168,8 @@ Create events from natural language text | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Google Calendar API | | `calendarId` | string | No | Calendar ID \(defaults to primary\) | -| `text` | string | Yes | Natural language text describing the event \(e.g., | +| `text` | string | Yes | Natural language text describing the event \(e.g., "Meeting with John tomorrow at 3pm"\) | | `attendees` | array | No | Array of attendee email addresses \(comma-separated string also accepted\) | | `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none | @@ -181,8 +177,8 @@ Create events from natural language text | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Operation response content | -| `metadata` | json | Event metadata | +| `content` | string | Event creation confirmation message from natural language | +| `metadata` | json | Created event metadata including parsed details | ### `google_calendar_invite` @@ -192,7 +188,6 @@ Invite attendees to an existing Google Calendar event | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Google Calendar API | | `calendarId` | string | No | Calendar ID \(defaults to primary\) | | `eventId` | string | Yes | Event ID to invite attendees to | | `attendees` | array | Yes | Array of attendee email addresses to invite | @@ -203,8 +198,8 @@ Invite attendees to an existing Google Calendar event | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Operation response content | -| `metadata` | json | Event metadata | +| `content` | string | Attendee invitation confirmation message with email delivery status | +| `metadata` | json | Updated event metadata including attendee list and details | diff --git a/apps/docs/content/docs/tools/google_docs.mdx b/apps/docs/content/docs/tools/google_docs.mdx index cfac1a14d4..593d74fc28 100644 --- a/apps/docs/content/docs/tools/google_docs.mdx +++ b/apps/docs/content/docs/tools/google_docs.mdx @@ -95,16 +95,14 @@ Read content from a Google Docs document | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Docs API | | `documentId` | string | Yes | The ID of the document to read | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Document content | -| `metadata` | json | Document metadata | -| `updatedContent` | boolean | Content update status | +| `content` | string | Extracted document text content | +| `metadata` | json | Document metadata including ID, title, and URL | ### `google_docs_write` @@ -114,7 +112,6 @@ Write or update content in a Google Docs document | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Docs API | | `documentId` | string | Yes | The ID of the document to write to | | `content` | string | Yes | The content to write to the document | @@ -122,9 +119,8 @@ Write or update content in a Google Docs document | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Document content | -| `metadata` | json | Document metadata | -| `updatedContent` | boolean | Content update status | +| `updatedContent` | boolean | Indicates if document content was updated successfully | +| `metadata` | json | Updated document metadata including ID, title, and URL | ### `google_docs_create` @@ -134,7 +130,6 @@ Create a new Google Docs document | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Docs API | | `title` | string | Yes | The title of the document to create | | `content` | string | No | The content of the document to create | | `folderSelector` | string | No | Select the folder to create the document in | @@ -144,9 +139,7 @@ Create a new Google Docs document | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Document content | -| `metadata` | json | Document metadata | -| `updatedContent` | boolean | Content update status | +| `metadata` | json | Created document metadata including ID, title, and URL | diff --git a/apps/docs/content/docs/tools/google_drive.mdx b/apps/docs/content/docs/tools/google_drive.mdx index d729ff6d52..f6a2e1ec75 100644 --- a/apps/docs/content/docs/tools/google_drive.mdx +++ b/apps/docs/content/docs/tools/google_drive.mdx @@ -87,7 +87,6 @@ Upload a file to Google Drive | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Drive API | | `fileName` | string | Yes | The name of the file to upload | | `content` | string | Yes | The content of the file to upload | | `mimeType` | string | No | The MIME type of the file to upload | @@ -98,8 +97,7 @@ Upload a file to Google Drive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | File data | -| `files` | json | Files list | +| `file` | json | Uploaded file metadata including ID, name, and links | ### `google_drive_create_folder` @@ -109,7 +107,6 @@ Create a new folder in Google Drive | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Drive API | | `fileName` | string | Yes | Name of the folder to create | | `folderSelector` | string | No | Select the parent folder to create the folder in | | `folderId` | string | No | ID of the parent folder \(internal use\) | @@ -118,8 +115,7 @@ Create a new folder in Google Drive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | File data | -| `files` | json | Files list | +| `file` | json | Created folder metadata including ID, name, and parent information | ### `google_drive_list` @@ -129,7 +125,6 @@ List files and folders in Google Drive | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Drive API | | `folderSelector` | string | No | Select the folder to list files from | | `folderId` | string | No | The ID of the folder to list files from \(internal use\) | | `query` | string | No | A query to filter the files | @@ -140,8 +135,7 @@ List files and folders in Google Drive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | File data | -| `files` | json | Files list | +| `files` | json | Array of file metadata objects from the specified folder | diff --git a/apps/docs/content/docs/tools/google_search.mdx b/apps/docs/content/docs/tools/google_search.mdx index b98863031e..805881f63f 100644 --- a/apps/docs/content/docs/tools/google_search.mdx +++ b/apps/docs/content/docs/tools/google_search.mdx @@ -81,8 +81,7 @@ Search the web with the Custom Search API | Parameter | Type | Description | | --------- | ---- | ----------- | -| `items` | json | Search result items | -| `searchInformation` | json | Search metadata | +| `items` | array | Array of search results from Google | diff --git a/apps/docs/content/docs/tools/google_sheets.mdx b/apps/docs/content/docs/tools/google_sheets.mdx index 00909a3608..9339cdaab8 100644 --- a/apps/docs/content/docs/tools/google_sheets.mdx +++ b/apps/docs/content/docs/tools/google_sheets.mdx @@ -110,7 +110,6 @@ Read data from a Google Sheets spreadsheet | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet to read from | | `range` | string | No | The range of cells to read from | @@ -118,13 +117,8 @@ Read data from a Google Sheets spreadsheet | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `tableRange` | string | Table range | +| `data` | json | Sheet data including range and cell values | +| `metadata` | json | Spreadsheet metadata including ID and URL | ### `google_sheets_write` @@ -134,7 +128,6 @@ Write data to a Google Sheets spreadsheet | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet to write to | | `range` | string | No | The range of cells to write to | | `values` | array | Yes | The data to write to the spreadsheet | @@ -145,13 +138,11 @@ Write data to a Google Sheets spreadsheet | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `tableRange` | string | Table range | +| `updatedRange` | string | Range of cells that were updated | +| `updatedRows` | number | Number of rows updated | +| `updatedColumns` | number | Number of columns updated | +| `updatedCells` | number | Number of cells updated | +| `metadata` | json | Spreadsheet metadata including ID and URL | ### `google_sheets_update` @@ -161,7 +152,6 @@ Update data in a Google Sheets spreadsheet | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet to update | | `range` | string | No | The range of cells to update | | `values` | array | Yes | The data to update in the spreadsheet | @@ -172,13 +162,11 @@ Update data in a Google Sheets spreadsheet | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `tableRange` | string | Table range | +| `updatedRange` | string | Range of cells that were updated | +| `updatedRows` | number | Number of rows updated | +| `updatedColumns` | number | Number of columns updated | +| `updatedCells` | number | Number of cells updated | +| `metadata` | json | Spreadsheet metadata including ID and URL | ### `google_sheets_append` @@ -188,7 +176,6 @@ Append data to the end of a Google Sheets spreadsheet | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet to append to | | `range` | string | No | The range of cells to append after | | `values` | array | Yes | The data to append to the spreadsheet | @@ -200,13 +187,12 @@ Append data to the end of a Google Sheets spreadsheet | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `tableRange` | string | Table range | +| `tableRange` | string | Range of the table where data was appended | +| `updatedRange` | string | Range of cells that were updated | +| `updatedRows` | number | Number of rows updated | +| `updatedColumns` | number | Number of columns updated | +| `updatedCells` | number | Number of cells updated | +| `metadata` | json | Spreadsheet metadata including ID and URL | diff --git a/apps/docs/content/docs/tools/huggingface.mdx b/apps/docs/content/docs/tools/huggingface.mdx index 6e0875a4ec..afba281339 100644 --- a/apps/docs/content/docs/tools/huggingface.mdx +++ b/apps/docs/content/docs/tools/huggingface.mdx @@ -92,9 +92,8 @@ Generate completions using Hugging Face Inference API | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Generated response | -| `model` | string | Model used | -| `usage` | json | Token usage stats | +| `success` | boolean | Operation success status | +| `output` | object | Chat completion results | diff --git a/apps/docs/content/docs/tools/hunter.mdx b/apps/docs/content/docs/tools/hunter.mdx index f7f2c17fa2..03e2309957 100644 --- a/apps/docs/content/docs/tools/hunter.mdx +++ b/apps/docs/content/docs/tools/hunter.mdx @@ -57,7 +57,7 @@ Returns companies matching a set of criteria using Hunter.io AI-powered search. | --------- | ---- | -------- | ----------- | | `query` | string | No | Natural language search query for companies | | `domain` | string | No | Company domain names to filter by | -| `headcount` | string | No | Company size filter \(e.g., | +| `headcount` | string | No | Company size filter \(e.g., "1-10", "11-50"\) | | `company_type` | string | No | Type of organization | | `technology` | string | No | Technology used by companies | | `apiKey` | string | Yes | Hunter.io API Key | @@ -66,15 +66,7 @@ Returns companies matching a set of criteria using Hunter.io AI-powered search. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `emails` | json | Email addresses found | -| `email` | string | Found email address | -| `score` | number | Confidence score | -| `result` | string | Verification result | -| `status` | string | Status message | -| `total` | number | Total results count | -| `personal_emails` | number | Personal emails count | -| `generic_emails` | number | Generic emails count | +| `results` | array | Array of companies matching the search criteria, each containing domain, name, headcount, technologies, and email_count | ### `hunter_domain_search` @@ -96,15 +88,26 @@ Returns all the email addresses found using one given domain name, with sources. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `emails` | json | Email addresses found | -| `email` | string | Found email address | -| `score` | number | Confidence score | -| `result` | string | Verification result | -| `status` | string | Status message | -| `total` | number | Total results count | -| `personal_emails` | number | Personal emails count | -| `generic_emails` | number | Generic emails count | +| `domain` | string | The searched domain name | +| `disposable` | boolean | Whether the domain accepts disposable email addresses | +| `webmail` | boolean | Whether the domain is a webmail provider | +| `accept_all` | boolean | Whether the domain accepts all email addresses | +| `pattern` | string | The email pattern used by the organization | +| `organization` | string | The organization name | +| `description` | string | Description of the organization | +| `industry` | string | Industry of the organization | +| `twitter` | string | Twitter profile of the organization | +| `facebook` | string | Facebook profile of the organization | +| `linkedin` | string | LinkedIn profile of the organization | +| `instagram` | string | Instagram profile of the organization | +| `youtube` | string | YouTube channel of the organization | +| `technologies` | array | Array of technologies used by the organization | +| `country` | string | Country where the organization is located | +| `state` | string | State where the organization is located | +| `city` | string | City where the organization is located | +| `postal_code` | string | Postal code of the organization | +| `street` | string | Street address of the organization | +| `emails` | array | Array of email addresses found for the domain, each containing value, type, confidence, sources, and person details | ### `hunter_email_finder` @@ -115,8 +118,8 @@ Finds the most likely email address for a person given their name and company do | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `domain` | string | Yes | Company domain name | -| `first_name` | string | Yes | Person | -| `last_name` | string | Yes | Person | +| `first_name` | string | Yes | Person's first name | +| `last_name` | string | Yes | Person's last name | | `company` | string | No | Company name | | `apiKey` | string | Yes | Hunter.io API Key | @@ -124,15 +127,10 @@ Finds the most likely email address for a person given their name and company do | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `emails` | json | Email addresses found | -| `email` | string | Found email address | -| `score` | number | Confidence score | -| `result` | string | Verification result | -| `status` | string | Status message | -| `total` | number | Total results count | -| `personal_emails` | number | Personal emails count | -| `generic_emails` | number | Generic emails count | +| `email` | string | The found email address | +| `score` | number | Confidence score for the found email address | +| `sources` | array | Array of sources where the email was found, each containing domain, uri, extracted_on, last_seen_on, and still_on_page | +| `verification` | object | Verification information containing date and status | ### `hunter_email_verifier` @@ -149,15 +147,20 @@ Verifies the deliverability of an email address and provides detailed verificati | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `emails` | json | Email addresses found | -| `email` | string | Found email address | -| `score` | number | Confidence score | -| `result` | string | Verification result | -| `status` | string | Status message | -| `total` | number | Total results count | -| `personal_emails` | number | Personal emails count | -| `generic_emails` | number | Generic emails count | +| `result` | string | Deliverability result: deliverable, undeliverable, or risky | +| `score` | number | Confidence score for the verification result | +| `email` | string | The verified email address | +| `regexp` | boolean | Whether the email follows a valid regex pattern | +| `gibberish` | boolean | Whether the email appears to be gibberish | +| `disposable` | boolean | Whether the email is from a disposable email provider | +| `webmail` | boolean | Whether the email is from a webmail provider | +| `mx_records` | boolean | Whether MX records exist for the domain | +| `smtp_server` | boolean | Whether the SMTP server is reachable | +| `smtp_check` | boolean | Whether the SMTP check was successful | +| `accept_all` | boolean | Whether the domain accepts all email addresses | +| `block` | boolean | Whether the email is blocked | +| `status` | string | Verification status: valid, invalid, accept_all, webmail, disposable, or unknown | +| `sources` | array | Array of sources where the email was found | ### `hunter_companies_find` @@ -174,15 +177,8 @@ Enriches company data using domain name. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `emails` | json | Email addresses found | -| `email` | string | Found email address | -| `score` | number | Confidence score | -| `result` | string | Verification result | -| `status` | string | Status message | -| `total` | number | Total results count | -| `personal_emails` | number | Personal emails count | -| `generic_emails` | number | Generic emails count | +| `person` | object | Person information \(undefined for companies_find tool\) | +| `company` | object | Company information including name, domain, industry, size, country, linkedin, and twitter | ### `hunter_email_count` @@ -201,15 +197,11 @@ Returns the total number of email addresses found for a domain or company. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `emails` | json | Email addresses found | -| `email` | string | Found email address | -| `score` | number | Confidence score | -| `result` | string | Verification result | -| `status` | string | Status message | -| `total` | number | Total results count | -| `personal_emails` | number | Personal emails count | -| `generic_emails` | number | Generic emails count | +| `total` | number | Total number of email addresses found | +| `personal_emails` | number | Number of personal email addresses found | +| `generic_emails` | number | Number of generic email addresses found | +| `department` | object | Breakdown of email addresses by department \(executive, it, finance, management, sales, legal, support, hr, marketing, communication\) | +| `seniority` | object | Breakdown of email addresses by seniority level \(junior, senior, executive\) | diff --git a/apps/docs/content/docs/tools/image_generator.mdx b/apps/docs/content/docs/tools/image_generator.mdx index 2614231f44..4fbc3fbb52 100644 --- a/apps/docs/content/docs/tools/image_generator.mdx +++ b/apps/docs/content/docs/tools/image_generator.mdx @@ -73,9 +73,8 @@ Generate images using OpenAI | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Generation response | -| `image` | string | Generated image URL | -| `metadata` | json | Generation metadata | +| `success` | boolean | Operation success status | +| `output` | object | Generated image data | diff --git a/apps/docs/content/docs/tools/jina.mdx b/apps/docs/content/docs/tools/jina.mdx index 7daef77f54..f0d0061c34 100644 --- a/apps/docs/content/docs/tools/jina.mdx +++ b/apps/docs/content/docs/tools/jina.mdx @@ -87,7 +87,7 @@ Extract and process web content into clean, LLM-friendly text using Jina AI Read | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Extracted content | +| `content` | string | The extracted content from the URL, processed into clean, LLM-friendly text | diff --git a/apps/docs/content/docs/tools/jira.mdx b/apps/docs/content/docs/tools/jira.mdx index 19973d3731..81fabb91cb 100644 --- a/apps/docs/content/docs/tools/jira.mdx +++ b/apps/docs/content/docs/tools/jira.mdx @@ -57,7 +57,6 @@ Retrieve detailed information about a specific Jira issue | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Jira | | `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) | | `projectId` | string | No | Jira project ID to retrieve issues from. If not provided, all issues will be retrieved. | | `issueKey` | string | Yes | Jira issue key to retrieve \(e.g., PROJ-123\) | @@ -67,14 +66,8 @@ Retrieve detailed information about a specific Jira issue | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Timestamp | -| `issueKey` | string | Issue key | -| `summary` | string | Issue summary | -| `description` | string | Issue description | -| `created` | string | Creation date | -| `updated` | string | Update date | -| `success` | boolean | Operation success | -| `url` | string | Issue URL | +| `success` | boolean | Operation success status | +| `output` | object | Jira issue details with issue key, summary, description, created and updated timestamps | ### `jira_update` @@ -84,7 +77,6 @@ Update a Jira issue | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Jira | | `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) | | `projectId` | string | No | Jira project ID to update issues in. If not provided, all issues will be retrieved. | | `issueKey` | string | Yes | Jira issue key to update | @@ -99,14 +91,8 @@ Update a Jira issue | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Timestamp | -| `issueKey` | string | Issue key | -| `summary` | string | Issue summary | -| `description` | string | Issue description | -| `created` | string | Creation date | -| `updated` | string | Update date | -| `success` | boolean | Operation success | -| `url` | string | Issue URL | +| `success` | boolean | Operation success status | +| `output` | object | Updated Jira issue details with timestamp, issue key, summary, and success status | ### `jira_write` @@ -116,7 +102,6 @@ Write a Jira issue | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Jira | | `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) | | `projectId` | string | Yes | Project ID for the issue | | `summary` | string | Yes | Summary for the issue | @@ -130,14 +115,8 @@ Write a Jira issue | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Timestamp | -| `issueKey` | string | Issue key | -| `summary` | string | Issue summary | -| `description` | string | Issue description | -| `created` | string | Creation date | -| `updated` | string | Update date | -| `success` | boolean | Operation success | -| `url` | string | Issue URL | +| `success` | boolean | Operation success status | +| `output` | object | Created Jira issue details with timestamp, issue key, summary, success status, and URL | ### `jira_bulk_read` @@ -147,7 +126,6 @@ Retrieve multiple Jira issues in bulk | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Jira | | `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) | | `projectId` | string | Yes | Jira project ID | | `cloudId` | string | No | Jira cloud ID | @@ -156,14 +134,8 @@ Retrieve multiple Jira issues in bulk | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Timestamp | -| `issueKey` | string | Issue key | -| `summary` | string | Issue summary | -| `description` | string | Issue description | -| `created` | string | Creation date | -| `updated` | string | Update date | -| `success` | boolean | Operation success | -| `url` | string | Issue URL | +| `success` | boolean | Operation success status | +| `output` | array | Array of Jira issues with summary, description, created and updated timestamps | diff --git a/apps/docs/content/docs/tools/knowledge.mdx b/apps/docs/content/docs/tools/knowledge.mdx index 5a1b4e6b35..525f3f8cc3 100644 --- a/apps/docs/content/docs/tools/knowledge.mdx +++ b/apps/docs/content/docs/tools/knowledge.mdx @@ -72,9 +72,7 @@ Search for similar content in a knowledge base using vector similarity | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `query` | string | Query used | -| `totalResults` | number | Total results count | +| `results` | array | Array of search results from the knowledge base | ### `knowledge_upload_chunk` @@ -92,9 +90,7 @@ Upload a new chunk to a document in a knowledge base | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `query` | string | Query used | -| `totalResults` | number | Total results count | +| `data` | object | Information about the uploaded chunk | ### `knowledge_create_document` @@ -120,9 +116,7 @@ Create a new document in a knowledge base | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results | -| `query` | string | Query used | -| `totalResults` | number | Total results count | +| `data` | object | Information about the created document | diff --git a/apps/docs/content/docs/tools/linear.mdx b/apps/docs/content/docs/tools/linear.mdx index a541add5a0..3da9fc927b 100644 --- a/apps/docs/content/docs/tools/linear.mdx +++ b/apps/docs/content/docs/tools/linear.mdx @@ -63,8 +63,7 @@ Fetch and filter issues from Linear | Parameter | Type | Description | | --------- | ---- | ----------- | -| `issues` | json | Issues list | -| `issue` | json | Single issue data | +| `issues` | array | Array of issues from the specified Linear team and project, each containing id, title, description, state, teamId, and projectId | ### `linear_create_issue` @@ -83,8 +82,7 @@ Create a new issue in Linear | Parameter | Type | Description | | --------- | ---- | ----------- | -| `issues` | json | Issues list | -| `issue` | json | Single issue data | +| `issue` | object | The created issue containing id, title, description, state, teamId, and projectId | diff --git a/apps/docs/content/docs/tools/linkup.mdx b/apps/docs/content/docs/tools/linkup.mdx index 24c15bb1a9..52c67c9e2e 100644 --- a/apps/docs/content/docs/tools/linkup.mdx +++ b/apps/docs/content/docs/tools/linkup.mdx @@ -58,16 +58,16 @@ Search the web for information using Linkup | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | | `q` | string | Yes | The search query | -| `depth` | string | Yes | Search depth \(has to either be | -| `outputType` | string | Yes | Type of output to return \(has to either be | +| `depth` | string | Yes | Search depth \(has to either be "standard" or "deep"\) | +| `outputType` | string | Yes | Type of output to return \(has to either be "sourcedAnswer" or "searchResults"\) | | `apiKey` | string | Yes | Enter your Linkup API key | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `answer` | string | Generated answer | -| `sources` | json | Source references | +| `answer` | string | The sourced answer to the search query | +| `sources` | array | Array of sources used to compile the answer, each containing name, url, and snippet | diff --git a/apps/docs/content/docs/tools/mem0.mdx b/apps/docs/content/docs/tools/mem0.mdx index 6c9448d051..442cf95069 100644 --- a/apps/docs/content/docs/tools/mem0.mdx +++ b/apps/docs/content/docs/tools/mem0.mdx @@ -66,9 +66,8 @@ Add memories to Mem0 for persistent storage and retrieval | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ids` | any | Memory identifiers | -| `memories` | any | Memory data | -| `searchResults` | any | Search results | +| `ids` | array | Array of memory IDs that were created | +| `memories` | array | Array of memory objects that were created | ### `mem0_search_memories` @@ -87,9 +86,8 @@ Search for memories in Mem0 using semantic search | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ids` | any | Memory identifiers | -| `memories` | any | Memory data | -| `searchResults` | any | Search results | +| `searchResults` | array | Array of search results with memory data, each containing id, data, and score | +| `ids` | array | Array of memory IDs found in the search results | ### `mem0_get_memories` @@ -110,9 +108,8 @@ Retrieve memories from Mem0 by ID or filter criteria | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ids` | any | Memory identifiers | -| `memories` | any | Memory data | -| `searchResults` | any | Search results | +| `memories` | array | Array of retrieved memory objects | +| `ids` | array | Array of memory IDs that were retrieved | diff --git a/apps/docs/content/docs/tools/memory.mdx b/apps/docs/content/docs/tools/memory.mdx index fcd46da35a..e9324e0a65 100644 --- a/apps/docs/content/docs/tools/memory.mdx +++ b/apps/docs/content/docs/tools/memory.mdx @@ -57,8 +57,9 @@ Add a new memory to the database or append to existing memory with the same ID. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `memories` | any | Memory data | -| `id` | string | Memory identifier | +| `success` | boolean | Whether the memory was added successfully | +| `memories` | array | Array of memory objects including the new or updated memory | +| `error` | string | Error message if operation failed | ### `memory_get` @@ -74,8 +75,10 @@ Retrieve a specific memory by its ID | Parameter | Type | Description | | --------- | ---- | ----------- | -| `memories` | any | Memory data | -| `id` | string | Memory identifier | +| `success` | boolean | Whether the memory was retrieved successfully | +| `memories` | array | Array of memory data for the requested ID | +| `message` | string | Success or error message | +| `error` | string | Error message if operation failed | ### `memory_get_all` @@ -90,8 +93,10 @@ Retrieve all memories from the database | Parameter | Type | Description | | --------- | ---- | ----------- | -| `memories` | any | Memory data | -| `id` | string | Memory identifier | +| `success` | boolean | Whether all memories were retrieved successfully | +| `memories` | array | Array of all memory objects with keys, types, and data | +| `message` | string | Success or error message | +| `error` | string | Error message if operation failed | ### `memory_delete` @@ -107,8 +112,9 @@ Delete a specific memory by its ID | Parameter | Type | Description | | --------- | ---- | ----------- | -| `memories` | any | Memory data | -| `id` | string | Memory identifier | +| `success` | boolean | Whether the memory was deleted successfully | +| `message` | string | Success or error message | +| `error` | string | Error message if operation failed | diff --git a/apps/docs/content/docs/tools/microsoft_excel.mdx b/apps/docs/content/docs/tools/microsoft_excel.mdx index 15ecd8e28d..38b37827e6 100644 --- a/apps/docs/content/docs/tools/microsoft_excel.mdx +++ b/apps/docs/content/docs/tools/microsoft_excel.mdx @@ -108,7 +108,6 @@ Read data from a Microsoft Excel spreadsheet | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Excel API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet to read from | | `range` | string | No | The range of cells to read from | @@ -116,14 +115,8 @@ Read data from a Microsoft Excel spreadsheet | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `index` | number | Row index | -| `values` | json | Table values | +| `success` | boolean | Operation success status | +| `output` | object | Excel spreadsheet data and metadata | ### `microsoft_excel_write` @@ -133,7 +126,6 @@ Write data to a Microsoft Excel spreadsheet | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Excel API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet to write to | | `range` | string | No | The range of cells to write to | | `values` | array | Yes | The data to write to the spreadsheet | @@ -144,14 +136,8 @@ Write data to a Microsoft Excel spreadsheet | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `index` | number | Row index | -| `values` | json | Table values | +| `success` | boolean | Operation success status | +| `output` | object | Write operation results and metadata | ### `microsoft_excel_table_add` @@ -161,7 +147,6 @@ Add new rows to a Microsoft Excel table | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Excel API | | `spreadsheetId` | string | Yes | The ID of the spreadsheet containing the table | | `tableName` | string | Yes | The name of the table to add rows to | | `values` | array | Yes | The data to add to the table \(array of arrays or array of objects\) | @@ -170,14 +155,8 @@ Add new rows to a Microsoft Excel table | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Sheet data | -| `metadata` | json | Operation metadata | -| `updatedRange` | string | Updated range | -| `updatedRows` | number | Updated rows count | -| `updatedColumns` | number | Updated columns count | -| `updatedCells` | number | Updated cells count | -| `index` | number | Row index | -| `values` | json | Table values | +| `success` | boolean | Operation success status | +| `output` | object | Table add operation results and metadata | diff --git a/apps/docs/content/docs/tools/microsoft_planner.mdx b/apps/docs/content/docs/tools/microsoft_planner.mdx index 5ae82298f3..08b4bc2dbd 100644 --- a/apps/docs/content/docs/tools/microsoft_planner.mdx +++ b/apps/docs/content/docs/tools/microsoft_planner.mdx @@ -136,7 +136,6 @@ Read tasks from Microsoft Planner - get all user tasks or all tasks from a speci | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Planner API | | `planId` | string | No | The ID of the plan to get tasks from \(if not provided, gets all user tasks\) | | `taskId` | string | No | The ID of the task to get | @@ -144,8 +143,9 @@ Read tasks from Microsoft Planner - get all user tasks or all tasks from a speci | Parameter | Type | Description | | --------- | ---- | ----------- | -| `task` | json | The Microsoft Planner task object, including details such as id, title, description, status, due date, and assignees. | -| `metadata` | json | Additional metadata about the operation, such as timestamps, request status, or other relevant information. | +| `success` | boolean | Whether tasks were retrieved successfully | +| `tasks` | array | Array of task objects with filtered properties | +| `metadata` | object | Metadata including planId, userId, and planUrl | ### `microsoft_planner_create_task` @@ -155,7 +155,6 @@ Create a new task in Microsoft Planner | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Planner API | | `planId` | string | Yes | The ID of the plan where the task will be created | | `title` | string | Yes | The title of the task | | `description` | string | No | The description of the task | @@ -167,8 +166,9 @@ Create a new task in Microsoft Planner | Parameter | Type | Description | | --------- | ---- | ----------- | -| `task` | json | The Microsoft Planner task object, including details such as id, title, description, status, due date, and assignees. | -| `metadata` | json | Additional metadata about the operation, such as timestamps, request status, or other relevant information. | +| `success` | boolean | Whether the task was created successfully | +| `task` | object | The created task object with all properties | +| `metadata` | object | Metadata including planId, taskId, and taskUrl | diff --git a/apps/docs/content/docs/tools/microsoft_teams.mdx b/apps/docs/content/docs/tools/microsoft_teams.mdx index 51e80b0113..81ae2e4b51 100644 --- a/apps/docs/content/docs/tools/microsoft_teams.mdx +++ b/apps/docs/content/docs/tools/microsoft_teams.mdx @@ -112,16 +112,19 @@ Read content from a Microsoft Teams chat | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Teams API | | `chatId` | string | Yes | The ID of the chat to read from | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Message content | -| `metadata` | json | Message metadata | -| `updatedContent` | boolean | Content update status | +| `success` | boolean | Teams chat read operation success status | +| `messageCount` | number | Number of messages retrieved from chat | +| `chatId` | string | ID of the chat that was read from | +| `messages` | array | Array of chat message objects | +| `attachmentCount` | number | Total number of attachments found | +| `attachmentTypes` | array | Types of attachments found | +| `content` | string | Formatted content of chat messages | ### `microsoft_teams_write_chat` @@ -131,7 +134,6 @@ Write or update content in a Microsoft Teams chat | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Teams API | | `chatId` | string | Yes | The ID of the chat to write to | | `content` | string | Yes | The content to write to the message | @@ -139,9 +141,12 @@ Write or update content in a Microsoft Teams chat | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Message content | -| `metadata` | json | Message metadata | -| `updatedContent` | boolean | Content update status | +| `success` | boolean | Teams chat message send success status | +| `messageId` | string | Unique identifier for the sent message | +| `chatId` | string | ID of the chat where message was sent | +| `createdTime` | string | Timestamp when message was created | +| `url` | string | Web URL to the message | +| `updatedContent` | boolean | Whether content was successfully updated | ### `microsoft_teams_read_channel` @@ -151,7 +156,6 @@ Read content from a Microsoft Teams channel | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Teams API | | `teamId` | string | Yes | The ID of the team to read from | | `channelId` | string | Yes | The ID of the channel to read from | @@ -159,9 +163,14 @@ Read content from a Microsoft Teams channel | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Message content | -| `metadata` | json | Message metadata | -| `updatedContent` | boolean | Content update status | +| `success` | boolean | Teams channel read operation success status | +| `messageCount` | number | Number of messages retrieved from channel | +| `teamId` | string | ID of the team that was read from | +| `channelId` | string | ID of the channel that was read from | +| `messages` | array | Array of channel message objects | +| `attachmentCount` | number | Total number of attachments found | +| `attachmentTypes` | array | Types of attachments found | +| `content` | string | Formatted content of channel messages | ### `microsoft_teams_write_channel` @@ -171,7 +180,6 @@ Write or send a message to a Microsoft Teams channel | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Microsoft Teams API | | `teamId` | string | Yes | The ID of the team to write to | | `channelId` | string | Yes | The ID of the channel to write to | | `content` | string | Yes | The content to write to the channel | @@ -180,9 +188,13 @@ Write or send a message to a Microsoft Teams channel | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Message content | -| `metadata` | json | Message metadata | -| `updatedContent` | boolean | Content update status | +| `success` | boolean | Teams channel message send success status | +| `messageId` | string | Unique identifier for the sent message | +| `teamId` | string | ID of the team where message was sent | +| `channelId` | string | ID of the channel where message was sent | +| `createdTime` | string | Timestamp when message was created | +| `url` | string | Web URL to the message | +| `updatedContent` | boolean | Whether content was successfully updated | diff --git a/apps/docs/content/docs/tools/mistral_parse.mdx b/apps/docs/content/docs/tools/mistral_parse.mdx index dbd5a0f72f..08fadae49f 100644 --- a/apps/docs/content/docs/tools/mistral_parse.mdx +++ b/apps/docs/content/docs/tools/mistral_parse.mdx @@ -106,8 +106,9 @@ Parse PDF documents using Mistral OCR API | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Extracted content | -| `metadata` | json | Processing metadata | +| `success` | boolean | Whether the PDF was parsed successfully | +| `content` | string | Extracted content in the requested format \(markdown, text, or JSON\) | +| `metadata` | object | Processing metadata including jobId, fileType, pageCount, and usage info | diff --git a/apps/docs/content/docs/tools/notion.mdx b/apps/docs/content/docs/tools/notion.mdx index 51801ce370..5b117623ac 100644 --- a/apps/docs/content/docs/tools/notion.mdx +++ b/apps/docs/content/docs/tools/notion.mdx @@ -59,15 +59,14 @@ Read content from a Notion page | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `pageId` | string | Yes | The ID of the Notion page to read | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Page content in markdown format with headers, paragraphs, lists, and todos | +| `metadata` | object | Page metadata including title, URL, and timestamps | ### `notion_read_database` @@ -77,15 +76,14 @@ Read database information and structure from Notion | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `databaseId` | string | Yes | The ID of the Notion database to read | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Database information including title, properties schema, and metadata | +| `metadata` | object | Database metadata including title, ID, URL, timestamps, and properties schema | ### `notion_write` @@ -95,7 +93,6 @@ Append content to a Notion page | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `pageId` | string | Yes | The ID of the Notion page to append content to | | `content` | string | Yes | The content to append to the page | @@ -103,8 +100,7 @@ Append content to a Notion page | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Success message confirming content was appended to page | ### `notion_create_page` @@ -114,7 +110,6 @@ Create a new page in Notion | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `parentId` | string | Yes | ID of the parent page | | `title` | string | No | Title of the new page | | `content` | string | No | Optional content to add to the page upon creation | @@ -123,8 +118,8 @@ Create a new page in Notion | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Success message confirming page creation | +| `metadata` | object | Page metadata including title, page ID, URL, and timestamps | ### `notion_query_database` @@ -134,7 +129,6 @@ Query and filter Notion database entries with advanced filtering | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `databaseId` | string | Yes | The ID of the database to query | | `filter` | string | No | Filter conditions as JSON \(optional\) | | `sorts` | string | No | Sort criteria as JSON array \(optional\) | @@ -144,8 +138,8 @@ Query and filter Notion database entries with advanced filtering | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Formatted list of database entries with their properties | +| `metadata` | object | Query metadata including total results count, pagination info, and raw results array | ### `notion_search` @@ -155,7 +149,6 @@ Search across all pages and databases in Notion workspace | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `query` | string | No | Search terms \(leave empty to get all pages\) | | `filterType` | string | No | Filter by object type: page, database, or leave empty for all | | `pageSize` | number | No | Number of results to return \(default: 100, max: 100\) | @@ -164,8 +157,8 @@ Search across all pages and databases in Notion workspace | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Formatted list of search results including pages and databases | +| `metadata` | object | Search metadata including total results count, pagination info, and raw results array | ### `notion_create_database` @@ -175,17 +168,16 @@ Create a new database in Notion with custom properties | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Notion OAuth access token | | `parentId` | string | Yes | ID of the parent page where the database will be created | | `title` | string | Yes | Title for the new database | -| `properties` | string | No | Database properties as JSON object \(optional, will create a default | +| `properties` | string | No | Database properties as JSON object \(optional, will create a default "Name" property if empty\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Page content | -| `metadata` | any | Page metadata | +| `content` | string | Success message with database details and properties list | +| `metadata` | object | Database metadata including ID, title, URL, creation time, and properties schema | diff --git a/apps/docs/content/docs/tools/onedrive.mdx b/apps/docs/content/docs/tools/onedrive.mdx index 7a389f238e..1708434f06 100644 --- a/apps/docs/content/docs/tools/onedrive.mdx +++ b/apps/docs/content/docs/tools/onedrive.mdx @@ -65,7 +65,6 @@ Upload a file to OneDrive | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the OneDrive API | | `fileName` | string | Yes | The name of the file to upload | | `content` | string | Yes | The content of the file to upload | | `folderSelector` | string | No | Select the folder to upload the file to | @@ -75,8 +74,8 @@ Upload a file to OneDrive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | The OneDrive file object, including details such as id, name, size, and more. | -| `files` | json | An array of OneDrive file objects, each containing details such as id, name, size, and more. | +| `success` | boolean | Whether the file was uploaded successfully | +| `file` | object | The uploaded file object with metadata including id, name, webViewLink, webContentLink, and timestamps | ### `onedrive_create_folder` @@ -86,7 +85,6 @@ Create a new folder in OneDrive | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the OneDrive API | | `folderName` | string | Yes | Name of the folder to create | | `folderSelector` | string | No | Select the parent folder to create the folder in | | `folderId` | string | No | ID of the parent folder \(internal use\) | @@ -95,8 +93,8 @@ Create a new folder in OneDrive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | The OneDrive file object, including details such as id, name, size, and more. | -| `files` | json | An array of OneDrive file objects, each containing details such as id, name, size, and more. | +| `success` | boolean | Whether the folder was created successfully | +| `file` | object | The created folder object with metadata including id, name, webViewLink, and timestamps | ### `onedrive_list` @@ -106,7 +104,6 @@ List files and folders in OneDrive | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the OneDrive API | | `folderSelector` | string | No | Select the folder to list files from | | `folderId` | string | No | The ID of the folder to list files from \(internal use\) | | `query` | string | No | A query to filter the files | @@ -116,8 +113,9 @@ List files and folders in OneDrive | Parameter | Type | Description | | --------- | ---- | ----------- | -| `file` | json | The OneDrive file object, including details such as id, name, size, and more. | -| `files` | json | An array of OneDrive file objects, each containing details such as id, name, size, and more. | +| `success` | boolean | Whether files were listed successfully | +| `files` | array | Array of file and folder objects with metadata | +| `nextPageToken` | string | Token for retrieving the next page of results \(optional\) | diff --git a/apps/docs/content/docs/tools/openai.mdx b/apps/docs/content/docs/tools/openai.mdx index b8d04338bd..f445f359dd 100644 --- a/apps/docs/content/docs/tools/openai.mdx +++ b/apps/docs/content/docs/tools/openai.mdx @@ -66,9 +66,8 @@ Generate embeddings from text using OpenAI | Parameter | Type | Description | | --------- | ---- | ----------- | -| `embeddings` | json | Generated embeddings | -| `model` | string | Model used | -| `usage` | json | Token usage | +| `success` | boolean | Operation success status | +| `output` | object | Embeddings generation results | diff --git a/apps/docs/content/docs/tools/outlook.mdx b/apps/docs/content/docs/tools/outlook.mdx index 1aefc7e077..52ca800bd2 100644 --- a/apps/docs/content/docs/tools/outlook.mdx +++ b/apps/docs/content/docs/tools/outlook.mdx @@ -154,7 +154,6 @@ Send emails using Outlook | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Outlook API | | `to` | string | Yes | Recipient email address | | `subject` | string | Yes | Email subject | | `body` | string | Yes | Email body content | @@ -167,8 +166,10 @@ Send emails using Outlook | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Response message | -| `results` | json | Email results | +| `success` | boolean | Email send success status | +| `status` | string | Delivery status of the email | +| `timestamp` | string | Timestamp when email was sent | +| `message` | string | Success or error message | ### `outlook_draft` @@ -178,7 +179,6 @@ Draft emails using Outlook | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Outlook API | | `to` | string | Yes | Recipient email address | | `subject` | string | Yes | Email subject | | `body` | string | Yes | Email body content | @@ -187,8 +187,12 @@ Draft emails using Outlook | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Response message | -| `results` | json | Email results | +| `success` | boolean | Email draft creation success status | +| `messageId` | string | Unique identifier for the drafted email | +| `status` | string | Draft status of the email | +| `subject` | string | Subject of the drafted email | +| `timestamp` | string | Timestamp when draft was created | +| `message` | string | Success or error message | ### `outlook_read` @@ -198,7 +202,6 @@ Read emails from Outlook | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | OAuth access token for Outlook | | `folder` | string | No | Folder ID to read emails from \(default: Inbox\) | | `maxResults` | number | No | Maximum number of emails to retrieve \(default: 1, max: 10\) | @@ -206,8 +209,10 @@ Read emails from Outlook | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Response message | -| `results` | json | Email results | +| `success` | boolean | Email read operation success status | +| `messageCount` | number | Number of emails retrieved | +| `messages` | array | Array of email message objects | +| `message` | string | Success or status message | diff --git a/apps/docs/content/docs/tools/perplexity.mdx b/apps/docs/content/docs/tools/perplexity.mdx index b8855fdde2..ed690490f9 100644 --- a/apps/docs/content/docs/tools/perplexity.mdx +++ b/apps/docs/content/docs/tools/perplexity.mdx @@ -62,9 +62,8 @@ Generate completions using Perplexity AI chat models | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Generated response | -| `model` | string | Model used | -| `usage` | json | Token usage | +| `success` | boolean | Operation success status | +| `output` | object | Chat completion results | diff --git a/apps/docs/content/docs/tools/pinecone.mdx b/apps/docs/content/docs/tools/pinecone.mdx index 76baccb1ae..6eb76e2025 100644 --- a/apps/docs/content/docs/tools/pinecone.mdx +++ b/apps/docs/content/docs/tools/pinecone.mdx @@ -67,12 +67,10 @@ Generate embeddings from text using Pinecone | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `model` | any | Model information | -| `vector_type` | any | Vector type | -| `usage` | any | Usage statistics | +| `data` | array | Generated embeddings data with values and vector type | +| `model` | string | Model used for generating embeddings | +| `vector_type` | string | Type of vector generated \(dense/sparse\) | +| `usage` | object | Usage statistics for embeddings generation | ### `pinecone_upsert_text` @@ -91,12 +89,8 @@ Insert or update text records in a Pinecone index | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `model` | any | Model information | -| `vector_type` | any | Vector type | -| `usage` | any | Usage statistics | +| `statusText` | string | Status of the upsert operation | +| `upsertedCount` | number | Number of records successfully upserted | ### `pinecone_search_text` @@ -119,12 +113,7 @@ Search for similar text in a Pinecone index | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `model` | any | Model information | -| `vector_type` | any | Vector type | -| `usage` | any | Usage statistics | +| `matches` | array | Search results with ID, score, and metadata | ### `pinecone_search_vector` @@ -147,12 +136,8 @@ Search for similar vectors in a Pinecone index | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `model` | any | Model information | -| `vector_type` | any | Vector type | -| `usage` | any | Usage statistics | +| `matches` | array | Vector search results with ID, score, values, and metadata | +| `namespace` | string | Namespace where the search was performed | ### `pinecone_fetch` @@ -171,12 +156,7 @@ Fetch vectors by ID from a Pinecone index | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `model` | any | Model information | -| `vector_type` | any | Vector type | -| `usage` | any | Usage statistics | +| `matches` | array | Fetched vectors with ID, values, metadata, and score | diff --git a/apps/docs/content/docs/tools/qdrant.mdx b/apps/docs/content/docs/tools/qdrant.mdx index fcc2c478bf..44a7719da4 100644 --- a/apps/docs/content/docs/tools/qdrant.mdx +++ b/apps/docs/content/docs/tools/qdrant.mdx @@ -126,10 +126,8 @@ Insert or update points in a Qdrant collection | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `status` | any | Operation status | +| `status` | string | Status of the upsert operation | +| `data` | object | Result data from the upsert operation | ### `qdrant_search_vector` @@ -152,10 +150,8 @@ Search for similar vectors in a Qdrant collection | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `status` | any | Operation status | +| `data` | array | Vector search results with ID, score, payload, and optional vector data | +| `status` | string | Status of the search operation | ### `qdrant_fetch_points` @@ -176,10 +172,8 @@ Fetch points by ID from a Qdrant collection | Parameter | Type | Description | | --------- | ---- | ----------- | -| `matches` | any | Search matches | -| `upsertedCount` | any | Upserted count | -| `data` | any | Response data | -| `status` | any | Operation status | +| `data` | array | Fetched points with ID, payload, and optional vector data | +| `status` | string | Status of the fetch operation | diff --git a/apps/docs/content/docs/tools/reddit.mdx b/apps/docs/content/docs/tools/reddit.mdx index 22d16546a8..851d4c7593 100644 --- a/apps/docs/content/docs/tools/reddit.mdx +++ b/apps/docs/content/docs/tools/reddit.mdx @@ -53,20 +53,17 @@ Fetch posts from a subreddit with different sorting options | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Reddit API | | `subreddit` | string | Yes | The name of the subreddit to fetch posts from \(without the r/ prefix\) | -| `sort` | string | No | Sort method for posts: | +| `sort` | string | No | Sort method for posts: "hot", "new", "top", or "rising" \(default: "hot"\) | | `limit` | number | No | Maximum number of posts to return \(default: 10, max: 100\) | -| `time` | string | No | Time filter for | +| `time` | string | No | Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" \(default: "day"\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `subreddit` | string | Subreddit name | -| `posts` | json | Posts data | -| `post` | json | Single post data | -| `comments` | json | Comments data | +| `subreddit` | string | Name of the subreddit where posts were fetched from | +| `posts` | array | Array of posts with title, author, URL, score, comments count, and metadata | ### `reddit_get_comments` @@ -76,20 +73,16 @@ Fetch comments from a specific Reddit post | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | Access token for Reddit API | | `postId` | string | Yes | The ID of the Reddit post to fetch comments from | | `subreddit` | string | Yes | The subreddit where the post is located \(without the r/ prefix\) | -| `sort` | string | No | Sort method for comments: | +| `sort` | string | No | Sort method for comments: "confidence", "top", "new", "controversial", "old", "random", "qa" \(default: "confidence"\) | | `limit` | number | No | Maximum number of comments to return \(default: 50, max: 100\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `subreddit` | string | Subreddit name | -| `posts` | json | Posts data | -| `post` | json | Single post data | -| `comments` | json | Comments data | +| `post` | object | Post information including ID, title, author, content, and metadata | diff --git a/apps/docs/content/docs/tools/s3.mdx b/apps/docs/content/docs/tools/s3.mdx index f8d62e2f7b..b6955bafae 100644 --- a/apps/docs/content/docs/tools/s3.mdx +++ b/apps/docs/content/docs/tools/s3.mdx @@ -84,8 +84,8 @@ Retrieve an object from an AWS S3 bucket | Parameter | Type | Description | | --------- | ---- | ----------- | -| `url` | string | Presigned URL | -| `metadata` | json | Object metadata | +| `url` | string | Pre-signed URL for downloading the S3 object | +| `metadata` | object | File metadata including type, size, name, and last modified date | diff --git a/apps/docs/content/docs/tools/serper.mdx b/apps/docs/content/docs/tools/serper.mdx index e04ce7b3a7..73b551c099 100644 --- a/apps/docs/content/docs/tools/serper.mdx +++ b/apps/docs/content/docs/tools/serper.mdx @@ -103,7 +103,7 @@ A powerful web search tool that provides access to Google search results through | Parameter | Type | Description | | --------- | ---- | ----------- | -| `searchResults` | json | Search results data | +| `searchResults` | array | Search results with titles, links, snippets, and type-specific metadata \(date for news, rating for places, imageUrl for images\) | diff --git a/apps/docs/content/docs/tools/sharepoint.mdx b/apps/docs/content/docs/tools/sharepoint.mdx index 3c44e35104..e08cbf903f 100644 --- a/apps/docs/content/docs/tools/sharepoint.mdx +++ b/apps/docs/content/docs/tools/sharepoint.mdx @@ -75,7 +75,6 @@ Create a new page in a SharePoint site | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the SharePoint API | | `siteId` | string | No | The ID of the SharePoint site \(internal use\) | | `siteSelector` | string | No | Select the SharePoint site | | `pageName` | string | Yes | The name of the page to create | @@ -86,7 +85,7 @@ Create a new page in a SharePoint site | Parameter | Type | Description | | --------- | ---- | ----------- | -| `sites` | json | An array of SharePoint site objects, each containing details such as id, name, and more. | +| `page` | object | Created SharePoint page information | ### `sharepoint_read_page` @@ -96,7 +95,6 @@ Read a specific page from a SharePoint site | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the SharePoint API | | `siteSelector` | string | No | Select the SharePoint site | | `siteId` | string | No | The ID of the SharePoint site \(internal use\) | | `pageId` | string | No | The ID of the page to read | @@ -107,7 +105,7 @@ Read a specific page from a SharePoint site | Parameter | Type | Description | | --------- | ---- | ----------- | -| `sites` | json | An array of SharePoint site objects, each containing details such as id, name, and more. | +| `page` | object | Information about the SharePoint page | ### `sharepoint_list_sites` @@ -117,7 +115,6 @@ List details of all SharePoint sites | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the SharePoint API | | `siteSelector` | string | No | Select the SharePoint site | | `groupId` | string | No | The group ID for accessing a group team site | @@ -125,7 +122,7 @@ List details of all SharePoint sites | Parameter | Type | Description | | --------- | ---- | ----------- | -| `sites` | json | An array of SharePoint site objects, each containing details such as id, name, and more. | +| `site` | object | Information about the current SharePoint site | diff --git a/apps/docs/content/docs/tools/slack.mdx b/apps/docs/content/docs/tools/slack.mdx index 074e9c1d18..68c4ac50f3 100644 --- a/apps/docs/content/docs/tools/slack.mdx +++ b/apps/docs/content/docs/tools/slack.mdx @@ -1,6 +1,6 @@ --- title: Slack -description: Send messages to Slack +description: Send messages to Slack or trigger workflows from Slack events --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -64,7 +64,7 @@ This allows for powerful automation scenarios such as sending notifications, ale ## Usage Instructions -Comprehensive Slack integration with OAuth authentication. Send formatted messages using Slack's mrkdwn syntax. +Comprehensive Slack integration with OAuth authentication. Send formatted messages using Slack's mrkdwn syntax or trigger workflows from Slack events like mentions and messages. @@ -80,7 +80,6 @@ Send messages to Slack channels or users through the Slack API. Supports Slack m | --------- | ---- | -------- | ----------- | | `authMethod` | string | No | Authentication method: oauth or bot_token | | `botToken` | string | No | Bot token for Custom Bot | -| `accessToken` | string | No | OAuth access token or bot token for Slack API | | `channel` | string | Yes | Target Slack channel \(e.g., #general\) | | `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) | @@ -89,10 +88,7 @@ Send messages to Slack channels or users through the Slack API. Supports Slack m | Parameter | Type | Description | | --------- | ---- | ----------- | | `ts` | string | Message timestamp | -| `channel` | string | Channel identifier | -| `canvas_id` | string | Canvas identifier | -| `title` | string | Canvas title | -| `messages` | json | Message data | +| `channel` | string | Channel ID where message was sent | ### `slack_canvas` @@ -104,7 +100,6 @@ Create and share Slack canvases in channels. Canvases are collaborative document | --------- | ---- | -------- | ----------- | | `authMethod` | string | No | Authentication method: oauth or bot_token | | `botToken` | string | No | Bot token for Custom Bot | -| `accessToken` | string | No | OAuth access token or bot token for Slack API | | `channel` | string | Yes | Target Slack channel \(e.g., #general\) | | `title` | string | Yes | Title of the canvas | | `content` | string | Yes | Canvas content in markdown format | @@ -114,11 +109,9 @@ Create and share Slack canvases in channels. Canvases are collaborative document | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Message timestamp | -| `channel` | string | Channel identifier | -| `canvas_id` | string | Canvas identifier | -| `title` | string | Canvas title | -| `messages` | json | Message data | +| `canvas_id` | string | ID of the created canvas | +| `channel` | string | Channel where canvas was created | +| `title` | string | Title of the canvas | ### `slack_message_reader` @@ -130,7 +123,6 @@ Read the latest messages from Slack channels. Retrieve conversation history with | --------- | ---- | -------- | ----------- | | `authMethod` | string | No | Authentication method: oauth or bot_token | | `botToken` | string | No | Bot token for Custom Bot | -| `accessToken` | string | No | OAuth access token or bot token for Slack API | | `channel` | string | Yes | Slack channel to read messages from \(e.g., #general\) | | `limit` | number | No | Number of messages to retrieve \(default: 10, max: 100\) | | `oldest` | string | No | Start of time range \(timestamp\) | @@ -140,11 +132,7 @@ Read the latest messages from Slack channels. Retrieve conversation history with | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ts` | string | Message timestamp | -| `channel` | string | Channel identifier | -| `canvas_id` | string | Canvas identifier | -| `title` | string | Canvas title | -| `messages` | json | Message data | +| `messages` | array | Array of message objects from the channel | diff --git a/apps/docs/content/docs/tools/stagehand.mdx b/apps/docs/content/docs/tools/stagehand.mdx index c0fe0e6ca6..eef1a2cb7d 100644 --- a/apps/docs/content/docs/tools/stagehand.mdx +++ b/apps/docs/content/docs/tools/stagehand.mdx @@ -214,7 +214,7 @@ Extract structured data from a webpage using Stagehand | Parameter | Type | Description | | --------- | ---- | ----------- | -| `data` | json | Extracted data | +| `data` | object | Extracted structured data matching the provided schema | diff --git a/apps/docs/content/docs/tools/stagehand_agent.mdx b/apps/docs/content/docs/tools/stagehand_agent.mdx index 3f1b5f20b5..fa48d02e6f 100644 --- a/apps/docs/content/docs/tools/stagehand_agent.mdx +++ b/apps/docs/content/docs/tools/stagehand_agent.mdx @@ -212,6 +212,7 @@ Run an autonomous web agent to complete tasks and extract structured data | `startUrl` | string | Yes | URL of the webpage to start the agent on | | `task` | string | Yes | The task to complete or goal to achieve on the website | | `variables` | json | No | Optional variables to substitute in the task \(format: \{key: value\}\). Reference in task using %key% | +| `format` | string | No | No description | | `apiKey` | string | Yes | OpenAI API key for agent execution \(required by Stagehand\) | | `outputSchema` | json | No | Optional JSON schema defining the structure of data the agent should return | @@ -219,8 +220,7 @@ Run an autonomous web agent to complete tasks and extract structured data | Parameter | Type | Description | | --------- | ---- | ----------- | -| `agentResult` | json | Agent execution result | -| `structuredOutput` | any | Structured output data | +| `agentResult` | object | Result from the Stagehand agent execution | diff --git a/apps/docs/content/docs/tools/supabase.mdx b/apps/docs/content/docs/tools/supabase.mdx index 772967fc25..c43f284246 100644 --- a/apps/docs/content/docs/tools/supabase.mdx +++ b/apps/docs/content/docs/tools/supabase.mdx @@ -92,7 +92,7 @@ Query data from a Supabase table | --------- | ---- | -------- | ----------- | | `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) | | `table` | string | Yes | The name of the Supabase table to query | -| `filter` | string | No | PostgREST filter \(e.g., | +| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) | | `orderBy` | string | No | Column to order by \(add DESC for descending\) | | `limit` | number | No | Maximum number of rows to return | | `apiKey` | string | Yes | Your Supabase service role secret key | @@ -101,8 +101,8 @@ Query data from a Supabase table | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Operation message | -| `results` | json | Query results | +| `success` | boolean | Operation success status | +| `output` | object | Query operation results | ### `supabase_insert` @@ -121,8 +121,8 @@ Insert data into a Supabase table | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Operation message | -| `results` | json | Query results | +| `success` | boolean | Operation success status | +| `output` | object | Insert operation results | ### `supabase_get_row` @@ -134,15 +134,15 @@ Get a single row from a Supabase table based on filter criteria | --------- | ---- | -------- | ----------- | | `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) | | `table` | string | Yes | The name of the Supabase table to query | -| `filter` | string | Yes | PostgREST filter to find the specific row \(e.g., | +| `filter` | string | Yes | PostgREST filter to find the specific row \(e.g., "id=eq.123"\) | | `apiKey` | string | Yes | Your Supabase service role secret key | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Operation message | -| `results` | json | Query results | +| `success` | boolean | Operation success status | +| `output` | object | Get row operation results | ### `supabase_update` @@ -154,7 +154,7 @@ Update rows in a Supabase table based on filter criteria | --------- | ---- | -------- | ----------- | | `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) | | `table` | string | Yes | The name of the Supabase table to update | -| `filter` | string | Yes | PostgREST filter to identify rows to update \(e.g., | +| `filter` | string | Yes | PostgREST filter to identify rows to update \(e.g., "id=eq.123"\) | | `data` | object | Yes | Data to update in the matching rows | | `apiKey` | string | Yes | Your Supabase service role secret key | @@ -162,8 +162,8 @@ Update rows in a Supabase table based on filter criteria | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Operation message | -| `results` | json | Query results | +| `success` | boolean | Operation success status | +| `output` | object | Update operation results | ### `supabase_delete` @@ -175,15 +175,15 @@ Delete rows from a Supabase table based on filter criteria | --------- | ---- | -------- | ----------- | | `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) | | `table` | string | Yes | The name of the Supabase table to delete from | -| `filter` | string | Yes | PostgREST filter to identify rows to delete \(e.g., | +| `filter` | string | Yes | PostgREST filter to identify rows to delete \(e.g., "id=eq.123"\) | | `apiKey` | string | Yes | Your Supabase service role secret key | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `message` | string | Operation message | -| `results` | json | Query results | +| `success` | boolean | Operation success status | +| `output` | object | Delete operation results | diff --git a/apps/docs/content/docs/tools/tavily.mdx b/apps/docs/content/docs/tools/tavily.mdx index 3f4ec5b605..c54e49c2ff 100644 --- a/apps/docs/content/docs/tools/tavily.mdx +++ b/apps/docs/content/docs/tools/tavily.mdx @@ -80,12 +80,8 @@ Perform AI-powered web searches using Tavily | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results data | -| `answer` | any | Search answer | -| `query` | string | Query used | -| `content` | string | Extracted content | -| `title` | string | Page title | -| `url` | string | Source URL | +| `query` | string | The search query that was executed | +| `results` | array | results output from the tool | ### `tavily_extract` @@ -103,12 +99,7 @@ Extract raw content from multiple web pages simultaneously using Tavily | Parameter | Type | Description | | --------- | ---- | ----------- | -| `results` | json | Search results data | -| `answer` | any | Search answer | -| `query` | string | Query used | -| `content` | string | Extracted content | -| `title` | string | Page title | -| `url` | string | Source URL | +| `results` | array | The URL that was extracted | diff --git a/apps/docs/content/docs/tools/telegram.mdx b/apps/docs/content/docs/tools/telegram.mdx index 155d82edfe..2dd0beae2c 100644 --- a/apps/docs/content/docs/tools/telegram.mdx +++ b/apps/docs/content/docs/tools/telegram.mdx @@ -1,6 +1,6 @@ --- title: Telegram -description: Send a message through Telegram +description: Send messages through Telegram or trigger workflows from Telegram events --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -67,7 +67,7 @@ In Sim, the Telegram integration enables your agents to leverage these powerful ## Usage Instructions -Send messages to any Telegram channel using your Bot API key. Integrate automated notifications and alerts into your workflow to keep your team informed. +Send messages to any Telegram channel using your Bot API key or trigger workflows from Telegram bot messages. Integrate automated notifications and alerts into your workflow to keep your team informed. @@ -89,8 +89,12 @@ Send messages to Telegram channels or users through the Telegram Bot API. Enable | Parameter | Type | Description | | --------- | ---- | ----------- | -| `ok` | boolean | Success status | -| `result` | json | Message result | +| `success` | boolean | Telegram message send success status | +| `messageId` | number | Unique Telegram message identifier | +| `chatId` | string | Target chat ID where message was sent | +| `text` | string | Text content of the sent message | +| `timestamp` | number | Unix timestamp when message was sent | +| `from` | object | Information about the bot that sent the message | diff --git a/apps/docs/content/docs/tools/thinking.mdx b/apps/docs/content/docs/tools/thinking.mdx index 917b8340b2..86d23e53b2 100644 --- a/apps/docs/content/docs/tools/thinking.mdx +++ b/apps/docs/content/docs/tools/thinking.mdx @@ -69,7 +69,7 @@ Processes a provided thought/instruction, making it available for subsequent ste | Parameter | Type | Description | | --------- | ---- | ----------- | -| `acknowledgedThought` | string | Acknowledged thought process | +| `acknowledgedThought` | string | The thought that was processed and acknowledged | diff --git a/apps/docs/content/docs/tools/translate.mdx b/apps/docs/content/docs/tools/translate.mdx index 27bfa26680..1676f1119d 100644 --- a/apps/docs/content/docs/tools/translate.mdx +++ b/apps/docs/content/docs/tools/translate.mdx @@ -67,7 +67,7 @@ Convert text between languages while preserving meaning, nuance, and formatting. | --------- | ---- | ----------- | | `content` | string | Translated text | | `model` | string | Model used | -| `tokens` | any | Token usage | +| `tokens` | json | Token usage | ### `anthropic_chat` @@ -85,7 +85,7 @@ Convert text between languages while preserving meaning, nuance, and formatting. | --------- | ---- | ----------- | | `content` | string | Translated text | | `model` | string | Model used | -| `tokens` | any | Token usage | +| `tokens` | json | Token usage | diff --git a/apps/docs/content/docs/tools/twilio_sms.mdx b/apps/docs/content/docs/tools/twilio_sms.mdx index dfbd529f26..25d4a5164e 100644 --- a/apps/docs/content/docs/tools/twilio_sms.mdx +++ b/apps/docs/content/docs/tools/twilio_sms.mdx @@ -58,10 +58,11 @@ Send text messages to single or multiple recipients using the Twilio API. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `success` | boolean | Send success status | -| `messageId` | any | Message identifier | -| `status` | any | Delivery status | -| `error` | any | Error information | +| `success` | boolean | SMS send success status | +| `messageId` | string | Unique Twilio message identifier \(SID\) | +| `status` | string | Message delivery status from Twilio | +| `fromNumber` | string | Phone number message was sent from | +| `toNumber` | string | Phone number message was sent to | diff --git a/apps/docs/content/docs/tools/typeform.mdx b/apps/docs/content/docs/tools/typeform.mdx index 21550595b2..996490c660 100644 --- a/apps/docs/content/docs/tools/typeform.mdx +++ b/apps/docs/content/docs/tools/typeform.mdx @@ -95,9 +95,9 @@ Download files uploaded in Typeform responses | Parameter | Type | Description | | --------- | ---- | ----------- | -| `total_items` | number | Total response count | -| `page_count` | number | Total page count | -| `items` | json | Response items | +| `fileUrl` | string | Direct download URL for the uploaded file | +| `contentType` | string | MIME type of the uploaded file | +| `filename` | string | Original filename of the uploaded file | ### `typeform_insights` @@ -114,9 +114,7 @@ Retrieve insights and analytics for Typeform forms | Parameter | Type | Description | | --------- | ---- | ----------- | -| `total_items` | number | Total response count | -| `page_count` | number | Total page count | -| `items` | json | Response items | +| `fields` | array | Number of users who dropped off at this field | diff --git a/apps/docs/content/docs/tools/vision.mdx b/apps/docs/content/docs/tools/vision.mdx index cf1ea47ab2..c26c9937a5 100644 --- a/apps/docs/content/docs/tools/vision.mdx +++ b/apps/docs/content/docs/tools/vision.mdx @@ -70,9 +70,10 @@ Process and analyze images using advanced vision models. Capable of understandin | Parameter | Type | Description | | --------- | ---- | ----------- | -| `content` | string | Analysis result | -| `model` | any | Model used | -| `tokens` | any | Token usage | +| `content` | string | The analyzed content and description of the image | +| `model` | string | The vision model that was used for analysis | +| `tokens` | number | Total tokens used for the analysis | +| `usage` | object | Detailed token usage breakdown | diff --git a/apps/docs/content/docs/tools/wealthbox.mdx b/apps/docs/content/docs/tools/wealthbox.mdx index d8cc291c52..a623401b53 100644 --- a/apps/docs/content/docs/tools/wealthbox.mdx +++ b/apps/docs/content/docs/tools/wealthbox.mdx @@ -56,21 +56,14 @@ Read content from a Wealthbox note | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Wealthbox API | | `noteId` | string | No | The ID of the note to read | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `note` | any | Note data | -| `notes` | any | Notes list | -| `contact` | any | Contact data | -| `contacts` | any | Contacts list | -| `task` | any | Task data | -| `tasks` | any | Tasks list | -| `metadata` | json | Operation metadata | -| `success` | any | Success status | +| `success` | boolean | Operation success status | +| `output` | object | Note data and metadata | ### `wealthbox_write_note` @@ -80,7 +73,6 @@ Create or update a Wealthbox note | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Wealthbox API | | `content` | string | Yes | The main body of the note | | `contactId` | string | No | ID of contact to link to this note | @@ -88,14 +80,8 @@ Create or update a Wealthbox note | Parameter | Type | Description | | --------- | ---- | ----------- | -| `note` | any | Note data | -| `notes` | any | Notes list | -| `contact` | any | Contact data | -| `contacts` | any | Contacts list | -| `task` | any | Task data | -| `tasks` | any | Tasks list | -| `metadata` | json | Operation metadata | -| `success` | any | Success status | +| `success` | boolean | Operation success status | +| `output` | object | Created or updated note data and metadata | ### `wealthbox_read_contact` @@ -105,21 +91,14 @@ Read content from a Wealthbox contact | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Wealthbox API | | `contactId` | string | No | The ID of the contact to read | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `note` | any | Note data | -| `notes` | any | Notes list | -| `contact` | any | Contact data | -| `contacts` | any | Contacts list | -| `task` | any | Task data | -| `tasks` | any | Tasks list | -| `metadata` | json | Operation metadata | -| `success` | any | Success status | +| `success` | boolean | Operation success status | +| `output` | object | Contact data and metadata | ### `wealthbox_write_contact` @@ -129,7 +108,6 @@ Create a new Wealthbox contact | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Wealthbox API | | `firstName` | string | Yes | The first name of the contact | | `lastName` | string | Yes | The last name of the contact | | `emailAddress` | string | No | The email address of the contact | @@ -139,14 +117,8 @@ Create a new Wealthbox contact | Parameter | Type | Description | | --------- | ---- | ----------- | -| `note` | any | Note data | -| `notes` | any | Notes list | -| `contact` | any | Contact data | -| `contacts` | any | Contacts list | -| `task` | any | Task data | -| `tasks` | any | Tasks list | -| `metadata` | json | Operation metadata | -| `success` | any | Success status | +| `success` | boolean | Operation success status | +| `output` | object | Created or updated contact data and metadata | ### `wealthbox_read_task` @@ -156,21 +128,14 @@ Read content from a Wealthbox task | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Wealthbox API | | `taskId` | string | No | The ID of the task to read | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `note` | any | Note data | -| `notes` | any | Notes list | -| `contact` | any | Contact data | -| `contacts` | any | Contacts list | -| `task` | any | Task data | -| `tasks` | any | Tasks list | -| `metadata` | json | Operation metadata | -| `success` | any | Success status | +| `success` | boolean | Operation success status | +| `output` | object | Task data and metadata | ### `wealthbox_write_task` @@ -180,9 +145,8 @@ Create or update a Wealthbox task | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Wealthbox API | | `title` | string | Yes | The name/title of the task | -| `dueDate` | string | Yes | The due date and time of the task \(format: | +| `dueDate` | string | Yes | The due date and time of the task \(format: "YYYY-MM-DD HH:MM AM/PM -HHMM", e.g., "2015-05-24 11:00 AM -0400"\) | | `contactId` | string | No | ID of contact to link to this task | | `description` | string | No | Description or notes about the task | @@ -190,14 +154,8 @@ Create or update a Wealthbox task | Parameter | Type | Description | | --------- | ---- | ----------- | -| `note` | any | Note data | -| `notes` | any | Notes list | -| `contact` | any | Contact data | -| `contacts` | any | Contacts list | -| `task` | any | Task data | -| `tasks` | any | Tasks list | -| `metadata` | json | Operation metadata | -| `success` | any | Success status | +| `success` | boolean | Operation success status | +| `output` | object | Created or updated task data and metadata | diff --git a/apps/docs/content/docs/tools/whatsapp.mdx b/apps/docs/content/docs/tools/whatsapp.mdx index a8b3e96750..d8a30b6853 100644 --- a/apps/docs/content/docs/tools/whatsapp.mdx +++ b/apps/docs/content/docs/tools/whatsapp.mdx @@ -54,15 +54,16 @@ Send WhatsApp messages | `phoneNumber` | string | Yes | Recipient phone number with country code | | `message` | string | Yes | Message content to send | | `phoneNumberId` | string | Yes | WhatsApp Business Phone Number ID | -| `accessToken` | string | Yes | WhatsApp Business API Access Token | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `success` | boolean | Send success status | -| `messageId` | any | Message identifier | -| `error` | any | Error information | +| `success` | boolean | WhatsApp message send success status | +| `messageId` | string | Unique WhatsApp message identifier | +| `phoneNumber` | string | Recipient phone number | +| `status` | string | Message delivery status | +| `timestamp` | string | Message send timestamp | diff --git a/apps/docs/content/docs/tools/wikipedia.mdx b/apps/docs/content/docs/tools/wikipedia.mdx index 6556945e57..ece9053cfc 100644 --- a/apps/docs/content/docs/tools/wikipedia.mdx +++ b/apps/docs/content/docs/tools/wikipedia.mdx @@ -74,11 +74,7 @@ Get a summary and metadata for a specific Wikipedia page. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `summary` | json | Page summary data | -| `searchResults` | json | Search results data | -| `totalHits` | number | Total search hits | -| `content` | json | Page content data | -| `randomPage` | json | Random page data | +| `summary` | object | Wikipedia page summary and metadata | ### `wikipedia_search` @@ -95,11 +91,7 @@ Search for Wikipedia pages by title or content. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `summary` | json | Page summary data | -| `searchResults` | json | Search results data | -| `totalHits` | number | Total search hits | -| `content` | json | Page content data | -| `randomPage` | json | Random page data | +| `searchResults` | array | Array of matching Wikipedia pages | ### `wikipedia_content` @@ -115,11 +107,7 @@ Get the full HTML content of a Wikipedia page. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `summary` | json | Page summary data | -| `searchResults` | json | Search results data | -| `totalHits` | number | Total search hits | -| `content` | json | Page content data | -| `randomPage` | json | Random page data | +| `content` | object | Full HTML content and metadata of the Wikipedia page | ### `wikipedia_random` @@ -134,11 +122,7 @@ Get a random Wikipedia page. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `summary` | json | Page summary data | -| `searchResults` | json | Search results data | -| `totalHits` | number | Total search hits | -| `content` | json | Page content data | -| `randomPage` | json | Random page data | +| `randomPage` | object | Random Wikipedia page data | diff --git a/apps/docs/content/docs/tools/x.mdx b/apps/docs/content/docs/tools/x.mdx index e6c99d0451..c3abadbbf1 100644 --- a/apps/docs/content/docs/tools/x.mdx +++ b/apps/docs/content/docs/tools/x.mdx @@ -50,7 +50,6 @@ Post new tweets, reply to tweets, or create polls on X (Twitter) | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | X OAuth access token | | `text` | string | Yes | The text content of your tweet | | `replyTo` | string | No | ID of the tweet to reply to | | `mediaIds` | array | No | Array of media IDs to attach to the tweet | @@ -60,14 +59,7 @@ Post new tweets, reply to tweets, or create polls on X (Twitter) | Parameter | Type | Description | | --------- | ---- | ----------- | -| `tweet` | json | Tweet data | -| `replies` | any | Tweet replies | -| `context` | any | Tweet context | -| `tweets` | json | Tweets data | -| `includes` | any | Additional data | -| `meta` | json | Response metadata | -| `user` | json | User profile data | -| `recentTweets` | any | Recent tweets data | +| `tweet` | object | The newly created tweet data | ### `x_read` @@ -77,7 +69,6 @@ Read tweet details, including replies and conversation context | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | X OAuth access token | | `tweetId` | string | Yes | ID of the tweet to read | | `includeReplies` | boolean | No | Whether to include replies to the tweet | @@ -85,14 +76,7 @@ Read tweet details, including replies and conversation context | Parameter | Type | Description | | --------- | ---- | ----------- | -| `tweet` | json | Tweet data | -| `replies` | any | Tweet replies | -| `context` | any | Tweet context | -| `tweets` | json | Tweets data | -| `includes` | any | Additional data | -| `meta` | json | Response metadata | -| `user` | json | User profile data | -| `recentTweets` | any | Recent tweets data | +| `tweet` | object | The main tweet data | ### `x_search` @@ -102,7 +86,6 @@ Search for tweets using keywords, hashtags, or advanced queries | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | X OAuth access token | | `query` | string | Yes | Search query \(supports X search operators\) | | `maxResults` | number | No | Maximum number of results to return \(default: 10, max: 100\) | | `startTime` | string | No | Start time for search \(ISO 8601 format\) | @@ -113,14 +96,7 @@ Search for tweets using keywords, hashtags, or advanced queries | Parameter | Type | Description | | --------- | ---- | ----------- | -| `tweet` | json | Tweet data | -| `replies` | any | Tweet replies | -| `context` | any | Tweet context | -| `tweets` | json | Tweets data | -| `includes` | any | Additional data | -| `meta` | json | Response metadata | -| `user` | json | User profile data | -| `recentTweets` | any | Recent tweets data | +| `tweets` | array | Array of tweets matching the search query | ### `x_user` @@ -130,21 +106,13 @@ Get user profile information | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | X OAuth access token | | `username` | string | Yes | Username to look up \(without @ symbol\) | #### Output | Parameter | Type | Description | | --------- | ---- | ----------- | -| `tweet` | json | Tweet data | -| `replies` | any | Tweet replies | -| `context` | any | Tweet context | -| `tweets` | json | Tweets data | -| `includes` | any | Additional data | -| `meta` | json | Response metadata | -| `user` | json | User profile data | -| `recentTweets` | any | Recent tweets data | +| `user` | object | X user profile information | diff --git a/apps/docs/content/docs/tools/youtube.mdx b/apps/docs/content/docs/tools/youtube.mdx index 6f8ddda348..b22c3b934f 100644 --- a/apps/docs/content/docs/tools/youtube.mdx +++ b/apps/docs/content/docs/tools/youtube.mdx @@ -62,8 +62,7 @@ Search for videos on YouTube using the YouTube Data API. | Parameter | Type | Description | | --------- | ---- | ----------- | -| `items` | json | The items returned by the YouTube search | -| `totalResults` | number | The total number of results returned by the YouTube search | +| `items` | array | Array of YouTube videos matching the search query | diff --git a/apps/sim/app/api/__test-utils__/utils.ts b/apps/sim/app/api/__test-utils__/utils.ts index 7ebb0e19ec..9f5cdf21c5 100644 --- a/apps/sim/app/api/__test-utils__/utils.ts +++ b/apps/sim/app/api/__test-utils__/utils.ts @@ -99,6 +99,7 @@ export const sampleWorkflowState = { horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: 95, }, 'agent-id': { @@ -127,6 +128,7 @@ export const sampleWorkflowState = { horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: 680, }, }, diff --git a/apps/sim/app/api/webhooks/route.ts b/apps/sim/app/api/webhooks/route.ts index 9f38cb2d9f..cfcb29cfb9 100644 --- a/apps/sim/app/api/webhooks/route.ts +++ b/apps/sim/app/api/webhooks/route.ts @@ -87,7 +87,7 @@ export async function POST(request: NextRequest) { const { workflowId, path, provider, providerConfig, blockId } = body // Validate input - if (!workflowId || !path) { + if (!workflowId) { logger.warn(`[${requestId}] Missing required fields for webhook creation`, { hasWorkflowId: !!workflowId, hasPath: !!path, @@ -95,6 +95,26 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'Missing required fields' }, { status: 400 }) } + // For credential-based providers (those that use polling instead of webhooks), + // generate a dummy path if none provided since they don't use actual webhook URLs + // but still need database entries for the polling services to find them + let finalPath = path + if (!path || path.trim() === '') { + // List of providers that use credential-based polling instead of webhooks + const credentialBasedProviders = ['gmail', 'outlook'] + + if (credentialBasedProviders.includes(provider)) { + finalPath = `${provider}-${crypto.randomUUID()}` + logger.info(`[${requestId}] Generated dummy path for ${provider} trigger: ${finalPath}`) + } else { + logger.warn(`[${requestId}] Missing path for webhook creation`, { + hasWorkflowId: !!workflowId, + hasPath: !!path, + }) + return NextResponse.json({ error: 'Missing required path' }, { status: 400 }) + } + } + // Check if the workflow exists and user has permission to modify it const workflowData = await db .select({ @@ -144,29 +164,32 @@ export async function POST(request: NextRequest) { const existingWebhooks = await db .select({ id: webhook.id, workflowId: webhook.workflowId }) .from(webhook) - .where(eq(webhook.path, path)) + .where(eq(webhook.path, finalPath)) .limit(1) let savedWebhook: any = null // Variable to hold the result of save/update // If a webhook with the same path exists but belongs to a different workflow, return an error if (existingWebhooks.length > 0 && existingWebhooks[0].workflowId !== workflowId) { - logger.warn(`[${requestId}] Webhook path conflict: ${path}`) + logger.warn(`[${requestId}] Webhook path conflict: ${finalPath}`) return NextResponse.json( { error: 'Webhook path already exists.', code: 'PATH_EXISTS' }, { status: 409 } ) } + // Use the original provider config - Gmail/Outlook configuration functions will inject userId automatically + const finalProviderConfig = providerConfig + // If a webhook with the same path and workflowId exists, update it if (existingWebhooks.length > 0 && existingWebhooks[0].workflowId === workflowId) { - logger.info(`[${requestId}] Updating existing webhook for path: ${path}`) + logger.info(`[${requestId}] Updating existing webhook for path: ${finalPath}`) const updatedResult = await db .update(webhook) .set({ blockId, provider, - providerConfig, + providerConfig: finalProviderConfig, isActive: true, updatedAt: new Date(), }) @@ -183,9 +206,9 @@ export async function POST(request: NextRequest) { id: webhookId, workflowId, blockId, - path, + path: finalPath, provider, - providerConfig, + providerConfig: finalProviderConfig, isActive: true, createdAt: new Date(), updatedAt: new Date(), diff --git a/apps/sim/app/api/workflows/[id]/state/route.ts b/apps/sim/app/api/workflows/[id]/state/route.ts index 75ba2f9b82..a1a788b607 100644 --- a/apps/sim/app/api/workflows/[id]/state/route.ts +++ b/apps/sim/app/api/workflows/[id]/state/route.ts @@ -51,6 +51,7 @@ const BlockStateSchema = z.object({ isWide: z.boolean().optional(), height: z.number().optional(), advancedMode: z.boolean().optional(), + triggerMode: z.boolean().optional(), data: BlockDataSchema.optional(), }) diff --git a/apps/sim/app/api/workflows/route.ts b/apps/sim/app/api/workflows/route.ts index 99eb109e1c..c4e56f8edf 100644 --- a/apps/sim/app/api/workflows/route.ts +++ b/apps/sim/app/api/workflows/route.ts @@ -130,6 +130,7 @@ export async function POST(req: NextRequest) { horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: 95, }, }, @@ -178,6 +179,7 @@ export async function POST(req: NextRequest) { horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: '95', subBlocks: { startWorkflow: { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/index.ts index c2f8c5d6c3..22aa0ef4b3 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/index.ts @@ -23,4 +23,5 @@ export { Switch } from './switch' export { Table } from './table' export { TimeInput } from './time-input' export { ToolInput } from './tool-input/tool-input' +export { TriggerConfig } from './trigger-config/trigger-config' export { WebhookConfig } from './webhook/webhook' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-config-section.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-config-section.tsx new file mode 100644 index 0000000000..333fc1ab13 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-config-section.tsx @@ -0,0 +1,360 @@ +import { useState } from 'react' +import { Check, ChevronDown, Copy, Eye, EyeOff, Info } from 'lucide-react' +import { Badge } from '@/components/ui/badge' +import { Button } from '@/components/ui/button' +import { + Command, + CommandEmpty, + CommandGroup, + CommandInput, + CommandItem, + CommandList, +} from '@/components/ui/command' +import { Input } from '@/components/ui/input' +import { Label } from '@/components/ui/label' +import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover' +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from '@/components/ui/select' +import { Switch } from '@/components/ui/switch' +import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip' +import { cn } from '@/lib/utils' +import type { TriggerConfig } from '@/triggers/types' + +interface TriggerConfigSectionProps { + triggerDef: TriggerConfig + config: Record + onChange: (fieldId: string, value: any) => void + webhookUrl: string + dynamicOptions?: Record | string[]> +} + +export function TriggerConfigSection({ + triggerDef, + config, + onChange, + webhookUrl, + dynamicOptions = {}, +}: TriggerConfigSectionProps) { + const [showSecrets, setShowSecrets] = useState>({}) + const [copied, setCopied] = useState(null) + + const copyToClipboard = (text: string, type: string) => { + navigator.clipboard.writeText(text) + setCopied(type) + setTimeout(() => setCopied(null), 2000) + } + + const toggleSecretVisibility = (fieldId: string) => { + setShowSecrets((prev) => ({ + ...prev, + [fieldId]: !prev[fieldId], + })) + } + + const renderField = (fieldId: string, fieldDef: any) => { + const value = config[fieldId] ?? fieldDef.defaultValue ?? '' + const isSecret = fieldDef.isSecret + const showSecret = showSecrets[fieldId] + + switch (fieldDef.type) { + case 'boolean': + return ( +
+ onChange(fieldId, checked)} + /> + +
+ ) + + case 'select': + return ( +
+ + + {fieldDef.description && ( +

{fieldDef.description}

+ )} +
+ ) + + case 'multiselect': { + const selectedValues = Array.isArray(value) ? value : [] + const rawOptions = dynamicOptions[fieldId] || fieldDef.options || [] + + // Handle both string[] and {id, name}[] formats + const availableOptions = rawOptions.map((option: any) => { + if (typeof option === 'string') { + return { id: option, name: option } + } + return option + }) + + // Create a map for quick lookup of display names + const optionMap = new Map(availableOptions.map((opt: any) => [opt.id, opt.name])) + + return ( +
+ + + + + + + + + + + {availableOptions.length === 0 + ? 'No options available. Please select credentials first.' + : 'No options found.'} + + + {availableOptions.map((option: any) => ( + { + const newValues = selectedValues.includes(option.id) + ? selectedValues.filter((v: string) => v !== option.id) + : [...selectedValues, option.id] + onChange(fieldId, newValues) + }} + > + + {option.name} + + ))} + + + + + + {fieldDef.description && ( +

{fieldDef.description}

+ )} +
+ ) + } + + case 'number': + return ( +
+ + onChange(fieldId, Number(e.target.value))} + /> + {fieldDef.description && ( +

{fieldDef.description}

+ )} +
+ ) + + default: // string + return ( +
+
+ + {fieldDef.description && ( + + + + + +

{fieldDef.description}

+
+
+ )} +
+
+
+ onChange(fieldId, e.target.value)} + className={cn( + 'h-10 flex-1', + isSecret ? 'pr-10' : '', + 'focus-visible:ring-2 focus-visible:ring-primary/20' + )} + /> + {isSecret && ( + + )} +
+ {isSecret && ( + + )} +
+
+ ) + } + } + + return ( +
+ {webhookUrl && ( +
+
+ + + + + + + +

This is the URL that will receive webhook requests

+
+
+
+
+
+
+ (e.target as HTMLInputElement).select()} + /> +
+ +
+
+ )} + + {Object.entries(triggerDef.configFields).map(([fieldId, fieldDef]) => ( +
{renderField(fieldId, fieldDef)}
+ ))} +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-instructions.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-instructions.tsx new file mode 100644 index 0000000000..2037d698ee --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-instructions.tsx @@ -0,0 +1,49 @@ +import { Notice } from '@/components/ui' +import { cn } from '@/lib/utils' +import { JSONView } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/console/components' +import type { TriggerConfig } from '@/triggers/types' + +interface TriggerInstructionsProps { + instructions: string[] + webhookUrl: string + samplePayload: any + triggerDef: TriggerConfig +} + +export function TriggerInstructions({ + instructions, + webhookUrl, + samplePayload, + triggerDef, +}: TriggerInstructionsProps) { + return ( +
+
+

Setup Instructions

+
+
    + {instructions.map((instruction, index) => ( +
  1. + ))} +
+
+
+ + + ) : null + } + title={`${triggerDef.provider.charAt(0).toUpperCase() + triggerDef.provider.slice(1)} Event Payload Example`} + > + Your workflow will receive a payload similar to this when a subscribed event occurs. +
+ +
+
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-modal.tsx new file mode 100644 index 0000000000..1228e1cda8 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/components/trigger-modal.tsx @@ -0,0 +1,355 @@ +import { useEffect, useMemo, useState } from 'react' +import { Trash2 } from 'lucide-react' +import { Badge } from '@/components/ui/badge' +import { Button } from '@/components/ui/button' +import { + Dialog, + DialogContent, + DialogFooter, + DialogHeader, + DialogTitle, +} from '@/components/ui/dialog' +import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip' +import { createLogger } from '@/lib/logs/console/logger' +import { cn } from '@/lib/utils' +import { useSubBlockStore } from '@/stores/workflows/subblock/store' +import type { TriggerConfig } from '@/triggers/types' +import { CredentialSelector } from '../../credential-selector/credential-selector' +import { TriggerConfigSection } from './trigger-config-section' +import { TriggerInstructions } from './trigger-instructions' + +const logger = createLogger('TriggerModal') + +interface TriggerModalProps { + isOpen: boolean + onClose: () => void + triggerPath: string + triggerDef: TriggerConfig + triggerConfig: Record + onSave?: (path: string, config: Record) => Promise + onDelete?: () => Promise + triggerId?: string + blockId: string +} + +export function TriggerModal({ + isOpen, + onClose, + triggerPath, + triggerDef, + triggerConfig: initialConfig, + onSave, + onDelete, + triggerId, + blockId, +}: TriggerModalProps) { + const [config, setConfig] = useState>(initialConfig) + const [isSaving, setIsSaving] = useState(false) + + // Track if config has changed from initial values + const hasConfigChanged = useMemo(() => { + return JSON.stringify(config) !== JSON.stringify(initialConfig) + }, [config, initialConfig]) + const [isDeleting, setIsDeleting] = useState(false) + const [webhookUrl, setWebhookUrl] = useState('') + const [generatedPath, setGeneratedPath] = useState('') + const [hasCredentials, setHasCredentials] = useState(false) + const [selectedCredentialId, setSelectedCredentialId] = useState(null) + const [dynamicOptions, setDynamicOptions] = useState< + Record> + >({}) + + // Initialize config with default values from trigger definition + useEffect(() => { + const defaultConfig: Record = {} + + // Apply default values from trigger definition + Object.entries(triggerDef.configFields).forEach(([fieldId, field]) => { + if (field.defaultValue !== undefined && !(fieldId in initialConfig)) { + defaultConfig[fieldId] = field.defaultValue + } + }) + + // Merge with initial config, prioritizing initial config values + const mergedConfig = { ...defaultConfig, ...initialConfig } + + // Only update if there are actually default values to apply + if (Object.keys(defaultConfig).length > 0) { + setConfig(mergedConfig) + } + }, [triggerDef.configFields, initialConfig]) + + // Monitor credential selection + useEffect(() => { + if (triggerDef.requiresCredentials && triggerDef.credentialProvider) { + // Check if credentials are selected by monitoring the sub-block store + const checkCredentials = () => { + const subBlockStore = useSubBlockStore.getState() + const credentialValue = subBlockStore.getValue(blockId, 'triggerCredentials') + const hasCredential = Boolean(credentialValue) + setHasCredentials(hasCredential) + + // If credential changed and it's a Gmail trigger, load labels + if (hasCredential && credentialValue !== selectedCredentialId) { + setSelectedCredentialId(credentialValue) + if (triggerDef.provider === 'gmail') { + loadGmailLabels(credentialValue) + } + } + } + + checkCredentials() + + // Set up a subscription to monitor changes + const unsubscribe = useSubBlockStore.subscribe(checkCredentials) + + return unsubscribe + } + // If credentials aren't required, set to true + setHasCredentials(true) + }, [ + blockId, + triggerDef.requiresCredentials, + triggerDef.credentialProvider, + selectedCredentialId, + triggerDef.provider, + ]) + + // Load Gmail labels for the selected credential + const loadGmailLabels = async (credentialId: string) => { + try { + const response = await fetch(`/api/tools/gmail/labels?credentialId=${credentialId}`) + if (response.ok) { + const data = await response.json() + if (data.labels && Array.isArray(data.labels)) { + const labelOptions = data.labels.map((label: any) => ({ + id: label.id, + name: label.name, + })) + setDynamicOptions((prev) => ({ + ...prev, + labelIds: labelOptions, + })) + } + } else { + logger.error('Failed to load Gmail labels:', response.statusText) + } + } catch (error) { + logger.error('Error loading Gmail labels:', error) + } + } + + // Generate webhook path and URL + useEffect(() => { + // For triggers that don't use webhooks (like Gmail polling), skip URL generation + if (triggerDef.requiresCredentials && !triggerDef.webhook) { + setWebhookUrl('') + setGeneratedPath('') + return + } + + let finalPath = triggerPath + + // If no path exists, generate one automatically + if (!finalPath) { + const timestamp = Date.now() + const randomId = Math.random().toString(36).substring(2, 8) + finalPath = `/${triggerDef.provider}/${timestamp}-${randomId}` + setGeneratedPath(finalPath) + } + + if (finalPath) { + const baseUrl = window.location.origin + setWebhookUrl(`${baseUrl}/api/webhooks/trigger${finalPath}`) + } + }, [triggerPath, triggerDef.provider, triggerDef.requiresCredentials, triggerDef.webhook]) + + const handleConfigChange = (fieldId: string, value: any) => { + setConfig((prev) => ({ + ...prev, + [fieldId]: value, + })) + } + + const handleSave = async () => { + if (!onSave) return + + setIsSaving(true) + try { + // Use the existing trigger path or the generated one + const path = triggerPath || generatedPath + + // For credential-based triggers that don't use webhooks (like Gmail), path is optional + const requiresPath = triggerDef.webhook !== undefined + + if (requiresPath && !path) { + logger.error('No webhook path available for saving trigger') + return + } + + const success = await onSave(path || '', config) + if (success) { + onClose() + } + } catch (error) { + logger.error('Error saving trigger:', error) + } finally { + setIsSaving(false) + } + } + + const handleDelete = async () => { + if (!onDelete) return + + setIsDeleting(true) + try { + const success = await onDelete() + if (success) { + onClose() + } + } catch (error) { + logger.error('Error deleting trigger:', error) + } finally { + setIsDeleting(false) + } + } + + const isConfigValid = () => { + // Check if credentials are required and available + if (triggerDef.requiresCredentials && !hasCredentials) { + return false + } + + // Check required fields + for (const [fieldId, fieldDef] of Object.entries(triggerDef.configFields)) { + if (fieldDef.required && !config[fieldId]) { + return false + } + } + return true + } + + return ( + + e.preventDefault()} + > + +
+
+ + {triggerDef.name} Configuration + + {triggerId && ( + + + +
+
+
+
+ Active Trigger + + + +

{triggerDef.name}

+
+ + )} +
+
+ + +
+
+ {triggerDef.requiresCredentials && triggerDef.credentialProvider && ( +
+

Credentials

+

+ This trigger requires {triggerDef.credentialProvider.replace('-', ' ')}{' '} + credentials to access your account. +

+ +
+ )} + + + + +
+
+ + +
+
+ {triggerId && ( + + )} +
+
+ + +
+
+
+ +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/trigger-config.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/trigger-config.tsx new file mode 100644 index 0000000000..93dd8db710 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/trigger-config/trigger-config.tsx @@ -0,0 +1,403 @@ +import { useEffect, useState } from 'react' +import { ExternalLink } from 'lucide-react' +import { useParams } from 'next/navigation' +import { Button } from '@/components/ui/button' +import { createLogger } from '@/lib/logs/console/logger' +import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value' +import { useSubBlockStore } from '@/stores/workflows/subblock/store' +import { getTrigger } from '@/triggers' +import { TriggerModal } from './components/trigger-modal' + +const logger = createLogger('TriggerConfig') + +interface TriggerConfigProps { + blockId: string + isConnecting: boolean + isPreview?: boolean + value?: { + triggerId?: string + triggerPath?: string + triggerConfig?: Record + } + disabled?: boolean + availableTriggers?: string[] +} + +export function TriggerConfig({ + blockId, + isConnecting, + isPreview = false, + value: propValue, + disabled = false, + availableTriggers = [], +}: TriggerConfigProps) { + const [isModalOpen, setIsModalOpen] = useState(false) + const [isSaving, setIsSaving] = useState(false) + const [isDeleting, setIsDeleting] = useState(false) + const [error, setError] = useState(null) + const [triggerId, setTriggerId] = useState(null) + const params = useParams() + const workflowId = params.workflowId as string + const [isLoading, setIsLoading] = useState(false) + + // Get trigger configuration from the block state + const [storeTriggerProvider, setTriggerProvider] = useSubBlockValue(blockId, 'triggerProvider') + const [storeTriggerPath, setTriggerPath] = useSubBlockValue(blockId, 'triggerPath') + const [storeTriggerConfig, setTriggerConfig] = useSubBlockValue(blockId, 'triggerConfig') + const [storeTriggerId, setStoredTriggerId] = useSubBlockValue(blockId, 'triggerId') + + // Use prop values when available (preview mode), otherwise use store values + const selectedTriggerId = propValue?.triggerId ?? storeTriggerId ?? (availableTriggers[0] || null) + const triggerPath = propValue?.triggerPath ?? storeTriggerPath + const triggerConfig = propValue?.triggerConfig ?? storeTriggerConfig + + // Consolidate trigger ID logic + const effectiveTriggerId = selectedTriggerId || availableTriggers[0] + const triggerDef = effectiveTriggerId ? getTrigger(effectiveTriggerId) : null + + // Set the trigger ID to the first available one if none is set + useEffect(() => { + if (!selectedTriggerId && availableTriggers[0] && !isPreview) { + setStoredTriggerId(availableTriggers[0]) + } + }, [availableTriggers, selectedTriggerId, setStoredTriggerId, isPreview]) + + // Store the actual trigger from the database + const [actualTriggerId, setActualTriggerId] = useState(null) + + // Check if webhook exists in the database (using existing webhook API) + useEffect(() => { + // Skip API calls in preview mode + if (isPreview) { + setIsLoading(false) + return + } + + const checkWebhook = async () => { + setIsLoading(true) + try { + // Check if there's a webhook for this specific block + const response = await fetch(`/api/webhooks?workflowId=${workflowId}&blockId=${blockId}`) + if (response.ok) { + const data = await response.json() + if (data.webhooks && data.webhooks.length > 0) { + const webhook = data.webhooks[0].webhook + setTriggerId(webhook.id) + setActualTriggerId(webhook.provider) + + // Update the path in the block state if it's different + if (webhook.path && webhook.path !== triggerPath) { + setTriggerPath(webhook.path) + } + + // Update trigger config (from webhook providerConfig) + if (webhook.providerConfig) { + setTriggerConfig(webhook.providerConfig) + } + } else { + setTriggerId(null) + setActualTriggerId(null) + + // Clear stale trigger data from store when no webhook found in database + if (triggerPath) { + setTriggerPath('') + logger.info('Cleared stale trigger path on page refresh - no webhook in database', { + blockId, + clearedPath: triggerPath, + }) + } + } + } + } catch (error) { + logger.error('Error checking webhook:', { error }) + } finally { + setIsLoading(false) + } + } + + if (effectiveTriggerId) { + checkWebhook() + } + }, [workflowId, blockId, isPreview, effectiveTriggerId]) + + const handleOpenModal = () => { + if (isPreview || disabled) return + setIsModalOpen(true) + setError(null) + } + + const handleCloseModal = () => { + setIsModalOpen(false) + } + + const handleSaveTrigger = async (path: string, config: Record) => { + if (isPreview || disabled || !effectiveTriggerId) return false + + try { + setIsSaving(true) + setError(null) + + // Get trigger definition to check if it requires webhooks + const triggerDef = getTrigger(effectiveTriggerId) + if (!triggerDef) { + throw new Error('Trigger definition not found') + } + + // Set the trigger path and config in the block state + if (path && path !== triggerPath) { + setTriggerPath(path) + } + setTriggerConfig(config) + setStoredTriggerId(effectiveTriggerId) + + // Map trigger ID to webhook provider name + const webhookProvider = effectiveTriggerId.replace(/_webhook|_poller$/, '') // e.g., 'slack_webhook' -> 'slack', 'gmail_poller' -> 'gmail' + + // For credential-based triggers (like Gmail), create webhook entry for polling service but no webhook URL + if (triggerDef.requiresCredentials && !triggerDef.webhook) { + // Gmail polling service requires a webhook database entry to find the configuration + const response = await fetch('/api/webhooks', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + workflowId, + blockId, + path: '', // Empty path - API will generate dummy path for Gmail + provider: webhookProvider, + providerConfig: config, + }), + }) + + if (!response.ok) { + const errorData = await response.json() + throw new Error( + typeof errorData.error === 'object' + ? errorData.error.message || JSON.stringify(errorData.error) + : errorData.error || 'Failed to save credential-based trigger' + ) + } + + const data = await response.json() + const savedWebhookId = data.webhook.id + setTriggerId(savedWebhookId) + + logger.info('Credential-based trigger saved successfully', { + webhookId: savedWebhookId, + triggerDefId: effectiveTriggerId, + provider: webhookProvider, + blockId, + }) + + // Update the actual trigger after saving + setActualTriggerId(webhookProvider) + return true + } + + // Save as webhook using existing webhook API (for webhook-based triggers) + const response = await fetch('/api/webhooks', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + workflowId, + blockId, + path, + provider: webhookProvider, + providerConfig: config, + }), + }) + + if (!response.ok) { + const errorData = await response.json() + throw new Error( + typeof errorData.error === 'object' + ? errorData.error.message || JSON.stringify(errorData.error) + : errorData.error || 'Failed to save trigger' + ) + } + + const data = await response.json() + const savedWebhookId = data.webhook.id + setTriggerId(savedWebhookId) + + logger.info('Trigger saved successfully as webhook', { + webhookId: savedWebhookId, + triggerDefId: effectiveTriggerId, + provider: webhookProvider, + path, + blockId, + }) + + // Update the actual trigger after saving + setActualTriggerId(webhookProvider) + + return true + } catch (error: any) { + logger.error('Error saving trigger:', { error }) + setError(error.message || 'Failed to save trigger configuration') + return false + } finally { + setIsSaving(false) + } + } + + const handleDeleteTrigger = async () => { + if (isPreview || disabled || !triggerId) return false + + try { + setIsDeleting(true) + setError(null) + + // Delete webhook using existing webhook API (works for both webhook and credential-based triggers) + const response = await fetch(`/api/webhooks/${triggerId}`, { + method: 'DELETE', + }) + + if (!response.ok) { + const errorData = await response.json() + throw new Error(errorData.error || 'Failed to delete trigger') + } + + // Remove trigger-specific fields from the block state + const store = useSubBlockStore.getState() + const workflowValues = store.workflowValues[workflowId] || {} + const blockValues = { ...workflowValues[blockId] } + + // Remove trigger-related fields + blockValues.triggerId = undefined + blockValues.triggerConfig = undefined + blockValues.triggerPath = undefined + + // Update the store with the cleaned block values + useSubBlockStore.setState({ + workflowValues: { + ...workflowValues, + [workflowId]: { + ...workflowValues, + [blockId]: blockValues, + }, + }, + }) + + // Clear component state + setTriggerId(null) + setActualTriggerId(null) + + // Also clear store values using the setters to ensure UI updates + setTriggerPath('') + setTriggerConfig({}) + setStoredTriggerId('') + + logger.info('Trigger deleted successfully', { + blockId, + triggerType: + triggerDef?.requiresCredentials && !triggerDef.webhook + ? 'credential-based' + : 'webhook-based', + hadWebhookId: Boolean(triggerId), + }) + + handleCloseModal() + + return true + } catch (error: any) { + logger.error('Error deleting trigger:', { error }) + setError(error.message || 'Failed to delete trigger') + return false + } finally { + setIsDeleting(false) + } + } + + // Check if the trigger is connected + // Both webhook and credential-based triggers now have webhook database entries + const isTriggerConnected = Boolean(triggerId && actualTriggerId) + + // Debug logging to help with troubleshooting + useEffect(() => { + logger.info('Trigger connection status:', { + triggerId, + actualTriggerId, + triggerPath, + isTriggerConnected, + effectiveTriggerId, + triggerConfig, + triggerConfigKeys: triggerConfig ? Object.keys(triggerConfig) : [], + isCredentialBased: triggerDef?.requiresCredentials && !triggerDef.webhook, + storeValues: { + storeTriggerId, + storeTriggerPath, + storeTriggerConfig, + }, + }) + }, [ + triggerId, + actualTriggerId, + triggerPath, + isTriggerConnected, + effectiveTriggerId, + triggerConfig, + triggerDef, + storeTriggerId, + storeTriggerPath, + storeTriggerConfig, + ]) + + return ( +
+ {error &&
{error}
} + + {isTriggerConnected ? ( +
+
+
+
+ {triggerDef?.icon && ( + + )} + {triggerDef?.name || 'Active Trigger'} +
+
+
+
+ ) : ( + + )} + + {isModalOpen && triggerDef && ( + + )} +
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/sub-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/sub-block.tsx index ccc99433d9..50a4f0acc1 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/sub-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/sub-block.tsx @@ -29,6 +29,7 @@ import { Table, TimeInput, ToolInput, + TriggerConfig, WebhookConfig, } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components' import type { SubBlockConfig } from '@/blocks/types' @@ -304,6 +305,27 @@ export function SubBlock({ /> ) } + case 'trigger-config': { + // For trigger config, we need to construct the value from multiple subblock values + const triggerValue = + isPreview && subBlockValues + ? { + triggerId: subBlockValues.triggerId?.value, + triggerPath: subBlockValues.triggerPath?.value, + triggerConfig: subBlockValues.triggerConfig?.value, + } + : previewValue + return ( + + ) + } case 'schedule-config': return ( ) { const blockWebhookStatus = !!(hasWebhookProvider && hasWebhookPath) const blockAdvancedMode = useWorkflowStore((state) => state.blocks[id]?.advancedMode ?? false) + const blockTriggerMode = useWorkflowStore((state) => state.blocks[id]?.triggerMode ?? false) // Collaborative workflow actions const { collaborativeUpdateBlockName, collaborativeToggleBlockWide, collaborativeToggleBlockAdvancedMode, + collaborativeToggleBlockTriggerMode, } = useCollaborativeWorkflow() // Workflow store actions @@ -394,11 +396,24 @@ export function WorkflowBlock({ id, data }: NodeProps) { } const isAdvancedMode = useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false + const isTriggerMode = useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false // Filter visible blocks and those that meet their conditions const visibleSubBlocks = subBlocks.filter((block) => { if (block.hidden) return false + // Special handling for trigger mode + if (block.type === ('trigger-config' as SubBlockType)) { + // Show trigger-config blocks when in trigger mode OR for pure trigger blocks + const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers' + return isTriggerMode || isPureTriggerBlock + } + + if (isTriggerMode && block.type !== ('trigger-config' as SubBlockType)) { + // In trigger mode, hide all non-trigger-config blocks + return false + } + // Filter by mode if specified if (block.mode) { if (block.mode === 'basic' && isAdvancedMode) return false @@ -550,8 +565,8 @@ export function WorkflowBlock({ id, data }: NodeProps) { )} - {/* Connection Blocks - Don't show for trigger blocks or starter blocks */} - {config.category !== 'triggers' && type !== 'starter' && ( + {/* Connection Blocks - Don't show for trigger blocks, starter blocks, or blocks in trigger mode */} + {config.category !== 'triggers' && type !== 'starter' && !blockTriggerMode && ( ) { /> )} - {/* Input Handle - Don't show for trigger blocks or starter blocks */} - {config.category !== 'triggers' && type !== 'starter' && ( + {/* Input Handle - Don't show for trigger blocks, starter blocks, or blocks in trigger mode */} + {config.category !== 'triggers' && type !== 'starter' && !blockTriggerMode && ( ) { )} + {/* Trigger Mode Button - Show for hybrid blocks that support triggers (not pure trigger blocks) */} + {config.triggers?.enabled && config.category !== 'triggers' && ( + + + + + + {!userPermissions.canEdit + ? userPermissions.isOfflineMode + ? 'Connection lost - please refresh' + : 'Read-only mode' + : blockTriggerMode + ? 'Switch to Action Mode' + : 'Switch to Trigger Mode'} + + + )} {config.docsLink ? ( @@ -925,8 +974,8 @@ export function WorkflowBlock({ id, data }: NodeProps) { isValidConnection={(connection) => connection.target !== id} /> - {/* Error Handle - Don't show for trigger blocks or starter blocks */} - {config.category !== 'triggers' && type !== 'starter' && ( + {/* Error Handle - Don't show for trigger blocks, starter blocks, or blocks in trigger mode */} + {config.category !== 'triggers' && type !== 'starter' && !blockTriggerMode && ( = { condition: { field: 'operation', value: 'update' }, required: true, }, + // TRIGGER MODE: Trigger configuration (only shown when trigger mode is active) + { + id: 'triggerConfig', + title: 'Trigger Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'airtable', + availableTriggers: ['airtable_webhook'], + }, ], tools: { access: [ @@ -176,5 +185,21 @@ export const AirtableBlock: BlockConfig = { records: { type: 'json', description: 'Retrieved record data' }, // Optional: for list, create, updateMultiple record: { type: 'json', description: 'Single record data' }, // Optional: for get, update single metadata: { type: 'json', description: 'Operation metadata' }, // Required: present in all responses + // Trigger outputs + event_type: { type: 'string', description: 'Type of Airtable event' }, + base_id: { type: 'string', description: 'Airtable base identifier' }, + table_id: { type: 'string', description: 'Airtable table identifier' }, + record_id: { type: 'string', description: 'Record identifier that was modified' }, + record_data: { + type: 'string', + description: 'Complete record data (when Include Full Record Data is enabled)', + }, + changed_fields: { type: 'string', description: 'Fields that were changed in the record' }, + webhook_id: { type: 'string', description: 'Unique webhook identifier' }, + timestamp: { type: 'string', description: 'Event timestamp' }, + }, + triggers: { + enabled: true, + available: ['airtable_webhook'], }, } diff --git a/apps/sim/blocks/blocks/api.ts b/apps/sim/blocks/blocks/api.ts index e4a2978784..1cbea104be 100644 --- a/apps/sim/blocks/blocks/api.ts +++ b/apps/sim/blocks/blocks/api.ts @@ -94,8 +94,8 @@ Example: params: { type: 'json', description: 'URL query parameters' }, }, outputs: { - data: { type: 'any', description: 'Response data' }, - status: { type: 'number', description: 'HTTP status code' }, - headers: { type: 'json', description: 'Response headers' }, + data: { type: 'json', description: 'API response data (JSON, text, or other formats)' }, + status: { type: 'number', description: 'HTTP status code (200, 404, 500, etc.)' }, + headers: { type: 'json', description: 'HTTP response headers as key-value pairs' }, }, } diff --git a/apps/sim/blocks/blocks/browser_use.ts b/apps/sim/blocks/blocks/browser_use.ts index 967c80f3d2..af7ad3906b 100644 --- a/apps/sim/blocks/blocks/browser_use.ts +++ b/apps/sim/blocks/blocks/browser_use.ts @@ -71,7 +71,7 @@ export const BrowserUseBlock: BlockConfig = { outputs: { id: { type: 'string', description: 'Task execution identifier' }, success: { type: 'boolean', description: 'Task completion status' }, - output: { type: 'any', description: 'Task output data' }, + output: { type: 'json', description: 'Task output data' }, steps: { type: 'json', description: 'Execution steps taken' }, }, } diff --git a/apps/sim/blocks/blocks/clay.ts b/apps/sim/blocks/blocks/clay.ts index 6272d3da11..7de35c76ac 100644 --- a/apps/sim/blocks/blocks/clay.ts +++ b/apps/sim/blocks/blocks/clay.ts @@ -53,6 +53,6 @@ Plain Text: Best for populating a table in free-form style. data: { type: 'json', description: 'Data to populate' }, }, outputs: { - data: { type: 'any', description: 'Response data' }, + data: { type: 'json', description: 'Response data' }, }, } diff --git a/apps/sim/blocks/blocks/discord.ts b/apps/sim/blocks/blocks/discord.ts index fcc02f008c..39d24bcc17 100644 --- a/apps/sim/blocks/blocks/discord.ts +++ b/apps/sim/blocks/blocks/discord.ts @@ -212,6 +212,20 @@ export const DiscordBlock: BlockConfig = { }, outputs: { message: { type: 'string', description: 'Message content' }, - data: { type: 'any', description: 'Response data' }, + data: { type: 'json', description: 'Response data' }, + // Trigger outputs + content: { type: 'string', description: 'Message content from Discord webhook' }, + username: { type: 'string', description: 'Username of the sender (if provided)' }, + avatar_url: { type: 'string', description: 'Avatar URL of the sender (if provided)' }, + timestamp: { type: 'string', description: 'Timestamp when the webhook was triggered' }, + webhook_id: { type: 'string', description: 'Discord webhook identifier' }, + webhook_token: { type: 'string', description: 'Discord webhook token' }, + guild_id: { type: 'string', description: 'Discord server/guild ID' }, + channel_id: { type: 'string', description: 'Discord channel ID where the event occurred' }, + embeds: { type: 'string', description: 'Embedded content data (if any)' }, + }, + triggers: { + enabled: true, + available: ['discord_webhook'], }, } diff --git a/apps/sim/blocks/blocks/evaluator.ts b/apps/sim/blocks/blocks/evaluator.ts index 692d0c0b16..0f3e05ea81 100644 --- a/apps/sim/blocks/blocks/evaluator.ts +++ b/apps/sim/blocks/blocks/evaluator.ts @@ -315,7 +315,7 @@ export const EvaluatorBlock: BlockConfig = { outputs: { content: { type: 'string', description: 'Evaluation results' }, model: { type: 'string', description: 'Model used' }, - tokens: { type: 'any', description: 'Token usage' }, - cost: { type: 'any', description: 'Cost information' }, + tokens: { type: 'json', description: 'Token usage' }, + cost: { type: 'json', description: 'Cost information' }, } as any, } diff --git a/apps/sim/blocks/blocks/file.ts b/apps/sim/blocks/blocks/file.ts index 7e87be8e69..d56e91dcf5 100644 --- a/apps/sim/blocks/blocks/file.ts +++ b/apps/sim/blocks/blocks/file.ts @@ -111,7 +111,13 @@ export const FileBlock: BlockConfig = { file: { type: 'json', description: 'Uploaded file data' }, }, outputs: { - files: { type: 'json', description: 'Parsed file data' }, - combinedContent: { type: 'string', description: 'Combined file content' }, + files: { + type: 'json', + description: 'Array of parsed file objects with content, metadata, and file properties', + }, + combinedContent: { + type: 'string', + description: 'All file contents merged into a single text string', + }, }, } diff --git a/apps/sim/blocks/blocks/firecrawl.ts b/apps/sim/blocks/blocks/firecrawl.ts index bb9e4427b6..38835d2dc7 100644 --- a/apps/sim/blocks/blocks/firecrawl.ts +++ b/apps/sim/blocks/blocks/firecrawl.ts @@ -121,11 +121,11 @@ export const FirecrawlBlock: BlockConfig = { outputs: { // Scrape output markdown: { type: 'string', description: 'Page content markdown' }, - html: { type: 'any', description: 'Raw HTML content' }, + html: { type: 'string', description: 'Raw HTML content' }, metadata: { type: 'json', description: 'Page metadata' }, // Search output data: { type: 'json', description: 'Search results data' }, - warning: { type: 'any', description: 'Warning messages' }, + warning: { type: 'string', description: 'Warning messages' }, // Crawl output pages: { type: 'json', description: 'Crawled pages data' }, total: { type: 'number', description: 'Total pages found' }, diff --git a/apps/sim/blocks/blocks/function.ts b/apps/sim/blocks/blocks/function.ts index 5000424618..b2de04dae1 100644 --- a/apps/sim/blocks/blocks/function.ts +++ b/apps/sim/blocks/blocks/function.ts @@ -80,7 +80,10 @@ try { timeout: { type: 'number', description: 'Execution timeout' }, }, outputs: { - result: { type: 'any', description: 'Execution result' }, - stdout: { type: 'string', description: 'Console output' }, + result: { type: 'json', description: 'Return value from the executed JavaScript function' }, + stdout: { + type: 'string', + description: 'Console log output and debug messages from function execution', + }, }, } diff --git a/apps/sim/blocks/blocks/generic_webhook.ts b/apps/sim/blocks/blocks/generic_webhook.ts new file mode 100644 index 0000000000..2b392b2284 --- /dev/null +++ b/apps/sim/blocks/blocks/generic_webhook.ts @@ -0,0 +1,47 @@ +import { WebhookIcon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' + +export const GenericWebhookBlock: BlockConfig = { + type: 'generic_webhook', + name: 'Webhook', + description: 'Receive webhooks from any service', + category: 'triggers', + icon: WebhookIcon, + bgColor: '#10B981', // Green color for triggers + + subBlocks: [ + // Generic webhook configuration - always visible + { + id: 'triggerConfig', + title: 'Webhook Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'generic', + availableTriggers: ['generic_webhook'], + }, + ], + + tools: { + access: [], // No external tools needed for triggers + }, + + inputs: {}, // No inputs - webhook triggers receive data externally + + outputs: { + // Generic webhook outputs that can be used with any webhook payload + payload: { type: 'json', description: 'Complete webhook payload' }, + headers: { type: 'json', description: 'Request headers' }, + method: { type: 'string', description: 'HTTP method' }, + url: { type: 'string', description: 'Request URL' }, + timestamp: { type: 'string', description: 'Webhook received timestamp' }, + // Common webhook fields that services often use + event: { type: 'string', description: 'Event type from payload' }, + id: { type: 'string', description: 'Event ID from payload' }, + data: { type: 'json', description: 'Event data from payload' }, + }, + + triggers: { + enabled: true, + available: ['generic_webhook'], + }, +} diff --git a/apps/sim/blocks/blocks/github.ts b/apps/sim/blocks/blocks/github.ts index 8900d8a6af..52f35d884e 100644 --- a/apps/sim/blocks/blocks/github.ts +++ b/apps/sim/blocks/blocks/github.ts @@ -5,9 +5,9 @@ import type { GitHubResponse } from '@/tools/github/types' export const GitHubBlock: BlockConfig = { type: 'github', name: 'GitHub', - description: 'Interact with GitHub', + description: 'Interact with GitHub or trigger workflows from GitHub events', longDescription: - 'Access GitHub repositories, pull requests, and comments through the GitHub API. Automate code reviews, PR management, and repository interactions within your workflow.', + 'Access GitHub repositories, pull requests, and comments through the GitHub API. Automate code reviews, PR management, and repository interactions within your workflow. Trigger workflows from GitHub events like push, pull requests, and issues.', docsLink: 'https://docs.sim.ai/tools/github', category: 'tools', bgColor: '#181C1E', @@ -86,6 +86,15 @@ export const GitHubBlock: BlockConfig = { password: true, required: true, }, + // TRIGGER MODE: Trigger configuration (only shown when trigger mode is active) + { + id: 'triggerConfig', + title: 'Trigger Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'github', + availableTriggers: ['github_webhook'], + }, { id: 'commentType', title: 'Comment Type', @@ -164,5 +173,27 @@ export const GitHubBlock: BlockConfig = { outputs: { content: { type: 'string', description: 'Response content' }, metadata: { type: 'json', description: 'Response metadata' }, + // Trigger outputs + action: { type: 'string', description: 'The action that was performed' }, + event_type: { type: 'string', description: 'Type of GitHub event' }, + repository: { type: 'string', description: 'Repository full name' }, + repository_name: { type: 'string', description: 'Repository name only' }, + repository_owner: { type: 'string', description: 'Repository owner username' }, + sender: { type: 'string', description: 'Username of the user who triggered the event' }, + sender_id: { type: 'string', description: 'User ID of the sender' }, + ref: { type: 'string', description: 'Git reference (for push events)' }, + before: { type: 'string', description: 'SHA of the commit before the push' }, + after: { type: 'string', description: 'SHA of the commit after the push' }, + commits: { type: 'string', description: 'Array of commit objects (for push events)' }, + pull_request: { type: 'string', description: 'Pull request object (for pull_request events)' }, + issue: { type: 'string', description: 'Issue object (for issues events)' }, + comment: { type: 'string', description: 'Comment object (for comment events)' }, + branch: { type: 'string', description: 'Branch name extracted from ref' }, + commit_message: { type: 'string', description: 'Latest commit message' }, + commit_author: { type: 'string', description: 'Author of the latest commit' }, + }, + triggers: { + enabled: true, + available: ['github_webhook'], }, } diff --git a/apps/sim/blocks/blocks/gmail.ts b/apps/sim/blocks/blocks/gmail.ts index fb6947860a..c5fff27f88 100644 --- a/apps/sim/blocks/blocks/gmail.ts +++ b/apps/sim/blocks/blocks/gmail.ts @@ -5,9 +5,9 @@ import type { GmailToolResponse } from '@/tools/gmail/types' export const GmailBlock: BlockConfig = { type: 'gmail', name: 'Gmail', - description: 'Send Gmail', + description: 'Send Gmail or trigger workflows from Gmail events', longDescription: - 'Integrate Gmail functionality to send email messages within your workflow. Automate email communications and process email content using OAuth authentication.', + 'Comprehensive Gmail integration with OAuth authentication. Send email messages, read email content, and trigger workflows from Gmail events like new emails and label changes.', docsLink: 'https://docs.sim.ai/tools/gmail', category: 'tools', bgColor: '#E0E0E0', @@ -145,6 +145,15 @@ export const GmailBlock: BlockConfig = { placeholder: 'Maximum number of results (default: 10)', condition: { field: 'operation', value: ['search_gmail', 'read_gmail'] }, }, + // TRIGGER MODE: Trigger configuration (only shown when trigger mode is active) + { + id: 'triggerConfig', + title: 'Trigger Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'gmail', + availableTriggers: ['gmail_poller'], + }, ], tools: { access: ['gmail_send', 'gmail_draft', 'gmail_read', 'gmail_search'], @@ -200,11 +209,27 @@ export const GmailBlock: BlockConfig = { maxResults: { type: 'number', description: 'Maximum results' }, }, outputs: { + // Tool outputs content: { type: 'string', description: 'Response content' }, metadata: { type: 'json', description: 'Email metadata' }, - attachments: { - type: 'json', - description: 'Email attachments (when includeAttachments is enabled)', - }, + attachments: { type: 'json', description: 'Email attachments array' }, + // Trigger outputs + email_id: { type: 'string', description: 'Gmail message ID' }, + thread_id: { type: 'string', description: 'Gmail thread ID' }, + subject: { type: 'string', description: 'Email subject line' }, + from: { type: 'string', description: 'Sender email address' }, + to: { type: 'string', description: 'Recipient email address' }, + cc: { type: 'string', description: 'CC recipients (comma-separated)' }, + date: { type: 'string', description: 'Email date in ISO format' }, + body_text: { type: 'string', description: 'Plain text email body' }, + body_html: { type: 'string', description: 'HTML email body' }, + labels: { type: 'string', description: 'Email labels (comma-separated)' }, + has_attachments: { type: 'boolean', description: 'Whether email has attachments' }, + raw_email: { type: 'json', description: 'Complete raw email data from Gmail API (if enabled)' }, + timestamp: { type: 'string', description: 'Event timestamp' }, + }, + triggers: { + enabled: true, + available: ['gmail_poller'], }, } diff --git a/apps/sim/blocks/blocks/jira.ts b/apps/sim/blocks/blocks/jira.ts index 399ed00ee4..10ff0a479e 100644 --- a/apps/sim/blocks/blocks/jira.ts +++ b/apps/sim/blocks/blocks/jira.ts @@ -240,13 +240,24 @@ export const JiraBlock: BlockConfig = { issueType: { type: 'string', description: 'Issue type' }, }, outputs: { - ts: { type: 'string', description: 'Timestamp' }, - issueKey: { type: 'string', description: 'Issue key' }, - summary: { type: 'string', description: 'Issue summary' }, - description: { type: 'string', description: 'Issue description' }, - created: { type: 'string', description: 'Creation date' }, - updated: { type: 'string', description: 'Update date' }, - success: { type: 'boolean', description: 'Operation success' }, - url: { type: 'string', description: 'Issue URL' }, + // Common outputs across all Jira operations + ts: { type: 'string', description: 'Timestamp of the operation' }, + + // jira_retrieve (read) outputs + issueKey: { type: 'string', description: 'Issue key (e.g., PROJ-123)' }, + summary: { type: 'string', description: 'Issue summary/title' }, + description: { type: 'string', description: 'Issue description content' }, + created: { type: 'string', description: 'Issue creation date' }, + updated: { type: 'string', description: 'Issue last update date' }, + + // jira_update outputs + success: { type: 'boolean', description: 'Whether the update operation was successful' }, + + // jira_write (create) outputs + url: { type: 'string', description: 'URL to the created/accessed issue' }, + + // jira_bulk_read outputs (array of issues) + // Note: bulk_read returns an array in the output field, each item contains: + // ts, summary, description, created, updated }, } diff --git a/apps/sim/blocks/blocks/mem0.ts b/apps/sim/blocks/blocks/mem0.ts index 5822f593df..47ab17ea13 100644 --- a/apps/sim/blocks/blocks/mem0.ts +++ b/apps/sim/blocks/blocks/mem0.ts @@ -295,8 +295,8 @@ export const Mem0Block: BlockConfig = { limit: { type: 'number', description: 'Result limit' }, }, outputs: { - ids: { type: 'any', description: 'Memory identifiers' }, - memories: { type: 'any', description: 'Memory data' }, - searchResults: { type: 'any', description: 'Search results' }, + ids: { type: 'json', description: 'Memory identifiers' }, + memories: { type: 'json', description: 'Memory data' }, + searchResults: { type: 'json', description: 'Search results' }, }, } diff --git a/apps/sim/blocks/blocks/memory.ts b/apps/sim/blocks/blocks/memory.ts index f4c478169f..e2dd0969b5 100644 --- a/apps/sim/blocks/blocks/memory.ts +++ b/apps/sim/blocks/blocks/memory.ts @@ -186,7 +186,7 @@ export const MemoryBlock: BlockConfig = { content: { type: 'string', description: 'Memory content' }, }, outputs: { - memories: { type: 'any', description: 'Memory data' }, + memories: { type: 'json', description: 'Memory data' }, id: { type: 'string', description: 'Memory identifier' }, }, } diff --git a/apps/sim/blocks/blocks/microsoft_excel.ts b/apps/sim/blocks/blocks/microsoft_excel.ts index 56c6fd553c..bed00152a9 100644 --- a/apps/sim/blocks/blocks/microsoft_excel.ts +++ b/apps/sim/blocks/blocks/microsoft_excel.ts @@ -197,13 +197,19 @@ export const MicrosoftExcelBlock: BlockConfig = { valueInputOption: { type: 'string', description: 'Value input option' }, }, outputs: { - data: { type: 'json', description: 'Sheet data' }, - metadata: { type: 'json', description: 'Operation metadata' }, - updatedRange: { type: 'string', description: 'Updated range' }, - updatedRows: { type: 'number', description: 'Updated rows count' }, - updatedColumns: { type: 'number', description: 'Updated columns count' }, - updatedCells: { type: 'number', description: 'Updated cells count' }, - index: { type: 'number', description: 'Row index' }, - values: { type: 'json', description: 'Table values' }, + data: { type: 'json', description: 'Excel range data with sheet information and cell values' }, + metadata: { + type: 'json', + description: 'Spreadsheet metadata including ID, URL, and sheet details', + }, + updatedRange: { type: 'string', description: 'The range that was updated (write operations)' }, + updatedRows: { type: 'number', description: 'Number of rows updated (write operations)' }, + updatedColumns: { type: 'number', description: 'Number of columns updated (write operations)' }, + updatedCells: { + type: 'number', + description: 'Total number of cells updated (write operations)', + }, + index: { type: 'number', description: 'Row index for table add operations' }, + values: { type: 'json', description: 'Cell values array for table add operations' }, }, } diff --git a/apps/sim/blocks/blocks/microsoft_teams.ts b/apps/sim/blocks/blocks/microsoft_teams.ts index d3cfb1861a..97818c2213 100644 --- a/apps/sim/blocks/blocks/microsoft_teams.ts +++ b/apps/sim/blocks/blocks/microsoft_teams.ts @@ -221,8 +221,42 @@ export const MicrosoftTeamsBlock: BlockConfig = { content: { type: 'string', description: 'Message content' }, }, outputs: { - content: { type: 'string', description: 'Message content' }, - metadata: { type: 'json', description: 'Message metadata' }, - updatedContent: { type: 'boolean', description: 'Content update status' }, + // Read operation outputs + content: { type: 'string', description: 'Formatted message content from chat/channel' }, + metadata: { type: 'json', description: 'Message metadata with full details' }, + messageCount: { type: 'number', description: 'Number of messages retrieved' }, + messages: { type: 'json', description: 'Array of message objects' }, + totalAttachments: { type: 'number', description: 'Total number of attachments' }, + attachmentTypes: { type: 'json', description: 'Array of attachment content types' }, + // Write operation outputs + updatedContent: { + type: 'boolean', + description: 'Whether content was successfully updated/sent', + }, + messageId: { type: 'string', description: 'ID of the created/sent message' }, + createdTime: { type: 'string', description: 'Timestamp when message was created' }, + url: { type: 'string', description: 'Web URL to the message' }, + // Individual message fields (from read operations) + sender: { type: 'string', description: 'Message sender display name' }, + messageTimestamp: { type: 'string', description: 'Individual message timestamp' }, + messageType: { + type: 'string', + description: 'Type of message (message, systemEventMessage, etc.)', + }, + // Trigger outputs + type: { type: 'string', description: 'Type of Teams message' }, + id: { type: 'string', description: 'Unique message identifier' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + localTimestamp: { type: 'string', description: 'Local timestamp of the message' }, + serviceUrl: { type: 'string', description: 'Microsoft Teams service URL' }, + channelId: { type: 'string', description: 'Teams channel ID where the event occurred' }, + from_id: { type: 'string', description: 'User ID who sent the message' }, + from_name: { type: 'string', description: 'Username who sent the message' }, + conversation_id: { type: 'string', description: 'Conversation/thread ID' }, + text: { type: 'string', description: 'Message text content' }, + }, + triggers: { + enabled: true, + available: ['microsoftteams_webhook'], }, } diff --git a/apps/sim/blocks/blocks/notion.ts b/apps/sim/blocks/blocks/notion.ts index ab102cad98..2ecc1c2045 100644 --- a/apps/sim/blocks/blocks/notion.ts +++ b/apps/sim/blocks/blocks/notion.ts @@ -308,7 +308,17 @@ export const NotionBlock: BlockConfig = { filterType: { type: 'string', description: 'Filter type' }, }, outputs: { - content: { type: 'string', description: 'Page content' }, - metadata: { type: 'any', description: 'Page metadata' }, + // Common outputs across all Notion operations + content: { + type: 'string', + description: 'Page content, search results, or confirmation messages', + }, + + // Metadata object containing operation-specific information + metadata: { + type: 'json', + description: + 'Metadata containing operation-specific details including page/database info, results, and pagination data', + }, }, } diff --git a/apps/sim/blocks/blocks/outlook.ts b/apps/sim/blocks/blocks/outlook.ts index b871ccfbfa..e8bef11e5e 100644 --- a/apps/sim/blocks/blocks/outlook.ts +++ b/apps/sim/blocks/blocks/outlook.ts @@ -145,6 +145,15 @@ export const OutlookBlock: BlockConfig = { placeholder: 'Number of emails to retrieve (default: 1, max: 10)', condition: { field: 'operation', value: 'read_outlook' }, }, + // TRIGGER MODE: Trigger configuration (only shown when trigger mode is active) + { + id: 'triggerConfig', + title: 'Trigger Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'outlook', + availableTriggers: ['outlook_poller'], + }, ], tools: { access: ['outlook_send', 'outlook_draft', 'outlook_read'], @@ -193,7 +202,36 @@ export const OutlookBlock: BlockConfig = { maxResults: { type: 'number', description: 'Maximum emails' }, }, outputs: { + // Common outputs message: { type: 'string', description: 'Response message' }, - results: { type: 'json', description: 'Email results' }, + results: { type: 'json', description: 'Operation results' }, + // Send operation specific outputs + status: { type: 'string', description: 'Email send status (sent)' }, + timestamp: { type: 'string', description: 'Operation timestamp' }, + // Draft operation specific outputs + messageId: { type: 'string', description: 'Draft message ID' }, + subject: { type: 'string', description: 'Draft email subject' }, + // Read operation specific outputs + emailCount: { type: 'number', description: 'Number of emails retrieved' }, + emails: { type: 'json', description: 'Array of email objects' }, + emailId: { type: 'string', description: 'Individual email ID' }, + emailSubject: { type: 'string', description: 'Individual email subject' }, + bodyPreview: { type: 'string', description: 'Email body preview' }, + bodyContent: { type: 'string', description: 'Full email body content' }, + sender: { type: 'json', description: 'Email sender information' }, + from: { type: 'json', description: 'Email from information' }, + recipients: { type: 'json', description: 'Email recipients' }, + receivedDateTime: { type: 'string', description: 'Email received timestamp' }, + sentDateTime: { type: 'string', description: 'Email sent timestamp' }, + hasAttachments: { type: 'boolean', description: 'Whether email has attachments' }, + isRead: { type: 'boolean', description: 'Whether email is read' }, + importance: { type: 'string', description: 'Email importance level' }, + // Trigger outputs + email: { type: 'json', description: 'Email data from trigger' }, + rawEmail: { type: 'json', description: 'Complete raw email data from Microsoft Graph API' }, + }, + triggers: { + enabled: true, + available: ['outlook_poller'], }, } diff --git a/apps/sim/blocks/blocks/pinecone.ts b/apps/sim/blocks/blocks/pinecone.ts index 6d85eefb35..3d8d5db250 100644 --- a/apps/sim/blocks/blocks/pinecone.ts +++ b/apps/sim/blocks/blocks/pinecone.ts @@ -283,11 +283,11 @@ export const PineconeBlock: BlockConfig = { }, outputs: { - matches: { type: 'any', description: 'Search matches' }, - upsertedCount: { type: 'any', description: 'Upserted count' }, - data: { type: 'any', description: 'Response data' }, - model: { type: 'any', description: 'Model information' }, - vector_type: { type: 'any', description: 'Vector type' }, - usage: { type: 'any', description: 'Usage statistics' }, + matches: { type: 'json', description: 'Search matches' }, + upsertedCount: { type: 'number', description: 'Upserted count' }, + data: { type: 'json', description: 'Response data' }, + model: { type: 'string', description: 'Model information' }, + vector_type: { type: 'string', description: 'Vector type' }, + usage: { type: 'json', description: 'Usage statistics' }, }, } diff --git a/apps/sim/blocks/blocks/qdrant.ts b/apps/sim/blocks/blocks/qdrant.ts index 1dbc6e11bd..c1fe14cfab 100644 --- a/apps/sim/blocks/blocks/qdrant.ts +++ b/apps/sim/blocks/blocks/qdrant.ts @@ -198,9 +198,9 @@ export const QdrantBlock: BlockConfig = { }, outputs: { - matches: { type: 'any', description: 'Search matches' }, - upsertedCount: { type: 'any', description: 'Upserted count' }, - data: { type: 'any', description: 'Response data' }, - status: { type: 'any', description: 'Operation status' }, + matches: { type: 'json', description: 'Search matches' }, + upsertedCount: { type: 'number', description: 'Upserted count' }, + data: { type: 'json', description: 'Response data' }, + status: { type: 'string', description: 'Operation status' }, }, } diff --git a/apps/sim/blocks/blocks/router.ts b/apps/sim/blocks/blocks/router.ts index 97fa9d9d5d..825dec5a03 100644 --- a/apps/sim/blocks/blocks/router.ts +++ b/apps/sim/blocks/blocks/router.ts @@ -188,8 +188,8 @@ export const RouterBlock: BlockConfig = { outputs: { content: { type: 'string', description: 'Routing response content' }, model: { type: 'string', description: 'Model used' }, - tokens: { type: 'any', description: 'Token usage' }, - cost: { type: 'any', description: 'Cost information' }, + tokens: { type: 'json', description: 'Token usage' }, + cost: { type: 'json', description: 'Cost information' }, selectedPath: { type: 'json', description: 'Selected routing path' }, }, } diff --git a/apps/sim/blocks/blocks/slack.ts b/apps/sim/blocks/blocks/slack.ts index a116231395..8c11694614 100644 --- a/apps/sim/blocks/blocks/slack.ts +++ b/apps/sim/blocks/blocks/slack.ts @@ -5,9 +5,9 @@ import type { SlackResponse } from '@/tools/slack/types' export const SlackBlock: BlockConfig = { type: 'slack', name: 'Slack', - description: 'Send messages to Slack', + description: 'Send messages to Slack or trigger workflows from Slack events', longDescription: - "Comprehensive Slack integration with OAuth authentication. Send formatted messages using Slack's mrkdwn syntax.", + "Comprehensive Slack integration with OAuth authentication. Send formatted messages using Slack's mrkdwn syntax or trigger workflows from Slack events like mentions and messages.", docsLink: 'https://docs.sim.ai/tools/slack', category: 'tools', bgColor: '#611f69', @@ -151,6 +151,15 @@ export const SlackBlock: BlockConfig = { value: 'read', }, }, + // TRIGGER MODE: Trigger configuration (only shown when trigger mode is active) + { + id: 'triggerConfig', + title: 'Trigger Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'slack', + availableTriggers: ['slack_webhook'], + }, ], tools: { access: ['slack_message', 'slack_canvas', 'slack_message_reader'], @@ -257,10 +266,30 @@ export const SlackBlock: BlockConfig = { oldest: { type: 'string', description: 'Oldest timestamp' }, }, outputs: { - ts: { type: 'string', description: 'Message timestamp' }, - channel: { type: 'string', description: 'Channel identifier' }, - canvas_id: { type: 'string', description: 'Canvas identifier' }, + // slack_message outputs + ts: { type: 'string', description: 'Message timestamp returned by Slack API' }, + channel: { type: 'string', description: 'Channel identifier where message was sent' }, + + // slack_canvas outputs + canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' }, title: { type: 'string', description: 'Canvas title' }, - messages: { type: 'json', description: 'Message data' }, + + // slack_message_reader outputs + messages: { + type: 'json', + description: 'Array of message objects', + }, + + // Trigger outputs (when used as webhook trigger) + event_type: { type: 'string', description: 'Type of Slack event that triggered the workflow' }, + channel_name: { type: 'string', description: 'Human-readable channel name' }, + user_name: { type: 'string', description: 'Username who triggered the event' }, + team_id: { type: 'string', description: 'Slack workspace/team ID' }, + event_id: { type: 'string', description: 'Unique event identifier for the trigger' }, + }, + // New: Trigger capabilities + triggers: { + enabled: true, + available: ['slack_webhook'], }, } diff --git a/apps/sim/blocks/blocks/stagehand_agent.ts b/apps/sim/blocks/blocks/stagehand_agent.ts index f638b659fd..225ec638f0 100644 --- a/apps/sim/blocks/blocks/stagehand_agent.ts +++ b/apps/sim/blocks/blocks/stagehand_agent.ts @@ -71,6 +71,6 @@ export const StagehandAgentBlock: BlockConfig = { }, outputs: { agentResult: { type: 'json', description: 'Agent execution result' }, - structuredOutput: { type: 'any', description: 'Structured output data' }, + structuredOutput: { type: 'json', description: 'Structured output data' }, }, } diff --git a/apps/sim/blocks/blocks/supabase.ts b/apps/sim/blocks/blocks/supabase.ts index eb22dc6789..3acd1ae1b8 100644 --- a/apps/sim/blocks/blocks/supabase.ts +++ b/apps/sim/blocks/blocks/supabase.ts @@ -206,7 +206,13 @@ export const SupabaseBlock: BlockConfig = { limit: { type: 'number', description: 'Result limit' }, }, outputs: { - message: { type: 'string', description: 'Operation message' }, - results: { type: 'json', description: 'Query results' }, + message: { + type: 'string', + description: 'Success or error message describing the operation outcome', + }, + results: { + type: 'json', + description: 'Database records returned from query, insert, update, or delete operations', + }, }, } diff --git a/apps/sim/blocks/blocks/tavily.ts b/apps/sim/blocks/blocks/tavily.ts index f07d550ca9..04684372e3 100644 --- a/apps/sim/blocks/blocks/tavily.ts +++ b/apps/sim/blocks/blocks/tavily.ts @@ -97,7 +97,7 @@ export const TavilyBlock: BlockConfig = { }, outputs: { results: { type: 'json', description: 'Search results data' }, - answer: { type: 'any', description: 'Search answer' }, + answer: { type: 'string', description: 'Search answer' }, query: { type: 'string', description: 'Query used' }, content: { type: 'string', description: 'Extracted content' }, title: { type: 'string', description: 'Page title' }, diff --git a/apps/sim/blocks/blocks/telegram.ts b/apps/sim/blocks/blocks/telegram.ts index 3c67e6f621..eec1244b26 100644 --- a/apps/sim/blocks/blocks/telegram.ts +++ b/apps/sim/blocks/blocks/telegram.ts @@ -5,9 +5,9 @@ import type { TelegramMessageResponse } from '@/tools/telegram/types' export const TelegramBlock: BlockConfig = { type: 'telegram', name: 'Telegram', - description: 'Send a message through Telegram', + description: 'Send messages through Telegram or trigger workflows from Telegram events', longDescription: - 'Send messages to any Telegram channel using your Bot API key. Integrate automated notifications and alerts into your workflow to keep your team informed.', + 'Send messages to any Telegram channel using your Bot API key or trigger workflows from Telegram bot messages. Integrate automated notifications and alerts into your workflow to keep your team informed.', docsLink: 'https://docs.sim.ai/tools/telegram', category: 'tools', bgColor: '#E0E0E0', @@ -48,6 +48,15 @@ export const TelegramBlock: BlockConfig = { placeholder: 'Enter the message to send', required: true, }, + // TRIGGER MODE: Trigger configuration (only shown when trigger mode is active) + { + id: 'triggerConfig', + title: 'Trigger Configuration', + type: 'trigger-config', + layout: 'full', + triggerProvider: 'telegram', + availableTriggers: ['telegram_webhook'], + }, ], tools: { access: ['telegram_message'], @@ -58,7 +67,38 @@ export const TelegramBlock: BlockConfig = { text: { type: 'string', description: 'Message text' }, }, outputs: { - ok: { type: 'boolean', description: 'Success status' }, - result: { type: 'json', description: 'Message result' }, + // Send message operation outputs + ok: { type: 'boolean', description: 'API response success status' }, + result: { type: 'json', description: 'Complete message result object from Telegram API' }, + // Specific result fields + messageId: { type: 'number', description: 'Sent message ID' }, + chatId: { type: 'number', description: 'Chat ID where message was sent' }, + chatType: { type: 'string', description: 'Type of chat (private, group, supergroup, channel)' }, + username: { type: 'string', description: 'Chat username (if available)' }, + messageDate: { type: 'number', description: 'Unix timestamp of sent message' }, + messageText: { type: 'string', description: 'Text content of sent message' }, + // Webhook trigger outputs (incoming messages) + update_id: { type: 'number', description: 'Unique identifier for the update' }, + message_id: { type: 'number', description: 'Unique message identifier from webhook' }, + from_id: { type: 'number', description: 'User ID who sent the message' }, + from_username: { type: 'string', description: 'Username of the sender' }, + from_first_name: { type: 'string', description: 'First name of the sender' }, + from_last_name: { type: 'string', description: 'Last name of the sender' }, + chat_id: { type: 'number', description: 'Unique identifier for the chat' }, + chat_type: { + type: 'string', + description: 'Type of chat (private, group, supergroup, channel)', + }, + chat_title: { type: 'string', description: 'Title of the chat (for groups and channels)' }, + text: { type: 'string', description: 'Message text content from webhook' }, + date: { type: 'number', description: 'Date the message was sent (Unix timestamp)' }, + entities: { + type: 'json', + description: 'Special entities in the message (mentions, hashtags, etc.)', + }, + }, + triggers: { + enabled: true, + available: ['telegram_webhook'], }, } diff --git a/apps/sim/blocks/blocks/translate.ts b/apps/sim/blocks/blocks/translate.ts index a0f4460465..1fb761b71f 100644 --- a/apps/sim/blocks/blocks/translate.ts +++ b/apps/sim/blocks/blocks/translate.ts @@ -99,6 +99,6 @@ export const TranslateBlock: BlockConfig = { outputs: { content: { type: 'string', description: 'Translated text' }, model: { type: 'string', description: 'Model used' }, - tokens: { type: 'any', description: 'Token usage' }, + tokens: { type: 'json', description: 'Token usage' }, }, } diff --git a/apps/sim/blocks/blocks/twilio.ts b/apps/sim/blocks/blocks/twilio.ts index c47c259286..e25f495919 100644 --- a/apps/sim/blocks/blocks/twilio.ts +++ b/apps/sim/blocks/blocks/twilio.ts @@ -68,8 +68,8 @@ export const TwilioSMSBlock: BlockConfig = { }, outputs: { success: { type: 'boolean', description: 'Send success status' }, - messageId: { type: 'any', description: 'Message identifier' }, - status: { type: 'any', description: 'Delivery status' }, - error: { type: 'any', description: 'Error information' }, + messageId: { type: 'string', description: 'Twilio message SID' }, + status: { type: 'string', description: 'SMS delivery status (queued, sent, delivered, etc.)' }, + error: { type: 'string', description: 'Error information if sending fails' }, }, } diff --git a/apps/sim/blocks/blocks/vision.ts b/apps/sim/blocks/blocks/vision.ts index 2dc9c29b5b..3ac23c16a5 100644 --- a/apps/sim/blocks/blocks/vision.ts +++ b/apps/sim/blocks/blocks/vision.ts @@ -62,7 +62,7 @@ export const VisionBlock: BlockConfig = { }, outputs: { content: { type: 'string', description: 'Analysis result' }, - model: { type: 'any', description: 'Model used' }, - tokens: { type: 'any', description: 'Token usage' }, + model: { type: 'string', description: 'Model used' }, + tokens: { type: 'number', description: 'Token usage' }, }, } diff --git a/apps/sim/blocks/blocks/wealthbox.ts b/apps/sim/blocks/blocks/wealthbox.ts index f50a3b2486..fdfbc34cde 100644 --- a/apps/sim/blocks/blocks/wealthbox.ts +++ b/apps/sim/blocks/blocks/wealthbox.ts @@ -255,13 +255,28 @@ export const WealthboxBlock: BlockConfig = { dueDate: { type: 'string', description: 'Due date' }, }, outputs: { - note: { type: 'any', description: 'Note data' }, - notes: { type: 'any', description: 'Notes list' }, - contact: { type: 'any', description: 'Contact data' }, - contacts: { type: 'any', description: 'Contacts list' }, - task: { type: 'any', description: 'Task data' }, - tasks: { type: 'any', description: 'Tasks list' }, - metadata: { type: 'json', description: 'Operation metadata' }, - success: { type: 'any', description: 'Success status' }, + note: { + type: 'json', + description: 'Single note object with ID, content, creator, and linked contacts', + }, + notes: { type: 'json', description: 'Array of note objects from bulk read operations' }, + contact: { + type: 'json', + description: 'Single contact object with name, email, phone, and background info', + }, + contacts: { type: 'json', description: 'Array of contact objects from bulk read operations' }, + task: { + type: 'json', + description: 'Single task object with name, due date, description, and priority', + }, + tasks: { type: 'json', description: 'Array of task objects from bulk read operations' }, + metadata: { + type: 'json', + description: 'Operation metadata including item IDs, types, and operation details', + }, + success: { + type: 'boolean', + description: 'Boolean indicating whether the operation completed successfully', + }, }, } diff --git a/apps/sim/blocks/blocks/webhook.ts b/apps/sim/blocks/blocks/webhook.ts index 96292ca666..d5b870fa00 100644 --- a/apps/sim/blocks/blocks/webhook.ts +++ b/apps/sim/blocks/blocks/webhook.ts @@ -39,6 +39,7 @@ export const WebhookBlock: BlockConfig = { category: 'triggers', icon: WebhookIcon, bgColor: '#10B981', // Green color for triggers + hideFromToolbar: true, // Hidden for backwards compatibility - use generic webhook trigger instead subBlocks: [ { diff --git a/apps/sim/blocks/blocks/whatsapp.ts b/apps/sim/blocks/blocks/whatsapp.ts index 6d62f13af9..dde2c93c81 100644 --- a/apps/sim/blocks/blocks/whatsapp.ts +++ b/apps/sim/blocks/blocks/whatsapp.ts @@ -60,8 +60,19 @@ export const WhatsAppBlock: BlockConfig = { accessToken: { type: 'string', description: 'WhatsApp access token' }, }, outputs: { + // Send operation outputs success: { type: 'boolean', description: 'Send success status' }, - messageId: { type: 'any', description: 'Message identifier' }, - error: { type: 'any', description: 'Error information' }, + messageId: { type: 'string', description: 'WhatsApp message identifier' }, + error: { type: 'string', description: 'Error information if sending fails' }, + // Webhook trigger outputs + from: { type: 'string', description: 'Sender phone number' }, + to: { type: 'string', description: 'Recipient phone number' }, + text: { type: 'string', description: 'Message text content' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + type: { type: 'string', description: 'Message type (text, image, etc.)' }, + }, + triggers: { + enabled: true, + available: ['whatsapp_webhook'], }, } diff --git a/apps/sim/blocks/blocks/x.ts b/apps/sim/blocks/blocks/x.ts index 03ccba5d89..66667a8e63 100644 --- a/apps/sim/blocks/blocks/x.ts +++ b/apps/sim/blocks/blocks/x.ts @@ -210,12 +210,12 @@ export const XBlock: BlockConfig = { }, outputs: { tweet: { type: 'json', description: 'Tweet data' }, - replies: { type: 'any', description: 'Tweet replies' }, - context: { type: 'any', description: 'Tweet context' }, + replies: { type: 'json', description: 'Tweet replies' }, + context: { type: 'json', description: 'Tweet context' }, tweets: { type: 'json', description: 'Tweets data' }, - includes: { type: 'any', description: 'Additional data' }, + includes: { type: 'json', description: 'Additional data' }, meta: { type: 'json', description: 'Response metadata' }, user: { type: 'json', description: 'User profile data' }, - recentTweets: { type: 'any', description: 'Recent tweets data' }, + recentTweets: { type: 'json', description: 'Recent tweets data' }, }, } diff --git a/apps/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts index 9cde501eb0..b052b80ae4 100644 --- a/apps/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -18,6 +18,7 @@ import { ExaBlock } from '@/blocks/blocks/exa' import { FileBlock } from '@/blocks/blocks/file' import { FirecrawlBlock } from '@/blocks/blocks/firecrawl' import { FunctionBlock } from '@/blocks/blocks/function' +import { GenericWebhookBlock } from '@/blocks/blocks/generic_webhook' import { GitHubBlock } from '@/blocks/blocks/github' import { GmailBlock } from '@/blocks/blocks/gmail' import { GoogleSearchBlock } from '@/blocks/blocks/google' @@ -91,6 +92,7 @@ export const registry: Record = { firecrawl: FirecrawlBlock, file: FileBlock, function: FunctionBlock, + generic_webhook: GenericWebhookBlock, github: GitHubBlock, gmail: GmailBlock, google_calendar: GoogleCalendarBlock, diff --git a/apps/sim/blocks/types.ts b/apps/sim/blocks/types.ts index 8cd1391742..d1a6d7ca18 100644 --- a/apps/sim/blocks/types.ts +++ b/apps/sim/blocks/types.ts @@ -36,6 +36,7 @@ export type SubBlockType = | 'time-input' // Time input | 'oauth-input' // OAuth credential selector | 'webhook-config' // Webhook configuration + | 'trigger-config' // Trigger configuration | 'schedule-config' // Schedule status and information | 'file-selector' // File selector for Google Drive, etc. | 'project-selector' // Project selector for Jira, Discord, etc. @@ -167,6 +168,9 @@ export interface SubBlockConfig { placeholder?: string // Custom placeholder for the prompt input maintainHistory?: boolean // Whether to maintain conversation history } + // Trigger-specific configuration + availableTriggers?: string[] // List of trigger IDs available for this subblock + triggerProvider?: string // Which provider's triggers to show } // Main block definition @@ -195,6 +199,10 @@ export interface BlockConfig { } } hideFromToolbar?: boolean + triggers?: { + enabled: boolean + available: string[] // List of trigger IDs this block supports + } } // Output configuration rules diff --git a/apps/sim/components/ui/tag-dropdown.test.tsx b/apps/sim/components/ui/tag-dropdown.test.tsx index 2889f7e139..c2bf947017 100644 --- a/apps/sim/components/ui/tag-dropdown.test.tsx +++ b/apps/sim/components/ui/tag-dropdown.test.tsx @@ -4,6 +4,115 @@ import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/respon import type { BlockState } from '@/stores/workflows/workflow/types' import { generateLoopBlocks } from '@/stores/workflows/workflow/utils' +// Mock getTool function for testing tool output types +vi.mock('@/lib/get-tool', () => ({ + getTool: vi.fn((toolId: string) => { + // Mock different tool configurations for testing + const mockTools: Record = { + exa_search: { + outputs: { + results: { + type: 'array', + description: 'Search results with titles, URLs, and text snippets', + items: { + type: 'object', + properties: { + title: { type: 'string', description: 'The title of the search result' }, + url: { type: 'string', description: 'The URL of the search result' }, + score: { type: 'number', description: 'Relevance score for the search result' }, + }, + }, + }, + }, + }, + pinecone_search_text: { + outputs: { + matches: { + type: 'array', + description: 'Search results with ID, score, and metadata', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Vector ID' }, + score: { type: 'number', description: 'Similarity score' }, + metadata: { type: 'object', description: 'Associated metadata' }, + }, + }, + }, + usage: { + type: 'object', + description: 'Usage statistics including tokens, read units, and rerank units', + properties: { + total_tokens: { type: 'number', description: 'Total tokens used for embedding' }, + read_units: { type: 'number', description: 'Read units consumed' }, + rerank_units: { type: 'number', description: 'Rerank units used' }, + }, + }, + }, + }, + notion_query_database: { + outputs: { + content: { + type: 'string', + description: 'Formatted list of database entries with their properties', + }, + metadata: { + type: 'object', + description: + 'Query metadata including total results count, pagination info, and raw results array', + properties: { + totalResults: { type: 'number', description: 'Number of results returned' }, + hasMore: { type: 'boolean', description: 'Whether more results are available' }, + results: { + type: 'array', + description: 'Raw Notion page objects', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Page ID' }, + properties: { type: 'object', description: 'Page properties' }, + }, + }, + }, + }, + }, + }, + }, + } + return mockTools[toolId] || null + }), +})) + +// Mock getBlock function for testing +vi.mock('@/lib/get-block', () => ({ + getBlock: vi.fn((blockType: string) => { + const mockBlockConfigs: Record = { + exa: { + tools: { + config: { + tool: ({ operation }: { operation: string }) => `exa_${operation}`, + }, + }, + }, + tools: { + tools: { + config: { + tool: ({ operation }: { operation: string }) => `pinecone_${operation}`, + }, + }, + }, + notion: { + tools: { + config: { + tool: ({ operation }: { operation: string }) => `notion_${operation}`, + }, + }, + }, + } + return mockBlockConfigs[blockType] || null + }), +})) + vi.mock('@/stores/workflows/workflow/store', () => ({ useWorkflowStore: vi.fn(() => ({ blocks: {}, @@ -34,6 +143,633 @@ vi.mock('@/stores/workflows/subblock/store', () => ({ })), })) +// Mock trigger functions +vi.mock('@/triggers/utils', () => ({ + getTriggersByProvider: vi.fn((provider: string) => { + const mockTriggers: Record = { + outlook: [ + { + id: 'outlook_poller', + name: 'Outlook Email Trigger', + outputs: { + email: { + id: { type: 'string', description: 'Outlook message ID' }, + conversationId: { type: 'string', description: 'Outlook conversation ID' }, + subject: { type: 'string', description: 'Email subject line' }, + hasAttachments: { type: 'boolean', description: 'Whether email has attachments' }, + isRead: { type: 'boolean', description: 'Whether email is read' }, + from: { type: 'string', description: 'Email sender' }, + to: { type: 'string', description: 'Email recipient' }, + cc: { type: 'string', description: 'CC recipients' }, + date: { type: 'string', description: 'Email date' }, + bodyText: { type: 'string', description: 'Email body text' }, + bodyHtml: { type: 'string', description: 'Email body HTML' }, + folderId: { type: 'string', description: 'Folder ID' }, + messageId: { type: 'string', description: 'Message ID' }, + threadId: { type: 'string', description: 'Thread ID' }, + }, + timestamp: { type: 'string', description: 'Event timestamp' }, + rawEmail: { + type: 'json', + description: 'Complete raw email data from Microsoft Graph API', + }, + }, + }, + ], + slack: [ + { + id: 'slack_message', + name: 'Slack Message Trigger', + outputs: { + message: { + text: { type: 'string', description: 'Message text' }, + user: { type: 'string', description: 'User ID' }, + channel: { type: 'string', description: 'Channel ID' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + }, + channel: { type: 'string', description: 'Channel information' }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }), +})) + +describe('TagDropdown Trigger Output Parsing', () => { + it.concurrent('should parse trigger outputs correctly for outlook trigger', () => { + // Mock getTriggersByProvider function directly + const getTriggersByProvider = vi.fn((provider: string) => { + const mockTriggers: Record = { + outlook: [ + { + id: 'outlook_poller', + name: 'Outlook Email Trigger', + outputs: { + email: { + id: { type: 'string', description: 'Outlook message ID' }, + conversationId: { type: 'string', description: 'Outlook conversation ID' }, + subject: { type: 'string', description: 'Email subject line' }, + hasAttachments: { type: 'boolean', description: 'Whether email has attachments' }, + isRead: { type: 'boolean', description: 'Whether email is read' }, + }, + timestamp: { type: 'string', description: 'Event timestamp' }, + rawEmail: { type: 'json', description: 'Complete raw email data' }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }) + + const triggers = getTriggersByProvider('outlook') + const firstTrigger = triggers[0] + + expect(firstTrigger).toBeDefined() + expect(firstTrigger.outputs).toBeDefined() + expect(firstTrigger.outputs.email).toBeDefined() + expect(firstTrigger.outputs.timestamp).toBeDefined() + expect(firstTrigger.outputs.rawEmail).toBeDefined() + + // Verify email nested properties + expect(firstTrigger.outputs.email.id.type).toBe('string') + expect(firstTrigger.outputs.email.subject.type).toBe('string') + expect(firstTrigger.outputs.email.hasAttachments.type).toBe('boolean') + expect(firstTrigger.outputs.email.isRead.type).toBe('boolean') + }) + + it.concurrent( + 'should get correct output type for trigger paths using getOutputTypeForPath', + () => { + // Mock getTriggersByProvider function directly + const getTriggersByProvider = vi.fn((provider: string) => { + const mockTriggers: Record = { + outlook: [ + { + id: 'outlook_poller', + outputs: { + email: { + id: { type: 'string' }, + subject: { type: 'string' }, + hasAttachments: { type: 'boolean' }, + isRead: { type: 'boolean' }, + from: { type: 'string' }, + to: { type: 'string' }, + }, + timestamp: { type: 'string' }, + rawEmail: { type: 'json' }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }) + + // Mock the getOutputTypeForPath function behavior for triggers + const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string + ): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if ( + currentObj && + typeof currentObj === 'object' && + 'type' in currentObj && + currentObj.type + ) { + return currentObj.type + } + } + } + + return 'any' + } + + const block = { + id: 'outlook1', + type: 'outlook', + triggerMode: true, + } + + const blockConfig = { + triggers: { enabled: true }, + } + + // Test top-level trigger outputs + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'timestamp')).toBe('string') + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'rawEmail')).toBe('json') + + // Test nested email properties + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.id')).toBe('string') + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.subject')).toBe('string') + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.hasAttachments')).toBe( + 'boolean' + ) + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.isRead')).toBe('boolean') + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.from')).toBe('string') + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.to')).toBe('string') + + // Test non-existent paths + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email.nonexistent')).toBe('any') + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'nonexistent')).toBe('any') + } + ) + + it.concurrent('should handle trigger output navigation for parent objects', () => { + const getTriggersByProvider = vi.fn((provider: string) => { + const mockTriggers: Record = { + outlook: [ + { + outputs: { + email: { + id: { type: 'string' }, + subject: { type: 'string' }, + }, + timestamp: { type: 'string' }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }) + + const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string + ): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if ( + currentObj && + typeof currentObj === 'object' && + 'type' in currentObj && + currentObj.type + ) { + return currentObj.type + } + + // Check if currentObj is a parent object with nested properties + if (currentObj && typeof currentObj === 'object' && !('type' in currentObj)) { + return 'object' + } + } + } + + return 'any' + } + + const block = { + id: 'outlook1', + type: 'outlook', + triggerMode: true, + } + + const blockConfig = { + triggers: { enabled: true }, + } + + // Test parent object (email should be treated as object type) + expect(getOutputTypeForPath(block, blockConfig, 'outlook1', 'email')).toBe('object') + }) + + it.concurrent('should return "any" for non-trigger blocks', () => { + const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string + ): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + const { getTriggersByProvider } = require('@/triggers/utils') + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if ( + currentObj && + typeof currentObj === 'object' && + 'type' in currentObj && + currentObj.type + ) { + return currentObj.type + } + } + } + + return 'any' + } + + // Test block without trigger mode + const normalBlock = { + id: 'outlook1', + type: 'outlook', + triggerMode: false, + } + + const blockConfig = { + triggers: { enabled: true }, + } + + expect(getOutputTypeForPath(normalBlock, blockConfig, 'outlook1', 'email.id')).toBe('any') + + // Test block with trigger mode but triggers not enabled + const triggerBlockNoConfig = { + id: 'outlook1', + type: 'outlook', + triggerMode: true, + } + + const noTriggersConfig = { + triggers: { enabled: false }, + } + + expect( + getOutputTypeForPath(triggerBlockNoConfig, noTriggersConfig, 'outlook1', 'email.id') + ).toBe('any') + }) + + it.concurrent('should handle different trigger providers correctly', () => { + const getTriggersByProvider = vi.fn((provider: string) => { + const mockTriggers: Record = { + slack: [ + { + outputs: { + message: { + text: { type: 'string' }, + user: { type: 'string' }, + channel: { type: 'string' }, + }, + channel: { type: 'string' }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }) + + const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string + ): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if ( + currentObj && + typeof currentObj === 'object' && + 'type' in currentObj && + currentObj.type + ) { + return currentObj.type + } + } + } + + return 'any' + } + + // Test Slack trigger + const slackBlock = { + id: 'slack1', + type: 'slack', + triggerMode: true, + } + + const blockConfig = { + triggers: { enabled: true }, + } + + expect(getOutputTypeForPath(slackBlock, blockConfig, 'slack1', 'message.text')).toBe('string') + expect(getOutputTypeForPath(slackBlock, blockConfig, 'slack1', 'message.user')).toBe('string') + expect(getOutputTypeForPath(slackBlock, blockConfig, 'slack1', 'channel')).toBe('string') + }) + + it.concurrent('should handle malformed or missing trigger configurations gracefully', () => { + const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string + ): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + try { + const { getTriggersByProvider } = require('@/triggers/utils') + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if ( + currentObj && + typeof currentObj === 'object' && + 'type' in currentObj && + currentObj.type + ) { + return currentObj.type + } + } + } catch (error) { + return 'any' + } + } + + return 'any' + } + + // Test with unknown trigger provider + const unknownBlock = { + id: 'unknown1', + type: 'unknown_provider', + triggerMode: true, + } + + const blockConfig = { + triggers: { enabled: true }, + } + + expect(getOutputTypeForPath(unknownBlock, blockConfig, 'unknown1', 'any.path')).toBe('any') + + // Test with null/undefined configurations + expect(getOutputTypeForPath(null, blockConfig, 'test', 'path')).toBe('any') + expect(getOutputTypeForPath(unknownBlock, null, 'test', 'path')).toBe('any') + }) + + it.concurrent('should generate correct trigger output tags for dropdown', () => { + const getTriggersByProvider = vi.fn((provider: string) => { + const mockTriggers: Record = { + outlook: [ + { + outputs: { + email: { + id: { type: 'string' }, + subject: { type: 'string' }, + hasAttachments: { type: 'boolean' }, + isRead: { type: 'boolean' }, + }, + timestamp: { type: 'string' }, + rawEmail: { type: 'json' }, + }, + }, + ], + slack: [ + { + outputs: { + message: { + text: { type: 'string' }, + user: { type: 'string' }, + }, + channel: { type: 'string' }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }) + + // Mock trigger output tag generation + const generateTriggerOutputTags = (blockType: string, blockId: string): string[] => { + const triggers = getTriggersByProvider(blockType) + const firstTrigger = triggers[0] + + if (!firstTrigger?.outputs) return [] + + const tags: string[] = [] + const normalizedBlockId = blockId.replace(/\s+/g, '').toLowerCase() + + const traverseOutputs = (outputs: any, prefix = '') => { + for (const [key, output] of Object.entries(outputs)) { + const currentPath = prefix ? `${prefix}.${key}` : key + const fullTag = `${normalizedBlockId}.${currentPath}` + + tags.push(fullTag) + + // If this is a parent object with nested properties, recurse + if (output && typeof output === 'object' && !('type' in output)) { + traverseOutputs(output, currentPath) + } + } + } + + traverseOutputs(firstTrigger.outputs) + return tags + } + + // Test Outlook trigger tags + const outlookTags = generateTriggerOutputTags('outlook', 'Outlook 1') + + expect(outlookTags).toContain('outlook1.email') + expect(outlookTags).toContain('outlook1.email.id') + expect(outlookTags).toContain('outlook1.email.subject') + expect(outlookTags).toContain('outlook1.email.hasAttachments') + expect(outlookTags).toContain('outlook1.email.isRead') + expect(outlookTags).toContain('outlook1.timestamp') + expect(outlookTags).toContain('outlook1.rawEmail') + + // Test Slack trigger tags + const slackTags = generateTriggerOutputTags('slack', 'Slack 1') + + expect(slackTags).toContain('slack1.message') + expect(slackTags).toContain('slack1.message.text') + expect(slackTags).toContain('slack1.message.user') + expect(slackTags).toContain('slack1.channel') + }) + + it.concurrent('should correctly identify trigger vs tool output resolution', () => { + const getTriggersByProvider = vi.fn((provider: string) => { + const mockTriggers: Record = { + outlook: [ + { + outputs: { + email: { + id: { type: 'string' }, + }, + }, + }, + ], + } + return mockTriggers[provider] || [] + }) + + const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string + ): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + // Trigger mode logic + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if ( + currentObj && + typeof currentObj === 'object' && + 'type' in currentObj && + currentObj.type + ) { + return currentObj.type + } + } + } else { + // Tool mode logic - simplified mock + if (blockConfig && outputPath === 'results') { + return 'array' + } + } + + return 'any' + } + + // Test trigger mode + const triggerBlock = { + id: 'outlook1', + type: 'outlook', + triggerMode: true, + } + + const triggerConfig = { + triggers: { enabled: true }, + } + + expect(getOutputTypeForPath(triggerBlock, triggerConfig, 'outlook1', 'email.id')).toBe('string') + + // Test tool mode + const toolBlock = { + id: 'outlook1', + type: 'outlook', + triggerMode: false, + } + + const toolConfig = { + triggers: { enabled: false }, + } + + expect(getOutputTypeForPath(toolBlock, toolConfig, 'outlook1', 'results')).toBe('array') + expect(getOutputTypeForPath(toolBlock, toolConfig, 'outlook1', 'email.id')).toBe('any') + }) +}) + describe('TagDropdown Loop Suggestions', () => { it.concurrent('should generate correct loop suggestions for forEach loops', () => { const blocks: Record = { @@ -790,3 +1526,466 @@ describe('TagDropdown Response Format Support', () => { ]) }) }) + +describe('TagDropdown Type Display Functionality', () => { + it.concurrent( + 'should extract types correctly from tool outputs using generateOutputPathsWithTypes', + () => { + // Test with Exa search tool outputs + const exaSearchOutputs = { + results: { + type: 'array', + description: 'Search results with titles, URLs, and text snippets', + items: { + type: 'object', + properties: { + title: { type: 'string', description: 'The title of the search result' }, + url: { type: 'string', description: 'The URL of the search result' }, + score: { type: 'number', description: 'Relevance score for the search result' }, + }, + }, + }, + } + + // Mock the generateOutputPathsWithTypes function behavior + const generateOutputPathsWithTypes = ( + outputs: Record, + prefix = '' + ): Array<{ path: string; type: string }> => { + const paths: Array<{ path: string; type: string }> = [] + + for (const [key, output] of Object.entries(outputs)) { + const currentPath = prefix ? `${prefix}.${key}` : key + if (output && typeof output === 'object' && 'type' in output) { + paths.push({ path: currentPath, type: output.type as string }) + + // Handle nested properties + if ((output as any).properties) { + const nestedPaths = generateOutputPathsWithTypes( + (output as any).properties, + currentPath + ) + paths.push(...nestedPaths) + } + + // Handle array items properties + if ((output as any).items?.properties) { + const itemPaths = generateOutputPathsWithTypes( + (output as any).items.properties, + currentPath + ) + paths.push(...itemPaths) + } + } + } + + return paths + } + + const paths = generateOutputPathsWithTypes(exaSearchOutputs) + + expect(paths).toEqual([ + { path: 'results', type: 'array' }, + { path: 'results.title', type: 'string' }, + { path: 'results.url', type: 'string' }, + { path: 'results.score', type: 'number' }, + ]) + } + ) + + it.concurrent('should extract types correctly for complex nested structures', () => { + // Test with Pinecone tool outputs + const pineconeOutputs = { + matches: { + type: 'array', + description: 'Search results with ID, score, and metadata', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Vector ID' }, + score: { type: 'number', description: 'Similarity score' }, + metadata: { type: 'object', description: 'Associated metadata' }, + }, + }, + }, + usage: { + type: 'object', + description: 'Usage statistics including tokens, read units, and rerank units', + properties: { + total_tokens: { type: 'number', description: 'Total tokens used for embedding' }, + read_units: { type: 'number', description: 'Read units consumed' }, + rerank_units: { type: 'number', description: 'Rerank units used' }, + }, + }, + } + + const generateOutputPathsWithTypes = ( + outputs: Record, + prefix = '' + ): Array<{ path: string; type: string }> => { + const paths: Array<{ path: string; type: string }> = [] + + for (const [key, output] of Object.entries(outputs)) { + const currentPath = prefix ? `${prefix}.${key}` : key + if (output && typeof output === 'object' && 'type' in output) { + paths.push({ path: currentPath, type: output.type as string }) + + if ((output as any).properties) { + const nestedPaths = generateOutputPathsWithTypes( + (output as any).properties, + currentPath + ) + paths.push(...nestedPaths) + } + + if ((output as any).items?.properties) { + const itemPaths = generateOutputPathsWithTypes( + (output as any).items.properties, + currentPath + ) + paths.push(...itemPaths) + } + } + } + + return paths + } + + const paths = generateOutputPathsWithTypes(pineconeOutputs) + + expect(paths).toEqual([ + { path: 'matches', type: 'array' }, + { path: 'matches.id', type: 'string' }, + { path: 'matches.score', type: 'number' }, + { path: 'matches.metadata', type: 'object' }, + { path: 'usage', type: 'object' }, + { path: 'usage.total_tokens', type: 'number' }, + { path: 'usage.read_units', type: 'number' }, + { path: 'usage.rerank_units', type: 'number' }, + ]) + }) + + it.concurrent('should get tool output type for specific paths using getToolOutputType', () => { + // Mock block configuration for Exa + const blockConfig = { + tools: { + config: { + tool: ({ operation }: { operation: string }) => `exa_${operation}`, + }, + }, + } + + // Mock getToolOutputType function behavior + const getToolOutputType = (blockConfig: any, operation: string, path: string): string => { + // Get tool ID from block config + const toolId = blockConfig?.tools?.config?.tool?.({ operation }) + if (!toolId) return '' + + // Mock tool lookup (would use getTool in real implementation) + const mockTools: Record = { + exa_search: { + outputs: { + results: { + type: 'array', + items: { + type: 'object', + properties: { + title: { type: 'string' }, + url: { type: 'string' }, + score: { type: 'number' }, + }, + }, + }, + }, + }, + } + + const tool = mockTools[toolId] + if (!tool?.outputs) return '' + + // Navigate to the specific path + const pathParts = path.split('.') + let current = tool.outputs + + for (const part of pathParts) { + if (!current[part]) { + // Check if we're looking at array items + if (current.items?.properties?.[part]) { + current = current.items.properties + } else { + return '' + } + } + current = current[part] + } + + return current?.type || '' + } + + // Test various path types + expect(getToolOutputType(blockConfig, 'search', 'results')).toBe('array') + expect(getToolOutputType(blockConfig, 'search', 'results.title')).toBe('string') + expect(getToolOutputType(blockConfig, 'search', 'results.url')).toBe('string') + expect(getToolOutputType(blockConfig, 'search', 'results.score')).toBe('number') + expect(getToolOutputType(blockConfig, 'search', 'nonexistent')).toBe('') + }) + + it.concurrent('should generate tool output paths with type information', () => { + // Mock the generateToolOutputPaths function that returns both path and type + const generateToolOutputPaths = ( + blockConfig: any, + operation: string + ): Array<{ path: string; type: string }> => { + const toolId = blockConfig?.tools?.config?.tool?.({ operation }) + if (!toolId) return [] + + // Mock tool configurations + const mockTools: Record = { + exa_search: { + outputs: { + results: { + type: 'array', + items: { + type: 'object', + properties: { + title: { type: 'string' }, + url: { type: 'string' }, + score: { type: 'number' }, + }, + }, + }, + }, + }, + } + + const tool = mockTools[toolId] + if (!tool?.outputs) return [] + + const paths: Array<{ path: string; type: string }> = [] + + const traverse = (obj: any, prefix = '') => { + for (const [key, value] of Object.entries(obj)) { + const currentPath = prefix ? `${prefix}.${key}` : key + if (value && typeof value === 'object' && 'type' in value) { + paths.push({ path: currentPath, type: (value as any).type }) + + if ((value as any).properties) { + traverse((value as any).properties, currentPath) + } + + if ((value as any).items?.properties) { + traverse((value as any).items.properties, currentPath) + } + } + } + } + + traverse(tool.outputs) + return paths + } + + const blockConfig = { + tools: { + config: { + tool: ({ operation }: { operation: string }) => `exa_${operation}`, + }, + }, + } + + const paths = generateToolOutputPaths(blockConfig, 'search') + + expect(paths).toEqual([ + { path: 'results', type: 'array' }, + { path: 'results.title', type: 'string' }, + { path: 'results.url', type: 'string' }, + { path: 'results.score', type: 'number' }, + ]) + }) + + it.concurrent('should handle missing or invalid tool configurations gracefully', () => { + const getToolOutputType = (blockConfig: any, operation: string, path: string): string => { + try { + const toolId = blockConfig?.tools?.config?.tool?.({ operation }) + if (!toolId) return '' + + // Mock empty tool configurations + const mockTools: Record = {} + const tool = mockTools[toolId] + if (!tool?.outputs) return '' + + return '' + } catch (error) { + return '' + } + } + + // Test with null/undefined block config + expect(getToolOutputType(null, 'search', 'results')).toBe('') + expect(getToolOutputType(undefined, 'search', 'results')).toBe('') + expect(getToolOutputType({}, 'search', 'results')).toBe('') + + // Test with invalid block config structure + const invalidBlockConfig = { tools: null } + expect(getToolOutputType(invalidBlockConfig, 'search', 'results')).toBe('') + + // Test with missing tool function + const incompleteBlockConfig = { + tools: { + config: {}, + }, + } + expect(getToolOutputType(incompleteBlockConfig, 'search', 'results')).toBe('') + }) + + it.concurrent( + 'should only show types when reliable data is available from tool configuration', + () => { + // Mock tag info creation that only includes type when available + const createTagInfo = ( + blockConfig: any, + operation: string, + path: string + ): { type?: string; description?: string } => { + const getToolOutputType = (blockConfig: any, operation: string, path: string): string => { + const toolId = blockConfig?.tools?.config?.tool?.({ operation }) + if (!toolId) return '' + + const mockTools: Record = { + exa_search: { + outputs: { + results: { + type: 'array', + items: { + type: 'object', + properties: { + title: { type: 'string' }, + }, + }, + }, + }, + }, + } + + const tool = mockTools[toolId] + if (!tool?.outputs) return '' + + const pathParts = path.split('.') + let current = tool.outputs + + for (const part of pathParts) { + if (!current[part]) { + if ((current as any).items?.properties?.[part]) { + current = (current as any).items.properties + } else { + return '' + } + } + current = current[part] + } + + return (current as any)?.type || '' + } + + const type = getToolOutputType(blockConfig, operation, path) + + // Only return type information if we have reliable data + if (type) { + return { type } + } + + return {} + } + + const blockConfig = { + tools: { + config: { + tool: ({ operation }: { operation: string }) => `exa_${operation}`, + }, + }, + } + + // Should have type for valid paths + expect(createTagInfo(blockConfig, 'search', 'results')).toEqual({ type: 'array' }) + expect(createTagInfo(blockConfig, 'search', 'results.title')).toEqual({ type: 'string' }) + + // Should not have type for invalid paths + expect(createTagInfo(blockConfig, 'search', 'nonexistent')).toEqual({}) + expect(createTagInfo(blockConfig, 'invalid_operation', 'results')).toEqual({}) + expect(createTagInfo(null, 'search', 'results')).toEqual({}) + } + ) + + it.concurrent('should handle deeply nested structures correctly', () => { + // Test with Notion query_database tool structure + const notionOutputs = { + content: { + type: 'string', + description: 'Formatted list of database entries with their properties', + }, + metadata: { + type: 'object', + description: + 'Query metadata including total results count, pagination info, and raw results array', + properties: { + totalResults: { type: 'number', description: 'Number of results returned' }, + hasMore: { type: 'boolean', description: 'Whether more results are available' }, + results: { + type: 'array', + description: 'Raw Notion page objects', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Page ID' }, + properties: { type: 'object', description: 'Page properties' }, + }, + }, + }, + }, + }, + } + + const generateOutputPathsWithTypes = ( + outputs: Record, + prefix = '' + ): Array<{ path: string; type: string }> => { + const paths: Array<{ path: string; type: string }> = [] + + for (const [key, output] of Object.entries(outputs)) { + const currentPath = prefix ? `${prefix}.${key}` : key + if (output && typeof output === 'object' && 'type' in output) { + paths.push({ path: currentPath, type: output.type as string }) + + if ((output as any).properties) { + const nestedPaths = generateOutputPathsWithTypes( + (output as any).properties, + currentPath + ) + paths.push(...nestedPaths) + } + + if ((output as any).items?.properties) { + const itemPaths = generateOutputPathsWithTypes( + (output as any).items.properties, + currentPath + ) + paths.push(...itemPaths) + } + } + } + + return paths + } + + const paths = generateOutputPathsWithTypes(notionOutputs) + + expect(paths).toEqual([ + { path: 'content', type: 'string' }, + { path: 'metadata', type: 'object' }, + { path: 'metadata.totalResults', type: 'number' }, + { path: 'metadata.hasMore', type: 'boolean' }, + { path: 'metadata.results', type: 'array' }, + { path: 'metadata.results.id', type: 'string' }, + { path: 'metadata.results.properties', type: 'object' }, + ]) + }) +}) diff --git a/apps/sim/components/ui/tag-dropdown.tsx b/apps/sim/components/ui/tag-dropdown.tsx index cbd9a020e3..20a5b95f2b 100644 --- a/apps/sim/components/ui/tag-dropdown.tsx +++ b/apps/sim/components/ui/tag-dropdown.tsx @@ -1,5 +1,6 @@ import type React from 'react' import { useCallback, useEffect, useMemo, useState } from 'react' +import { ChevronRight } from 'lucide-react' import { BlockPathCalculator } from '@/lib/block-path-calculator' import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format' import { cn } from '@/lib/utils' @@ -10,6 +11,8 @@ import type { Variable } from '@/stores/panel/variables/types' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store' +import { getTool } from '@/tools/utils' +import { getTriggersByProvider } from '@/triggers' interface BlockTagGroup { blockName: string @@ -31,7 +34,6 @@ interface TagDropdownProps { style?: React.CSSProperties } -// Check if tag trigger '<' should show dropdown export const checkTagTrigger = (text: string, cursorPosition: number): { show: boolean } => { if (cursorPosition >= 1) { const textBeforeCursor = text.slice(0, cursorPosition) @@ -46,7 +48,110 @@ export const checkTagTrigger = (text: string, cursorPosition: number): { show: b return { show: false } } -// Generate output paths from block configuration outputs +const BLOCK_COLORS = { + VARIABLE: '#2F8BFF', + DEFAULT: '#2F55FF', + LOOP: '#8857E6', + PARALLEL: '#FF5757', +} as const + +const TAG_PREFIXES = { + VARIABLE: 'variable.', +} as const + +const normalizeBlockName = (blockName: string): string => { + return blockName.replace(/\s+/g, '').toLowerCase() +} + +const normalizeVariableName = (variableName: string): string => { + return variableName.replace(/\s+/g, '') +} + +const getSubBlockValue = (blockId: string, property: string): any => { + return useSubBlockStore.getState().getValue(blockId, property) +} + +const createTagEventHandlers = ( + tag: string, + group: any, + tagIndex: number, + handleTagSelect: (tag: string, group?: any) => void, + setSelectedIndex: (index: number) => void, + setHoveredNested: (value: any) => void +) => ({ + onMouseEnter: () => { + setSelectedIndex(tagIndex >= 0 ? tagIndex : 0) + setHoveredNested(null) + }, + onMouseDown: (e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + handleTagSelect(tag, group) + }, + onClick: (e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + handleTagSelect(tag, group) + }, +}) + +const getOutputTypeForPath = ( + block: any, + blockConfig: any, + blockId: string, + outputPath: string +): string => { + if (block?.triggerMode && blockConfig?.triggers?.enabled) { + const triggers = getTriggersByProvider(block.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + const pathParts = outputPath.split('.') + let currentObj: any = firstTrigger.outputs + + for (const part of pathParts) { + if (currentObj && typeof currentObj === 'object') { + currentObj = currentObj[part] + } else { + break + } + } + + if (currentObj && typeof currentObj === 'object' && 'type' in currentObj && currentObj.type) { + return currentObj.type + } + } + } else if (block?.type === 'starter') { + // Handle starter block specific outputs + const startWorkflowValue = getSubBlockValue(blockId, 'startWorkflow') + + if (startWorkflowValue === 'chat') { + // Define types for chat mode outputs + const chatModeTypes: Record = { + input: 'string', + conversationId: 'string', + files: 'array', + } + return chatModeTypes[outputPath] || 'any' + } + // For API mode, check inputFormat for custom field types + const inputFormatValue = getSubBlockValue(blockId, 'inputFormat') + if (inputFormatValue && Array.isArray(inputFormatValue)) { + const field = inputFormatValue.find((f: any) => f.name === outputPath) + if (field?.type) { + return field.type + } + } + } else { + const operationValue = getSubBlockValue(blockId, 'operation') + if (blockConfig && operationValue) { + return getToolOutputType(blockConfig, operationValue, outputPath) + } + } + + return 'any' +} + const generateOutputPaths = (outputs: Record, prefix = ''): string[] => { const paths: string[] = [] @@ -62,7 +167,7 @@ const generateOutputPaths = (outputs: Record, prefix = ''): string[ // New format: { type: 'string', description: '...' } - treat as leaf node paths.push(currentPath) } else { - // Legacy nested object - recurse + // Nested object - recurse to get all child paths const subPaths = generateOutputPaths(value, currentPath) paths.push(...subPaths) } @@ -75,6 +180,94 @@ const generateOutputPaths = (outputs: Record, prefix = ''): string[ return paths } +const generateOutputPathsWithTypes = ( + outputs: Record, + prefix = '' +): Array<{ path: string; type: string }> => { + const paths: Array<{ path: string; type: string }> = [] + + for (const [key, value] of Object.entries(outputs)) { + const currentPath = prefix ? `${prefix}.${key}` : key + + if (typeof value === 'string') { + // Simple type like 'string', 'number', 'json', 'any' + paths.push({ path: currentPath, type: value }) + } else if (typeof value === 'object' && value !== null) { + // Check if this is our new format with type and description + if ('type' in value && typeof value.type === 'string') { + // Handle nested properties for arrays and objects + if (value.type === 'array' && value.items?.properties) { + // For arrays with properties, add the array itself and recurse into items + paths.push({ path: currentPath, type: 'array' }) + const subPaths = generateOutputPathsWithTypes(value.items.properties, currentPath) + paths.push(...subPaths) + } else if (value.type === 'object' && value.properties) { + // For objects with properties, add the object itself and recurse into properties + paths.push({ path: currentPath, type: 'object' }) + const subPaths = generateOutputPathsWithTypes(value.properties, currentPath) + paths.push(...subPaths) + } else { + // Leaf node - just add the type + paths.push({ path: currentPath, type: value.type }) + } + } else { + // Legacy nested object - recurse and assume 'object' type + const subPaths = generateOutputPathsWithTypes(value, currentPath) + paths.push(...subPaths) + } + } else { + // Fallback - add with 'any' type + paths.push({ path: currentPath, type: 'any' }) + } + } + + return paths +} + +const generateToolOutputPaths = (blockConfig: any, operation: string): string[] => { + if (!blockConfig?.tools?.config?.tool) return [] + + try { + // Get the tool ID for this operation + const toolId = blockConfig.tools.config.tool({ operation }) + if (!toolId) return [] + + // Get the tool configuration + const toolConfig = getTool(toolId) + if (!toolConfig?.outputs) return [] + + // Generate paths from tool outputs + return generateOutputPaths(toolConfig.outputs) + } catch (error) { + console.warn('Failed to get tool outputs for operation:', operation, error) + return [] + } +} + +const getToolOutputType = (blockConfig: any, operation: string, path: string): string => { + if (!blockConfig?.tools?.config?.tool) return 'any' + + try { + // Get the tool ID for this operation + const toolId = blockConfig.tools.config.tool({ operation }) + if (!toolId) return 'any' + + // Get the tool configuration + const toolConfig = getTool(toolId) + if (!toolConfig?.outputs) return 'any' + + // Generate paths with types from tool outputs + const pathsWithTypes = generateOutputPathsWithTypes(toolConfig.outputs) + + // Find the matching path and return its type + const matchingPath = pathsWithTypes.find((p) => p.path === path) + return matchingPath?.type || 'any' + } catch (error) { + console.warn('Failed to get tool output type for path:', path, error) + return 'any' + } +} + export const TagDropdown: React.FC = ({ visible, onSelect, @@ -86,43 +279,41 @@ export const TagDropdown: React.FC = ({ onClose, style, }) => { - // Component state const [selectedIndex, setSelectedIndex] = useState(0) + const [hoveredNested, setHoveredNested] = useState<{ tag: string; index: number } | null>(null) + const [inSubmenu, setInSubmenu] = useState(false) + const [submenuIndex, setSubmenuIndex] = useState(0) + const [parentHovered, setParentHovered] = useState(null) + const [submenuHovered, setSubmenuHovered] = useState(false) - // Store hooks for workflow data const blocks = useWorkflowStore((state) => state.blocks) const loops = useWorkflowStore((state) => state.loops) const parallels = useWorkflowStore((state) => state.parallels) const edges = useWorkflowStore((state) => state.edges) const workflowId = useWorkflowRegistry((state) => state.activeWorkflowId) - // Store hooks for variables const getVariablesByWorkflowId = useVariablesStore((state) => state.getVariablesByWorkflowId) const loadVariables = useVariablesStore((state) => state.loadVariables) const variables = useVariablesStore((state) => state.variables) const workflowVariables = workflowId ? getVariablesByWorkflowId(workflowId) : [] - // Load variables when workflow changes useEffect(() => { if (workflowId) { loadVariables(workflowId) } }, [workflowId, loadVariables]) - // Extract current search term from input const searchTerm = useMemo(() => { const textBeforeCursor = inputValue.slice(0, cursorPosition) const match = textBeforeCursor.match(/<([^>]*)$/) return match ? match[1].toLowerCase() : '' }, [inputValue, cursorPosition]) - // Generate all available tags using BlockPathCalculator and clean block outputs const { tags, variableInfoMap = {}, blockTagGroups = [], } = useMemo(() => { - // Handle active source block (drag & drop from specific block) if (activeSourceBlockId) { const sourceBlock = blocks[activeSourceBlockId] if (!sourceBlock) { @@ -131,19 +322,16 @@ export const TagDropdown: React.FC = ({ const blockConfig = getBlock(sourceBlock.type) - // Handle special blocks that aren't in the registry (loop and parallel) if (!blockConfig) { if (sourceBlock.type === 'loop' || sourceBlock.type === 'parallel') { - // Create a mock config with results output for loop/parallel blocks const mockConfig = { outputs: { - results: 'array', // These blocks have a results array output + results: 'array', }, } const blockName = sourceBlock.name || sourceBlock.type - const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase() + const normalizedBlockName = normalizeBlockName(blockName) - // Generate output paths for the mock config const outputPaths = generateOutputPaths(mockConfig.outputs) const blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) @@ -167,49 +355,36 @@ export const TagDropdown: React.FC = ({ } const blockName = sourceBlock.name || sourceBlock.type - const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase() + const normalizedBlockName = normalizeBlockName(blockName) - // Check for custom response format first - const responseFormatValue = useSubBlockStore - .getState() - .getValue(activeSourceBlockId, 'responseFormat') + const responseFormatValue = getSubBlockValue(activeSourceBlockId, 'responseFormat') const responseFormat = parseResponseFormatSafely(responseFormatValue, activeSourceBlockId) let blockTags: string[] - // Special handling for evaluator blocks if (sourceBlock.type === 'evaluator') { - // Get the evaluation metrics for the evaluator block - const metricsValue = useSubBlockStore.getState().getValue(activeSourceBlockId, 'metrics') + const metricsValue = getSubBlockValue(activeSourceBlockId, 'metrics') if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) { - // Use the metric names as the available outputs const validMetrics = metricsValue.filter((metric: any) => metric?.name) blockTags = validMetrics.map( (metric: any) => `${normalizedBlockName}.${metric.name.toLowerCase()}` ) } else { - // Fallback to default evaluator outputs if no metrics are defined const outputPaths = generateOutputPaths(blockConfig.outputs) blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) } } else if (responseFormat) { - // Use custom schema properties if response format is specified const schemaFields = extractFieldsFromSchema(responseFormat) if (schemaFields.length > 0) { blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`) } else { - // Fallback to default if schema extraction failed const outputPaths = generateOutputPaths(blockConfig.outputs || {}) blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) } } else if (!blockConfig.outputs || Object.keys(blockConfig.outputs).length === 0) { - // Handle blocks with no outputs (like starter) - check for custom input fields if (sourceBlock.type === 'starter') { - // Check what start workflow mode is selected - const startWorkflowValue = useSubBlockStore - .getState() - .getValue(activeSourceBlockId, 'startWorkflow') + const startWorkflowValue = getSubBlockValue(activeSourceBlockId, 'startWorkflow') if (startWorkflowValue === 'chat') { // For chat mode, provide input, conversationId, and files @@ -219,33 +394,50 @@ export const TagDropdown: React.FC = ({ `${normalizedBlockName}.files`, ] } else { - // Check for custom input format fields (for manual mode) - const inputFormatValue = useSubBlockStore - .getState() - .getValue(activeSourceBlockId, 'inputFormat') + const inputFormatValue = getSubBlockValue(activeSourceBlockId, 'inputFormat') if ( inputFormatValue && Array.isArray(inputFormatValue) && inputFormatValue.length > 0 ) { - // Use custom input fields if they exist blockTags = inputFormatValue .filter((field: any) => field.name && field.name.trim() !== '') .map((field: any) => `${normalizedBlockName}.${field.name}`) } else { - // Fallback to just the block name blockTags = [normalizedBlockName] } } } else { - // Other blocks with no outputs - show as just blockTags = [normalizedBlockName] } } else { - // Use default block outputs - const outputPaths = generateOutputPaths(blockConfig.outputs || {}) - blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + if (sourceBlock?.triggerMode && blockConfig.triggers?.enabled) { + const triggers = getTriggersByProvider(sourceBlock.type) + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + // Use trigger outputs instead of block outputs + const outputPaths = generateOutputPaths(firstTrigger.outputs) + blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + } else { + const outputPaths = generateOutputPaths(blockConfig.outputs || {}) + blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + } + } else { + // Check for tool-specific outputs first + const operationValue = getSubBlockValue(activeSourceBlockId, 'operation') + const toolOutputPaths = operationValue + ? generateToolOutputPaths(blockConfig, operationValue) + : [] + + if (toolOutputPaths.length > 0) { + blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`) + } else { + const outputPaths = generateOutputPaths(blockConfig.outputs || {}) + blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + } + } } const blockTagGroups: BlockTagGroup[] = [ @@ -265,7 +457,6 @@ export const TagDropdown: React.FC = ({ } } - // Check for invalid blocks before serialization to prevent race conditions const hasInvalidBlocks = Object.values(blocks).some((block) => !block || !block.type) if (hasInvalidBlocks) { return { @@ -275,23 +466,19 @@ export const TagDropdown: React.FC = ({ } } - // Create serialized workflow for BlockPathCalculator const serializer = new Serializer() const serializedWorkflow = serializer.serializeWorkflow(blocks, edges, loops, parallels) - // Find accessible blocks using BlockPathCalculator const accessibleBlockIds = BlockPathCalculator.findAllPathNodes( serializedWorkflow.connections, blockId ) - // Always include starter block const starterBlock = Object.values(blocks).find((block) => block.type === 'starter') if (starterBlock && !accessibleBlockIds.includes(starterBlock.id)) { accessibleBlockIds.push(starterBlock.id) } - // Calculate distances from starter block for ordering const blockDistances: Record = {} if (starterBlock) { const adjList: Record = {} @@ -316,18 +503,17 @@ export const TagDropdown: React.FC = ({ } } - // Create variable tags - filter out variables with empty names const validVariables = workflowVariables.filter( (variable: Variable) => variable.name.trim() !== '' ) const variableTags = validVariables.map( - (variable: Variable) => `variable.${variable.name.replace(/\s+/g, '')}` + (variable: Variable) => `${TAG_PREFIXES.VARIABLE}${normalizeVariableName(variable.name)}` ) const variableInfoMap = validVariables.reduce( (acc, variable) => { - const tagName = `variable.${variable.name.replace(/\s+/g, '')}` + const tagName = `${TAG_PREFIXES.VARIABLE}${normalizeVariableName(variable.name)}` acc[tagName] = { type: variable.type, id: variable.id, @@ -337,7 +523,6 @@ export const TagDropdown: React.FC = ({ {} as Record ) - // Generate loop contextual block group if current block is in a loop let loopBlockGroup: BlockTagGroup | null = null const containingLoop = Object.entries(loops).find(([_, loop]) => loop.nodes.includes(blockId)) let containingLoopBlockId: string | null = null @@ -351,25 +536,22 @@ export const TagDropdown: React.FC = ({ contextualTags.push('items') } - // Add the containing loop block's results to the contextual tags const containingLoopBlock = blocks[loopId] if (containingLoopBlock) { const loopBlockName = containingLoopBlock.name || containingLoopBlock.type - const normalizedLoopBlockName = loopBlockName.replace(/\s+/g, '').toLowerCase() + const normalizedLoopBlockName = normalizeBlockName(loopBlockName) contextualTags.push(`${normalizedLoopBlockName}.results`) - // Create a block group for the loop contextual tags loopBlockGroup = { blockName: loopBlockName, blockId: loopId, blockType: 'loop', tags: contextualTags, - distance: 0, // Contextual tags have highest priority + distance: 0, } } } - // Generate parallel contextual block group if current block is in parallel let parallelBlockGroup: BlockTagGroup | null = null const containingParallel = Object.entries(parallels || {}).find(([_, parallel]) => parallel.nodes.includes(blockId) @@ -380,25 +562,22 @@ export const TagDropdown: React.FC = ({ containingParallelBlockId = parallelId const contextualTags: string[] = ['index', 'currentItem', 'items'] - // Add the containing parallel block's results to the contextual tags const containingParallelBlock = blocks[parallelId] if (containingParallelBlock) { const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type - const normalizedParallelBlockName = parallelBlockName.replace(/\s+/g, '').toLowerCase() + const normalizedParallelBlockName = normalizeBlockName(parallelBlockName) contextualTags.push(`${normalizedParallelBlockName}.results`) - // Create a block group for the parallel contextual tags parallelBlockGroup = { blockName: parallelBlockName, blockId: parallelId, blockType: 'parallel', tags: contextualTags, - distance: 0, // Contextual tags have highest priority + distance: 0, } } } - // Create block tag groups from accessible blocks const blockTagGroups: BlockTagGroup[] = [] const allBlockTags: string[] = [] @@ -408,9 +587,7 @@ export const TagDropdown: React.FC = ({ const blockConfig = getBlock(accessibleBlock.type) - // Handle special blocks that aren't in the registry (loop and parallel) if (!blockConfig) { - // For loop and parallel blocks, create a mock config with results output if (accessibleBlock.type === 'loop' || accessibleBlock.type === 'parallel') { // Skip this block if it's the containing loop/parallel block - we'll handle it with contextual tags if ( @@ -422,13 +599,12 @@ export const TagDropdown: React.FC = ({ const mockConfig = { outputs: { - results: 'array', // These blocks have a results array output + results: 'array', }, } const blockName = accessibleBlock.name || accessibleBlock.type - const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase() + const normalizedBlockName = normalizeBlockName(blockName) - // Generate output paths for the mock config const outputPaths = generateOutputPaths(mockConfig.outputs) const blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) @@ -446,49 +622,36 @@ export const TagDropdown: React.FC = ({ } const blockName = accessibleBlock.name || accessibleBlock.type - const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase() + const normalizedBlockName = normalizeBlockName(blockName) - // Check for custom response format first - const responseFormatValue = useSubBlockStore - .getState() - .getValue(accessibleBlockId, 'responseFormat') + const responseFormatValue = getSubBlockValue(accessibleBlockId, 'responseFormat') const responseFormat = parseResponseFormatSafely(responseFormatValue, accessibleBlockId) let blockTags: string[] - // Special handling for evaluator blocks if (accessibleBlock.type === 'evaluator') { - // Get the evaluation metrics for the evaluator block - const metricsValue = useSubBlockStore.getState().getValue(accessibleBlockId, 'metrics') + const metricsValue = getSubBlockValue(accessibleBlockId, 'metrics') if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) { - // Use the metric names as the available outputs const validMetrics = metricsValue.filter((metric: any) => metric?.name) blockTags = validMetrics.map( (metric: any) => `${normalizedBlockName}.${metric.name.toLowerCase()}` ) } else { - // Fallback to default evaluator outputs if no metrics are defined const outputPaths = generateOutputPaths(blockConfig.outputs) blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) } } else if (responseFormat) { - // Use custom schema properties if response format is specified const schemaFields = extractFieldsFromSchema(responseFormat) if (schemaFields.length > 0) { blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`) } else { - // Fallback to default if schema extraction failed const outputPaths = generateOutputPaths(blockConfig.outputs || {}) blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) } } else if (!blockConfig.outputs || Object.keys(blockConfig.outputs).length === 0) { - // Handle blocks with no outputs (like starter) - check for custom input fields if (accessibleBlock.type === 'starter') { - // Check what start workflow mode is selected - const startWorkflowValue = useSubBlockStore - .getState() - .getValue(accessibleBlockId, 'startWorkflow') + const startWorkflowValue = getSubBlockValue(accessibleBlockId, 'startWorkflow') if (startWorkflowValue === 'chat') { // For chat mode, provide input, conversationId, and files @@ -498,33 +661,51 @@ export const TagDropdown: React.FC = ({ `${normalizedBlockName}.files`, ] } else { - // Check for custom input format fields (for manual mode) - const inputFormatValue = useSubBlockStore - .getState() - .getValue(accessibleBlockId, 'inputFormat') + const inputFormatValue = getSubBlockValue(accessibleBlockId, 'inputFormat') if ( inputFormatValue && Array.isArray(inputFormatValue) && inputFormatValue.length > 0 ) { - // Use custom input fields if they exist blockTags = inputFormatValue .filter((field: any) => field.name && field.name.trim() !== '') .map((field: any) => `${normalizedBlockName}.${field.name}`) } else { - // Fallback to just the block name blockTags = [normalizedBlockName] } } } else { - // Other blocks with no outputs - show as just blockTags = [normalizedBlockName] } } else { - // Use default block outputs - const outputPaths = generateOutputPaths(blockConfig.outputs || {}) - blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + const blockState = blocks[accessibleBlockId] + if (blockState?.triggerMode && blockConfig.triggers?.enabled) { + const triggers = getTriggersByProvider(blockState.type) // Use block type as provider + const firstTrigger = triggers[0] + + if (firstTrigger?.outputs) { + // Use trigger outputs instead of block outputs + const outputPaths = generateOutputPaths(firstTrigger.outputs) + blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + } else { + const outputPaths = generateOutputPaths(blockConfig.outputs || {}) + blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + } + } else { + // Check for tool-specific outputs first + const operationValue = getSubBlockValue(accessibleBlockId, 'operation') + const toolOutputPaths = operationValue + ? generateToolOutputPaths(blockConfig, operationValue) + : [] + + if (toolOutputPaths.length > 0) { + blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`) + } else { + const outputPaths = generateOutputPaths(blockConfig.outputs || {}) + blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) + } + } } blockTagGroups.push({ @@ -538,7 +719,6 @@ export const TagDropdown: React.FC = ({ allBlockTags.push(...blockTags) } - // Add contextual block groups at the beginning (they have highest priority) const finalBlockTagGroups: BlockTagGroup[] = [] if (loopBlockGroup) { finalBlockTagGroups.push(loopBlockGroup) @@ -547,11 +727,9 @@ export const TagDropdown: React.FC = ({ finalBlockTagGroups.push(parallelBlockGroup) } - // Sort regular block groups by distance (closest first) and add them blockTagGroups.sort((a, b) => a.distance - b.distance) finalBlockTagGroups.push(...blockTagGroups) - // Collect all tags for the main tags array const contextualTags: string[] = [] if (loopBlockGroup) { contextualTags.push(...loopBlockGroup.tags) @@ -567,23 +745,20 @@ export const TagDropdown: React.FC = ({ } }, [blocks, edges, loops, parallels, blockId, activeSourceBlockId, workflowVariables]) - // Filter tags based on search term const filteredTags = useMemo(() => { if (!searchTerm) return tags return tags.filter((tag: string) => tag.toLowerCase().includes(searchTerm)) }, [tags, searchTerm]) - // Group filtered tags by category const { variableTags, filteredBlockTagGroups } = useMemo(() => { const varTags: string[] = [] filteredTags.forEach((tag) => { - if (tag.startsWith('variable.')) { + if (tag.startsWith(TAG_PREFIXES.VARIABLE)) { varTags.push(tag) } }) - // Filter block tag groups based on search term const filteredBlockTagGroups = blockTagGroups .map((group) => ({ ...group, @@ -597,13 +772,85 @@ export const TagDropdown: React.FC = ({ } }, [filteredTags, blockTagGroups, searchTerm]) - // Create ordered tags for keyboard navigation - const orderedTags = useMemo(() => { - const allBlockTags = filteredBlockTagGroups.flatMap((group) => group.tags) - return [...variableTags, ...allBlockTags] - }, [variableTags, filteredBlockTagGroups]) + const nestedBlockTagGroups = useMemo(() => { + return filteredBlockTagGroups.map((group) => { + const nestedTags: Array<{ + key: string + display: string + fullTag?: string + children?: Array<{ key: string; display: string; fullTag: string }> + }> = [] + + const groupedTags: Record< + string, + Array<{ key: string; display: string; fullTag: string }> + > = {} + const directTags: Array<{ key: string; display: string; fullTag: string }> = [] + + group.tags.forEach((tag) => { + const tagParts = tag.split('.') + if (tagParts.length >= 3) { + const parent = tagParts[1] + const child = tagParts.slice(2).join('.') + + if (!groupedTags[parent]) { + groupedTags[parent] = [] + } + groupedTags[parent].push({ + key: `${parent}.${child}`, + display: child, + fullTag: tag, + }) + } else { + const path = tagParts.slice(1).join('.') + directTags.push({ + key: path || group.blockName, + display: path || group.blockName, + fullTag: tag, + }) + } + }) + + Object.entries(groupedTags).forEach(([parent, children]) => { + nestedTags.push({ + key: parent, + display: parent, + children: children, + }) + }) + + directTags.forEach((directTag) => { + nestedTags.push(directTag) + }) + + return { + ...group, + nestedTags, + } + }) + }, [filteredBlockTagGroups]) + + const orderedTags = useMemo(() => { + const visualTags: string[] = [] + + visualTags.push(...variableTags) + + nestedBlockTagGroups.forEach((group) => { + group.nestedTags.forEach((nestedTag) => { + if (nestedTag.children && nestedTag.children.length > 0) { + const firstChild = nestedTag.children[0] + if (firstChild.fullTag) { + visualTags.push(firstChild.fullTag) + } + } else if (nestedTag.fullTag) { + visualTags.push(nestedTag.fullTag) + } + }) + }) + + return visualTags + }, [variableTags, nestedBlockTagGroups]) - // Create efficient tag index lookup map const tagIndexMap = useMemo(() => { const map = new Map() orderedTags.forEach((tag, index) => { @@ -612,22 +859,18 @@ export const TagDropdown: React.FC = ({ return map }, [orderedTags]) - // Handle tag selection and text replacement const handleTagSelect = useCallback( (tag: string, blockGroup?: BlockTagGroup) => { const textBeforeCursor = inputValue.slice(0, cursorPosition) const textAfterCursor = inputValue.slice(cursorPosition) - // Find the position of the last '<' before cursor const lastOpenBracket = textBeforeCursor.lastIndexOf('<') if (lastOpenBracket === -1) return - // Process different types of tags let processedTag = tag - // Handle variable tags - if (tag.startsWith('variable.')) { - const variableName = tag.substring('variable.'.length) + if (tag.startsWith(TAG_PREFIXES.VARIABLE)) { + const variableName = tag.substring(TAG_PREFIXES.VARIABLE.length) const variableObj = Object.values(variables).find( (v) => v.name.replace(/\s+/g, '') === variableName ) @@ -635,28 +878,22 @@ export const TagDropdown: React.FC = ({ if (variableObj) { processedTag = tag } - } - // Handle contextual loop/parallel tags - else if ( + } else if ( blockGroup && (blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel') ) { - // Check if this is a contextual tag (without dots) that needs a prefix if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) { processedTag = `${blockGroup.blockType}.${tag}` } else { - // It's already a properly formatted tag (like blockname.results) processedTag = tag } } - // Handle existing closing bracket const nextCloseBracket = textAfterCursor.indexOf('>') let remainingTextAfterCursor = textAfterCursor if (nextCloseBracket !== -1) { const textBetween = textAfterCursor.slice(0, nextCloseBracket) - // If text between cursor and '>' contains only tag-like characters, skip it if (/^[a-zA-Z0-9._]*$/.test(textBetween)) { remainingTextAfterCursor = textAfterCursor.slice(nextCloseBracket + 1) } @@ -670,65 +907,240 @@ export const TagDropdown: React.FC = ({ [inputValue, cursorPosition, variables, onSelect, onClose] ) - // Reset selection when search results change useEffect(() => setSelectedIndex(0), [searchTerm]) - // Keep selection within bounds when tags change useEffect(() => { if (selectedIndex >= orderedTags.length) { setSelectedIndex(Math.max(0, orderedTags.length - 1)) } }, [orderedTags.length, selectedIndex]) - // Handle keyboard navigation useEffect(() => { if (visible) { const handleKeyboardEvent = (e: KeyboardEvent) => { if (!orderedTags.length) return - switch (e.key) { - case 'ArrowDown': - e.preventDefault() - e.stopPropagation() - setSelectedIndex((prev) => Math.min(prev + 1, orderedTags.length - 1)) - break - case 'ArrowUp': - e.preventDefault() - e.stopPropagation() - setSelectedIndex((prev) => Math.max(prev - 1, 0)) - break - case 'Enter': - e.preventDefault() - e.stopPropagation() - if (selectedIndex >= 0 && selectedIndex < orderedTags.length) { - const selectedTag = orderedTags[selectedIndex] - // Find which block group this tag belongs to - const belongsToGroup = filteredBlockTagGroups.find((group) => - group.tags.includes(selectedTag) - ) - handleTagSelect(selectedTag, belongsToGroup) - } - break - case 'Escape': - e.preventDefault() - e.stopPropagation() - onClose?.() - break + if (inSubmenu) { + const currentHovered = hoveredNested + if (!currentHovered) { + setInSubmenu(false) + return + } + + const currentGroup = nestedBlockTagGroups.find((group) => { + return group.nestedTags.some( + (tag, index) => + `${group.blockId}-${tag.key}` === currentHovered.tag && + index === currentHovered.index + ) + }) + + const currentNestedTag = currentGroup?.nestedTags.find( + (tag, index) => + `${currentGroup.blockId}-${tag.key}` === currentHovered.tag && + index === currentHovered.index + ) + + const children = currentNestedTag?.children || [] + + switch (e.key) { + case 'ArrowDown': + e.preventDefault() + e.stopPropagation() + setSubmenuIndex((prev) => Math.min(prev + 1, children.length - 1)) + break + case 'ArrowUp': + e.preventDefault() + e.stopPropagation() + setSubmenuIndex((prev) => Math.max(prev - 1, 0)) + break + case 'ArrowLeft': + e.preventDefault() + e.stopPropagation() + setInSubmenu(false) + setHoveredNested(null) + setSubmenuIndex(0) + break + case 'Enter': + e.preventDefault() + e.stopPropagation() + if (submenuIndex >= 0 && submenuIndex < children.length) { + const selectedChild = children[submenuIndex] + handleTagSelect(selectedChild.fullTag, currentGroup) + } + break + case 'Escape': + e.preventDefault() + e.stopPropagation() + setInSubmenu(false) + setHoveredNested(null) + setSubmenuIndex(0) + break + } + } else { + switch (e.key) { + case 'ArrowDown': + e.preventDefault() + e.stopPropagation() + setSelectedIndex((prev) => { + const newIndex = Math.min(prev + 1, orderedTags.length - 1) + const newSelectedTag = orderedTags[newIndex] + let foundParent = false + for (const group of nestedBlockTagGroups) { + for ( + let nestedTagIndex = 0; + nestedTagIndex < group.nestedTags.length; + nestedTagIndex++ + ) { + const nestedTag = group.nestedTags[nestedTagIndex] + if (nestedTag.children && nestedTag.children.length > 0) { + const firstChild = nestedTag.children[0] + if (firstChild.fullTag === newSelectedTag) { + setHoveredNested({ + tag: `${group.blockId}-${nestedTag.key}`, + index: nestedTagIndex, + }) + foundParent = true + break + } + } + } + if (foundParent) break + } + if (!foundParent && !inSubmenu) { + setHoveredNested(null) + } + return newIndex + }) + break + case 'ArrowUp': + e.preventDefault() + e.stopPropagation() + setSelectedIndex((prev) => { + const newIndex = Math.max(prev - 1, 0) + const newSelectedTag = orderedTags[newIndex] + let foundParent = false + for (const group of nestedBlockTagGroups) { + for ( + let nestedTagIndex = 0; + nestedTagIndex < group.nestedTags.length; + nestedTagIndex++ + ) { + const nestedTag = group.nestedTags[nestedTagIndex] + if (nestedTag.children && nestedTag.children.length > 0) { + const firstChild = nestedTag.children[0] + if (firstChild.fullTag === newSelectedTag) { + setHoveredNested({ + tag: `${group.blockId}-${nestedTag.key}`, + index: nestedTagIndex, + }) + foundParent = true + break + } + } + } + if (foundParent) break + } + if (!foundParent && !inSubmenu) { + setHoveredNested(null) + } + return newIndex + }) + break + case 'ArrowRight': + e.preventDefault() + e.stopPropagation() + if (selectedIndex >= 0 && selectedIndex < orderedTags.length) { + const selectedTag = orderedTags[selectedIndex] + for (const group of nestedBlockTagGroups) { + for ( + let nestedTagIndex = 0; + nestedTagIndex < group.nestedTags.length; + nestedTagIndex++ + ) { + const nestedTag = group.nestedTags[nestedTagIndex] + if (nestedTag.children && nestedTag.children.length > 0) { + const firstChild = nestedTag.children[0] + if (firstChild.fullTag === selectedTag) { + setInSubmenu(true) + setSubmenuIndex(0) + setHoveredNested({ + tag: `${group.blockId}-${nestedTag.key}`, + index: nestedTagIndex, + }) + return + } + } + } + } + } + break + case 'Enter': + e.preventDefault() + e.stopPropagation() + if (selectedIndex >= 0 && selectedIndex < orderedTags.length) { + const selectedTag = orderedTags[selectedIndex] + + let isParentItem = false + let parentTag = '' + let parentGroup: BlockTagGroup | undefined + + for (const group of nestedBlockTagGroups) { + for (const nestedTag of group.nestedTags) { + if (nestedTag.children && nestedTag.children.length > 0) { + const firstChild = nestedTag.children[0] + if (firstChild.fullTag === selectedTag) { + isParentItem = true + parentTag = `${normalizeBlockName(group.blockName)}.${nestedTag.key}` + parentGroup = group + break + } + } + } + if (isParentItem) break + } + + if (isParentItem && parentTag) { + handleTagSelect(parentTag, parentGroup) + } else { + const belongsToGroup = filteredBlockTagGroups.find((group) => + group.tags.includes(selectedTag) + ) + handleTagSelect(selectedTag, belongsToGroup) + } + } + break + case 'Escape': + e.preventDefault() + e.stopPropagation() + onClose?.() + break + } } } window.addEventListener('keydown', handleKeyboardEvent, true) return () => window.removeEventListener('keydown', handleKeyboardEvent, true) } - }, [visible, selectedIndex, orderedTags, filteredBlockTagGroups, handleTagSelect, onClose]) + }, [ + visible, + selectedIndex, + orderedTags, + filteredBlockTagGroups, + nestedBlockTagGroups, + handleTagSelect, + onClose, + inSubmenu, + submenuIndex, + hoveredNested, + ]) - // Early return if dropdown should not be visible if (!visible || tags.length === 0 || orderedTags.length === 0) return null return (
= ({ tagIndex >= 0 && 'bg-accent text-accent-foreground' )} - onMouseEnter={() => setSelectedIndex(tagIndex >= 0 ? tagIndex : 0)} - onMouseDown={(e) => { - e.preventDefault() - e.stopPropagation() - handleTagSelect(tag) - }} - onClick={(e) => { - e.preventDefault() - e.stopPropagation() - handleTagSelect(tag) - }} + {...createTagEventHandlers( + tag, + undefined, + tagIndex, + handleTagSelect, + setSelectedIndex, + setHoveredNested + )} >
V
- {tag.startsWith('variable.') ? tag.substring('variable.'.length) : tag} + {tag.startsWith(TAG_PREFIXES.VARIABLE) + ? tag.substring(TAG_PREFIXES.VARIABLE.length) + : tag} {variableInfo && ( @@ -793,100 +1204,261 @@ export const TagDropdown: React.FC = ({ )} - {/* Block sections */} - {filteredBlockTagGroups.length > 0 && ( + {/* Block sections with nested structure */} + {nestedBlockTagGroups.length > 0 && ( <> {variableTags.length > 0 &&
} - {filteredBlockTagGroups.map((group) => { - // Get block color from configuration + {nestedBlockTagGroups.map((group) => { const blockConfig = getBlock(group.blockType) - let blockColor = blockConfig?.bgColor || '#2F55FF' + let blockColor = blockConfig?.bgColor || BLOCK_COLORS.DEFAULT - // Handle special colors for loop and parallel blocks if (group.blockType === 'loop') { - blockColor = '#8857E6' // Purple color for loop blocks + blockColor = BLOCK_COLORS.LOOP } else if (group.blockType === 'parallel') { - blockColor = '#FF5757' // Red color for parallel blocks + blockColor = BLOCK_COLORS.PARALLEL } return ( -
+
{group.blockName}
- {group.tags.map((tag: string) => { - const tagIndex = tagIndexMap.get(tag) ?? -1 + {group.nestedTags.map((nestedTag, index) => { + const tagIndex = nestedTag.fullTag + ? (tagIndexMap.get(nestedTag.fullTag) ?? -1) + : -1 + const hasChildren = nestedTag.children && nestedTag.children.length > 0 + const isHovered = + hoveredNested?.tag === `${group.blockId}-${nestedTag.key}` && + hoveredNested?.index === index - // Handle display text based on tag type - let displayText: string + const displayText = nestedTag.display let tagDescription = '' let tagIcon = group.blockName.charAt(0).toUpperCase() if ( (group.blockType === 'loop' || group.blockType === 'parallel') && - !tag.includes('.') + !nestedTag.key.includes('.') ) { - // Contextual tags like 'index', 'currentItem', 'items' - displayText = tag - if (tag === 'index') { + if (nestedTag.key === 'index') { tagIcon = '#' - tagDescription = 'Index' - } else if (tag === 'currentItem') { + tagDescription = 'number' + } else if (nestedTag.key === 'currentItem') { tagIcon = 'i' - tagDescription = 'Current item' - } else if (tag === 'items') { + tagDescription = 'any' + } else if (nestedTag.key === 'items') { tagIcon = 'I' - tagDescription = 'All items' + tagDescription = 'array' } } else { - // Regular block output tags like 'blockname.field' or 'blockname.results' - const tagParts = tag.split('.') - const path = tagParts.slice(1).join('.') - displayText = path || group.blockName - if (path === 'results') { - tagDescription = 'Results array' + if (nestedTag.fullTag) { + const tagParts = nestedTag.fullTag.split('.') + const outputPath = tagParts.slice(1).join('.') + + const block = Object.values(blocks).find( + (b) => b.id === group.blockId + ) + if (block) { + const blockConfig = getBlock(block.type) + + tagDescription = getOutputTypeForPath( + block, + blockConfig, + group.blockId, + outputPath + ) + } } } + const isKeyboardSelected = (() => { + if ( + hasChildren && + selectedIndex >= 0 && + selectedIndex < orderedTags.length + ) { + const selectedTag = orderedTags[selectedIndex] + const firstChild = nestedTag.children?.[0] + return firstChild?.fullTag === selectedTag + } + return tagIndex === selectedIndex && tagIndex >= 0 + })() + return ( - + + {/* Nested submenu */} + {hasChildren && isHovered && ( +
{ + setSubmenuHovered(true) + const parentKey = `${group.blockId}-${nestedTag.key}` + setHoveredNested({ + tag: parentKey, + index, + }) + setSubmenuIndex(-1) + }} + onMouseLeave={() => { + setSubmenuHovered(false) + const parentKey = `${group.blockId}-${nestedTag.key}` + if (parentHovered !== parentKey) { + setHoveredNested(null) + } + }} + > +
+ {nestedTag.children!.map((child, childIndex) => { + const isKeyboardSelected = + inSubmenu && submenuIndex === childIndex + const isSelected = isKeyboardSelected + + let childType = '' + const childTagParts = child.fullTag.split('.') + const childOutputPath = childTagParts.slice(1).join('.') + + const block = Object.values(blocks).find( + (b) => b.id === group.blockId + ) + if (block) { + const blockConfig = getBlock(block.type) + + childType = getOutputTypeForPath( + block, + blockConfig, + group.blockId, + childOutputPath + ) + } + + return ( + + ) + })} +
+
)} - +
) })}
diff --git a/apps/sim/db/migrations/0041_common_doomsday.sql b/apps/sim/db/migrations/0041_common_doomsday.sql deleted file mode 100644 index c43f77500e..0000000000 --- a/apps/sim/db/migrations/0041_common_doomsday.sql +++ /dev/null @@ -1,5 +0,0 @@ -ALTER TABLE "document" ADD COLUMN "processing_status" text DEFAULT 'pending' NOT NULL;--> statement-breakpoint -ALTER TABLE "document" ADD COLUMN "processing_started_at" timestamp;--> statement-breakpoint -ALTER TABLE "document" ADD COLUMN "processing_completed_at" timestamp;--> statement-breakpoint -ALTER TABLE "document" ADD COLUMN "processing_error" text;--> statement-breakpoint -CREATE INDEX "doc_processing_status_idx" ON "document" USING btree ("knowledge_base_id","processing_status"); \ No newline at end of file diff --git a/apps/sim/db/migrations/relations.ts b/apps/sim/db/migrations/relations.ts deleted file mode 100644 index 635c54146c..0000000000 --- a/apps/sim/db/migrations/relations.ts +++ /dev/null @@ -1,431 +0,0 @@ -import { relations } from 'drizzle-orm/relations' -import { - account, - apiKey, - chat, - copilotChats, - copilotCheckpoints, - customTools, - document, - embedding, - environment, - invitation, - knowledgeBase, - marketplace, - member, - memory, - organization, - permissions, - session, - settings, - templateStars, - templates, - user, - userRateLimits, - userStats, - webhook, - workflow, - workflowBlocks, - workflowEdges, - workflowExecutionBlocks, - workflowExecutionLogs, - workflowExecutionSnapshots, - workflowFolder, - workflowLogs, - workflowSchedule, - workflowSubflows, - workspace, - workspaceInvitation, -} from './schema' - -export const accountRelations = relations(account, ({ one }) => ({ - user: one(user, { - fields: [account.userId], - references: [user.id], - }), -})) - -export const userRelations = relations(user, ({ many }) => ({ - accounts: many(account), - environments: many(environment), - apiKeys: many(apiKey), - marketplaces: many(marketplace), - customTools: many(customTools), - sessions: many(session), - invitations: many(invitation), - members: many(member), - chats: many(chat), - workspaces: many(workspace), - knowledgeBases: many(knowledgeBase), - workflows: many(workflow), - workflowFolders: many(workflowFolder), - workspaceInvitations: many(workspaceInvitation), - permissions: many(permissions), - userStats: many(userStats), - copilotChats: many(copilotChats), - templateStars: many(templateStars), - templates: many(templates), - settings: many(settings), - userRateLimits: many(userRateLimits), - copilotCheckpoints: many(copilotCheckpoints), -})) - -export const environmentRelations = relations(environment, ({ one }) => ({ - user: one(user, { - fields: [environment.userId], - references: [user.id], - }), -})) - -export const workflowLogsRelations = relations(workflowLogs, ({ one }) => ({ - workflow: one(workflow, { - fields: [workflowLogs.workflowId], - references: [workflow.id], - }), -})) - -export const workflowRelations = relations(workflow, ({ one, many }) => ({ - workflowLogs: many(workflowLogs), - marketplaces: many(marketplace), - chats: many(chat), - memories: many(memory), - user: one(user, { - fields: [workflow.userId], - references: [user.id], - }), - workspace: one(workspace, { - fields: [workflow.workspaceId], - references: [workspace.id], - }), - workflowFolder: one(workflowFolder, { - fields: [workflow.folderId], - references: [workflowFolder.id], - }), - workflowEdges: many(workflowEdges), - workflowSubflows: many(workflowSubflows), - workflowBlocks: many(workflowBlocks), - workflowExecutionBlocks: many(workflowExecutionBlocks), - workflowExecutionLogs: many(workflowExecutionLogs), - workflowExecutionSnapshots: many(workflowExecutionSnapshots), - copilotChats: many(copilotChats), - templates: many(templates), - webhooks: many(webhook), - workflowSchedules: many(workflowSchedule), - copilotCheckpoints: many(copilotCheckpoints), -})) - -export const apiKeyRelations = relations(apiKey, ({ one }) => ({ - user: one(user, { - fields: [apiKey.userId], - references: [user.id], - }), -})) - -export const marketplaceRelations = relations(marketplace, ({ one }) => ({ - workflow: one(workflow, { - fields: [marketplace.workflowId], - references: [workflow.id], - }), - user: one(user, { - fields: [marketplace.authorId], - references: [user.id], - }), -})) - -export const customToolsRelations = relations(customTools, ({ one }) => ({ - user: one(user, { - fields: [customTools.userId], - references: [user.id], - }), -})) - -export const sessionRelations = relations(session, ({ one }) => ({ - user: one(user, { - fields: [session.userId], - references: [user.id], - }), - organization: one(organization, { - fields: [session.activeOrganizationId], - references: [organization.id], - }), -})) - -export const organizationRelations = relations(organization, ({ many }) => ({ - sessions: many(session), - invitations: many(invitation), - members: many(member), -})) - -export const invitationRelations = relations(invitation, ({ one }) => ({ - user: one(user, { - fields: [invitation.inviterId], - references: [user.id], - }), - organization: one(organization, { - fields: [invitation.organizationId], - references: [organization.id], - }), -})) - -export const memberRelations = relations(member, ({ one }) => ({ - user: one(user, { - fields: [member.userId], - references: [user.id], - }), - organization: one(organization, { - fields: [member.organizationId], - references: [organization.id], - }), -})) - -export const chatRelations = relations(chat, ({ one }) => ({ - workflow: one(workflow, { - fields: [chat.workflowId], - references: [workflow.id], - }), - user: one(user, { - fields: [chat.userId], - references: [user.id], - }), -})) - -export const workspaceRelations = relations(workspace, ({ one, many }) => ({ - user: one(user, { - fields: [workspace.ownerId], - references: [user.id], - }), - knowledgeBases: many(knowledgeBase), - workflows: many(workflow), - workflowFolders: many(workflowFolder), - workspaceInvitations: many(workspaceInvitation), -})) - -export const memoryRelations = relations(memory, ({ one }) => ({ - workflow: one(workflow, { - fields: [memory.workflowId], - references: [workflow.id], - }), -})) - -export const knowledgeBaseRelations = relations(knowledgeBase, ({ one, many }) => ({ - user: one(user, { - fields: [knowledgeBase.userId], - references: [user.id], - }), - workspace: one(workspace, { - fields: [knowledgeBase.workspaceId], - references: [workspace.id], - }), - documents: many(document), - embeddings: many(embedding), -})) - -export const workflowFolderRelations = relations(workflowFolder, ({ one, many }) => ({ - workflows: many(workflow), - user: one(user, { - fields: [workflowFolder.userId], - references: [user.id], - }), - workspace: one(workspace, { - fields: [workflowFolder.workspaceId], - references: [workspace.id], - }), -})) - -export const workflowEdgesRelations = relations(workflowEdges, ({ one }) => ({ - workflow: one(workflow, { - fields: [workflowEdges.workflowId], - references: [workflow.id], - }), - workflowBlock_sourceBlockId: one(workflowBlocks, { - fields: [workflowEdges.sourceBlockId], - references: [workflowBlocks.id], - relationName: 'workflowEdges_sourceBlockId_workflowBlocks_id', - }), - workflowBlock_targetBlockId: one(workflowBlocks, { - fields: [workflowEdges.targetBlockId], - references: [workflowBlocks.id], - relationName: 'workflowEdges_targetBlockId_workflowBlocks_id', - }), -})) - -export const workflowBlocksRelations = relations(workflowBlocks, ({ one, many }) => ({ - workflowEdges_sourceBlockId: many(workflowEdges, { - relationName: 'workflowEdges_sourceBlockId_workflowBlocks_id', - }), - workflowEdges_targetBlockId: many(workflowEdges, { - relationName: 'workflowEdges_targetBlockId_workflowBlocks_id', - }), - workflow: one(workflow, { - fields: [workflowBlocks.workflowId], - references: [workflow.id], - }), - webhooks: many(webhook), - workflowSchedules: many(workflowSchedule), -})) - -export const workflowSubflowsRelations = relations(workflowSubflows, ({ one }) => ({ - workflow: one(workflow, { - fields: [workflowSubflows.workflowId], - references: [workflow.id], - }), -})) - -export const workspaceInvitationRelations = relations(workspaceInvitation, ({ one }) => ({ - workspace: one(workspace, { - fields: [workspaceInvitation.workspaceId], - references: [workspace.id], - }), - user: one(user, { - fields: [workspaceInvitation.inviterId], - references: [user.id], - }), -})) - -export const permissionsRelations = relations(permissions, ({ one }) => ({ - user: one(user, { - fields: [permissions.userId], - references: [user.id], - }), -})) - -export const userStatsRelations = relations(userStats, ({ one }) => ({ - user: one(user, { - fields: [userStats.userId], - references: [user.id], - }), -})) - -export const workflowExecutionBlocksRelations = relations(workflowExecutionBlocks, ({ one }) => ({ - workflow: one(workflow, { - fields: [workflowExecutionBlocks.workflowId], - references: [workflow.id], - }), -})) - -export const workflowExecutionLogsRelations = relations(workflowExecutionLogs, ({ one }) => ({ - workflow: one(workflow, { - fields: [workflowExecutionLogs.workflowId], - references: [workflow.id], - }), - workflowExecutionSnapshot: one(workflowExecutionSnapshots, { - fields: [workflowExecutionLogs.stateSnapshotId], - references: [workflowExecutionSnapshots.id], - }), -})) - -export const workflowExecutionSnapshotsRelations = relations( - workflowExecutionSnapshots, - ({ one, many }) => ({ - workflowExecutionLogs: many(workflowExecutionLogs), - workflow: one(workflow, { - fields: [workflowExecutionSnapshots.workflowId], - references: [workflow.id], - }), - }) -) - -export const copilotChatsRelations = relations(copilotChats, ({ one, many }) => ({ - user: one(user, { - fields: [copilotChats.userId], - references: [user.id], - }), - workflow: one(workflow, { - fields: [copilotChats.workflowId], - references: [workflow.id], - }), - copilotCheckpoints: many(copilotCheckpoints), -})) - -export const documentRelations = relations(document, ({ one, many }) => ({ - knowledgeBase: one(knowledgeBase, { - fields: [document.knowledgeBaseId], - references: [knowledgeBase.id], - }), - embeddings: many(embedding), -})) - -export const embeddingRelations = relations(embedding, ({ one }) => ({ - knowledgeBase: one(knowledgeBase, { - fields: [embedding.knowledgeBaseId], - references: [knowledgeBase.id], - }), - document: one(document, { - fields: [embedding.documentId], - references: [document.id], - }), -})) - -export const templateStarsRelations = relations(templateStars, ({ one }) => ({ - user: one(user, { - fields: [templateStars.userId], - references: [user.id], - }), - template: one(templates, { - fields: [templateStars.templateId], - references: [templates.id], - }), -})) - -export const templatesRelations = relations(templates, ({ one, many }) => ({ - templateStars: many(templateStars), - workflow: one(workflow, { - fields: [templates.workflowId], - references: [workflow.id], - }), - user: one(user, { - fields: [templates.userId], - references: [user.id], - }), -})) - -export const settingsRelations = relations(settings, ({ one }) => ({ - user: one(user, { - fields: [settings.userId], - references: [user.id], - }), -})) - -export const userRateLimitsRelations = relations(userRateLimits, ({ one }) => ({ - user: one(user, { - fields: [userRateLimits.userId], - references: [user.id], - }), -})) - -export const webhookRelations = relations(webhook, ({ one }) => ({ - workflow: one(workflow, { - fields: [webhook.workflowId], - references: [workflow.id], - }), - workflowBlock: one(workflowBlocks, { - fields: [webhook.blockId], - references: [workflowBlocks.id], - }), -})) - -export const workflowScheduleRelations = relations(workflowSchedule, ({ one }) => ({ - workflow: one(workflow, { - fields: [workflowSchedule.workflowId], - references: [workflow.id], - }), - workflowBlock: one(workflowBlocks, { - fields: [workflowSchedule.blockId], - references: [workflowBlocks.id], - }), -})) - -export const copilotCheckpointsRelations = relations(copilotCheckpoints, ({ one }) => ({ - user: one(user, { - fields: [copilotCheckpoints.userId], - references: [user.id], - }), - workflow: one(workflow, { - fields: [copilotCheckpoints.workflowId], - references: [workflow.id], - }), - copilotChat: one(copilotChats, { - fields: [copilotCheckpoints.chatId], - references: [copilotChats.id], - }), -})) diff --git a/apps/sim/db/migrations/schema.ts b/apps/sim/db/migrations/schema.ts deleted file mode 100644 index 5d1283c66b..0000000000 --- a/apps/sim/db/migrations/schema.ts +++ /dev/null @@ -1,1472 +0,0 @@ -import { sql } from 'drizzle-orm' -import { - boolean, - check, - customType, - foreignKey, - index, - integer, - json, - jsonb, - numeric, - pgEnum, - pgTable, - text, - timestamp, - unique, - uniqueIndex, - uuid, - vector, -} from 'drizzle-orm/pg-core' - -// Custom type for PostgreSQL tsvector -const tsvector = customType<{ data: string }>({ - dataType() { - return 'tsvector' - }, -}) - -export const permissionType = pgEnum('permission_type', ['admin', 'write', 'read']) - -export const verification = pgTable('verification', { - id: text().primaryKey().notNull(), - identifier: text().notNull(), - value: text().notNull(), - expiresAt: timestamp('expires_at', { mode: 'string' }).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }), - updatedAt: timestamp('updated_at', { mode: 'string' }), -}) - -export const account = pgTable( - 'account', - { - id: text().primaryKey().notNull(), - accountId: text('account_id').notNull(), - providerId: text('provider_id').notNull(), - userId: text('user_id').notNull(), - accessToken: text('access_token'), - refreshToken: text('refresh_token'), - idToken: text('id_token'), - accessTokenExpiresAt: timestamp('access_token_expires_at', { mode: 'string' }), - refreshTokenExpiresAt: timestamp('refresh_token_expires_at', { mode: 'string' }), - scope: text(), - password: text(), - createdAt: timestamp('created_at', { mode: 'string' }).notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'account_user_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const waitlist = pgTable( - 'waitlist', - { - id: text().primaryKey().notNull(), - email: text().notNull(), - status: text().default('pending').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [unique('waitlist_email_unique').on(table.email)] -) - -export const environment = pgTable( - 'environment', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - variables: json().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'environment_user_id_user_id_fk', - }).onDelete('cascade'), - unique('environment_user_id_unique').on(table.userId), - ] -) - -export const workflowLogs = pgTable( - 'workflow_logs', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - executionId: text('execution_id'), - level: text().notNull(), - message: text().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - duration: text(), - trigger: text(), - metadata: json(), - }, - (table) => [ - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_logs_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const apiKey = pgTable( - 'api_key', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - name: text().notNull(), - key: text().notNull(), - lastUsed: timestamp('last_used', { mode: 'string' }), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - expiresAt: timestamp('expires_at', { mode: 'string' }), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'api_key_user_id_user_id_fk', - }).onDelete('cascade'), - unique('api_key_key_unique').on(table.key), - ] -) - -export const marketplace = pgTable( - 'marketplace', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - state: json().notNull(), - name: text().notNull(), - description: text(), - authorId: text('author_id').notNull(), - authorName: text('author_name').notNull(), - views: integer().default(0).notNull(), - category: text(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'marketplace_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.authorId], - foreignColumns: [user.id], - name: 'marketplace_author_id_user_id_fk', - }), - ] -) - -export const customTools = pgTable( - 'custom_tools', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - title: text().notNull(), - schema: json().notNull(), - code: text().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'custom_tools_user_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const user = pgTable( - 'user', - { - id: text().primaryKey().notNull(), - name: text().notNull(), - email: text().notNull(), - emailVerified: boolean('email_verified').notNull(), - image: text(), - createdAt: timestamp('created_at', { mode: 'string' }).notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).notNull(), - stripeCustomerId: text('stripe_customer_id'), - }, - (table) => [unique('user_email_unique').on(table.email)] -) - -export const session = pgTable( - 'session', - { - id: text().primaryKey().notNull(), - expiresAt: timestamp('expires_at', { mode: 'string' }).notNull(), - token: text().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).notNull(), - ipAddress: text('ip_address'), - userAgent: text('user_agent'), - userId: text('user_id').notNull(), - activeOrganizationId: text('active_organization_id'), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'session_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.activeOrganizationId], - foreignColumns: [organization.id], - name: 'session_active_organization_id_organization_id_fk', - }).onDelete('set null'), - unique('session_token_unique').on(table.token), - ] -) - -export const invitation = pgTable( - 'invitation', - { - id: text().primaryKey().notNull(), - email: text().notNull(), - inviterId: text('inviter_id').notNull(), - organizationId: text('organization_id').notNull(), - role: text().notNull(), - status: text().notNull(), - expiresAt: timestamp('expires_at', { mode: 'string' }).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.inviterId], - foreignColumns: [user.id], - name: 'invitation_inviter_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.organizationId], - foreignColumns: [organization.id], - name: 'invitation_organization_id_organization_id_fk', - }).onDelete('cascade'), - ] -) - -export const member = pgTable( - 'member', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - organizationId: text('organization_id').notNull(), - role: text().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'member_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.organizationId], - foreignColumns: [organization.id], - name: 'member_organization_id_organization_id_fk', - }).onDelete('cascade'), - ] -) - -export const chat = pgTable( - 'chat', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - userId: text('user_id').notNull(), - subdomain: text().notNull(), - title: text().notNull(), - description: text(), - isActive: boolean('is_active').default(true).notNull(), - customizations: json().default({}), - authType: text('auth_type').default('public').notNull(), - password: text(), - allowedEmails: json('allowed_emails').default([]), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - outputConfigs: json('output_configs').default([]), - }, - (table) => [ - uniqueIndex('subdomain_idx').using('btree', table.subdomain.asc().nullsLast().op('text_ops')), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'chat_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'chat_user_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const workspace = pgTable( - 'workspace', - { - id: text().primaryKey().notNull(), - name: text().notNull(), - ownerId: text('owner_id').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.ownerId], - foreignColumns: [user.id], - name: 'workspace_owner_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const organization = pgTable('organization', { - id: text().primaryKey().notNull(), - name: text().notNull(), - slug: text().notNull(), - logo: text(), - metadata: json(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), -}) - -export const memory = pgTable( - 'memory', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id'), - key: text().notNull(), - type: text().notNull(), - data: json().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - deletedAt: timestamp('deleted_at', { mode: 'string' }), - }, - (table) => [ - index('memory_key_idx').using('btree', table.key.asc().nullsLast().op('text_ops')), - index('memory_workflow_idx').using('btree', table.workflowId.asc().nullsLast().op('text_ops')), - uniqueIndex('memory_workflow_key_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.key.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'memory_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const knowledgeBase = pgTable( - 'knowledge_base', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - workspaceId: text('workspace_id'), - name: text().notNull(), - description: text(), - tokenCount: integer('token_count').default(0).notNull(), - embeddingModel: text('embedding_model').default('text-embedding-3-small').notNull(), - embeddingDimension: integer('embedding_dimension').default(1536).notNull(), - chunkingConfig: json('chunking_config') - .default({ maxSize: 1024, minSize: 100, overlap: 200 }) - .notNull(), - deletedAt: timestamp('deleted_at', { mode: 'string' }), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('kb_deleted_at_idx').using( - 'btree', - table.deletedAt.asc().nullsLast().op('timestamp_ops') - ), - index('kb_user_id_idx').using('btree', table.userId.asc().nullsLast().op('text_ops')), - index('kb_user_workspace_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.workspaceId.asc().nullsLast().op('text_ops') - ), - index('kb_workspace_id_idx').using('btree', table.workspaceId.asc().nullsLast().op('text_ops')), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'knowledge_base_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.workspaceId], - foreignColumns: [workspace.id], - name: 'knowledge_base_workspace_id_workspace_id_fk', - }).onDelete('cascade'), - ] -) - -export const workflow = pgTable( - 'workflow', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - name: text().notNull(), - description: text(), - state: json().notNull(), - lastSynced: timestamp('last_synced', { mode: 'string' }).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).notNull(), - isDeployed: boolean('is_deployed').default(false).notNull(), - deployedAt: timestamp('deployed_at', { mode: 'string' }), - color: text().default('#3972F6').notNull(), - collaborators: json().default([]).notNull(), - isPublished: boolean('is_published').default(false).notNull(), - runCount: integer('run_count').default(0).notNull(), - lastRunAt: timestamp('last_run_at', { mode: 'string' }), - variables: json().default({}), - marketplaceData: json('marketplace_data'), - deployedState: json('deployed_state'), - workspaceId: text('workspace_id'), - folderId: text('folder_id'), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'workflow_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.workspaceId], - foreignColumns: [workspace.id], - name: 'workflow_workspace_id_workspace_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.folderId], - foreignColumns: [workflowFolder.id], - name: 'workflow_folder_id_workflow_folder_id_fk', - }).onDelete('set null'), - ] -) - -export const workflowFolder = pgTable( - 'workflow_folder', - { - id: text().primaryKey().notNull(), - name: text().notNull(), - userId: text('user_id').notNull(), - workspaceId: text('workspace_id').notNull(), - parentId: text('parent_id'), - color: text().default('#6B7280'), - isExpanded: boolean('is_expanded').default(true).notNull(), - sortOrder: integer('sort_order').default(0).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('workflow_folder_parent_sort_idx').using( - 'btree', - table.parentId.asc().nullsLast().op('int4_ops'), - table.sortOrder.asc().nullsLast().op('text_ops') - ), - index('workflow_folder_user_idx').using('btree', table.userId.asc().nullsLast().op('text_ops')), - index('workflow_folder_workspace_parent_idx').using( - 'btree', - table.workspaceId.asc().nullsLast().op('text_ops'), - table.parentId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'workflow_folder_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.workspaceId], - foreignColumns: [workspace.id], - name: 'workflow_folder_workspace_id_workspace_id_fk', - }).onDelete('cascade'), - ] -) - -export const workflowEdges = pgTable( - 'workflow_edges', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - sourceBlockId: text('source_block_id').notNull(), - targetBlockId: text('target_block_id').notNull(), - sourceHandle: text('source_handle'), - targetHandle: text('target_handle'), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('workflow_edges_source_block_idx').using( - 'btree', - table.sourceBlockId.asc().nullsLast().op('text_ops') - ), - index('workflow_edges_target_block_idx').using( - 'btree', - table.targetBlockId.asc().nullsLast().op('text_ops') - ), - index('workflow_edges_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - index('workflow_edges_workflow_source_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.sourceBlockId.asc().nullsLast().op('text_ops') - ), - index('workflow_edges_workflow_target_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.targetBlockId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_edges_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.sourceBlockId], - foreignColumns: [workflowBlocks.id], - name: 'workflow_edges_source_block_id_workflow_blocks_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.targetBlockId], - foreignColumns: [workflowBlocks.id], - name: 'workflow_edges_target_block_id_workflow_blocks_id_fk', - }).onDelete('cascade'), - ] -) - -export const workflowSubflows = pgTable( - 'workflow_subflows', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - type: text().notNull(), - config: jsonb().default({}).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('workflow_subflows_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - index('workflow_subflows_workflow_type_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.type.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_subflows_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const workspaceInvitation = pgTable( - 'workspace_invitation', - { - id: text().primaryKey().notNull(), - workspaceId: text('workspace_id').notNull(), - email: text().notNull(), - inviterId: text('inviter_id').notNull(), - role: text().default('member').notNull(), - status: text().default('pending').notNull(), - token: text().notNull(), - expiresAt: timestamp('expires_at', { mode: 'string' }).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - permissions: permissionType().default('admin').notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.workspaceId], - foreignColumns: [workspace.id], - name: 'workspace_invitation_workspace_id_workspace_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.inviterId], - foreignColumns: [user.id], - name: 'workspace_invitation_inviter_id_user_id_fk', - }).onDelete('cascade'), - unique('workspace_invitation_token_unique').on(table.token), - ] -) - -export const permissions = pgTable( - 'permissions', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - entityType: text('entity_type').notNull(), - entityId: text('entity_id').notNull(), - permissionType: permissionType('permission_type').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('permissions_entity_idx').using( - 'btree', - table.entityType.asc().nullsLast().op('text_ops'), - table.entityId.asc().nullsLast().op('text_ops') - ), - uniqueIndex('permissions_unique_constraint').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.entityType.asc().nullsLast().op('text_ops'), - table.entityId.asc().nullsLast().op('text_ops') - ), - index('permissions_user_entity_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.entityType.asc().nullsLast().op('text_ops'), - table.entityId.asc().nullsLast().op('text_ops') - ), - index('permissions_user_entity_permission_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.entityType.asc().nullsLast().op('text_ops'), - table.permissionType.asc().nullsLast().op('enum_ops') - ), - index('permissions_user_entity_type_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.entityType.asc().nullsLast().op('text_ops') - ), - index('permissions_user_id_idx').using('btree', table.userId.asc().nullsLast().op('text_ops')), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'permissions_user_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const workflowBlocks = pgTable( - 'workflow_blocks', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - type: text().notNull(), - name: text().notNull(), - positionX: numeric('position_x').notNull(), - positionY: numeric('position_y').notNull(), - enabled: boolean().default(true).notNull(), - horizontalHandles: boolean('horizontal_handles').default(true).notNull(), - isWide: boolean('is_wide').default(false).notNull(), - height: numeric().default('0').notNull(), - subBlocks: jsonb('sub_blocks').default({}).notNull(), - outputs: jsonb().default({}).notNull(), - data: jsonb().default({}), - parentId: text('parent_id'), - extent: text(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - advancedMode: boolean('advanced_mode').default(false).notNull(), - }, - (table) => [ - index('workflow_blocks_parent_id_idx').using( - 'btree', - table.parentId.asc().nullsLast().op('text_ops') - ), - index('workflow_blocks_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - index('workflow_blocks_workflow_parent_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.parentId.asc().nullsLast().op('text_ops') - ), - index('workflow_blocks_workflow_type_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.type.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_blocks_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const userStats = pgTable( - 'user_stats', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - totalManualExecutions: integer('total_manual_executions').default(0).notNull(), - totalApiCalls: integer('total_api_calls').default(0).notNull(), - totalWebhookTriggers: integer('total_webhook_triggers').default(0).notNull(), - totalScheduledExecutions: integer('total_scheduled_executions').default(0).notNull(), - totalTokensUsed: integer('total_tokens_used').default(0).notNull(), - totalCost: numeric('total_cost').default('0').notNull(), - lastActive: timestamp('last_active', { mode: 'string' }).defaultNow().notNull(), - totalChatExecutions: integer('total_chat_executions').default(0).notNull(), - currentUsageLimit: numeric('current_usage_limit').default('5').notNull(), - usageLimitSetBy: text('usage_limit_set_by'), - usageLimitUpdatedAt: timestamp('usage_limit_updated_at', { mode: 'string' }).defaultNow(), - currentPeriodCost: numeric('current_period_cost').default('0').notNull(), - billingPeriodStart: timestamp('billing_period_start', { mode: 'string' }).defaultNow(), - billingPeriodEnd: timestamp('billing_period_end', { mode: 'string' }), - lastPeriodCost: numeric('last_period_cost').default('0'), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'user_stats_user_id_user_id_fk', - }).onDelete('cascade'), - unique('user_stats_user_id_unique').on(table.userId), - ] -) - -export const subscription = pgTable( - 'subscription', - { - id: text().primaryKey().notNull(), - plan: text().notNull(), - referenceId: text('reference_id').notNull(), - stripeCustomerId: text('stripe_customer_id'), - stripeSubscriptionId: text('stripe_subscription_id'), - status: text(), - periodStart: timestamp('period_start', { mode: 'string' }), - periodEnd: timestamp('period_end', { mode: 'string' }), - cancelAtPeriodEnd: boolean('cancel_at_period_end'), - seats: integer(), - trialStart: timestamp('trial_start', { mode: 'string' }), - trialEnd: timestamp('trial_end', { mode: 'string' }), - metadata: json(), - }, - (table) => [ - index('subscription_reference_status_idx').using( - 'btree', - table.referenceId.asc().nullsLast().op('text_ops'), - table.status.asc().nullsLast().op('text_ops') - ), - check( - 'check_enterprise_metadata', - sql`(plan <> 'enterprise'::text) OR ((metadata IS NOT NULL) AND (((metadata ->> 'perSeatAllowance'::text) IS NOT NULL) OR ((metadata ->> 'totalAllowance'::text) IS NOT NULL)))` - ), - ] -) - -export const workflowExecutionBlocks = pgTable( - 'workflow_execution_blocks', - { - id: text().primaryKey().notNull(), - executionId: text('execution_id').notNull(), - workflowId: text('workflow_id').notNull(), - blockId: text('block_id').notNull(), - blockName: text('block_name'), - blockType: text('block_type').notNull(), - startedAt: timestamp('started_at', { mode: 'string' }).notNull(), - endedAt: timestamp('ended_at', { mode: 'string' }), - durationMs: integer('duration_ms'), - status: text().notNull(), - errorMessage: text('error_message'), - errorStackTrace: text('error_stack_trace'), - inputData: jsonb('input_data'), - outputData: jsonb('output_data'), - costInput: numeric('cost_input', { precision: 10, scale: 6 }), - costOutput: numeric('cost_output', { precision: 10, scale: 6 }), - costTotal: numeric('cost_total', { precision: 10, scale: 6 }), - tokensPrompt: integer('tokens_prompt'), - tokensCompletion: integer('tokens_completion'), - tokensTotal: integer('tokens_total'), - modelUsed: text('model_used'), - metadata: jsonb(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('execution_blocks_block_id_idx').using( - 'btree', - table.blockId.asc().nullsLast().op('text_ops') - ), - index('execution_blocks_cost_idx').using( - 'btree', - table.costTotal.asc().nullsLast().op('numeric_ops') - ), - index('execution_blocks_duration_idx').using( - 'btree', - table.durationMs.asc().nullsLast().op('int4_ops') - ), - index('execution_blocks_execution_id_idx').using( - 'btree', - table.executionId.asc().nullsLast().op('text_ops') - ), - index('execution_blocks_execution_status_idx').using( - 'btree', - table.executionId.asc().nullsLast().op('text_ops'), - table.status.asc().nullsLast().op('text_ops') - ), - index('execution_blocks_started_at_idx').using( - 'btree', - table.startedAt.asc().nullsLast().op('timestamp_ops') - ), - index('execution_blocks_status_idx').using( - 'btree', - table.status.asc().nullsLast().op('text_ops') - ), - index('execution_blocks_workflow_execution_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.executionId.asc().nullsLast().op('text_ops') - ), - index('execution_blocks_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_execution_blocks_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const workflowExecutionLogs = pgTable( - 'workflow_execution_logs', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - executionId: text('execution_id').notNull(), - stateSnapshotId: text('state_snapshot_id').notNull(), - level: text().notNull(), - message: text().notNull(), - trigger: text().notNull(), - startedAt: timestamp('started_at', { mode: 'string' }).notNull(), - endedAt: timestamp('ended_at', { mode: 'string' }), - totalDurationMs: integer('total_duration_ms'), - blockCount: integer('block_count').default(0).notNull(), - successCount: integer('success_count').default(0).notNull(), - errorCount: integer('error_count').default(0).notNull(), - skippedCount: integer('skipped_count').default(0).notNull(), - totalCost: numeric('total_cost', { precision: 10, scale: 6 }), - totalInputCost: numeric('total_input_cost', { precision: 10, scale: 6 }), - totalOutputCost: numeric('total_output_cost', { precision: 10, scale: 6 }), - totalTokens: integer('total_tokens'), - metadata: jsonb().default({}).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('workflow_execution_logs_cost_idx').using( - 'btree', - table.totalCost.asc().nullsLast().op('numeric_ops') - ), - index('workflow_execution_logs_duration_idx').using( - 'btree', - table.totalDurationMs.asc().nullsLast().op('int4_ops') - ), - index('workflow_execution_logs_execution_id_idx').using( - 'btree', - table.executionId.asc().nullsLast().op('text_ops') - ), - uniqueIndex('workflow_execution_logs_execution_id_unique').using( - 'btree', - table.executionId.asc().nullsLast().op('text_ops') - ), - index('workflow_execution_logs_level_idx').using( - 'btree', - table.level.asc().nullsLast().op('text_ops') - ), - index('workflow_execution_logs_started_at_idx').using( - 'btree', - table.startedAt.asc().nullsLast().op('timestamp_ops') - ), - index('workflow_execution_logs_trigger_idx').using( - 'btree', - table.trigger.asc().nullsLast().op('text_ops') - ), - index('workflow_execution_logs_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_execution_logs_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.stateSnapshotId], - foreignColumns: [workflowExecutionSnapshots.id], - name: 'workflow_execution_logs_state_snapshot_id_workflow_execution_sn', - }), - ] -) - -export const workflowExecutionSnapshots = pgTable( - 'workflow_execution_snapshots', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - stateHash: text('state_hash').notNull(), - stateData: jsonb('state_data').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('workflow_snapshots_created_at_idx').using( - 'btree', - table.createdAt.asc().nullsLast().op('timestamp_ops') - ), - index('workflow_snapshots_hash_idx').using( - 'btree', - table.stateHash.asc().nullsLast().op('text_ops') - ), - uniqueIndex('workflow_snapshots_workflow_hash_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.stateHash.asc().nullsLast().op('text_ops') - ), - index('workflow_snapshots_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_execution_snapshots_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const copilotChats = pgTable( - 'copilot_chats', - { - id: uuid().defaultRandom().primaryKey().notNull(), - userId: text('user_id').notNull(), - workflowId: text('workflow_id').notNull(), - title: text(), - messages: jsonb().default([]).notNull(), - model: text().default('claude-3-7-sonnet-latest').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - previewYaml: text('preview_yaml'), - }, - (table) => [ - index('copilot_chats_created_at_idx').using( - 'btree', - table.createdAt.asc().nullsLast().op('timestamp_ops') - ), - index('copilot_chats_updated_at_idx').using( - 'btree', - table.updatedAt.asc().nullsLast().op('timestamp_ops') - ), - index('copilot_chats_user_id_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops') - ), - index('copilot_chats_user_workflow_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.workflowId.asc().nullsLast().op('text_ops') - ), - index('copilot_chats_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'copilot_chats_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'copilot_chats_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - ] -) - -export const docsEmbeddings = pgTable( - 'docs_embeddings', - { - chunkId: uuid('chunk_id').defaultRandom().primaryKey().notNull(), - chunkText: text('chunk_text').notNull(), - sourceDocument: text('source_document').notNull(), - sourceLink: text('source_link').notNull(), - headerText: text('header_text').notNull(), - headerLevel: integer('header_level').notNull(), - tokenCount: integer('token_count').notNull(), - embedding: vector({ dimensions: 1536 }).notNull(), - embeddingModel: text('embedding_model').default('text-embedding-3-small').notNull(), - metadata: jsonb().default({}).notNull(), - // tsvector column for full-text search - chunkTextTsv: tsvector('chunk_text_tsv').generatedAlwaysAs( - sql`to_tsvector('english'::regconfig, chunk_text)` - ), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('docs_emb_chunk_text_fts_idx').using( - 'gin', - table.chunkTextTsv.asc().nullsLast().op('tsvector_ops') - ), - index('docs_emb_created_at_idx').using( - 'btree', - table.createdAt.asc().nullsLast().op('timestamp_ops') - ), - index('docs_emb_header_level_idx').using( - 'btree', - table.headerLevel.asc().nullsLast().op('int4_ops') - ), - index('docs_emb_metadata_gin_idx').using( - 'gin', - table.metadata.asc().nullsLast().op('jsonb_ops') - ), - index('docs_emb_model_idx').using( - 'btree', - table.embeddingModel.asc().nullsLast().op('text_ops') - ), - index('docs_emb_source_document_idx').using( - 'btree', - table.sourceDocument.asc().nullsLast().op('text_ops') - ), - index('docs_emb_source_header_idx').using( - 'btree', - table.sourceDocument.asc().nullsLast().op('text_ops'), - table.headerLevel.asc().nullsLast().op('int4_ops') - ), - index('docs_embedding_vector_hnsw_idx') - .using('hnsw', table.embedding.asc().nullsLast().op('vector_cosine_ops')) - .with({ m: '16', ef_construction: '64' }), - check('docs_embedding_not_null_check', sql`embedding IS NOT NULL`), - check('docs_header_level_check', sql`(header_level >= 1) AND (header_level <= 6)`), - ] -) - -export const document = pgTable( - 'document', - { - id: text().primaryKey().notNull(), - knowledgeBaseId: text('knowledge_base_id').notNull(), - filename: text().notNull(), - fileUrl: text('file_url').notNull(), - fileSize: integer('file_size').notNull(), - mimeType: text('mime_type').notNull(), - chunkCount: integer('chunk_count').default(0).notNull(), - tokenCount: integer('token_count').default(0).notNull(), - characterCount: integer('character_count').default(0).notNull(), - enabled: boolean().default(true).notNull(), - deletedAt: timestamp('deleted_at', { mode: 'string' }), - uploadedAt: timestamp('uploaded_at', { mode: 'string' }).defaultNow().notNull(), - tag1: text(), - tag2: text(), - tag3: text(), - tag4: text(), - tag5: text(), - tag6: text(), - tag7: text(), - processingStatus: text('processing_status').default('pending').notNull(), - processingStartedAt: timestamp('processing_started_at', { mode: 'string' }), - processingCompletedAt: timestamp('processing_completed_at', { mode: 'string' }), - processingError: text('processing_error'), - }, - (table) => [ - index('doc_filename_idx').using('btree', table.filename.asc().nullsLast().op('text_ops')), - index('doc_kb_id_idx').using('btree', table.knowledgeBaseId.asc().nullsLast().op('text_ops')), - index('doc_kb_uploaded_at_idx').using( - 'btree', - table.knowledgeBaseId.asc().nullsLast().op('text_ops'), - table.uploadedAt.asc().nullsLast().op('timestamp_ops') - ), - index('doc_processing_status_idx').using( - 'btree', - table.knowledgeBaseId.asc().nullsLast().op('text_ops'), - table.processingStatus.asc().nullsLast().op('text_ops') - ), - index('doc_tag1_idx').using('btree', table.tag1.asc().nullsLast().op('text_ops')), - index('doc_tag2_idx').using('btree', table.tag2.asc().nullsLast().op('text_ops')), - index('doc_tag3_idx').using('btree', table.tag3.asc().nullsLast().op('text_ops')), - index('doc_tag4_idx').using('btree', table.tag4.asc().nullsLast().op('text_ops')), - index('doc_tag5_idx').using('btree', table.tag5.asc().nullsLast().op('text_ops')), - index('doc_tag6_idx').using('btree', table.tag6.asc().nullsLast().op('text_ops')), - index('doc_tag7_idx').using('btree', table.tag7.asc().nullsLast().op('text_ops')), - foreignKey({ - columns: [table.knowledgeBaseId], - foreignColumns: [knowledgeBase.id], - name: 'document_knowledge_base_id_knowledge_base_id_fk', - }).onDelete('cascade'), - ] -) - -export const embedding = pgTable( - 'embedding', - { - id: text().primaryKey().notNull(), - knowledgeBaseId: text('knowledge_base_id').notNull(), - documentId: text('document_id').notNull(), - chunkIndex: integer('chunk_index').notNull(), - chunkHash: text('chunk_hash').notNull(), - content: text().notNull(), - contentLength: integer('content_length').notNull(), - tokenCount: integer('token_count').notNull(), - embedding: vector({ dimensions: 1536 }), - embeddingModel: text('embedding_model').default('text-embedding-3-small').notNull(), - startOffset: integer('start_offset').notNull(), - endOffset: integer('end_offset').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - // tsvector column for full-text search - contentTsv: tsvector('content_tsv').generatedAlwaysAs( - sql`to_tsvector('english'::regconfig, content)` - ), - enabled: boolean().default(true).notNull(), - tag1: text(), - tag2: text(), - tag3: text(), - tag4: text(), - tag5: text(), - tag6: text(), - tag7: text(), - }, - (table) => [ - index('emb_content_fts_idx').using( - 'gin', - table.contentTsv.asc().nullsLast().op('tsvector_ops') - ), - uniqueIndex('emb_doc_chunk_idx').using( - 'btree', - table.documentId.asc().nullsLast().op('int4_ops'), - table.chunkIndex.asc().nullsLast().op('text_ops') - ), - index('emb_doc_enabled_idx').using( - 'btree', - table.documentId.asc().nullsLast().op('bool_ops'), - table.enabled.asc().nullsLast().op('bool_ops') - ), - index('emb_doc_id_idx').using('btree', table.documentId.asc().nullsLast().op('text_ops')), - index('emb_kb_enabled_idx').using( - 'btree', - table.knowledgeBaseId.asc().nullsLast().op('text_ops'), - table.enabled.asc().nullsLast().op('text_ops') - ), - index('emb_kb_id_idx').using('btree', table.knowledgeBaseId.asc().nullsLast().op('text_ops')), - index('emb_kb_model_idx').using( - 'btree', - table.knowledgeBaseId.asc().nullsLast().op('text_ops'), - table.embeddingModel.asc().nullsLast().op('text_ops') - ), - index('emb_tag1_idx').using('btree', table.tag1.asc().nullsLast().op('text_ops')), - index('emb_tag2_idx').using('btree', table.tag2.asc().nullsLast().op('text_ops')), - index('emb_tag3_idx').using('btree', table.tag3.asc().nullsLast().op('text_ops')), - index('emb_tag4_idx').using('btree', table.tag4.asc().nullsLast().op('text_ops')), - index('emb_tag5_idx').using('btree', table.tag5.asc().nullsLast().op('text_ops')), - index('emb_tag6_idx').using('btree', table.tag6.asc().nullsLast().op('text_ops')), - index('emb_tag7_idx').using('btree', table.tag7.asc().nullsLast().op('text_ops')), - index('embedding_vector_hnsw_idx') - .using('hnsw', table.embedding.asc().nullsLast().op('vector_cosine_ops')) - .with({ m: '16', ef_construction: '64' }), - foreignKey({ - columns: [table.knowledgeBaseId], - foreignColumns: [knowledgeBase.id], - name: 'embedding_knowledge_base_id_knowledge_base_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.documentId], - foreignColumns: [document.id], - name: 'embedding_document_id_document_id_fk', - }).onDelete('cascade'), - check('embedding_not_null_check', sql`embedding IS NOT NULL`), - ] -) - -export const templateStars = pgTable( - 'template_stars', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - templateId: text('template_id').notNull(), - starredAt: timestamp('starred_at', { mode: 'string' }).defaultNow().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('template_stars_starred_at_idx').using( - 'btree', - table.starredAt.asc().nullsLast().op('timestamp_ops') - ), - index('template_stars_template_id_idx').using( - 'btree', - table.templateId.asc().nullsLast().op('text_ops') - ), - index('template_stars_template_starred_at_idx').using( - 'btree', - table.templateId.asc().nullsLast().op('text_ops'), - table.starredAt.asc().nullsLast().op('timestamp_ops') - ), - index('template_stars_template_user_idx').using( - 'btree', - table.templateId.asc().nullsLast().op('text_ops'), - table.userId.asc().nullsLast().op('text_ops') - ), - index('template_stars_user_id_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops') - ), - index('template_stars_user_template_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.templateId.asc().nullsLast().op('text_ops') - ), - uniqueIndex('template_stars_user_template_unique').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.templateId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'template_stars_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.templateId], - foreignColumns: [templates.id], - name: 'template_stars_template_id_templates_id_fk', - }).onDelete('cascade'), - ] -) - -export const templates = pgTable( - 'templates', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - userId: text('user_id').notNull(), - name: text().notNull(), - description: text(), - author: text().notNull(), - views: integer().default(0).notNull(), - stars: integer().default(0).notNull(), - color: text().default('#3972F6').notNull(), - icon: text().default('FileText').notNull(), - category: text().notNull(), - state: jsonb().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('templates_category_idx').using('btree', table.category.asc().nullsLast().op('text_ops')), - index('templates_category_stars_idx').using( - 'btree', - table.category.asc().nullsLast().op('int4_ops'), - table.stars.asc().nullsLast().op('text_ops') - ), - index('templates_category_views_idx').using( - 'btree', - table.category.asc().nullsLast().op('int4_ops'), - table.views.asc().nullsLast().op('text_ops') - ), - index('templates_created_at_idx').using( - 'btree', - table.createdAt.asc().nullsLast().op('timestamp_ops') - ), - index('templates_stars_idx').using('btree', table.stars.asc().nullsLast().op('int4_ops')), - index('templates_updated_at_idx').using( - 'btree', - table.updatedAt.asc().nullsLast().op('timestamp_ops') - ), - index('templates_user_category_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.category.asc().nullsLast().op('text_ops') - ), - index('templates_user_id_idx').using('btree', table.userId.asc().nullsLast().op('text_ops')), - index('templates_views_idx').using('btree', table.views.asc().nullsLast().op('int4_ops')), - index('templates_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'templates_workflow_id_workflow_id_fk', - }), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'templates_user_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const settings = pgTable( - 'settings', - { - id: text().primaryKey().notNull(), - userId: text('user_id').notNull(), - general: json().default({}).notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - theme: text().default('system').notNull(), - autoConnect: boolean('auto_connect').default(true).notNull(), - autoFillEnvVars: boolean('auto_fill_env_vars').default(true).notNull(), - telemetryEnabled: boolean('telemetry_enabled').default(true).notNull(), - telemetryNotifiedUser: boolean('telemetry_notified_user').default(false).notNull(), - emailPreferences: json('email_preferences').default({}).notNull(), - autoPan: boolean('auto_pan').default(true).notNull(), - consoleExpandedByDefault: boolean('console_expanded_by_default').default(true).notNull(), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'settings_user_id_user_id_fk', - }).onDelete('cascade'), - unique('settings_user_id_unique').on(table.userId), - ] -) - -export const userRateLimits = pgTable( - 'user_rate_limits', - { - userId: text('user_id').primaryKey().notNull(), - syncApiRequests: integer('sync_api_requests').default(0).notNull(), - asyncApiRequests: integer('async_api_requests').default(0).notNull(), - windowStart: timestamp('window_start', { mode: 'string' }).defaultNow().notNull(), - lastRequestAt: timestamp('last_request_at', { mode: 'string' }).defaultNow().notNull(), - isRateLimited: boolean('is_rate_limited').default(false).notNull(), - rateLimitResetAt: timestamp('rate_limit_reset_at', { mode: 'string' }), - }, - (table) => [ - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'user_rate_limits_user_id_user_id_fk', - }).onDelete('cascade'), - ] -) - -export const webhook = pgTable( - 'webhook', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - path: text().notNull(), - provider: text(), - isActive: boolean('is_active').default(true).notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - providerConfig: json('provider_config'), - blockId: text('block_id'), - }, - (table) => [ - uniqueIndex('path_idx').using('btree', table.path.asc().nullsLast().op('text_ops')), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'webhook_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.blockId], - foreignColumns: [workflowBlocks.id], - name: 'webhook_block_id_workflow_blocks_id_fk', - }).onDelete('cascade'), - ] -) - -export const workflowSchedule = pgTable( - 'workflow_schedule', - { - id: text().primaryKey().notNull(), - workflowId: text('workflow_id').notNull(), - cronExpression: text('cron_expression'), - nextRunAt: timestamp('next_run_at', { mode: 'string' }), - lastRanAt: timestamp('last_ran_at', { mode: 'string' }), - triggerType: text('trigger_type').notNull(), - timezone: text().default('UTC').notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - failedCount: integer('failed_count').default(0).notNull(), - status: text().default('active').notNull(), - lastFailedAt: timestamp('last_failed_at', { mode: 'string' }), - blockId: text('block_id'), - }, - (table) => [ - uniqueIndex('workflow_schedule_workflow_block_unique').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.blockId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'workflow_schedule_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.blockId], - foreignColumns: [workflowBlocks.id], - name: 'workflow_schedule_block_id_workflow_blocks_id_fk', - }).onDelete('cascade'), - ] -) - -export const copilotCheckpoints = pgTable( - 'copilot_checkpoints', - { - id: uuid().defaultRandom().primaryKey().notNull(), - userId: text('user_id').notNull(), - workflowId: text('workflow_id').notNull(), - chatId: uuid('chat_id').notNull(), - yaml: text().notNull(), - createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), - updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().notNull(), - }, - (table) => [ - index('copilot_checkpoints_chat_created_at_idx').using( - 'btree', - table.chatId.asc().nullsLast().op('uuid_ops'), - table.createdAt.asc().nullsLast().op('uuid_ops') - ), - index('copilot_checkpoints_chat_id_idx').using( - 'btree', - table.chatId.asc().nullsLast().op('uuid_ops') - ), - index('copilot_checkpoints_created_at_idx').using( - 'btree', - table.createdAt.asc().nullsLast().op('timestamp_ops') - ), - index('copilot_checkpoints_user_id_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops') - ), - index('copilot_checkpoints_user_workflow_idx').using( - 'btree', - table.userId.asc().nullsLast().op('text_ops'), - table.workflowId.asc().nullsLast().op('text_ops') - ), - index('copilot_checkpoints_workflow_chat_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops'), - table.chatId.asc().nullsLast().op('uuid_ops') - ), - index('copilot_checkpoints_workflow_id_idx').using( - 'btree', - table.workflowId.asc().nullsLast().op('text_ops') - ), - foreignKey({ - columns: [table.userId], - foreignColumns: [user.id], - name: 'copilot_checkpoints_user_id_user_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.workflowId], - foreignColumns: [workflow.id], - name: 'copilot_checkpoints_workflow_id_workflow_id_fk', - }).onDelete('cascade'), - foreignKey({ - columns: [table.chatId], - foreignColumns: [copilotChats.id], - name: 'copilot_checkpoints_chat_id_copilot_chats_id_fk', - }).onDelete('cascade'), - ] -) diff --git a/apps/sim/executor/__test-utils__/executor-mocks.ts b/apps/sim/executor/__test-utils__/executor-mocks.ts index 94dda1279c..06941d6363 100644 --- a/apps/sim/executor/__test-utils__/executor-mocks.ts +++ b/apps/sim/executor/__test-utils__/executor-mocks.ts @@ -8,7 +8,7 @@ export const createMockHandler = ( handlerName: string, options?: { canHandleCondition?: (block: any) => boolean - executeResult?: any + executeResult?: any | ((inputs: any) => any) } ) => { const defaultCanHandle = (block: any) => @@ -20,7 +20,12 @@ export const createMockHandler = ( return vi.fn().mockImplementation(() => ({ canHandle: options?.canHandleCondition || defaultCanHandle, - execute: vi.fn().mockResolvedValue(options?.executeResult || defaultExecuteResult), + execute: vi.fn().mockImplementation(async (block, inputs) => { + if (typeof options?.executeResult === 'function') { + return options.executeResult(inputs) + } + return options?.executeResult || defaultExecuteResult + }), })) } @@ -29,6 +34,11 @@ export const createMockHandler = ( */ export const setupHandlerMocks = () => { vi.doMock('@/executor/handlers', () => ({ + TriggerBlockHandler: createMockHandler('trigger', { + canHandleCondition: (block) => + block.metadata?.category === 'triggers' || block.config?.params?.triggerMode === true, + executeResult: (inputs: any) => inputs || {}, + }), AgentBlockHandler: createMockHandler('agent'), RouterBlockHandler: createMockHandler('router'), ConditionBlockHandler: createMockHandler('condition'), diff --git a/apps/sim/executor/handlers/index.ts b/apps/sim/executor/handlers/index.ts index 20c65456ef..c2acc8a39f 100644 --- a/apps/sim/executor/handlers/index.ts +++ b/apps/sim/executor/handlers/index.ts @@ -8,6 +8,7 @@ import { LoopBlockHandler } from '@/executor/handlers/loop/loop-handler' import { ParallelBlockHandler } from '@/executor/handlers/parallel/parallel-handler' import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler' import { RouterBlockHandler } from '@/executor/handlers/router/router-handler' +import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler' import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler' export { @@ -21,5 +22,6 @@ export { ParallelBlockHandler, ResponseBlockHandler, RouterBlockHandler, + TriggerBlockHandler, WorkflowBlockHandler, } diff --git a/apps/sim/executor/handlers/trigger/trigger-handler.test.ts b/apps/sim/executor/handlers/trigger/trigger-handler.test.ts new file mode 100644 index 0000000000..196a038c2e --- /dev/null +++ b/apps/sim/executor/handlers/trigger/trigger-handler.test.ts @@ -0,0 +1,323 @@ +import '@/executor/__test-utils__/mock-dependencies' + +import { beforeEach, describe, expect, it } from 'vitest' +import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler' +import type { ExecutionContext } from '@/executor/types' +import type { SerializedBlock } from '@/serializer/types' + +describe('TriggerBlockHandler', () => { + let handler: TriggerBlockHandler + let mockContext: ExecutionContext + + beforeEach(() => { + handler = new TriggerBlockHandler() + + mockContext = { + workflowId: 'test-workflow-id', + blockStates: new Map(), + blockLogs: [], + metadata: { duration: 0 }, + environmentVariables: {}, + decisions: { router: new Map(), condition: new Map() }, + loopIterations: new Map(), + loopItems: new Map(), + executedBlocks: new Set(), + activeExecutionPath: new Set(), + completedLoops: new Set(), + } + }) + + describe('canHandle', () => { + it.concurrent('should handle blocks with triggers category', () => { + const triggerBlock: SerializedBlock = { + id: 'trigger-1', + metadata: { id: 'schedule', name: 'Schedule Block', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'schedule', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + expect(handler.canHandle(triggerBlock)).toBe(true) + }) + + it.concurrent('should handle blocks with triggerMode enabled', () => { + const gmailTriggerBlock: SerializedBlock = { + id: 'gmail-1', + metadata: { id: 'gmail', name: 'Gmail Block', category: 'tools' }, + position: { x: 0, y: 0 }, + config: { tool: 'gmail', params: { triggerMode: true } }, + inputs: {}, + outputs: {}, + enabled: true, + } + + expect(handler.canHandle(gmailTriggerBlock)).toBe(true) + }) + + it.concurrent('should not handle regular tool blocks without triggerMode', () => { + const toolBlock: SerializedBlock = { + id: 'tool-1', + metadata: { id: 'gmail', name: 'Gmail Block', category: 'tools' }, + position: { x: 0, y: 0 }, + config: { tool: 'gmail', params: { triggerMode: false } }, + inputs: {}, + outputs: {}, + enabled: true, + } + + expect(handler.canHandle(toolBlock)).toBe(false) + }) + + it.concurrent('should not handle blocks without trigger indicators', () => { + const regularBlock: SerializedBlock = { + id: 'regular-1', + metadata: { id: 'api', name: 'API Block', category: 'tools' }, + position: { x: 0, y: 0 }, + config: { tool: 'api', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + expect(handler.canHandle(regularBlock)).toBe(false) + }) + + it.concurrent('should handle generic webhook blocks', () => { + const webhookBlock: SerializedBlock = { + id: 'webhook-1', + metadata: { id: 'generic_webhook', name: 'Generic Webhook', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'generic_webhook', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + expect(handler.canHandle(webhookBlock)).toBe(true) + }) + }) + + describe('execute', () => { + it.concurrent('should return inputs directly when provided', async () => { + const triggerBlock: SerializedBlock = { + id: 'trigger-1', + metadata: { id: 'gmail', name: 'Gmail Trigger', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'gmail', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + const triggerInputs = { + email: { + id: '12345', + subject: 'Test Email', + from: 'test@example.com', + body: 'Hello world', + }, + timestamp: '2023-01-01T12:00:00Z', + } + + const result = await handler.execute(triggerBlock, triggerInputs, mockContext) + + expect(result).toEqual(triggerInputs) + }) + + it.concurrent('should return empty object when no inputs provided', async () => { + const triggerBlock: SerializedBlock = { + id: 'trigger-1', + metadata: { id: 'schedule', name: 'Schedule Trigger', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'schedule', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + const result = await handler.execute(triggerBlock, {}, mockContext) + + expect(result).toEqual({}) + }) + + it.concurrent('should handle webhook payload inputs', async () => { + const webhookBlock: SerializedBlock = { + id: 'webhook-1', + metadata: { id: 'generic_webhook', name: 'Generic Webhook', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'generic_webhook', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + const webhookInputs = { + payload: { + event: 'user.created', + data: { + user: { + id: 'user123', + email: 'user@example.com', + }, + }, + }, + headers: { + 'content-type': 'application/json', + }, + method: 'POST', + } + + const result = await handler.execute(webhookBlock, webhookInputs, mockContext) + + expect(result).toEqual(webhookInputs) + }) + + it.concurrent('should handle Outlook trigger inputs', async () => { + const outlookBlock: SerializedBlock = { + id: 'outlook-1', + metadata: { id: 'outlook', name: 'Outlook Block', category: 'tools' }, + position: { x: 0, y: 0 }, + config: { tool: 'outlook', params: { triggerMode: true } }, + inputs: {}, + outputs: {}, + enabled: true, + } + + const outlookInputs = { + email: { + id: 'outlook123', + subject: 'Meeting Invitation', + from: 'colleague@company.com', + bodyPreview: 'Join us for the quarterly review...', + }, + timestamp: '2023-01-01T14:30:00Z', + } + + const result = await handler.execute(outlookBlock, outlookInputs, mockContext) + + expect(result).toEqual(outlookInputs) + }) + + it.concurrent('should handle schedule trigger with no inputs', async () => { + const scheduleBlock: SerializedBlock = { + id: 'schedule-1', + metadata: { id: 'schedule', name: 'Daily Schedule', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'schedule', params: { scheduleType: 'daily' } }, + inputs: {}, + outputs: {}, + enabled: true, + } + + const result = await handler.execute(scheduleBlock, {}, mockContext) + + // Schedule triggers typically don't have input data, just trigger the workflow + expect(result).toEqual({}) + }) + + it.concurrent('should handle complex nested trigger data', async () => { + const triggerBlock: SerializedBlock = { + id: 'complex-trigger-1', + metadata: { id: 'webhook', name: 'Complex Webhook', category: 'triggers' }, + position: { x: 0, y: 0 }, + config: { tool: 'webhook', params: {} }, + inputs: {}, + outputs: {}, + enabled: true, + } + + const complexInputs = { + webhook: { + data: { + provider: 'github', + payload: { + action: 'opened', + pull_request: { + id: 123, + title: 'Fix bug in authentication', + user: { login: 'developer' }, + base: { ref: 'main' }, + head: { ref: 'fix-auth-bug' }, + }, + }, + headers: { 'x-github-event': 'pull_request' }, + }, + }, + timestamp: '2023-01-01T15:45:00Z', + } + + const result = await handler.execute(triggerBlock, complexInputs, mockContext) + + expect(result).toEqual(complexInputs) + }) + }) + + describe('integration scenarios', () => { + it.concurrent('should work with different trigger block types', () => { + const testCases = [ + { + name: 'Gmail in trigger mode', + block: { + id: 'gmail-trigger', + metadata: { id: 'gmail', category: 'tools' }, + config: { tool: 'gmail', params: { triggerMode: true } }, + }, + shouldHandle: true, + }, + { + name: 'Generic webhook', + block: { + id: 'webhook-trigger', + metadata: { id: 'generic_webhook', category: 'triggers' }, + config: { tool: 'generic_webhook', params: {} }, + }, + shouldHandle: true, + }, + { + name: 'Schedule block', + block: { + id: 'schedule-trigger', + metadata: { id: 'schedule', category: 'triggers' }, + config: { tool: 'schedule', params: {} }, + }, + shouldHandle: true, + }, + { + name: 'Regular API block', + block: { + id: 'api-block', + metadata: { id: 'api', category: 'tools' }, + config: { tool: 'api', params: {} }, + }, + shouldHandle: false, + }, + { + name: 'Gmail in tool mode', + block: { + id: 'gmail-tool', + metadata: { id: 'gmail', category: 'tools' }, + config: { tool: 'gmail', params: { triggerMode: false } }, + }, + shouldHandle: false, + }, + ] + + testCases.forEach(({ name, block, shouldHandle }) => { + const serializedBlock: SerializedBlock = { + ...block, + position: { x: 0, y: 0 }, + inputs: {}, + outputs: {}, + enabled: true, + } as SerializedBlock + + expect( + handler.canHandle(serializedBlock), + `${name} should ${shouldHandle ? '' : 'not '}be handled` + ).toBe(shouldHandle) + }) + }) + }) +}) diff --git a/apps/sim/executor/handlers/trigger/trigger-handler.ts b/apps/sim/executor/handlers/trigger/trigger-handler.ts new file mode 100644 index 0000000000..54b0638b06 --- /dev/null +++ b/apps/sim/executor/handlers/trigger/trigger-handler.ts @@ -0,0 +1,45 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { BlockHandler, ExecutionContext } from '@/executor/types' +import type { SerializedBlock } from '@/serializer/types' + +const logger = createLogger('TriggerBlockHandler') + +/** + * Handler for trigger blocks (Gmail, Webhook, Schedule, etc.) + * These blocks don't execute tools - they provide input data to workflows + */ +export class TriggerBlockHandler implements BlockHandler { + canHandle(block: SerializedBlock): boolean { + // Handle blocks that are triggers - either by category or by having triggerMode enabled + const isTriggerCategory = block.metadata?.category === 'triggers' + + // For blocks that can be both tools and triggers (like Gmail/Outlook), check if triggerMode is enabled + // This would come from the serialized block config/params + const hasTriggerMode = block.config?.params?.triggerMode === true + + return isTriggerCategory || hasTriggerMode + } + + async execute( + block: SerializedBlock, + inputs: Record, + _context: ExecutionContext + ): Promise { + logger.info(`Executing trigger block: ${block.id} (Type: ${block.metadata?.id})`) + + // Trigger blocks don't execute anything - they just pass through their input data + // The input data comes from the webhook execution context or initial workflow inputs + + // For trigger blocks, return the inputs directly - these contain the webhook/trigger data + if (inputs && Object.keys(inputs).length > 0) { + logger.debug(`Returning trigger inputs for block ${block.id}`, { + inputKeys: Object.keys(inputs), + }) + return inputs + } + + // Fallback - return empty object for trigger blocks with no inputs + logger.debug(`No inputs provided for trigger block ${block.id}, returning empty object`) + return {} + } +} diff --git a/apps/sim/executor/index.test.ts b/apps/sim/executor/index.test.ts index 6d96af2347..3d101ed333 100644 --- a/apps/sim/executor/index.test.ts +++ b/apps/sim/executor/index.test.ts @@ -8,6 +8,7 @@ * resolving inputs and dependencies, and managing errors. */ import { afterEach, beforeEach, describe, expect, vi } from 'vitest' +import type { BlockOutput, ParamType } from '@/blocks/types' import { Executor } from '@/executor' import { createMinimalWorkflow, @@ -187,6 +188,61 @@ describe('Executor', () => { ) }) + it.concurrent( + 'should NOT throw error if starter block has no outgoing connections but has trigger blocks', + () => { + const workflow = createMinimalWorkflow() + workflow.connections = [] + + // Add a trigger block (webhook trigger) + workflow.blocks.push({ + id: 'webhook-trigger', + position: { x: 0, y: 0 }, + metadata: { + category: 'triggers', + id: 'webhook', + }, + config: { + tool: 'webhook', + params: {}, + }, + inputs: {}, + outputs: {}, + enabled: true, + }) + + expect(() => new Executor(workflow)).not.toThrow() + } + ) + + it.concurrent( + 'should NOT throw error if starter block has no outgoing connections but has triggerMode block', + () => { + const workflow = createMinimalWorkflow() + workflow.connections = [] + + // Add a block with triggerMode enabled + workflow.blocks.push({ + id: 'gmail-trigger', + position: { x: 0, y: 0 }, + metadata: { + id: 'gmail', + }, + config: { + tool: 'gmail', + params: { + triggerMode: true, + }, + }, + inputs: {}, + outputs: {}, + enabled: true, + }) + + expect(() => new Executor(workflow)).not.toThrow() + } + ) + it.concurrent('should throw error if connection references non-existent source block', () => { const workflow = createMinimalWorkflow() workflow.connections.push({ @@ -978,26 +1034,29 @@ describe('Executor', () => { blocks: [ { id: 'starter', + position: { x: 0, y: 0 }, metadata: { id: BlockType.STARTER }, - subBlocks: {}, + config: { tool: 'starter', params: {} }, + inputs: {}, + outputs: {}, enabled: true, }, { id: 'agent1', + position: { x: 100, y: 0 }, metadata: { id: BlockType.AGENT, name: 'Agent 1' }, - subBlocks: { - model: { value: 'gpt-4o' }, - input: { value: 'Hello' }, - }, + config: { tool: 'agent', params: { model: 'gpt-4o', input: 'Hello' } }, + inputs: {}, + outputs: {}, enabled: true, }, { id: 'agent2', + position: { x: 200, y: 0 }, metadata: { id: BlockType.AGENT, name: 'Agent 2' }, - subBlocks: { - model: { value: 'gpt-4o' }, - input: { value: 'Hello' }, - }, + config: { tool: 'agent', params: { model: 'gpt-4o', input: 'Hello' } }, + inputs: {}, + outputs: {}, enabled: true, }, ], @@ -1005,8 +1064,8 @@ describe('Executor', () => { { source: 'starter', sourceHandle: 'out', target: 'agent1', targetHandle: 'in' }, { source: 'starter', sourceHandle: 'out', target: 'agent2', targetHandle: 'in' }, ], - loops: [], - parallels: [], + loops: {}, + parallels: {}, } const executor = new Executor(workflow) @@ -1055,4 +1114,55 @@ describe('Executor', () => { } ) }) + + /** + * Trigger handler integration tests + */ + describe('trigger block handling', () => { + it.concurrent('should not interfere with regular tool blocks', async () => { + const workflow = { + version: '1.0', + blocks: [ + { + id: 'starter', + position: { x: -100, y: 0 }, + metadata: { id: BlockType.STARTER, name: 'Starter Block' }, + config: { tool: 'starter', params: {} }, + inputs: {} as Record, + outputs: {} as Record, + enabled: true, + }, + { + id: 'api-block', + position: { x: 0, y: 0 }, + metadata: { id: BlockType.API, name: 'API Block', category: 'tools' }, + config: { tool: 'api', params: {} }, + inputs: { url: 'string' as ParamType }, + outputs: { response: 'json' as BlockOutput }, + enabled: true, + }, + ], + connections: [{ source: 'starter', target: 'api-block' }], + loops: {}, + } + + const executor = new Executor({ + workflow, + workflowInput: { url: 'https://api.example.com' }, + }) + + // The TriggerBlockHandler should NOT handle regular tool blocks + expect( + (executor as any).blockHandlers[0].canHandle({ + id: 'api-block', + metadata: { id: BlockType.API, category: 'tools' }, + config: { tool: 'api', params: {} }, + position: { x: 0, y: 0 }, + inputs: {}, + outputs: {}, + enabled: true, + }) + ).toBe(false) + }) + }) }) diff --git a/apps/sim/executor/index.ts b/apps/sim/executor/index.ts index 9b6f392b24..ab5cef2c54 100644 --- a/apps/sim/executor/index.ts +++ b/apps/sim/executor/index.ts @@ -13,6 +13,7 @@ import { ParallelBlockHandler, ResponseBlockHandler, RouterBlockHandler, + TriggerBlockHandler, WorkflowBlockHandler, } from '@/executor/handlers' import { LoopManager } from '@/executor/loops/loops' @@ -149,6 +150,7 @@ export class Executor { this.pathTracker = new PathTracker(this.actualWorkflow) this.blockHandlers = [ + new TriggerBlockHandler(), new AgentBlockHandler(), new RouterBlockHandler(this.pathTracker), new ConditionBlockHandler(this.pathTracker, this.resolver), @@ -619,12 +621,19 @@ export class Executor { throw new Error('Starter block cannot have incoming connections') } - // Only check outgoing connections for starter blocks, not trigger blocks - const outgoingFromStarter = this.actualWorkflow.connections.filter( - (conn) => conn.source === starterBlock.id - ) - if (outgoingFromStarter.length === 0) { - throw new Error('Starter block must have at least one outgoing connection') + // Check if there are any trigger blocks on the canvas + const hasTriggerBlocks = this.actualWorkflow.blocks.some((block) => { + return block.metadata?.category === 'triggers' || block.config?.params?.triggerMode === true + }) + + // Only check outgoing connections for starter blocks if there are no trigger blocks + if (!hasTriggerBlocks) { + const outgoingFromStarter = this.actualWorkflow.connections.filter( + (conn) => conn.source === starterBlock.id + ) + if (outgoingFromStarter.length === 0) { + throw new Error('Starter block must have at least one outgoing connection') + } } } diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index 5bf53e7fd1..356221c440 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -98,7 +98,15 @@ export function useCollaborativeWorkflow() { payload.position, payload.data, payload.parentId, - payload.extent + payload.extent, + { + enabled: payload.enabled, + horizontalHandles: payload.horizontalHandles, + isWide: payload.isWide, + advancedMode: payload.advancedMode, + triggerMode: payload.triggerMode ?? false, + height: payload.height, + } ) if (payload.autoConnectEdge) { workflowStore.addEdge(payload.autoConnectEdge) @@ -152,6 +160,9 @@ export function useCollaborativeWorkflow() { case 'update-advanced-mode': workflowStore.setBlockAdvancedMode(payload.id, payload.advancedMode) break + case 'update-trigger-mode': + workflowStore.setBlockTriggerMode(payload.id, payload.triggerMode) + break case 'toggle-handles': { const currentBlock = workflowStore.blocks[payload.id] if (currentBlock && currentBlock.horizontalHandles !== payload.horizontalHandles) { @@ -167,7 +178,15 @@ export function useCollaborativeWorkflow() { payload.position, payload.data, payload.parentId, - payload.extent + payload.extent, + { + enabled: payload.enabled, + horizontalHandles: payload.horizontalHandles, + isWide: payload.isWide, + advancedMode: payload.advancedMode, + triggerMode: payload.triggerMode ?? false, + height: payload.height, + } ) // Handle auto-connect edge if present if (payload.autoConnectEdge) { @@ -462,7 +481,9 @@ export function useCollaborativeWorkflow() { // Skip if applying remote changes if (isApplyingRemoteChange.current) { - workflowStore.addBlock(id, type, name, position, data, parentId, extent) + workflowStore.addBlock(id, type, name, position, data, parentId, extent, { + triggerMode: false, + }) if (autoConnectEdge) { workflowStore.addEdge(autoConnectEdge) } @@ -485,7 +506,9 @@ export function useCollaborativeWorkflow() { }) // Apply locally first (immediate UI feedback) - workflowStore.addBlock(id, type, name, position, data, parentId, extent) + workflowStore.addBlock(id, type, name, position, data, parentId, extent, { + triggerMode: false, + }) if (autoConnectEdge) { workflowStore.addEdge(autoConnectEdge) } @@ -526,6 +549,7 @@ export function useCollaborativeWorkflow() { horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: 0, // Default height, will be set by the UI parentId, extent, @@ -551,7 +575,9 @@ export function useCollaborativeWorkflow() { }) // Apply locally - workflowStore.addBlock(id, type, name, position, data, parentId, extent) + workflowStore.addBlock(id, type, name, position, data, parentId, extent, { + triggerMode: false, + }) if (autoConnectEdge) { workflowStore.addEdge(autoConnectEdge) } @@ -670,6 +696,23 @@ export function useCollaborativeWorkflow() { [executeQueuedOperation, workflowStore] ) + const collaborativeToggleBlockTriggerMode = useCallback( + (id: string) => { + const currentBlock = workflowStore.blocks[id] + if (!currentBlock) return + + const newTriggerMode = !currentBlock.triggerMode + + executeQueuedOperation( + 'update-trigger-mode', + 'block', + { id, triggerMode: newTriggerMode }, + () => workflowStore.toggleBlockTriggerMode(id) + ) + }, + [executeQueuedOperation, workflowStore] + ) + const collaborativeToggleBlockHandles = useCallback( (id: string) => { const currentBlock = workflowStore.blocks[id] @@ -841,6 +884,7 @@ export function useCollaborativeWorkflow() { horizontalHandles: sourceBlock.horizontalHandles ?? true, isWide: sourceBlock.isWide ?? false, advancedMode: sourceBlock.advancedMode ?? false, + triggerMode: false, // Always duplicate as normal mode to avoid webhook conflicts height: sourceBlock.height || 0, } @@ -857,6 +901,7 @@ export function useCollaborativeWorkflow() { horizontalHandles: sourceBlock.horizontalHandles, isWide: sourceBlock.isWide, advancedMode: sourceBlock.advancedMode, + triggerMode: false, // Always duplicate as normal mode height: sourceBlock.height, } ) @@ -875,6 +920,7 @@ export function useCollaborativeWorkflow() { horizontalHandles: sourceBlock.horizontalHandles, isWide: sourceBlock.isWide, advancedMode: sourceBlock.advancedMode, + triggerMode: false, // Always duplicate as normal mode height: sourceBlock.height, } ) @@ -1096,6 +1142,7 @@ export function useCollaborativeWorkflow() { collaborativeUpdateParentId, collaborativeToggleBlockWide, collaborativeToggleBlockAdvancedMode, + collaborativeToggleBlockTriggerMode, collaborativeToggleBlockHandles, collaborativeDuplicateBlock, collaborativeAddEdge, diff --git a/apps/sim/lib/workflows/db-helpers.ts b/apps/sim/lib/workflows/db-helpers.ts index 2073dc65c8..8e2bee03ac 100644 --- a/apps/sim/lib/workflows/db-helpers.ts +++ b/apps/sim/lib/workflows/db-helpers.ts @@ -103,6 +103,7 @@ export async function loadWorkflowFromNormalizedTables( horizontalHandles: block.horizontalHandles, isWide: block.isWide, advancedMode: block.advancedMode, + triggerMode: block.triggerMode, height: Number(block.height), subBlocks: block.subBlocks || {}, outputs: block.outputs || {}, @@ -188,6 +189,7 @@ export async function saveWorkflowToNormalizedTables( horizontalHandles: block.horizontalHandles ?? true, isWide: block.isWide ?? false, advancedMode: block.advancedMode ?? false, + triggerMode: block.triggerMode ?? false, height: String(block.height || 0), subBlocks: block.subBlocks || {}, outputs: block.outputs || {}, diff --git a/apps/sim/scripts/migrate-workflow-states.ts b/apps/sim/scripts/migrate-workflow-states.ts index 87e5e0252f..fb81ac19fd 100755 --- a/apps/sim/scripts/migrate-workflow-states.ts +++ b/apps/sim/scripts/migrate-workflow-states.ts @@ -130,6 +130,7 @@ async function migrateWorkflowStates(specificWorkflowIds?: string[] | null) { horizontalHandles: block.horizontalHandles ?? true, isWide: block.isWide ?? false, advancedMode: block.advancedMode ?? false, + triggerMode: block.triggerMode ?? false, height: String(block.height || 0), subBlocks: block.subBlocks || {}, outputs: block.outputs || {}, diff --git a/apps/sim/socket-server/database/operations.ts b/apps/sim/socket-server/database/operations.ts index 4b12cb6b7a..0cf9f8489c 100644 --- a/apps/sim/socket-server/database/operations.ts +++ b/apps/sim/socket-server/database/operations.ts @@ -560,6 +560,28 @@ async function handleBlockOperationTx( break } + case 'update-trigger-mode': { + if (!payload.id || payload.triggerMode === undefined) { + throw new Error('Missing required fields for update trigger mode operation') + } + + const updateResult = await tx + .update(workflowBlocks) + .set({ + triggerMode: payload.triggerMode, + updatedAt: new Date(), + }) + .where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId))) + .returning({ id: workflowBlocks.id }) + + if (updateResult.length === 0) { + throw new Error(`Block ${payload.id} not found in workflow ${workflowId}`) + } + + logger.debug(`Updated block trigger mode: ${payload.id} -> ${payload.triggerMode}`) + break + } + case 'toggle-handles': { if (!payload.id || payload.horizontalHandles === undefined) { throw new Error('Missing required fields for toggle handles operation') diff --git a/apps/sim/socket-server/middleware/permissions.ts b/apps/sim/socket-server/middleware/permissions.ts index 3be2c835ca..a94ae21b1e 100644 --- a/apps/sim/socket-server/middleware/permissions.ts +++ b/apps/sim/socket-server/middleware/permissions.ts @@ -102,6 +102,7 @@ export async function verifyOperationPermission( 'update-parent', 'update-wide', 'update-advanced-mode', + 'update-trigger-mode', 'toggle-handles', 'duplicate', ], @@ -115,6 +116,7 @@ export async function verifyOperationPermission( 'update-parent', 'update-wide', 'update-advanced-mode', + 'update-trigger-mode', 'toggle-handles', 'duplicate', ], diff --git a/apps/sim/socket-server/validation/schemas.ts b/apps/sim/socket-server/validation/schemas.ts index c36cc0ca2b..d7bee359da 100644 --- a/apps/sim/socket-server/validation/schemas.ts +++ b/apps/sim/socket-server/validation/schemas.ts @@ -25,6 +25,7 @@ export const BlockOperationSchema = z.object({ 'update-parent', 'update-wide', 'update-advanced-mode', + 'update-trigger-mode', 'toggle-handles', 'duplicate', ]), @@ -44,6 +45,7 @@ export const BlockOperationSchema = z.object({ horizontalHandles: z.boolean().optional(), isWide: z.boolean().optional(), advancedMode: z.boolean().optional(), + triggerMode: z.boolean().optional(), height: z.number().optional(), autoConnectEdge: AutoConnectEdgeSchema.optional(), // Add support for auto-connect edges }), diff --git a/apps/sim/stores/workflows/registry/store.ts b/apps/sim/stores/workflows/registry/store.ts index 762ef9c8d0..19ef7b9659 100644 --- a/apps/sim/stores/workflows/registry/store.ts +++ b/apps/sim/stores/workflows/registry/store.ts @@ -731,6 +731,7 @@ export const useWorkflowRegistry = create()( horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: 0, } @@ -1107,6 +1108,7 @@ export const useWorkflowRegistry = create()( horizontalHandles: true, isWide: false, advancedMode: false, + triggerMode: false, height: 0, } diff --git a/apps/sim/stores/workflows/workflow/store.ts b/apps/sim/stores/workflows/workflow/store.ts index e9b147f590..a12426d8c2 100644 --- a/apps/sim/stores/workflows/workflow/store.ts +++ b/apps/sim/stores/workflows/workflow/store.ts @@ -101,6 +101,7 @@ export const useWorkflowStore = create()( horizontalHandles?: boolean isWide?: boolean advancedMode?: boolean + triggerMode?: boolean height?: number } ) => { @@ -127,6 +128,7 @@ export const useWorkflowStore = create()( horizontalHandles: blockProperties?.horizontalHandles ?? true, isWide: blockProperties?.isWide ?? false, advancedMode: blockProperties?.advancedMode ?? false, + triggerMode: blockProperties?.triggerMode ?? false, height: blockProperties?.height ?? 0, data: nodeData, }, @@ -177,6 +179,7 @@ export const useWorkflowStore = create()( horizontalHandles: blockProperties?.horizontalHandles ?? true, isWide: blockProperties?.isWide ?? false, advancedMode: blockProperties?.advancedMode ?? false, + triggerMode: blockProperties?.triggerMode ?? false, height: blockProperties?.height ?? 0, data: nodeData, }, @@ -746,6 +749,22 @@ export const useWorkflowStore = create()( // Note: Socket.IO handles real-time sync automatically }, + setBlockTriggerMode: (id: string, triggerMode: boolean) => { + set((state) => ({ + blocks: { + ...state.blocks, + [id]: { + ...state.blocks[id], + triggerMode, + }, + }, + edges: [...state.edges], + loops: { ...state.loops }, + })) + get().updateLastSaved() + // Note: Socket.IO handles real-time sync automatically + }, + updateBlockHeight: (id: string, height: number) => { set((state) => ({ blocks: { @@ -991,6 +1010,85 @@ export const useWorkflowStore = create()( // Note: Socket.IO handles real-time sync automatically }, + toggleBlockTriggerMode: (id: string) => { + const block = get().blocks[id] + if (!block) return + + const newTriggerMode = !block.triggerMode + + // When switching TO trigger mode, remove all incoming connections + let filteredEdges = [...get().edges] + if (newTriggerMode) { + // Remove edges where this block is the target + filteredEdges = filteredEdges.filter((edge) => edge.target !== id) + logger.info( + `Removed ${get().edges.length - filteredEdges.length} incoming connections for trigger mode`, + { + blockId: id, + blockType: block.type, + } + ) + } + + const newState = { + blocks: { + ...get().blocks, + [id]: { + ...block, + triggerMode: newTriggerMode, + }, + }, + edges: filteredEdges, + loops: { ...get().loops }, + parallels: { ...get().parallels }, + } + + set(newState) + pushHistory(set, get, newState, `Toggle trigger mode for ${block.type} block`) + get().updateLastSaved() + + // Handle webhook enable/disable when toggling trigger mode + const handleWebhookToggle = async () => { + try { + const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId + if (!activeWorkflowId) return + + // Check if there's a webhook for this block + const response = await fetch( + `/api/webhooks?workflowId=${activeWorkflowId}&blockId=${id}` + ) + if (response.ok) { + const data = await response.json() + if (data.webhooks && data.webhooks.length > 0) { + const webhook = data.webhooks[0].webhook + + // Update webhook's isActive status based on trigger mode + const updateResponse = await fetch(`/api/webhooks/${webhook.id}`, { + method: 'PATCH', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + isActive: newTriggerMode, + }), + }) + + if (!updateResponse.ok) { + console.error('Failed to update webhook status') + } + } + } + } catch (error) { + console.error('Error toggling webhook status:', error) + } + } + + // Handle webhook toggle asynchronously + handleWebhookToggle() + + // Note: Socket.IO handles real-time sync automatically + }, + // Parallel block methods implementation updateParallelCount: (parallelId: string, count: number) => { const block = get().blocks[parallelId] diff --git a/apps/sim/stores/workflows/workflow/types.ts b/apps/sim/stores/workflows/workflow/types.ts index 3ccc21637d..42d12ba0db 100644 --- a/apps/sim/stores/workflows/workflow/types.ts +++ b/apps/sim/stores/workflows/workflow/types.ts @@ -74,6 +74,7 @@ export interface BlockState { isWide?: boolean height?: number advancedMode?: boolean + triggerMode?: boolean data?: BlockData } @@ -167,6 +168,7 @@ export interface WorkflowActions { horizontalHandles?: boolean isWide?: boolean advancedMode?: boolean + triggerMode?: boolean height?: number } ) => void @@ -185,6 +187,7 @@ export interface WorkflowActions { toggleBlockWide: (id: string) => void setBlockWide: (id: string, isWide: boolean) => void setBlockAdvancedMode: (id: string, advancedMode: boolean) => void + setBlockTriggerMode: (id: string, triggerMode: boolean) => void updateBlockHeight: (id: string, height: number) => void triggerUpdate: () => void updateLoopCount: (loopId: string, count: number) => void @@ -199,6 +202,7 @@ export interface WorkflowActions { setWebhookStatus: (hasActiveWebhook: boolean) => void revertToDeployedState: (deployedState: WorkflowState) => void toggleBlockAdvancedMode: (id: string) => void + toggleBlockTriggerMode: (id: string) => void // Add the sync control methods to the WorkflowActions interface sync: SyncControl diff --git a/apps/sim/tools/airtable/create_records.ts b/apps/sim/tools/airtable/create_records.ts index 7f7e8bee36..c88e575c85 100644 --- a/apps/sim/tools/airtable/create_records.ts +++ b/apps/sim/tools/airtable/create_records.ts @@ -69,4 +69,23 @@ export const airtableCreateRecordsTool: ToolConfig { return `Failed to create Airtable records: ${error.message || 'Unknown error'}` }, + + outputs: { + records: { + type: 'json', + description: 'Array of created Airtable records', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + createdTime: { type: 'string' }, + fields: { type: 'object' }, + }, + }, + }, + metadata: { + type: 'json', + description: 'Operation metadata', + }, + }, } diff --git a/apps/sim/tools/airtable/get_record.ts b/apps/sim/tools/airtable/get_record.ts index a789575e8a..206a578ea5 100644 --- a/apps/sim/tools/airtable/get_record.ts +++ b/apps/sim/tools/airtable/get_record.ts @@ -72,4 +72,15 @@ export const airtableGetRecordTool: ToolConfig { return `Failed to list Airtable records: ${error.message || 'Unknown error'}` }, + + outputs: { + records: { + type: 'json', + description: 'Array of retrieved Airtable records', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + createdTime: { type: 'string' }, + fields: { type: 'object' }, + }, + }, + }, + metadata: { + type: 'json', + description: 'Operation metadata including pagination offset and total records count', + }, + }, } diff --git a/apps/sim/tools/airtable/update_multiple_records.ts b/apps/sim/tools/airtable/update_multiple_records.ts index 3c00190c0b..45c757de37 100644 --- a/apps/sim/tools/airtable/update_multiple_records.ts +++ b/apps/sim/tools/airtable/update_multiple_records.ts @@ -83,4 +83,23 @@ export const airtableUpdateMultipleRecordsTool: ToolConfig< // logger.error('Airtable tool error:', error) return `Failed to update multiple Airtable records: ${error.message || 'Unknown error'}` }, + + outputs: { + records: { + type: 'json', + description: 'Array of updated Airtable records', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + createdTime: { type: 'string' }, + fields: { type: 'object' }, + }, + }, + }, + metadata: { + type: 'json', + description: 'Operation metadata including record count and updated record IDs', + }, + }, } diff --git a/apps/sim/tools/airtable/update_record.ts b/apps/sim/tools/airtable/update_record.ts index 3f17cbb75d..c455cca58f 100644 --- a/apps/sim/tools/airtable/update_record.ts +++ b/apps/sim/tools/airtable/update_record.ts @@ -44,9 +44,7 @@ export const airtableUpdateRecordTool: ToolConfig = { description: 'Search for academic papers on ArXiv by keywords, authors, titles, or other fields.', version: '1.0.0', + outputs: { + papers: { + type: 'json', + description: 'Array of papers matching the search query', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + title: { type: 'string' }, + summary: { type: 'string' }, + authors: { type: 'string' }, + published: { type: 'string' }, + updated: { type: 'string' }, + link: { type: 'string' }, + pdfLink: { type: 'string' }, + categories: { type: 'string' }, + primaryCategory: { type: 'string' }, + comment: { type: 'string' }, + journalRef: { type: 'string' }, + doi: { type: 'string' }, + }, + }, + }, + totalResults: { + type: 'number', + description: 'Total number of results found for the search query', + }, + }, + params: { searchQuery: { type: 'string', diff --git a/apps/sim/tools/browser_use/run_task.ts b/apps/sim/tools/browser_use/run_task.ts index 8de6d20d23..578bad87e4 100644 --- a/apps/sim/tools/browser_use/run_task.ts +++ b/apps/sim/tools/browser_use/run_task.ts @@ -45,7 +45,15 @@ export const runTaskTool: ToolConfig params.webhookURL, method: 'POST', diff --git a/apps/sim/tools/confluence/retrieve.ts b/apps/sim/tools/confluence/retrieve.ts index 192aeaacca..a8415182cb 100644 --- a/apps/sim/tools/confluence/retrieve.ts +++ b/apps/sim/tools/confluence/retrieve.ts @@ -15,6 +15,13 @@ export const confluenceRetrieveTool: ToolConfig< provider: 'confluence', }, + outputs: { + ts: { type: 'string', description: 'Timestamp of retrieval' }, + pageId: { type: 'string', description: 'Confluence page ID' }, + content: { type: 'string', description: 'Page content with HTML tags stripped' }, + title: { type: 'string', description: 'Page title' }, + }, + params: { accessToken: { type: 'string', diff --git a/apps/sim/tools/confluence/update.ts b/apps/sim/tools/confluence/update.ts index d109cbb0fa..07bc5edaf9 100644 --- a/apps/sim/tools/confluence/update.ts +++ b/apps/sim/tools/confluence/update.ts @@ -12,6 +12,13 @@ export const confluenceUpdateTool: ToolConfig = { }, }, + outputs: { + answer: { + type: 'string', + description: 'AI-generated answer to the question', + }, + citations: { + type: 'array', + description: 'Sources and citations for the answer', + items: { + type: 'object', + properties: { + title: { type: 'string', description: 'The title of the cited source' }, + url: { type: 'string', description: 'The URL of the cited source' }, + text: { type: 'string', description: 'Relevant text from the cited source' }, + }, + }, + }, + }, + request: { url: 'https://api.exa.ai/answer', method: 'POST', diff --git a/apps/sim/tools/exa/find_similar_links.ts b/apps/sim/tools/exa/find_similar_links.ts index 45aafe8774..41d182d4a5 100644 --- a/apps/sim/tools/exa/find_similar_links.ts +++ b/apps/sim/tools/exa/find_similar_links.ts @@ -38,6 +38,28 @@ export const findSimilarLinksTool: ToolConfig< }, }, + outputs: { + similarLinks: { + type: 'array', + description: 'Similar links found with titles, URLs, and text snippets', + items: { + type: 'object', + properties: { + title: { type: 'string', description: 'The title of the similar webpage' }, + url: { type: 'string', description: 'The URL of the similar webpage' }, + text: { + type: 'string', + description: 'Text snippet or full content from the similar webpage', + }, + score: { + type: 'number', + description: 'Similarity score indicating how similar the page is', + }, + }, + }, + }, + }, + request: { url: 'https://api.exa.ai/findSimilar', method: 'POST', diff --git a/apps/sim/tools/exa/get_contents.ts b/apps/sim/tools/exa/get_contents.ts index 67c552dbb1..2d1e39e857 100644 --- a/apps/sim/tools/exa/get_contents.ts +++ b/apps/sim/tools/exa/get_contents.ts @@ -36,6 +36,22 @@ export const getContentsTool: ToolConfig = description: 'Exa AI API Key', }, }, + + outputs: { + research: { + type: 'array', + description: 'Comprehensive research findings with citations and summaries', + items: { + type: 'object', + properties: { + title: { type: 'string' }, + url: { type: 'string' }, + summary: { type: 'string' }, + text: { type: 'string' }, + publishedDate: { type: 'string' }, + author: { type: 'string' }, + score: { type: 'number' }, + }, + }, + }, + }, request: { url: 'https://api.exa.ai/research/v0/tasks', method: 'POST', diff --git a/apps/sim/tools/exa/search.ts b/apps/sim/tools/exa/search.ts index a27b00148b..38d9c62059 100644 --- a/apps/sim/tools/exa/search.ts +++ b/apps/sim/tools/exa/search.ts @@ -41,6 +41,27 @@ export const searchTool: ToolConfig = { }, }, + outputs: { + results: { + type: 'array', + description: 'Search results with titles, URLs, and text snippets', + items: { + type: 'object', + properties: { + title: { type: 'string', description: 'The title of the search result' }, + url: { type: 'string', description: 'The URL of the search result' }, + publishedDate: { type: 'string', description: 'Date when the content was published' }, + author: { type: 'string', description: 'The author of the content' }, + summary: { type: 'string', description: 'A brief summary of the content' }, + favicon: { type: 'string', description: "URL of the site's favicon" }, + image: { type: 'string', description: 'URL of a representative image from the page' }, + text: { type: 'string', description: 'Text snippet or full content from the page' }, + score: { type: 'number', description: 'Relevance score for the search result' }, + }, + }, + }, + }, + request: { url: 'https://api.exa.ai/search', method: 'POST', diff --git a/apps/sim/tools/firecrawl/crawl.ts b/apps/sim/tools/firecrawl/crawl.ts index 5c8f0c3e8e..3b18d8340b 100644 --- a/apps/sim/tools/firecrawl/crawl.ts +++ b/apps/sim/tools/firecrawl/crawl.ts @@ -153,4 +153,34 @@ export const crawlTool: ToolConfig } return error }, + + outputs: { + pages: { + type: 'array', + description: 'Array of crawled pages with their content and metadata', + items: { + type: 'object', + properties: { + markdown: { type: 'string', description: 'Page content in markdown format' }, + html: { type: 'string', description: 'Page HTML content' }, + metadata: { + type: 'object', + description: 'Page metadata', + properties: { + title: { type: 'string', description: 'Page title' }, + description: { type: 'string', description: 'Page description' }, + language: { type: 'string', description: 'Page language' }, + sourceURL: { type: 'string', description: 'Source URL of the page' }, + statusCode: { type: 'number', description: 'HTTP status code' }, + }, + }, + }, + }, + }, + total: { type: 'number', description: 'Total number of pages found during crawl' }, + creditsUsed: { + type: 'number', + description: 'Number of credits consumed by the crawl operation', + }, + }, } diff --git a/apps/sim/tools/firecrawl/scrape.ts b/apps/sim/tools/firecrawl/scrape.ts index 7d8496ba6a..2e92b8e007 100644 --- a/apps/sim/tools/firecrawl/scrape.ts +++ b/apps/sim/tools/firecrawl/scrape.ts @@ -64,4 +64,13 @@ export const scrapeTool: ToolConfig = { const code = error.error?.type || error.code return `${message} (${code})` }, + + outputs: { + markdown: { type: 'string', description: 'Page content in markdown format' }, + html: { type: 'string', description: 'Raw HTML content of the page' }, + metadata: { + type: 'object', + description: 'Page metadata including SEO and Open Graph information', + }, + }, } diff --git a/apps/sim/tools/firecrawl/search.ts b/apps/sim/tools/firecrawl/search.ts index cb5e47b0bd..574f17fbc3 100644 --- a/apps/sim/tools/firecrawl/search.ts +++ b/apps/sim/tools/firecrawl/search.ts @@ -55,4 +55,26 @@ export const searchTool: ToolConfig = { const code = error.error?.type || error.code return `${message} (${code})` }, + + outputs: { + data: { + type: 'array', + description: 'Search results data', + items: { + type: 'object', + properties: { + title: { type: 'string' }, + description: { type: 'string' }, + url: { type: 'string' }, + markdown: { type: 'string' }, + html: { type: 'string' }, + rawHtml: { type: 'string' }, + links: { type: 'array' }, + screenshot: { type: 'string' }, + metadata: { type: 'object' }, + }, + }, + }, + warning: { type: 'string', description: 'Warning messages from the search operation' }, + }, } diff --git a/apps/sim/tools/github/comment.ts b/apps/sim/tools/github/comment.ts index 2dbe2e3697..763c6c5363 100644 --- a/apps/sim/tools/github/comment.ts +++ b/apps/sim/tools/github/comment.ts @@ -134,4 +134,12 @@ export const commentTool: ToolConfig transformError: (error) => { return error instanceof Error ? error.message : 'Failed to create comment' }, + + outputs: { + content: { type: 'string', description: 'Human-readable comment confirmation' }, + metadata: { + type: 'object', + description: 'Comment metadata', + }, + }, } diff --git a/apps/sim/tools/github/latest_commit.ts b/apps/sim/tools/github/latest_commit.ts index bec7b0ded1..11d95c8fb9 100644 --- a/apps/sim/tools/github/latest_commit.ts +++ b/apps/sim/tools/github/latest_commit.ts @@ -144,4 +144,12 @@ export const latestCommitTool: ToolConfig { return error instanceof Error ? error.message : 'Failed to fetch PR details' }, + + outputs: { + content: { type: 'string', description: 'Human-readable PR summary' }, + metadata: { + type: 'object', + description: 'Detailed PR metadata including file changes', + properties: { + number: { type: 'number', description: 'Pull request number' }, + title: { type: 'string', description: 'PR title' }, + state: { type: 'string', description: 'PR state (open/closed/merged)' }, + html_url: { type: 'string', description: 'GitHub web URL' }, + diff_url: { type: 'string', description: 'Raw diff URL' }, + created_at: { type: 'string', description: 'Creation timestamp' }, + updated_at: { type: 'string', description: 'Last update timestamp' }, + files: { + type: 'array', + description: 'Files changed in the PR', + items: { + type: 'object', + properties: { + filename: { type: 'string', description: 'File path' }, + additions: { type: 'number', description: 'Lines added' }, + deletions: { type: 'number', description: 'Lines deleted' }, + changes: { type: 'number', description: 'Total changes' }, + patch: { type: 'string', description: 'File diff patch' }, + blob_url: { type: 'string', description: 'GitHub blob URL' }, + raw_url: { type: 'string', description: 'Raw file URL' }, + status: { type: 'string', description: 'Change type (added/modified/deleted)' }, + }, + }, + }, + }, + }, + }, } diff --git a/apps/sim/tools/github/repo_info.ts b/apps/sim/tools/github/repo_info.ts index b52dd8017a..830b1cea07 100644 --- a/apps/sim/tools/github/repo_info.ts +++ b/apps/sim/tools/github/repo_info.ts @@ -83,4 +83,20 @@ URL: ${data.html_url}` } return 'Failed to fetch repository information' }, + + outputs: { + content: { type: 'string', description: 'Human-readable repository summary' }, + metadata: { + type: 'object', + description: 'Repository metadata', + properties: { + name: { type: 'string', description: 'Repository name' }, + description: { type: 'string', description: 'Repository description' }, + stars: { type: 'number', description: 'Number of stars' }, + forks: { type: 'number', description: 'Number of forks' }, + openIssues: { type: 'number', description: 'Number of open issues' }, + language: { type: 'string', description: 'Primary programming language' }, + }, + }, + }, } diff --git a/apps/sim/tools/gmail/draft.ts b/apps/sim/tools/gmail/draft.ts index ec8455a435..3dc455e426 100644 --- a/apps/sim/tools/gmail/draft.ts +++ b/apps/sim/tools/gmail/draft.ts @@ -9,6 +9,26 @@ export const gmailDraftTool: ToolConfig = { description: 'Draft emails using Gmail', version: '1.0.0', + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Draft metadata', + properties: { + id: { type: 'string', description: 'Draft ID' }, + message: { + type: 'object', + description: 'Message metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Email labels' }, + }, + }, + }, + }, + }, + oauth: { required: true, provider: 'google-email', diff --git a/apps/sim/tools/gmail/search.ts b/apps/sim/tools/gmail/search.ts index 78c0130b91..40b3dbab5b 100644 --- a/apps/sim/tools/gmail/search.ts +++ b/apps/sim/tools/gmail/search.ts @@ -9,6 +9,31 @@ export const gmailSearchTool: ToolConfig = description: 'Search emails in Gmail', version: '1.0.0', + outputs: { + content: { type: 'string', description: 'Search results summary' }, + metadata: { + type: 'object', + description: 'Search metadata', + properties: { + results: { + type: 'array', + description: 'Array of search results', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + subject: { type: 'string', description: 'Email subject' }, + from: { type: 'string', description: 'Sender email address' }, + date: { type: 'string', description: 'Email date' }, + snippet: { type: 'string', description: 'Email snippet/preview' }, + }, + }, + }, + }, + }, + }, + oauth: { required: true, provider: 'google-email', diff --git a/apps/sim/tools/gmail/send.ts b/apps/sim/tools/gmail/send.ts index ed18f9ecbf..0ed09bb886 100644 --- a/apps/sim/tools/gmail/send.ts +++ b/apps/sim/tools/gmail/send.ts @@ -9,6 +9,19 @@ export const gmailSendTool: ToolConfig = { description: 'Send emails using Gmail', version: '1.0.0', + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Email labels' }, + }, + }, + }, + oauth: { required: true, provider: 'google-email', diff --git a/apps/sim/tools/google/search.ts b/apps/sim/tools/google/search.ts index 7a870df840..b903a76dfd 100644 --- a/apps/sim/tools/google/search.ts +++ b/apps/sim/tools/google/search.ts @@ -34,6 +34,36 @@ export const searchTool: ToolConfig = }, }, + outputs: { + items: { + type: 'array', + description: 'Array of search results from Google', + items: { + type: 'object', + properties: { + title: { type: 'string', description: 'Title of the search result' }, + link: { type: 'string', description: 'URL of the search result' }, + snippet: { type: 'string', description: 'Snippet or description of the search result' }, + displayLink: { type: 'string', description: 'Display URL', optional: true }, + pagemap: { type: 'object', description: 'Additional page metadata', optional: true }, + }, + }, + }, + searchInformation: { + type: 'object', + description: 'Information about the search query and results', + properties: { + totalResults: { type: 'string', description: 'Total number of search results available' }, + searchTime: { type: 'number', description: 'Time taken to perform the search in seconds' }, + formattedSearchTime: { type: 'string', description: 'Formatted search time for display' }, + formattedTotalResults: { + type: 'string', + description: 'Formatted total results count for display', + }, + }, + }, + }, + request: { url: (params: GoogleSearchParams) => { const baseUrl = 'https://www.googleapis.com/customsearch/v1' diff --git a/apps/sim/tools/google_calendar/create.ts b/apps/sim/tools/google_calendar/create.ts index fe1f7d09d4..0395bdcfc7 100644 --- a/apps/sim/tools/google_calendar/create.ts +++ b/apps/sim/tools/google_calendar/create.ts @@ -148,6 +148,14 @@ export const createTool: ToolConfig { if (!response.ok) { const errorData = await response.json() diff --git a/apps/sim/tools/google_calendar/get.ts b/apps/sim/tools/google_calendar/get.ts index da02234348..e8f218debc 100644 --- a/apps/sim/tools/google_calendar/get.ts +++ b/apps/sim/tools/google_calendar/get.ts @@ -51,6 +51,14 @@ export const getTool: ToolConfig { if (!response.ok) { const errorData = await response.json() diff --git a/apps/sim/tools/google_calendar/invite.ts b/apps/sim/tools/google_calendar/invite.ts index 08891c531a..dcabce4970 100644 --- a/apps/sim/tools/google_calendar/invite.ts +++ b/apps/sim/tools/google_calendar/invite.ts @@ -56,6 +56,17 @@ export const inviteTool: ToolConfig { const calendarId = params.calendarId || 'primary' diff --git a/apps/sim/tools/google_calendar/list.ts b/apps/sim/tools/google_calendar/list.ts index c4093efba3..55bee562c1 100644 --- a/apps/sim/tools/google_calendar/list.ts +++ b/apps/sim/tools/google_calendar/list.ts @@ -80,6 +80,14 @@ export const listTool: ToolConfig { if (!response.ok) { const errorData = await response.json() diff --git a/apps/sim/tools/google_calendar/quick_add.ts b/apps/sim/tools/google_calendar/quick_add.ts index fa2c62beda..b4a078d20e 100644 --- a/apps/sim/tools/google_calendar/quick_add.ts +++ b/apps/sim/tools/google_calendar/quick_add.ts @@ -74,6 +74,14 @@ export const quickAddTool: ToolConfig< }), }, + outputs: { + content: { + type: 'string', + description: 'Event creation confirmation message from natural language', + }, + metadata: { type: 'json', description: 'Created event metadata including parsed details' }, + }, + transformResponse: async (response: Response, params) => { const data = await response.json() diff --git a/apps/sim/tools/google_calendar/update.ts b/apps/sim/tools/google_calendar/update.ts index ea2a8bd3b5..2cef6d90e2 100644 --- a/apps/sim/tools/google_calendar/update.ts +++ b/apps/sim/tools/google_calendar/update.ts @@ -86,6 +86,14 @@ export const updateTool: ToolConfig { const calendarId = params.calendarId || 'primary' diff --git a/apps/sim/tools/google_docs/create.ts b/apps/sim/tools/google_docs/create.ts index 1b6f6fae99..e748d702b4 100644 --- a/apps/sim/tools/google_docs/create.ts +++ b/apps/sim/tools/google_docs/create.ts @@ -112,6 +112,14 @@ export const createTool: ToolConfig { if (!response.ok) { let errorText = '' diff --git a/apps/sim/tools/google_docs/read.ts b/apps/sim/tools/google_docs/read.ts index 02bfbc3d95..53057a7bb9 100644 --- a/apps/sim/tools/google_docs/read.ts +++ b/apps/sim/tools/google_docs/read.ts @@ -47,6 +47,12 @@ export const readTool: ToolConfig } }, }, + + outputs: { + content: { type: 'string', description: 'Extracted document text content' }, + metadata: { type: 'json', description: 'Document metadata including ID, title, and URL' }, + }, + transformResponse: async (response: Response) => { if (!response.ok) { const errorText = await response.text() diff --git a/apps/sim/tools/google_docs/write.ts b/apps/sim/tools/google_docs/write.ts index 33fa654b2b..b92760a22e 100644 --- a/apps/sim/tools/google_docs/write.ts +++ b/apps/sim/tools/google_docs/write.ts @@ -74,6 +74,18 @@ export const writeTool: ToolConfig { if (!response.ok) { let errorText = '' diff --git a/apps/sim/tools/google_drive/create_folder.ts b/apps/sim/tools/google_drive/create_folder.ts index fca7624cea..30dd5f9c62 100644 --- a/apps/sim/tools/google_drive/create_folder.ts +++ b/apps/sim/tools/google_drive/create_folder.ts @@ -63,6 +63,14 @@ export const createFolderTool: ToolConfig { if (!response.ok) { const data = await response.json().catch(() => ({})) diff --git a/apps/sim/tools/google_drive/get_content.ts b/apps/sim/tools/google_drive/get_content.ts index 4d188fa0ba..81525e6eb3 100644 --- a/apps/sim/tools/google_drive/get_content.ts +++ b/apps/sim/tools/google_drive/get_content.ts @@ -39,6 +39,18 @@ export const getContentTool: ToolConfig `https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=id,name,mimeType`, diff --git a/apps/sim/tools/google_drive/list.ts b/apps/sim/tools/google_drive/list.ts index 70a138a970..866937b13f 100644 --- a/apps/sim/tools/google_drive/list.ts +++ b/apps/sim/tools/google_drive/list.ts @@ -86,6 +86,14 @@ export const listTool: ToolConfig { const data = await response.json() diff --git a/apps/sim/tools/google_drive/upload.ts b/apps/sim/tools/google_drive/upload.ts index ab66575ddb..496bd4db7b 100644 --- a/apps/sim/tools/google_drive/upload.ts +++ b/apps/sim/tools/google_drive/upload.ts @@ -79,6 +79,11 @@ export const uploadTool: ToolConfig { try { const data = await response.json() diff --git a/apps/sim/tools/google_sheets/append.ts b/apps/sim/tools/google_sheets/append.ts index dbd92860cf..3f9735cbbc 100644 --- a/apps/sim/tools/google_sheets/append.ts +++ b/apps/sim/tools/google_sheets/append.ts @@ -176,6 +176,16 @@ export const appendTool: ToolConfig { if (!response.ok) { const errorText = await response.text() diff --git a/apps/sim/tools/google_sheets/read.ts b/apps/sim/tools/google_sheets/read.ts index b8a8c236c3..074eb5b449 100644 --- a/apps/sim/tools/google_sheets/read.ts +++ b/apps/sim/tools/google_sheets/read.ts @@ -59,6 +59,12 @@ export const readTool: ToolConfig { if (!response.ok) { const errorJson = await response.json().catch(() => ({ error: response.statusText })) diff --git a/apps/sim/tools/google_sheets/update.ts b/apps/sim/tools/google_sheets/update.ts index c11766a7c6..8161d9d82c 100644 --- a/apps/sim/tools/google_sheets/update.ts +++ b/apps/sim/tools/google_sheets/update.ts @@ -130,6 +130,15 @@ export const updateTool: ToolConfig { if (!response.ok) { const errorText = await response.text() diff --git a/apps/sim/tools/google_sheets/write.ts b/apps/sim/tools/google_sheets/write.ts index 66fe15db28..3185bba65d 100644 --- a/apps/sim/tools/google_sheets/write.ts +++ b/apps/sim/tools/google_sheets/write.ts @@ -127,6 +127,15 @@ export const writeTool: ToolConfig { if (!response.ok) { const errorText = await response.text() diff --git a/apps/sim/tools/http/request.ts b/apps/sim/tools/http/request.ts index 4c0d7c870d..b19793120f 100644 --- a/apps/sim/tools/http/request.ts +++ b/apps/sim/tools/http/request.ts @@ -221,6 +221,29 @@ export const requestTool: ToolConfig = { }, }, + outputs: { + data: { + type: 'json', + description: 'Response data from the HTTP request (JSON object, text, or other format)', + }, + status: { + type: 'number', + description: 'HTTP status code of the response (e.g., 200, 404, 500)', + }, + headers: { + type: 'object', + description: 'Response headers as key-value pairs', + properties: { + 'content-type': { + type: 'string', + description: 'Content type of the response', + optional: true, + }, + 'content-length': { type: 'string', description: 'Content length', optional: true }, + }, + }, + }, + // Direct execution to bypass server for HTTP requests directExecution: async (params: RequestParams): Promise => { try { diff --git a/apps/sim/tools/huggingface/chat.ts b/apps/sim/tools/huggingface/chat.ts index 82933c7ef3..1fcccd18b6 100644 --- a/apps/sim/tools/huggingface/chat.ts +++ b/apps/sim/tools/huggingface/chat.ts @@ -56,7 +56,29 @@ export const chatTool: ToolConfig { diff --git a/apps/sim/tools/hunter/companies_find.ts b/apps/sim/tools/hunter/companies_find.ts index e25c54e12e..ac1ac46d7f 100644 --- a/apps/sim/tools/hunter/companies_find.ts +++ b/apps/sim/tools/hunter/companies_find.ts @@ -22,6 +22,18 @@ export const companiesFindTool: ToolConfig { const url = new URL('https://api.hunter.io/v2/companies/find') diff --git a/apps/sim/tools/hunter/discover.ts b/apps/sim/tools/hunter/discover.ts index 0c037d1de6..6b07500bfd 100644 --- a/apps/sim/tools/hunter/discover.ts +++ b/apps/sim/tools/hunter/discover.ts @@ -46,6 +46,14 @@ export const discoverTool: ToolConfig { // Validate that at least one search parameter is provided diff --git a/apps/sim/tools/hunter/domain_search.ts b/apps/sim/tools/hunter/domain_search.ts index 79eec15ea7..ed9a766aa8 100644 --- a/apps/sim/tools/hunter/domain_search.ts +++ b/apps/sim/tools/hunter/domain_search.ts @@ -52,6 +52,90 @@ export const domainSearchTool: ToolConfig { const url = new URL('https://api.hunter.io/v2/domain-search') diff --git a/apps/sim/tools/hunter/email_count.ts b/apps/sim/tools/hunter/email_count.ts index 6658cbd679..f5feb5fe05 100644 --- a/apps/sim/tools/hunter/email_count.ts +++ b/apps/sim/tools/hunter/email_count.ts @@ -34,6 +34,30 @@ export const emailCountTool: ToolConfig { if (!params.domain && !params.company) { diff --git a/apps/sim/tools/hunter/email_finder.ts b/apps/sim/tools/hunter/email_finder.ts index 2ec48dfdb8..5a8c5c7b6e 100644 --- a/apps/sim/tools/hunter/email_finder.ts +++ b/apps/sim/tools/hunter/email_finder.ts @@ -41,6 +41,26 @@ export const emailFinderTool: ToolConfig { const url = new URL('https://api.hunter.io/v2/email-finder') diff --git a/apps/sim/tools/hunter/email_verifier.ts b/apps/sim/tools/hunter/email_verifier.ts index 3ff449cfd2..7115dfa3e8 100644 --- a/apps/sim/tools/hunter/email_verifier.ts +++ b/apps/sim/tools/hunter/email_verifier.ts @@ -24,6 +24,66 @@ export const emailVerifierTool: ToolConfig { const url = new URL('https://api.hunter.io/v2/email-verifier') diff --git a/apps/sim/tools/jina/read_url.ts b/apps/sim/tools/jina/read_url.ts index 8b21699611..88287165d0 100644 --- a/apps/sim/tools/jina/read_url.ts +++ b/apps/sim/tools/jina/read_url.ts @@ -41,6 +41,13 @@ export const readUrlTool: ToolConfig = { }, }, + outputs: { + content: { + type: 'string', + description: 'The extracted content from the URL, processed into clean, LLM-friendly text', + }, + }, + request: { url: (params: ReadUrlParams) => { return `https://r.jina.ai/https://${params.url.replace(/^https?:\/\//, '')}` diff --git a/apps/sim/tools/jira/bulk_read.ts b/apps/sim/tools/jira/bulk_read.ts index 09622a531a..5a882542f5 100644 --- a/apps/sim/tools/jira/bulk_read.ts +++ b/apps/sim/tools/jira/bulk_read.ts @@ -37,6 +37,16 @@ export const jiraBulkRetrieveTool: ToolConfig { if (params.cloudId) { diff --git a/apps/sim/tools/jira/retrieve.ts b/apps/sim/tools/jira/retrieve.ts index cb18626cbb..92b54df7d2 100644 --- a/apps/sim/tools/jira/retrieve.ts +++ b/apps/sim/tools/jira/retrieve.ts @@ -46,6 +46,17 @@ export const jiraRetrieveTool: ToolConfig { diff --git a/apps/sim/tools/jira/update.ts b/apps/sim/tools/jira/update.ts index 607ee26a47..098c209687 100644 --- a/apps/sim/tools/jira/update.ts +++ b/apps/sim/tools/jira/update.ts @@ -78,6 +78,17 @@ export const jiraUpdateTool: ToolConfig = 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', }, }, + outputs: { + success: { + type: 'boolean', + description: 'Operation success status', + }, + output: { + type: 'object', + description: + 'Updated Jira issue details with timestamp, issue key, summary, and success status', + }, + }, directExecution: async (params) => { // Pre-fetch the cloudId if not provided diff --git a/apps/sim/tools/jira/write.ts b/apps/sim/tools/jira/write.ts index 8206a274d5..c0cff2ef80 100644 --- a/apps/sim/tools/jira/write.ts +++ b/apps/sim/tools/jira/write.ts @@ -81,6 +81,17 @@ export const jiraWriteTool: ToolConfig = { description: 'Type of issue to create (e.g., Task, Story)', }, }, + outputs: { + success: { + type: 'boolean', + description: 'Operation success status', + }, + output: { + type: 'object', + description: + 'Created Jira issue details with timestamp, issue key, summary, success status, and URL', + }, + }, directExecution: async (params) => { // Pre-fetch the cloudId if not provided diff --git a/apps/sim/tools/knowledge/create_document.ts b/apps/sim/tools/knowledge/create_document.ts index 6a6bbef588..7b7716f665 100644 --- a/apps/sim/tools/knowledge/create_document.ts +++ b/apps/sim/tools/knowledge/create_document.ts @@ -63,6 +63,30 @@ export const knowledgeCreateDocumentTool: ToolConfig `/api/knowledge/${params.knowledgeBaseId}/documents`, method: 'POST', diff --git a/apps/sim/tools/knowledge/search.ts b/apps/sim/tools/knowledge/search.ts index 5cfd30efc5..ecb12ca579 100644 --- a/apps/sim/tools/knowledge/search.ts +++ b/apps/sim/tools/knowledge/search.ts @@ -28,6 +28,38 @@ export const knowledgeSearchTool: ToolConfig = { description: 'Array of tag filters with tagName and tagValue properties', }, }, + + outputs: { + results: { + type: 'array', + description: 'Array of search results from the knowledge base', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + content: { type: 'string' }, + documentId: { type: 'string' }, + chunkIndex: { type: 'number' }, + similarity: { type: 'number' }, + metadata: { type: 'object' }, + }, + }, + }, + query: { + type: 'string', + description: 'The search query that was executed', + }, + totalResults: { + type: 'number', + description: 'Total number of results found', + }, + cost: { + type: 'object', + description: 'Cost information for the search operation', + optional: true, + }, + }, + request: { url: () => '/api/knowledge/search', method: 'POST', diff --git a/apps/sim/tools/knowledge/upload_chunk.ts b/apps/sim/tools/knowledge/upload_chunk.ts index 9a1f880a2f..7e1f055f5f 100644 --- a/apps/sim/tools/knowledge/upload_chunk.ts +++ b/apps/sim/tools/knowledge/upload_chunk.ts @@ -23,6 +23,37 @@ export const knowledgeUploadChunkTool: ToolConfig `/api/knowledge/${params.knowledgeBaseId}/documents/${params.documentId}/chunks`, diff --git a/apps/sim/tools/linear/create_issue.ts b/apps/sim/tools/linear/create_issue.ts index 8c5eb793f0..27f46aa6fa 100644 --- a/apps/sim/tools/linear/create_issue.ts +++ b/apps/sim/tools/linear/create_issue.ts @@ -37,6 +37,22 @@ export const linearCreateIssueTool: ToolConfig) => { // For a specific memory ID, use the get single memory endpoint diff --git a/apps/sim/tools/mem0/search_memories.ts b/apps/sim/tools/mem0/search_memories.ts index b91ba9e6b2..654454cd16 100644 --- a/apps/sim/tools/mem0/search_memories.ts +++ b/apps/sim/tools/mem0/search_memories.ts @@ -34,6 +34,17 @@ export const mem0SearchMemoriesTool: ToolConfig = { description: 'Your Mem0 API key', }, }, + + outputs: { + searchResults: { + type: 'array', + description: 'Array of search results with memory data, each containing id, data, and score', + }, + ids: { + type: 'array', + description: 'Array of memory IDs found in the search results', + }, + }, request: { url: 'https://api.mem0.ai/v2/memories/search/', method: 'POST', diff --git a/apps/sim/tools/memory/add.ts b/apps/sim/tools/memory/add.ts index 140786c0c8..777179489c 100644 --- a/apps/sim/tools/memory/add.ts +++ b/apps/sim/tools/memory/add.ts @@ -24,6 +24,14 @@ export const memoryAddTool: ToolConfig = { description: 'Content for agent memory', }, }, + outputs: { + success: { type: 'boolean', description: 'Whether the memory was added successfully' }, + memories: { + type: 'array', + description: 'Array of memory objects including the new or updated memory', + }, + error: { type: 'string', description: 'Error message if operation failed' }, + }, request: { url: '/api/memory', method: 'POST', diff --git a/apps/sim/tools/memory/delete.ts b/apps/sim/tools/memory/delete.ts index 449b2afa5d..4c3223c196 100644 --- a/apps/sim/tools/memory/delete.ts +++ b/apps/sim/tools/memory/delete.ts @@ -13,6 +13,11 @@ export const memoryDeleteTool: ToolConfig = { description: 'Identifier for the memory to delete', }, }, + outputs: { + success: { type: 'boolean', description: 'Whether the memory was deleted successfully' }, + message: { type: 'string', description: 'Success or error message' }, + error: { type: 'string', description: 'Error message if operation failed' }, + }, request: { url: (params): any => { // Get workflowId from context (set by workflow execution) diff --git a/apps/sim/tools/memory/get.ts b/apps/sim/tools/memory/get.ts index bc7887ea3c..f07c59ffc9 100644 --- a/apps/sim/tools/memory/get.ts +++ b/apps/sim/tools/memory/get.ts @@ -13,6 +13,12 @@ export const memoryGetTool: ToolConfig = { description: 'Identifier for the memory to retrieve', }, }, + outputs: { + success: { type: 'boolean', description: 'Whether the memory was retrieved successfully' }, + memories: { type: 'array', description: 'Array of memory data for the requested ID' }, + message: { type: 'string', description: 'Success or error message' }, + error: { type: 'string', description: 'Error message if operation failed' }, + }, request: { url: (params): any => { // Get workflowId from context (set by workflow execution) diff --git a/apps/sim/tools/memory/get_all.ts b/apps/sim/tools/memory/get_all.ts index 74243ec93b..230a64d6e1 100644 --- a/apps/sim/tools/memory/get_all.ts +++ b/apps/sim/tools/memory/get_all.ts @@ -7,6 +7,15 @@ export const memoryGetAllTool: ToolConfig = { description: 'Retrieve all memories from the database', version: '1.0.0', params: {}, + outputs: { + success: { type: 'boolean', description: 'Whether all memories were retrieved successfully' }, + memories: { + type: 'array', + description: 'Array of all memory objects with keys, types, and data', + }, + message: { type: 'string', description: 'Success or error message' }, + error: { type: 'string', description: 'Error message if operation failed' }, + }, request: { url: (params): any => { // Get workflowId from context (set by workflow execution) diff --git a/apps/sim/tools/microsoft_excel/read.ts b/apps/sim/tools/microsoft_excel/read.ts index 394499cab2..f0e95cff05 100644 --- a/apps/sim/tools/microsoft_excel/read.ts +++ b/apps/sim/tools/microsoft_excel/read.ts @@ -34,6 +34,31 @@ export const readTool: ToolConfig { const spreadsheetId = params.spreadsheetId?.trim() diff --git a/apps/sim/tools/microsoft_excel/table_add.ts b/apps/sim/tools/microsoft_excel/table_add.ts index 3059a4ecd2..b89cabb2fb 100644 --- a/apps/sim/tools/microsoft_excel/table_add.ts +++ b/apps/sim/tools/microsoft_excel/table_add.ts @@ -43,6 +43,25 @@ export const tableAddTool: ToolConfig< description: 'The data to add to the table (array of arrays or array of objects)', }, }, + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + output: { + type: 'object', + description: 'Table add operation results and metadata', + properties: { + index: { type: 'number', description: 'Index of the first row that was added' }, + values: { type: 'array', description: 'Array of rows that were added to the table' }, + metadata: { + type: 'object', + description: 'Spreadsheet metadata', + properties: { + spreadsheetId: { type: 'string', description: 'The ID of the spreadsheet' }, + spreadsheetUrl: { type: 'string', description: 'URL to access the spreadsheet' }, + }, + }, + }, + }, + }, request: { url: (params) => { const tableName = encodeURIComponent(params.tableName) diff --git a/apps/sim/tools/microsoft_excel/write.ts b/apps/sim/tools/microsoft_excel/write.ts index abc54cc2ad..2ace7aa921 100644 --- a/apps/sim/tools/microsoft_excel/write.ts +++ b/apps/sim/tools/microsoft_excel/write.ts @@ -52,6 +52,27 @@ export const writeTool: ToolConfig { const rangeInput = params.range?.trim() diff --git a/apps/sim/tools/microsoft_planner/create_task.ts b/apps/sim/tools/microsoft_planner/create_task.ts index 30401ac195..79d4adaede 100644 --- a/apps/sim/tools/microsoft_planner/create_task.ts +++ b/apps/sim/tools/microsoft_planner/create_task.ts @@ -65,6 +65,11 @@ export const createTaskTool: ToolConfig< description: 'The bucket ID to place the task in', }, }, + outputs: { + success: { type: 'boolean', description: 'Whether the task was created successfully' }, + task: { type: 'object', description: 'The created task object with all properties' }, + metadata: { type: 'object', description: 'Metadata including planId, taskId, and taskUrl' }, + }, request: { url: () => 'https://graph.microsoft.com/v1.0/planner/tasks', method: 'POST', diff --git a/apps/sim/tools/microsoft_planner/read_task.ts b/apps/sim/tools/microsoft_planner/read_task.ts index 891f7f7a20..b825284f58 100644 --- a/apps/sim/tools/microsoft_planner/read_task.ts +++ b/apps/sim/tools/microsoft_planner/read_task.ts @@ -38,6 +38,11 @@ export const readTaskTool: ToolConfig { let finalUrl: string diff --git a/apps/sim/tools/microsoft_teams/read_channel.ts b/apps/sim/tools/microsoft_teams/read_channel.ts index a20854a9fb..b183f389b1 100644 --- a/apps/sim/tools/microsoft_teams/read_channel.ts +++ b/apps/sim/tools/microsoft_teams/read_channel.ts @@ -37,6 +37,18 @@ export const readChannelTool: ToolConfig { const teamId = params.teamId?.trim() diff --git a/apps/sim/tools/microsoft_teams/read_chat.ts b/apps/sim/tools/microsoft_teams/read_chat.ts index fa13c83ad6..c8bf0336f2 100644 --- a/apps/sim/tools/microsoft_teams/read_chat.ts +++ b/apps/sim/tools/microsoft_teams/read_chat.ts @@ -28,6 +28,17 @@ export const readChatTool: ToolConfig { // Ensure chatId is valid diff --git a/apps/sim/tools/microsoft_teams/write_channel.ts b/apps/sim/tools/microsoft_teams/write_channel.ts index e4792aa585..59f6f1a330 100644 --- a/apps/sim/tools/microsoft_teams/write_channel.ts +++ b/apps/sim/tools/microsoft_teams/write_channel.ts @@ -39,6 +39,17 @@ export const writeChannelTool: ToolConfig { const teamId = params.teamId?.trim() diff --git a/apps/sim/tools/microsoft_teams/write_chat.ts b/apps/sim/tools/microsoft_teams/write_chat.ts index f71e9be2c0..e7ae1c0418 100644 --- a/apps/sim/tools/microsoft_teams/write_chat.ts +++ b/apps/sim/tools/microsoft_teams/write_chat.ts @@ -33,6 +33,16 @@ export const writeChatTool: ToolConfig { // Ensure chatId is valid diff --git a/apps/sim/tools/mistral/parser.ts b/apps/sim/tools/mistral/parser.ts index 9d4ea8fd6a..81172b9014 100644 --- a/apps/sim/tools/mistral/parser.ts +++ b/apps/sim/tools/mistral/parser.ts @@ -63,6 +63,17 @@ export const mistralParserTool: ToolConfig 'https://api.notion.com/v1/databases', diff --git a/apps/sim/tools/notion/create_page.ts b/apps/sim/tools/notion/create_page.ts index 7a8c6aa756..c083eb6e36 100644 --- a/apps/sim/tools/notion/create_page.ts +++ b/apps/sim/tools/notion/create_page.ts @@ -37,6 +37,16 @@ export const notionCreatePageTool: ToolConfig 'https://api.notion.com/v1/pages', diff --git a/apps/sim/tools/notion/query_database.ts b/apps/sim/tools/notion/query_database.ts index 8bb7d8d529..d375d5799b 100644 --- a/apps/sim/tools/notion/query_database.ts +++ b/apps/sim/tools/notion/query_database.ts @@ -43,6 +43,37 @@ export const notionQueryDatabaseTool: ToolConfig { diff --git a/apps/sim/tools/notion/read.ts b/apps/sim/tools/notion/read.ts index 2be63d7ffc..9cfba1599d 100644 --- a/apps/sim/tools/notion/read.ts +++ b/apps/sim/tools/notion/read.ts @@ -25,6 +25,16 @@ export const notionReadTool: ToolConfig = { description: 'The ID of the Notion page to read', }, }, + outputs: { + content: { + type: 'string', + description: 'Page content in markdown format with headers, paragraphs, lists, and todos', + }, + metadata: { + type: 'object', + description: 'Page metadata including title, URL, and timestamps', + }, + }, request: { url: (params: NotionReadParams) => { diff --git a/apps/sim/tools/notion/read_database.ts b/apps/sim/tools/notion/read_database.ts index 9dfc748903..35ce152252 100644 --- a/apps/sim/tools/notion/read_database.ts +++ b/apps/sim/tools/notion/read_database.ts @@ -30,6 +30,16 @@ export const notionReadDatabaseTool: ToolConfig { diff --git a/apps/sim/tools/notion/search.ts b/apps/sim/tools/notion/search.ts index 2903d03d4f..97753b5940 100644 --- a/apps/sim/tools/notion/search.ts +++ b/apps/sim/tools/notion/search.ts @@ -37,6 +37,17 @@ export const notionSearchTool: ToolConfig = description: 'Number of results to return (default: 100, max: 100)', }, }, + outputs: { + content: { + type: 'string', + description: 'Formatted list of search results including pages and databases', + }, + metadata: { + type: 'object', + description: + 'Search metadata including total results count, pagination info, and raw results array', + }, + }, request: { url: () => 'https://api.notion.com/v1/search', diff --git a/apps/sim/tools/notion/update_page.ts b/apps/sim/tools/notion/update_page.ts index 6889173919..03c51e8574 100644 --- a/apps/sim/tools/notion/update_page.ts +++ b/apps/sim/tools/notion/update_page.ts @@ -31,6 +31,16 @@ export const notionUpdatePageTool: ToolConfig { diff --git a/apps/sim/tools/notion/write.ts b/apps/sim/tools/notion/write.ts index 49b55971f4..cdc65c3b0f 100644 --- a/apps/sim/tools/notion/write.ts +++ b/apps/sim/tools/notion/write.ts @@ -31,6 +31,12 @@ export const notionWriteTool: ToolConfig = { description: 'The content to append to the page', }, }, + outputs: { + content: { + type: 'string', + description: 'Success message confirming content was appended to page', + }, + }, request: { url: (params: NotionWriteParams) => { diff --git a/apps/sim/tools/onedrive/create_folder.ts b/apps/sim/tools/onedrive/create_folder.ts index 31d10c5410..68fb05448d 100644 --- a/apps/sim/tools/onedrive/create_folder.ts +++ b/apps/sim/tools/onedrive/create_folder.ts @@ -37,6 +37,14 @@ export const createFolderTool: ToolConfig { // Use specific parent folder URL if parentId is provided diff --git a/apps/sim/tools/onedrive/list.ts b/apps/sim/tools/onedrive/list.ts index f8c8315b51..0ca4239b8a 100644 --- a/apps/sim/tools/onedrive/list.ts +++ b/apps/sim/tools/onedrive/list.ts @@ -54,6 +54,14 @@ export const listTool: ToolConfig = { description: 'The number of files to return', }, }, + outputs: { + success: { type: 'boolean', description: 'Whether files were listed successfully' }, + files: { type: 'array', description: 'Array of file and folder objects with metadata' }, + nextPageToken: { + type: 'string', + description: 'Token for retrieving the next page of results (optional)', + }, + }, request: { url: (params) => { // Use specific folder if provided, otherwise use root diff --git a/apps/sim/tools/onedrive/upload.ts b/apps/sim/tools/onedrive/upload.ts index 5783351bb5..7a70d76d0d 100644 --- a/apps/sim/tools/onedrive/upload.ts +++ b/apps/sim/tools/onedrive/upload.ts @@ -53,6 +53,14 @@ export const uploadTool: ToolConfig description: 'The ID of the folder to upload the file to (internal use)', }, }, + outputs: { + success: { type: 'boolean', description: 'Whether the file was uploaded successfully' }, + file: { + type: 'object', + description: + 'The uploaded file object with metadata including id, name, webViewLink, webContentLink, and timestamps', + }, + }, request: { url: (params) => { let fileName = params.fileName || 'untitled' diff --git a/apps/sim/tools/openai/embeddings.ts b/apps/sim/tools/openai/embeddings.ts index 69e854d62d..17b427b5f1 100644 --- a/apps/sim/tools/openai/embeddings.ts +++ b/apps/sim/tools/openai/embeddings.ts @@ -35,7 +35,25 @@ export const embeddingsTool: ToolConfig = { description: 'OpenAI API key', }, }, - + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + output: { + type: 'object', + description: 'Embeddings generation results', + properties: { + embeddings: { type: 'array', description: 'Array of embedding vectors' }, + model: { type: 'string', description: 'Model used for generating embeddings' }, + usage: { + type: 'object', + description: 'Token usage information', + properties: { + prompt_tokens: { type: 'number', description: 'Number of tokens in the prompt' }, + total_tokens: { type: 'number', description: 'Total number of tokens used' }, + }, + }, + }, + }, + }, request: { method: 'POST', url: () => 'https://api.openai.com/v1/embeddings', diff --git a/apps/sim/tools/openai/image.ts b/apps/sim/tools/openai/image.ts index 85d0e2eef4..75d7c33197 100644 --- a/apps/sim/tools/openai/image.ts +++ b/apps/sim/tools/openai/image.ts @@ -60,6 +60,24 @@ export const imageTool: ToolConfig = { description: 'Your OpenAI API key', }, }, + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + output: { + type: 'object', + description: 'Generated image data', + properties: { + content: { type: 'string', description: 'Image URL or identifier' }, + image: { type: 'string', description: 'Base64 encoded image data' }, + metadata: { + type: 'object', + description: 'Image generation metadata', + properties: { + model: { type: 'string', description: 'Model used for image generation' }, + }, + }, + }, + }, + }, request: { url: 'https://api.openai.com/v1/images/generations', method: 'POST', diff --git a/apps/sim/tools/outlook/draft.ts b/apps/sim/tools/outlook/draft.ts index e406cc5e2c..7680523dd8 100644 --- a/apps/sim/tools/outlook/draft.ts +++ b/apps/sim/tools/outlook/draft.ts @@ -39,6 +39,15 @@ export const outlookDraftTool: ToolConfig { return `https://graph.microsoft.com/v1.0/me/messages` diff --git a/apps/sim/tools/outlook/read.ts b/apps/sim/tools/outlook/read.ts index 14ca192f8e..298fe1b50b 100644 --- a/apps/sim/tools/outlook/read.ts +++ b/apps/sim/tools/outlook/read.ts @@ -37,6 +37,13 @@ export const outlookReadTool: ToolConfig description: 'Maximum number of emails to retrieve (default: 1, max: 10)', }, }, + + outputs: { + success: { type: 'boolean', description: 'Email read operation success status' }, + messageCount: { type: 'number', description: 'Number of emails retrieved' }, + messages: { type: 'array', description: 'Array of email message objects' }, + message: { type: 'string', description: 'Success or status message' }, + }, request: { url: (params) => { // Set max results (default to 1 for simplicity, max 10) with no negative values diff --git a/apps/sim/tools/outlook/send.ts b/apps/sim/tools/outlook/send.ts index 1b332e6ec8..24347ab074 100644 --- a/apps/sim/tools/outlook/send.ts +++ b/apps/sim/tools/outlook/send.ts @@ -63,6 +63,13 @@ export const outlookSendTool: ToolConfig }, }, + outputs: { + success: { type: 'boolean', description: 'Email send success status' }, + status: { type: 'string', description: 'Delivery status of the email' }, + timestamp: { type: 'string', description: 'Timestamp when email was sent' }, + message: { type: 'string', description: 'Success or error message' }, + }, + request: { url: (params) => { // If replying to a specific message, use the reply endpoint diff --git a/apps/sim/tools/perplexity/chat.ts b/apps/sim/tools/perplexity/chat.ts index a984d9adfb..ab8cbca29b 100644 --- a/apps/sim/tools/perplexity/chat.ts +++ b/apps/sim/tools/perplexity/chat.ts @@ -45,7 +45,29 @@ export const chatTool: ToolConfig description: 'Perplexity API key', }, }, - + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + output: { + type: 'object', + description: 'Chat completion results', + properties: { + content: { type: 'string', description: 'Generated text content' }, + model: { type: 'string', description: 'Model used for generation' }, + usage: { + type: 'object', + description: 'Token usage information', + properties: { + prompt_tokens: { type: 'number', description: 'Number of tokens in the prompt' }, + completion_tokens: { + type: 'number', + description: 'Number of tokens in the completion', + }, + total_tokens: { type: 'number', description: 'Total number of tokens used' }, + }, + }, + }, + }, + }, request: { method: 'POST', url: () => 'https://api.perplexity.ai/chat/completions', diff --git a/apps/sim/tools/pinecone/fetch.ts b/apps/sim/tools/pinecone/fetch.ts index 76975561d3..f183bd4f98 100644 --- a/apps/sim/tools/pinecone/fetch.ts +++ b/apps/sim/tools/pinecone/fetch.ts @@ -34,6 +34,40 @@ export const fetchTool: ToolConfig = { }, }, + outputs: { + matches: { + type: 'array', + description: 'Fetched vectors with ID, values, metadata, and score', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Vector ID' }, + values: { type: 'array', description: 'Vector values' }, + metadata: { type: 'object', description: 'Associated metadata' }, + score: { type: 'number', description: 'Match score (1.0 for exact matches)' }, + }, + }, + }, + data: { + type: 'array', + description: 'Vector data with values and vector type', + items: { + type: 'object', + properties: { + values: { type: 'array', description: 'Vector values' }, + vector_type: { type: 'string', description: 'Vector type (dense/sparse)' }, + }, + }, + }, + usage: { + type: 'object', + description: 'Usage statistics including total read units', + properties: { + total_tokens: { type: 'number', description: 'Read units consumed' }, + }, + }, + }, + request: { method: 'GET', url: (params) => { diff --git a/apps/sim/tools/pinecone/generate_embeddings.ts b/apps/sim/tools/pinecone/generate_embeddings.ts index 7490cae774..356f1336fe 100644 --- a/apps/sim/tools/pinecone/generate_embeddings.ts +++ b/apps/sim/tools/pinecone/generate_embeddings.ts @@ -31,6 +31,25 @@ export const generateEmbeddingsTool: ToolConfig< }, }, + outputs: { + data: { + type: 'array', + description: 'Generated embeddings data with values and vector type', + }, + model: { + type: 'string', + description: 'Model used for generating embeddings', + }, + vector_type: { + type: 'string', + description: 'Type of vector generated (dense/sparse)', + }, + usage: { + type: 'object', + description: 'Usage statistics for embeddings generation', + }, + }, + request: { method: 'POST', url: () => 'https://api.pinecone.io/embed', diff --git a/apps/sim/tools/pinecone/search_text.ts b/apps/sim/tools/pinecone/search_text.ts index d910896a33..966db468be 100644 --- a/apps/sim/tools/pinecone/search_text.ts +++ b/apps/sim/tools/pinecone/search_text.ts @@ -62,6 +62,30 @@ export const searchTextTool: ToolConfig `${params.indexHost}/records/namespaces/${params.namespace}/search`, diff --git a/apps/sim/tools/pinecone/search_vector.ts b/apps/sim/tools/pinecone/search_vector.ts index c750af99de..4754a773d5 100644 --- a/apps/sim/tools/pinecone/search_vector.ts +++ b/apps/sim/tools/pinecone/search_vector.ts @@ -58,6 +58,17 @@ export const searchVectorTool: ToolConfig `${params.indexHost}/query`, diff --git a/apps/sim/tools/pinecone/upsert_text.ts b/apps/sim/tools/pinecone/upsert_text.ts index ebd28fa4ab..851a4ec551 100644 --- a/apps/sim/tools/pinecone/upsert_text.ts +++ b/apps/sim/tools/pinecone/upsert_text.ts @@ -39,6 +39,17 @@ export const upsertTextTool: ToolConfig `${params.indexHost}/records/namespaces/${params.namespace}/upsert`, diff --git a/apps/sim/tools/qdrant/fetch_points.ts b/apps/sim/tools/qdrant/fetch_points.ts index b4a1640d49..d44a36d8e9 100644 --- a/apps/sim/tools/qdrant/fetch_points.ts +++ b/apps/sim/tools/qdrant/fetch_points.ts @@ -46,6 +46,17 @@ export const fetchPointsTool: ToolConfig = { }, }, + outputs: { + data: { + type: 'array', + description: 'Fetched points with ID, payload, and optional vector data', + }, + status: { + type: 'string', + description: 'Status of the fetch operation', + }, + }, + request: { method: 'POST', url: (params) => `${params.url.replace(/\/$/, '')}/collections/${params.collection}/points`, diff --git a/apps/sim/tools/qdrant/search_vector.ts b/apps/sim/tools/qdrant/search_vector.ts index 92a8eac8ed..2fd6f6e887 100644 --- a/apps/sim/tools/qdrant/search_vector.ts +++ b/apps/sim/tools/qdrant/search_vector.ts @@ -58,6 +58,17 @@ export const searchVectorTool: ToolConfig = }, }, + outputs: { + data: { + type: 'array', + description: 'Vector search results with ID, score, payload, and optional vector data', + }, + status: { + type: 'string', + description: 'Status of the search operation', + }, + }, + request: { method: 'POST', url: (params) => diff --git a/apps/sim/tools/qdrant/upsert_points.ts b/apps/sim/tools/qdrant/upsert_points.ts index 2550707bb5..143f978c08 100644 --- a/apps/sim/tools/qdrant/upsert_points.ts +++ b/apps/sim/tools/qdrant/upsert_points.ts @@ -33,6 +33,17 @@ export const upsertPointsTool: ToolConfig = }, }, + outputs: { + status: { + type: 'string', + description: 'Status of the upsert operation', + }, + data: { + type: 'object', + description: 'Result data from the upsert operation', + }, + }, + request: { method: 'PUT', url: (params) => `${params.url.replace(/\/$/, '')}/collections/${params.collection}/points`, diff --git a/apps/sim/tools/reddit/get_comments.ts b/apps/sim/tools/reddit/get_comments.ts index 841e7105df..43fceeb332 100644 --- a/apps/sim/tools/reddit/get_comments.ts +++ b/apps/sim/tools/reddit/get_comments.ts @@ -46,6 +46,42 @@ export const getCommentsTool: ToolConfig { // Sanitize inputs diff --git a/apps/sim/tools/reddit/get_posts.ts b/apps/sim/tools/reddit/get_posts.ts index 38a9118a21..de1eec9310 100644 --- a/apps/sim/tools/reddit/get_posts.ts +++ b/apps/sim/tools/reddit/get_posts.ts @@ -47,6 +47,34 @@ export const getPostsTool: ToolConfig = }, }, + outputs: { + subreddit: { + type: 'string', + description: 'Name of the subreddit where posts were fetched from', + }, + posts: { + type: 'array', + description: 'Array of posts with title, author, URL, score, comments count, and metadata', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Post ID' }, + title: { type: 'string', description: 'Post title' }, + author: { type: 'string', description: 'Author username' }, + url: { type: 'string', description: 'Post URL' }, + permalink: { type: 'string', description: 'Reddit permalink' }, + score: { type: 'number', description: 'Post score (upvotes - downvotes)' }, + num_comments: { type: 'number', description: 'Number of comments' }, + created_utc: { type: 'number', description: 'Creation timestamp (UTC)' }, + is_self: { type: 'boolean', description: 'Whether this is a text post' }, + selftext: { type: 'string', description: 'Text content for self posts' }, + thumbnail: { type: 'string', description: 'Thumbnail URL' }, + subreddit: { type: 'string', description: 'Subreddit name' }, + }, + }, + }, + }, + request: { url: (params: RedditPostsParams) => { // Sanitize inputs diff --git a/apps/sim/tools/reddit/hot_posts.ts b/apps/sim/tools/reddit/hot_posts.ts index daf7adf694..4909ba95bb 100644 --- a/apps/sim/tools/reddit/hot_posts.ts +++ b/apps/sim/tools/reddit/hot_posts.ts @@ -40,6 +40,18 @@ export const hotPostsTool: ToolConfig = }, }, + outputs: { + subreddit: { + type: 'string', + description: 'Name of the subreddit where hot posts were fetched from', + }, + posts: { + type: 'array', + description: + 'Array of hot posts with title, author, URL, score, comments count, and metadata', + }, + }, + request: { url: (params) => { // Sanitize inputs and enforce limits diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index 84f33c7ecd..4be4524938 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -1,3 +1,4 @@ +// Provider tools - handled separately import { airtableCreateRecordsTool, airtableGetRecordTool, @@ -311,4 +312,6 @@ export const tools: Record = { sharepoint_create_page: sharepointCreatePageTool, sharepoint_read_page: sharepointReadPageTool, sharepoint_list_sites: sharepointListSitesTool, + // Provider chat tools + // Provider chat tools - handled separately in agent blocks } diff --git a/apps/sim/tools/s3/get_object.ts b/apps/sim/tools/s3/get_object.ts index a88f676f96..2e2af6a4fd 100644 --- a/apps/sim/tools/s3/get_object.ts +++ b/apps/sim/tools/s3/get_object.ts @@ -32,6 +32,17 @@ export const s3GetObjectTool: ToolConfig = { description: 'S3 Object URL', }, }, + + outputs: { + url: { + type: 'string', + description: 'Pre-signed URL for downloading the S3 object', + }, + metadata: { + type: 'object', + description: 'File metadata including type, size, name, and last modified date', + }, + }, request: { url: (params) => { try { diff --git a/apps/sim/tools/serper/search.ts b/apps/sim/tools/serper/search.ts index e9ec6c56b4..53f263cab9 100644 --- a/apps/sim/tools/serper/search.ts +++ b/apps/sim/tools/serper/search.ts @@ -47,6 +47,14 @@ export const searchTool: ToolConfig = { }, }, + outputs: { + searchResults: { + type: 'array', + description: + 'Search results with titles, links, snippets, and type-specific metadata (date for news, rating for places, imageUrl for images)', + }, + }, + request: { url: (params) => `https://google.serper.dev/${params.type || 'search'}`, method: 'POST', diff --git a/apps/sim/tools/sharepoint/create_page.ts b/apps/sim/tools/sharepoint/create_page.ts index 235ecac6cd..5817e1b396 100644 --- a/apps/sim/tools/sharepoint/create_page.ts +++ b/apps/sim/tools/sharepoint/create_page.ts @@ -56,6 +56,21 @@ export const createPageTool: ToolConfig { // Use specific site if provided, otherwise use root site diff --git a/apps/sim/tools/sharepoint/list_sites.ts b/apps/sim/tools/sharepoint/list_sites.ts index d7876a47be..42189eac1b 100644 --- a/apps/sim/tools/sharepoint/list_sites.ts +++ b/apps/sim/tools/sharepoint/list_sites.ts @@ -35,6 +35,50 @@ export const listSitesTool: ToolConfig { let baseUrl: string diff --git a/apps/sim/tools/sharepoint/read_page.ts b/apps/sim/tools/sharepoint/read_page.ts index 36a0685993..e4a574ac6b 100644 --- a/apps/sim/tools/sharepoint/read_page.ts +++ b/apps/sim/tools/sharepoint/read_page.ts @@ -59,6 +59,64 @@ export const readPageTool: ToolConfig { // Use specific site if provided, otherwise use root site diff --git a/apps/sim/tools/slack/canvas.ts b/apps/sim/tools/slack/canvas.ts index 1f1a40b7d0..9f161d2465 100644 --- a/apps/sim/tools/slack/canvas.ts +++ b/apps/sim/tools/slack/canvas.ts @@ -67,6 +67,12 @@ export const slackCanvasTool: ToolConfig }, }, + outputs: { + canvas_id: { type: 'string', description: 'ID of the created canvas' }, + channel: { type: 'string', description: 'Channel where canvas was created' }, + title: { type: 'string', description: 'Title of the canvas' }, + }, + request: { url: 'https://slack.com/api/canvases.create', method: 'POST', diff --git a/apps/sim/tools/slack/message.ts b/apps/sim/tools/slack/message.ts index ab5035de74..ec82c75568 100644 --- a/apps/sim/tools/slack/message.ts +++ b/apps/sim/tools/slack/message.ts @@ -53,6 +53,11 @@ export const slackMessageTool: ToolConfig { const url = new URL('https://slack.com/api/conversations.history') diff --git a/apps/sim/tools/stagehand/agent.ts b/apps/sim/tools/stagehand/agent.ts index 604586ff86..36744a8f5d 100644 --- a/apps/sim/tools/stagehand/agent.ts +++ b/apps/sim/tools/stagehand/agent.ts @@ -43,6 +43,33 @@ export const agentTool: ToolConfig description: 'Optional JSON schema defining the structure of data the agent should return', }, }, + outputs: { + agentResult: { + type: 'object', + description: 'Result from the Stagehand agent execution', + properties: { + success: { type: 'boolean', description: 'Whether the agent task completed successfully' }, + completed: { type: 'boolean', description: 'Whether the task was fully completed' }, + message: { type: 'string', description: 'Status message or final result' }, + actions: { + type: 'array', + items: { + type: 'object', + properties: { + type: { type: 'string', description: 'Type of action performed' }, + params: { type: 'object', description: 'Parameters used for the action' }, + result: { type: 'object', description: 'Result of the action' }, + }, + }, + description: 'List of actions performed by the agent', + }, + }, + }, + structuredOutput: { + type: 'object', + description: 'Extracted data matching the provided output schema', + }, + }, request: { url: '/api/tools/stagehand/agent', diff --git a/apps/sim/tools/stagehand/extract.ts b/apps/sim/tools/stagehand/extract.ts index 56d58b65e5..93f2f83e1a 100644 --- a/apps/sim/tools/stagehand/extract.ts +++ b/apps/sim/tools/stagehand/extract.ts @@ -36,6 +36,12 @@ export const extractTool: ToolConfig `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=*`, method: 'DELETE', diff --git a/apps/sim/tools/supabase/get_row.ts b/apps/sim/tools/supabase/get_row.ts index bd492d83e0..2b47cb879c 100644 --- a/apps/sim/tools/supabase/get_row.ts +++ b/apps/sim/tools/supabase/get_row.ts @@ -32,6 +32,18 @@ export const getRowTool: ToolConfig `https://${params.projectId}.supabase.co/rest/v1/${params.table}`, method: 'GET', diff --git a/apps/sim/tools/supabase/insert.ts b/apps/sim/tools/supabase/insert.ts index a1ddb9a321..705772731d 100644 --- a/apps/sim/tools/supabase/insert.ts +++ b/apps/sim/tools/supabase/insert.ts @@ -32,6 +32,18 @@ export const insertTool: ToolConfig `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=*`, method: 'POST', diff --git a/apps/sim/tools/supabase/query.ts b/apps/sim/tools/supabase/query.ts index d37de91260..f78870aa5c 100644 --- a/apps/sim/tools/supabase/query.ts +++ b/apps/sim/tools/supabase/query.ts @@ -44,6 +44,18 @@ export const queryTool: ToolConfig = description: 'Your Supabase service role secret key', }, }, + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + output: { + type: 'object', + description: 'Query operation results', + properties: { + message: { type: 'string', description: 'Operation status message' }, + results: { type: 'array', description: 'Array of records returned from the query' }, + }, + }, + error: { type: 'string', description: 'Error message if the operation failed' }, + }, request: { url: (params) => `https://${params.projectId}.supabase.co/rest/v1/${params.table}`, method: 'GET', diff --git a/apps/sim/tools/supabase/update.ts b/apps/sim/tools/supabase/update.ts index d8046de756..1bcf77e1b8 100644 --- a/apps/sim/tools/supabase/update.ts +++ b/apps/sim/tools/supabase/update.ts @@ -38,6 +38,18 @@ export const updateTool: ToolConfig `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=*`, method: 'PATCH', diff --git a/apps/sim/tools/tavily/extract.ts b/apps/sim/tools/tavily/extract.ts index 3403def969..29355c65da 100644 --- a/apps/sim/tools/tavily/extract.ts +++ b/apps/sim/tools/tavily/extract.ts @@ -28,6 +28,34 @@ export const extractTool: ToolConfig description: 'Tavily API Key', }, }, + outputs: { + results: { + type: 'array', + items: { + type: 'object', + properties: { + url: { type: 'string', description: 'The URL that was extracted' }, + raw_content: { type: 'string', description: 'The raw text content from the webpage' }, + }, + }, + description: 'Successfully extracted content from URLs', + }, + failed_results: { + type: 'array', + items: { + type: 'object', + properties: { + url: { type: 'string', description: 'The URL that failed extraction' }, + error: { type: 'string', description: 'Error message for the failed extraction' }, + }, + }, + description: 'URLs that failed to extract content', + }, + response_time: { + type: 'number', + description: 'Time taken for the extraction request in seconds', + }, + }, request: { url: 'https://api.tavily.com/extract', diff --git a/apps/sim/tools/tavily/search.ts b/apps/sim/tools/tavily/search.ts index c900973d8f..adce0d9ec7 100644 --- a/apps/sim/tools/tavily/search.ts +++ b/apps/sim/tools/tavily/search.ts @@ -28,6 +28,23 @@ export const searchTool: ToolConfig = description: 'Tavily API Key', }, }, + outputs: { + query: { type: 'string', description: 'The search query that was executed' }, + results: { + type: 'array', + items: { + type: 'object', + properties: { + title: { type: 'string' }, + url: { type: 'string' }, + snippet: { type: 'string' }, + raw_content: { type: 'string' }, + }, + }, + description: 'Search results with titles, URLs, and content snippets', + }, + response_time: { type: 'number', description: 'Time taken for the search request in seconds' }, + }, request: { url: 'https://api.tavily.com/search', diff --git a/apps/sim/tools/telegram/message.ts b/apps/sim/tools/telegram/message.ts index 0235365cc6..dfb1b03688 100644 --- a/apps/sim/tools/telegram/message.ts +++ b/apps/sim/tools/telegram/message.ts @@ -30,6 +30,15 @@ export const telegramMessageTool: ToolConfig `https://api.telegram.org/bot${params.botToken}/sendMessage`, diff --git a/apps/sim/tools/thinking/tool.ts b/apps/sim/tools/thinking/tool.ts index 939648e608..0b7907fddb 100644 --- a/apps/sim/tools/thinking/tool.ts +++ b/apps/sim/tools/thinking/tool.ts @@ -18,6 +18,13 @@ export const thinkingTool: ToolConfig }, }, + outputs: { + acknowledgedThought: { + type: 'string', + description: 'The thought that was processed and acknowledged', + }, + }, + // Use directExecution as no external HTTP call is needed directExecution: async (params: ThinkingToolParams): Promise => { // Simply acknowledge the thought by returning it in the output diff --git a/apps/sim/tools/twilio/send_sms.ts b/apps/sim/tools/twilio/send_sms.ts index c6bf31f0e5..eaeeefd492 100644 --- a/apps/sim/tools/twilio/send_sms.ts +++ b/apps/sim/tools/twilio/send_sms.ts @@ -43,6 +43,14 @@ export const sendSMSTool: ToolConfig }, }, + outputs: { + success: { type: 'boolean', description: 'SMS send success status' }, + messageId: { type: 'string', description: 'Unique Twilio message identifier (SID)' }, + status: { type: 'string', description: 'Message delivery status from Twilio' }, + fromNumber: { type: 'string', description: 'Phone number message was sent from' }, + toNumber: { type: 'string', description: 'Phone number message was sent to' }, + }, + request: { url: (params) => { if (!params.accountSid) { diff --git a/apps/sim/tools/typeform/files.ts b/apps/sim/tools/typeform/files.ts index dd0ea89b8e..4ab57d8b42 100644 --- a/apps/sim/tools/typeform/files.ts +++ b/apps/sim/tools/typeform/files.ts @@ -44,6 +44,11 @@ export const filesTool: ToolConfig = description: 'Typeform Personal Access Token', }, }, + outputs: { + fileUrl: { type: 'string', description: 'Direct download URL for the uploaded file' }, + contentType: { type: 'string', description: 'MIME type of the uploaded file' }, + filename: { type: 'string', description: 'Original filename of the uploaded file' }, + }, request: { url: (params: TypeformFilesParams) => { const encodedFormId = encodeURIComponent(params.formId) diff --git a/apps/sim/tools/typeform/insights.ts b/apps/sim/tools/typeform/insights.ts index b56a02cf39..4060933e6d 100644 --- a/apps/sim/tools/typeform/insights.ts +++ b/apps/sim/tools/typeform/insights.ts @@ -23,6 +23,65 @@ export const insightsTool: ToolConfig { const encodedFormId = encodeURIComponent(params.formId) diff --git a/apps/sim/tools/types.ts b/apps/sim/tools/types.ts index 417c63175b..792654c8e9 100644 --- a/apps/sim/tools/types.ts +++ b/apps/sim/tools/types.ts @@ -2,6 +2,18 @@ import type { OAuthService } from '@/lib/oauth/oauth' export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH' | 'HEAD' +export interface OutputProperty { + type: string + description?: string + optional?: boolean + properties?: Record + items?: { + type: string + description?: string + properties?: Record + } +} + export type ParameterVisibility = | 'user-or-llm' // User can provide OR LLM must generate | 'user-only' // Only user can provide (required/optional determined by required field) @@ -48,12 +60,18 @@ export interface ToolConfig

{ outputs?: Record< string, { - type: 'string' | 'number' | 'boolean' | 'json' | 'file' | 'file[]' + type: 'string' | 'number' | 'boolean' | 'json' | 'file' | 'file[]' | 'array' | 'object' description?: string + optional?: boolean fileConfig?: { mimeType?: string // Expected MIME type for file outputs extension?: string // Expected file extension } + items?: { + type: string + properties?: Record + } + properties?: Record } > diff --git a/apps/sim/tools/vision/tool.ts b/apps/sim/tools/vision/tool.ts index 6bf3e43b7a..876699f270 100644 --- a/apps/sim/tools/vision/tool.ts +++ b/apps/sim/tools/vision/tool.ts @@ -35,6 +35,33 @@ export const visionTool: ToolConfig = { }, }, + outputs: { + content: { + type: 'string', + description: 'The analyzed content and description of the image', + }, + model: { + type: 'string', + description: 'The vision model that was used for analysis', + optional: true, + }, + tokens: { + type: 'number', + description: 'Total tokens used for the analysis', + optional: true, + }, + usage: { + type: 'object', + description: 'Detailed token usage breakdown', + optional: true, + properties: { + input_tokens: { type: 'number', description: 'Tokens used for input processing' }, + output_tokens: { type: 'number', description: 'Tokens used for response generation' }, + total_tokens: { type: 'number', description: 'Total tokens consumed' }, + }, + }, + }, + request: { method: 'POST', url: (params) => { @@ -122,6 +149,14 @@ export const visionTool: ToolConfig = { tokens: data.content ? data.usage?.input_tokens + data.usage?.output_tokens : data.usage?.total_tokens, + usage: data.usage + ? { + input_tokens: data.usage.input_tokens, + output_tokens: data.usage.output_tokens, + total_tokens: + data.usage.total_tokens || data.usage.input_tokens + data.usage.output_tokens, + } + : undefined, }, } }, diff --git a/apps/sim/tools/wealthbox/read_contact.ts b/apps/sim/tools/wealthbox/read_contact.ts index 8c3ead67d8..cc7dc65503 100644 --- a/apps/sim/tools/wealthbox/read_contact.ts +++ b/apps/sim/tools/wealthbox/read_contact.ts @@ -23,6 +23,26 @@ export const wealthboxReadContactTool: ToolConfig { const contactId = params.contactId?.trim() diff --git a/apps/sim/tools/wealthbox/read_note.ts b/apps/sim/tools/wealthbox/read_note.ts index a51a9044e0..f307220f90 100644 --- a/apps/sim/tools/wealthbox/read_note.ts +++ b/apps/sim/tools/wealthbox/read_note.ts @@ -23,6 +23,26 @@ export const wealthboxReadNoteTool: ToolConfig { const noteId = params.noteId?.trim() diff --git a/apps/sim/tools/wealthbox/read_task.ts b/apps/sim/tools/wealthbox/read_task.ts index 1b850910f1..3c8fbf6d7a 100644 --- a/apps/sim/tools/wealthbox/read_task.ts +++ b/apps/sim/tools/wealthbox/read_task.ts @@ -23,6 +23,26 @@ export const wealthboxReadTaskTool: ToolConfig { const taskId = params.taskId?.trim() diff --git a/apps/sim/tools/wealthbox/write_contact.ts b/apps/sim/tools/wealthbox/write_contact.ts index 9f1d14e8f3..0b153a19f8 100644 --- a/apps/sim/tools/wealthbox/write_contact.ts +++ b/apps/sim/tools/wealthbox/write_contact.ts @@ -92,6 +92,26 @@ export const wealthboxWriteContactTool: ToolConfig { const taskId = params.taskId?.trim() diff --git a/apps/sim/tools/whatsapp/send_message.ts b/apps/sim/tools/whatsapp/send_message.ts index c5015f0967..f43d3f23df 100644 --- a/apps/sim/tools/whatsapp/send_message.ts +++ b/apps/sim/tools/whatsapp/send_message.ts @@ -37,6 +37,14 @@ export const sendMessageTool: ToolConfig { if (!params.phoneNumberId) { diff --git a/apps/sim/tools/wikipedia/content.ts b/apps/sim/tools/wikipedia/content.ts index 91d4e34d59..8cbf74d116 100644 --- a/apps/sim/tools/wikipedia/content.ts +++ b/apps/sim/tools/wikipedia/content.ts @@ -20,6 +20,20 @@ export const pageContentTool: ToolConfig { const encodedTitle = encodeURIComponent(params.pageTitle.replace(/ /g, '_')) diff --git a/apps/sim/tools/wikipedia/random.ts b/apps/sim/tools/wikipedia/random.ts index 29978f6469..49a7da9653 100644 --- a/apps/sim/tools/wikipedia/random.ts +++ b/apps/sim/tools/wikipedia/random.ts @@ -9,6 +9,20 @@ export const randomPageTool: ToolConfig, WikipediaRandomPa params: {}, + outputs: { + randomPage: { + type: 'object', + description: 'Random Wikipedia page data', + properties: { + title: { type: 'string', description: 'Page title' }, + extract: { type: 'string', description: 'Page extract/summary' }, + description: { type: 'string', description: 'Page description', optional: true }, + thumbnail: { type: 'object', description: 'Thumbnail image data', optional: true }, + content_urls: { type: 'object', description: 'URLs to access the page' }, + }, + }, + }, + request: { url: () => { return 'https://en.wikipedia.org/api/rest_v1/page/random/summary' diff --git a/apps/sim/tools/wikipedia/search.ts b/apps/sim/tools/wikipedia/search.ts index ad95f1b0fb..bda3dcb3ee 100644 --- a/apps/sim/tools/wikipedia/search.ts +++ b/apps/sim/tools/wikipedia/search.ts @@ -22,6 +22,29 @@ export const searchTool: ToolConfig { const baseUrl = 'https://en.wikipedia.org/w/api.php' diff --git a/apps/sim/tools/wikipedia/summary.ts b/apps/sim/tools/wikipedia/summary.ts index 2e20783a99..886e548f14 100644 --- a/apps/sim/tools/wikipedia/summary.ts +++ b/apps/sim/tools/wikipedia/summary.ts @@ -20,6 +20,21 @@ export const pageSummaryTool: ToolConfig { const encodedTitle = encodeURIComponent(params.pageTitle.replace(/ /g, '_')) diff --git a/apps/sim/tools/x/read.ts b/apps/sim/tools/x/read.ts index 66a30a883e..d40df1850b 100644 --- a/apps/sim/tools/x/read.ts +++ b/apps/sim/tools/x/read.ts @@ -34,6 +34,24 @@ export const xReadTool: ToolConfig = { }, }, + outputs: { + tweet: { + type: 'object', + description: 'The main tweet data', + properties: { + id: { type: 'string', description: 'Tweet ID' }, + text: { type: 'string', description: 'Tweet content text' }, + createdAt: { type: 'string', description: 'Tweet creation timestamp' }, + authorId: { type: 'string', description: 'ID of the tweet author' }, + }, + }, + context: { + type: 'object', + description: 'Conversation context including parent and root tweets', + optional: true, + }, + }, + request: { url: (params) => { const expansions = [ diff --git a/apps/sim/tools/x/search.ts b/apps/sim/tools/x/search.ts index 94f0afbfab..7266006e71 100644 --- a/apps/sim/tools/x/search.ts +++ b/apps/sim/tools/x/search.ts @@ -52,6 +52,36 @@ export const xSearchTool: ToolConfig = { }, }, + outputs: { + tweets: { + type: 'array', + description: 'Array of tweets matching the search query', + items: { + type: 'object', + properties: { + id: { type: 'string', description: 'Tweet ID' }, + text: { type: 'string', description: 'Tweet content' }, + createdAt: { type: 'string', description: 'Creation timestamp' }, + authorId: { type: 'string', description: 'Author user ID' }, + }, + }, + }, + includes: { + type: 'object', + description: 'Additional data including user profiles and media', + optional: true, + }, + meta: { + type: 'object', + description: 'Search metadata including result count and pagination tokens', + properties: { + resultCount: { type: 'number', description: 'Number of results returned' }, + newestId: { type: 'string', description: 'ID of the newest tweet' }, + oldestId: { type: 'string', description: 'ID of the oldest tweet' }, + }, + }, + }, + request: { url: (params) => { const query = params.query diff --git a/apps/sim/tools/x/user.ts b/apps/sim/tools/x/user.ts index 5a19702233..0bcb212b71 100644 --- a/apps/sim/tools/x/user.ts +++ b/apps/sim/tools/x/user.ts @@ -31,6 +31,29 @@ export const xUserTool: ToolConfig = { }, }, + outputs: { + user: { + type: 'object', + description: 'X user profile information', + properties: { + id: { type: 'string', description: 'User ID' }, + username: { type: 'string', description: 'Username without @ symbol' }, + name: { type: 'string', description: 'Display name' }, + description: { type: 'string', description: 'User bio/description', optional: true }, + verified: { type: 'boolean', description: 'Whether the user is verified' }, + metrics: { + type: 'object', + description: 'User statistics', + properties: { + followersCount: { type: 'number', description: 'Number of followers' }, + followingCount: { type: 'number', description: 'Number of users following' }, + tweetCount: { type: 'number', description: 'Total number of tweets' }, + }, + }, + }, + }, + }, + request: { url: (params) => { const username = encodeURIComponent(params.username) diff --git a/apps/sim/tools/x/write.ts b/apps/sim/tools/x/write.ts index c509c33fc8..2c68789f5c 100644 --- a/apps/sim/tools/x/write.ts +++ b/apps/sim/tools/x/write.ts @@ -46,6 +46,29 @@ export const xWriteTool: ToolConfig = { }, }, + outputs: { + tweet: { + type: 'object', + description: 'The newly created tweet data', + properties: { + id: { type: 'string', description: 'Tweet ID' }, + text: { type: 'string', description: 'Tweet content text' }, + createdAt: { type: 'string', description: 'Tweet creation timestamp' }, + authorId: { type: 'string', description: 'ID of the tweet author' }, + conversationId: { type: 'string', description: 'Conversation thread ID', optional: true }, + attachments: { + type: 'object', + description: 'Media or poll attachments', + optional: true, + properties: { + mediaKeys: { type: 'array', description: 'Media attachment keys', optional: true }, + pollId: { type: 'string', description: 'Poll ID if poll attached', optional: true }, + }, + }, + }, + }, + }, + request: { url: 'https://api.twitter.com/2/tweets', method: 'POST', diff --git a/apps/sim/tools/youtube/search.ts b/apps/sim/tools/youtube/search.ts index bd910278c8..1f2ca42e7a 100644 --- a/apps/sim/tools/youtube/search.ts +++ b/apps/sim/tools/youtube/search.ts @@ -27,6 +27,32 @@ export const youtubeSearchTool: ToolConfig { let url = `https://www.googleapis.com/youtube/v3/search?part=snippet&type=video&key=${params.apiKey}&q=${encodeURIComponent( diff --git a/apps/sim/trigger.config.ts b/apps/sim/trigger.config.ts index 33d5c3d2d0..9e166d9eed 100644 --- a/apps/sim/trigger.config.ts +++ b/apps/sim/trigger.config.ts @@ -11,5 +11,5 @@ export default defineConfig({ maxAttempts: 1, }, }, - dirs: ['./trigger'], + dirs: ['./background'], }) diff --git a/apps/sim/triggers/airtable/index.ts b/apps/sim/triggers/airtable/index.ts new file mode 100644 index 0000000000..ba2bc49df8 --- /dev/null +++ b/apps/sim/triggers/airtable/index.ts @@ -0,0 +1 @@ +export { airtableWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/airtable/webhook.ts b/apps/sim/triggers/airtable/webhook.ts new file mode 100644 index 0000000000..bae9be80cd --- /dev/null +++ b/apps/sim/triggers/airtable/webhook.ts @@ -0,0 +1,125 @@ +import { AirtableIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const airtableWebhookTrigger: TriggerConfig = { + id: 'airtable_webhook', + name: 'Airtable Webhook', + provider: 'airtable', + description: + 'Trigger workflow from Airtable record changes like create, update, and delete events', + version: '1.0.0', + icon: AirtableIcon, + + configFields: { + baseId: { + type: 'string', + label: 'Base ID', + placeholder: 'appXXXXXXXXXXXXXX', + description: 'The ID of the Airtable Base this webhook will monitor.', + required: true, + }, + tableId: { + type: 'string', + label: 'Table ID', + placeholder: 'tblXXXXXXXXXXXXXX', + description: 'The ID of the table within the Base that the webhook will monitor.', + required: true, + }, + includeCellValues: { + type: 'boolean', + label: 'Include Full Record Data', + description: 'Enable to receive the complete record data in the payload, not just changes.', + defaultValue: false, + }, + }, + + outputs: { + event_type: { + type: 'string', + description: 'Type of Airtable event (e.g., record.created, record.updated, record.deleted)', + }, + base_id: { + type: 'string', + description: 'Airtable base identifier', + }, + table_id: { + type: 'string', + description: 'Airtable table identifier', + }, + record_id: { + type: 'string', + description: 'Record identifier that was modified', + }, + record_data: { + type: 'string', + description: 'Complete record data (when Include Full Record Data is enabled)', + }, + changed_fields: { + type: 'string', + description: 'Fields that were changed in the record', + }, + webhook_id: { + type: 'string', + description: 'Unique webhook identifier', + }, + timestamp: { + type: 'string', + description: 'Event timestamp', + }, + }, + + instructions: [ + 'Ensure you have provided the correct Base ID and Table ID above.', + 'Sim will automatically configure the webhook in your Airtable account when you save.', + 'Any changes made to records in the specified table will trigger this workflow.', + "If 'Include Full Record Data' is enabled, the entire record will be sent; otherwise, only the changed fields are sent.", + 'You can find your Base ID in the Airtable URL or API documentation for your base.', + 'Table IDs can be found in the Airtable API documentation or by inspecting the table URL.', + ], + + samplePayload: { + webhook: { + id: 'achAbCdEfGhIjKlMn', + }, + timestamp: '2023-01-01T00:00:00.000Z', + base: { + id: 'appXXXXXXXXXXXXXX', + }, + table: { + id: 'tblXXXXXXXXXXXXXX', + }, + changedTablesById: { + tblXXXXXXXXXXXXXX: { + changedRecordsById: { + recXXXXXXXXXXXXXX: { + current: { + id: 'recXXXXXXXXXXXXXX', + createdTime: '2023-01-01T00:00:00.000Z', + fields: { + Name: 'Sample Record', + Status: 'Active', + }, + }, + previous: { + id: 'recXXXXXXXXXXXXXX', + createdTime: '2023-01-01T00:00:00.000Z', + fields: { + Name: 'Sample Record', + Status: 'Inactive', + }, + }, + }, + }, + createdRecordsById: {}, + destroyedRecordIds: [], + }, + }, + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/discord/index.ts b/apps/sim/triggers/discord/index.ts new file mode 100644 index 0000000000..49d8a1889d --- /dev/null +++ b/apps/sim/triggers/discord/index.ts @@ -0,0 +1 @@ +export { discordWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/discord/webhook.ts b/apps/sim/triggers/discord/webhook.ts new file mode 100644 index 0000000000..7474e44900 --- /dev/null +++ b/apps/sim/triggers/discord/webhook.ts @@ -0,0 +1,95 @@ +import { DiscordIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const discordWebhookTrigger: TriggerConfig = { + id: 'discord_webhook', + name: 'Discord Webhook', + provider: 'discord', + description: 'Trigger workflow from Discord webhook events and send messages to Discord channels', + version: '1.0.0', + icon: DiscordIcon, + + configFields: { + webhookName: { + type: 'string', + label: 'Webhook Name', + placeholder: 'Sim Bot', + description: 'This name will be displayed as the sender of messages in Discord.', + required: false, + }, + avatarUrl: { + type: 'string', + label: 'Avatar URL', + placeholder: 'https://example.com/avatar.png', + description: "URL to an image that will be used as the webhook's avatar.", + required: false, + }, + }, + + outputs: { + content: { + type: 'string', + description: 'Message content from Discord webhook', + }, + username: { + type: 'string', + description: 'Username of the sender (if provided)', + }, + avatar_url: { + type: 'string', + description: 'Avatar URL of the sender (if provided)', + }, + timestamp: { + type: 'string', + description: 'Timestamp when the webhook was triggered', + }, + webhook_id: { + type: 'string', + description: 'Discord webhook identifier', + }, + webhook_token: { + type: 'string', + description: 'Discord webhook token', + }, + guild_id: { + type: 'string', + description: 'Discord server/guild ID', + }, + channel_id: { + type: 'string', + description: 'Discord channel ID where the event occurred', + }, + embeds: { + type: 'string', + description: 'Embedded content data (if any)', + }, + }, + + instructions: [ + 'Go to Discord Server Settings > Integrations.', + 'Click "Webhooks" then "New Webhook".', + 'Customize the name and channel.', + 'Click "Copy Webhook URL".', + 'Paste the copied Discord URL into the main Webhook URL field above.', + 'Your workflow triggers when Discord sends an event to that URL.', + ], + + samplePayload: { + content: 'Hello from Sim!', + username: 'Optional Custom Name', + avatar_url: 'https://example.com/avatar.png', + timestamp: new Date().toISOString(), + webhook_id: '1234567890123456789', + webhook_token: 'example-webhook-token', + guild_id: '0987654321098765432', + channel_id: '1122334455667788990', + embeds: [], + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/generic/index.ts b/apps/sim/triggers/generic/index.ts new file mode 100644 index 0000000000..88f16d54b2 --- /dev/null +++ b/apps/sim/triggers/generic/index.ts @@ -0,0 +1 @@ +export { genericWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/generic/webhook.ts b/apps/sim/triggers/generic/webhook.ts new file mode 100644 index 0000000000..654da44942 --- /dev/null +++ b/apps/sim/triggers/generic/webhook.ts @@ -0,0 +1,84 @@ +import { WebhookIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const genericWebhookTrigger: TriggerConfig = { + id: 'generic_webhook', + name: 'Generic Webhook', + provider: 'generic', + description: 'Receive webhooks from any service or API', + version: '1.0.0', + icon: WebhookIcon, + + configFields: { + // Generic webhooks don't require any specific configuration + // The webhook URL is provided automatically + }, + + outputs: { + payload: { + type: 'json', + description: 'Complete webhook payload received', + }, + headers: { + type: 'json', + description: 'HTTP request headers', + }, + method: { + type: 'string', + description: 'HTTP method (GET, POST, PUT, etc.)', + }, + url: { + type: 'string', + description: 'Request URL path', + }, + query: { + type: 'json', + description: 'URL query parameters', + }, + timestamp: { + type: 'string', + description: 'Webhook received timestamp', + }, + // Common fields that many services use + event: { + type: 'string', + description: 'Event type (extracted from payload.event, payload.type, or payload.event_type)', + }, + id: { + type: 'string', + description: 'Event ID (extracted from payload.id, payload.event_id, or payload.uuid)', + }, + data: { + type: 'json', + description: 'Event data (extracted from payload.data or the full payload)', + }, + }, + + instructions: [ + 'Copy the webhook URL provided above and use it in your external service or API.', + 'Configure your service to send webhooks to this URL.', + 'The webhook will receive any HTTP method (GET, POST, PUT, DELETE, etc.).', + 'All request data (headers, body, query parameters) will be available in your workflow.', + 'Common fields like "event", "id", and "data" will be automatically extracted from the payload when available.', + ], + + samplePayload: { + event: 'user.created', + id: 'evt_1234567890', + data: { + user: { + id: 'user_123', + email: 'user@example.com', + name: 'John Doe', + }, + }, + timestamp: '2023-01-01T12:00:00Z', + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/github/index.ts b/apps/sim/triggers/github/index.ts new file mode 100644 index 0000000000..3466962ad8 --- /dev/null +++ b/apps/sim/triggers/github/index.ts @@ -0,0 +1 @@ +export { githubWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/github/webhook.ts b/apps/sim/triggers/github/webhook.ts new file mode 100644 index 0000000000..93de1df336 --- /dev/null +++ b/apps/sim/triggers/github/webhook.ts @@ -0,0 +1,166 @@ +import { GithubIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const githubWebhookTrigger: TriggerConfig = { + id: 'github_webhook', + name: 'GitHub Webhook', + provider: 'github', + description: 'Trigger workflow from GitHub events like push, pull requests, issues, and more', + version: '1.0.0', + icon: GithubIcon, + + configFields: { + contentType: { + type: 'select', + label: 'Content Type', + options: ['application/json', 'application/x-www-form-urlencoded'], + defaultValue: 'application/json', + description: 'Format GitHub will use when sending the webhook payload.', + required: true, + }, + webhookSecret: { + type: 'string', + label: 'Webhook Secret (Recommended)', + placeholder: 'Generate or enter a strong secret', + description: 'Validates that webhook deliveries originate from GitHub.', + required: false, + isSecret: true, + }, + sslVerification: { + type: 'select', + label: 'SSL Verification', + options: ['enabled', 'disabled'], + defaultValue: 'enabled', + description: 'GitHub verifies SSL certificates when delivering webhooks.', + required: true, + }, + }, + + outputs: { + action: { + type: 'string', + description: 'The action that was performed (e.g., opened, closed, synchronize)', + }, + event_type: { + type: 'string', + description: 'Type of GitHub event (e.g., push, pull_request, issues)', + }, + repository: { + type: 'string', + description: 'Repository full name (owner/repo)', + }, + repository_name: { + type: 'string', + description: 'Repository name only', + }, + repository_owner: { + type: 'string', + description: 'Repository owner username or organization', + }, + sender: { + type: 'string', + description: 'Username of the user who triggered the event', + }, + sender_id: { + type: 'string', + description: 'User ID of the sender', + }, + ref: { + type: 'string', + description: 'Git reference (for push events)', + }, + before: { + type: 'string', + description: 'SHA of the commit before the push', + }, + after: { + type: 'string', + description: 'SHA of the commit after the push', + }, + commits: { + type: 'string', + description: 'Array of commit objects (for push events)', + }, + pull_request: { + type: 'string', + description: 'Pull request object (for pull_request events)', + }, + issue: { + type: 'string', + description: 'Issue object (for issues events)', + }, + comment: { + type: 'string', + description: 'Comment object (for comment events)', + }, + branch: { + type: 'string', + description: 'Branch name extracted from ref', + }, + commit_message: { + type: 'string', + description: 'Latest commit message', + }, + commit_author: { + type: 'string', + description: 'Author of the latest commit', + }, + }, + + instructions: [ + 'Go to your GitHub Repository > Settings > Webhooks.', + 'Click "Add webhook".', + 'Paste the Webhook URL (from above) into the "Payload URL" field.', + 'Select your chosen Content Type from the dropdown above.', + 'Enter the Webhook Secret (from above) into the "Secret" field if you\'ve configured one.', + 'Set SSL verification according to your selection above.', + 'Choose which events should trigger this webhook.', + 'Ensure "Active" is checked and click "Add webhook".', + ], + + samplePayload: { + action: 'opened', + number: 1, + pull_request: { + id: 1, + number: 1, + state: 'open', + title: 'Update README', + user: { + login: 'octocat', + id: 1, + }, + body: 'This is a pretty simple change that we need to pull into main.', + head: { + ref: 'feature-branch', + sha: 'abc123', + }, + base: { + ref: 'main', + sha: 'def456', + }, + }, + repository: { + id: 35129377, + name: 'public-repo', + full_name: 'baxterthehacker/public-repo', + owner: { + login: 'baxterthehacker', + id: 6752317, + }, + }, + sender: { + login: 'baxterthehacker', + id: 6752317, + }, + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'X-GitHub-Event': 'pull_request', + 'X-GitHub-Delivery': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx', + }, + }, +} diff --git a/apps/sim/triggers/gmail/index.ts b/apps/sim/triggers/gmail/index.ts new file mode 100644 index 0000000000..1724af8152 --- /dev/null +++ b/apps/sim/triggers/gmail/index.ts @@ -0,0 +1 @@ +export { gmailPollingTrigger } from './poller' diff --git a/apps/sim/triggers/gmail/poller.ts b/apps/sim/triggers/gmail/poller.ts new file mode 100644 index 0000000000..4301b6f9a2 --- /dev/null +++ b/apps/sim/triggers/gmail/poller.ts @@ -0,0 +1,142 @@ +import { GmailIcon } from '@/components/icons' +import type { TriggerConfig } from '@/triggers/types' + +export const gmailPollingTrigger: TriggerConfig = { + id: 'gmail_poller', + name: 'Gmail Email Trigger', + provider: 'gmail', + description: 'Triggers when new emails are received in Gmail (requires Gmail credentials)', + version: '1.0.0', + icon: GmailIcon, + + // Gmail requires OAuth credentials to work + requiresCredentials: true, + credentialProvider: 'google-email', + + configFields: { + labelIds: { + type: 'multiselect', + label: 'Gmail Labels to Monitor', + placeholder: 'Select Gmail labels to monitor for new emails', + description: 'Choose which Gmail labels to monitor. Leave empty to monitor all emails.', + required: false, + options: [], // Will be populated dynamically from user's Gmail labels + }, + labelFilterBehavior: { + type: 'select', + label: 'Label Filter Behavior', + options: ['INCLUDE', 'EXCLUDE'], + defaultValue: 'INCLUDE', + description: + 'Include only emails with selected labels, or exclude emails with selected labels', + required: true, + }, + markAsRead: { + type: 'boolean', + label: 'Mark as Read', + defaultValue: false, + description: 'Automatically mark emails as read after processing', + required: false, + }, + includeRawEmail: { + type: 'boolean', + label: 'Include Raw Email Data', + defaultValue: false, + description: 'Include the complete raw Gmail API response in the trigger payload', + required: false, + }, + }, + + outputs: { + email: { + id: { + type: 'string', + description: 'Gmail message ID', + }, + threadId: { + type: 'string', + description: 'Gmail thread ID', + }, + subject: { + type: 'string', + description: 'Email subject line', + }, + from: { + type: 'string', + description: 'Sender email address', + }, + to: { + type: 'string', + description: 'Recipient email address', + }, + cc: { + type: 'string', + description: 'CC recipients', + }, + date: { + type: 'string', + description: 'Email date in ISO format', + }, + bodyText: { + type: 'string', + description: 'Plain text email body', + }, + bodyHtml: { + type: 'string', + description: 'HTML email body', + }, + labels: { + type: 'string', + description: 'Email labels array', + }, + hasAttachments: { + type: 'boolean', + description: 'Whether email has attachments', + }, + attachments: { + type: 'json', + description: 'Array of attachment information', + }, + }, + timestamp: { + type: 'string', + description: 'Event timestamp', + }, + rawEmail: { + type: 'json', + description: 'Complete raw email data from Gmail API (if enabled)', + }, + }, + + instructions: [ + 'Connect your Gmail account using OAuth credentials', + 'Configure which Gmail labels to monitor (optional)', + 'The system will automatically check for new emails and trigger your workflow', + ], + + samplePayload: { + email: { + id: '18e0ffabd5b5a0f4', + threadId: '18e0ffabd5b5a0f4', + subject: 'Monthly Report - April 2025', + from: 'sender@example.com', + to: 'recipient@example.com', + cc: 'team@example.com', + date: '2025-05-10T10:15:23.000Z', + bodyText: + 'Hello,\n\nPlease find attached the monthly report for April 2025.\n\nBest regards,\nSender', + bodyHtml: + '

Hello,

Please find attached the monthly report for April 2025.

Best regards,
Sender

', + labels: ['INBOX', 'IMPORTANT'], + hasAttachments: true, + attachments: [ + { + filename: 'report-april-2025.pdf', + mimeType: 'application/pdf', + size: 2048576, + }, + ], + }, + timestamp: '2025-05-10T10:15:30.123Z', + }, +} diff --git a/apps/sim/triggers/index.ts b/apps/sim/triggers/index.ts new file mode 100644 index 0000000000..3bb7493a44 --- /dev/null +++ b/apps/sim/triggers/index.ts @@ -0,0 +1,53 @@ +// Import trigger definitions + +import { airtableWebhookTrigger } from './airtable' +import { discordWebhookTrigger } from './discord' +import { genericWebhookTrigger } from './generic' +import { githubWebhookTrigger } from './github' +import { gmailPollingTrigger } from './gmail' +import { microsoftTeamsWebhookTrigger } from './microsoftteams' +import { outlookPollingTrigger } from './outlook' +import { slackWebhookTrigger } from './slack' +import { stripeWebhookTrigger } from './stripe/webhook' +import { telegramWebhookTrigger } from './telegram' +import type { TriggerConfig, TriggerRegistry } from './types' +import { whatsappWebhookTrigger } from './whatsapp' + +// Central registry of all available triggers +export const TRIGGER_REGISTRY: TriggerRegistry = { + slack_webhook: slackWebhookTrigger, + airtable_webhook: airtableWebhookTrigger, + discord_webhook: discordWebhookTrigger, + generic_webhook: genericWebhookTrigger, + github_webhook: githubWebhookTrigger, + gmail_poller: gmailPollingTrigger, + microsoftteams_webhook: microsoftTeamsWebhookTrigger, + outlook_poller: outlookPollingTrigger, + stripe_webhook: stripeWebhookTrigger, + telegram_webhook: telegramWebhookTrigger, + whatsapp_webhook: whatsappWebhookTrigger, +} + +// Utility functions for working with triggers +export function getTrigger(triggerId: string): TriggerConfig | undefined { + return TRIGGER_REGISTRY[triggerId] +} + +export function getTriggersByProvider(provider: string): TriggerConfig[] { + return Object.values(TRIGGER_REGISTRY).filter((trigger) => trigger.provider === provider) +} + +export function getAllTriggers(): TriggerConfig[] { + return Object.values(TRIGGER_REGISTRY) +} + +export function getTriggerIds(): string[] { + return Object.keys(TRIGGER_REGISTRY) +} + +export function isTriggerValid(triggerId: string): boolean { + return triggerId in TRIGGER_REGISTRY +} + +// Export types for use elsewhere +export type { TriggerConfig, TriggerRegistry } from './types' diff --git a/apps/sim/triggers/microsoftteams/index.ts b/apps/sim/triggers/microsoftteams/index.ts new file mode 100644 index 0000000000..e9cfa2876f --- /dev/null +++ b/apps/sim/triggers/microsoftteams/index.ts @@ -0,0 +1 @@ +export { microsoftTeamsWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/microsoftteams/webhook.ts b/apps/sim/triggers/microsoftteams/webhook.ts new file mode 100644 index 0000000000..598e3e8d2a --- /dev/null +++ b/apps/sim/triggers/microsoftteams/webhook.ts @@ -0,0 +1,100 @@ +import { MicrosoftTeamsIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const microsoftTeamsWebhookTrigger: TriggerConfig = { + id: 'microsoftteams_webhook', + name: 'Microsoft Teams Webhook', + provider: 'microsoftteams', + description: 'Trigger workflow from Microsoft Teams events like messages and mentions', + version: '1.0.0', + icon: MicrosoftTeamsIcon, + + configFields: { + hmacSecret: { + type: 'string', + label: 'HMAC Secret', + placeholder: 'Enter HMAC secret from Teams', + description: + 'The security token provided by Teams when creating an outgoing webhook. Used to verify request authenticity.', + required: true, + isSecret: true, + }, + }, + + outputs: { + type: { + type: 'string', + description: 'Type of Teams message (e.g., message)', + }, + id: { + type: 'string', + description: 'Unique message identifier', + }, + timestamp: { + type: 'string', + description: 'Message timestamp', + }, + localTimestamp: { + type: 'string', + description: 'Local timestamp of the message', + }, + serviceUrl: { + type: 'string', + description: 'Microsoft Teams service URL', + }, + channelId: { + type: 'string', + description: 'Teams channel ID where the event occurred', + }, + from_id: { + type: 'string', + description: 'User ID who sent the message', + }, + from_name: { + type: 'string', + description: 'Username who sent the message', + }, + conversation_id: { + type: 'string', + description: 'Conversation/thread ID', + }, + text: { + type: 'string', + description: 'Message text content', + }, + }, + + instructions: [ + 'Open Microsoft Teams and go to the team where you want to add the webhook.', + 'Click the three dots (•••) next to the team name and select "Manage team".', + 'Go to the "Apps" tab and click "Create an outgoing webhook".', + 'Provide a name, description, and optionally a profile picture.', + 'Set the callback URL to your Sim webhook URL (shown above).', + 'Copy the HMAC security token and paste it into the "HMAC Secret" field above.', + 'Click "Create" to finish setup.', + ], + + samplePayload: { + type: 'message', + id: '1234567890', + timestamp: '2023-01-01T00:00:00.000Z', + localTimestamp: '2023-01-01T00:00:00.000Z', + serviceUrl: 'https://smba.trafficmanager.net/amer/', + channelId: 'msteams', + from: { + id: '29:1234567890abcdef', + name: 'John Doe', + }, + conversation: { + id: '19:meeting_abcdef@thread.v2', + }, + text: 'Hello Sim Bot!', + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/outlook/index.ts b/apps/sim/triggers/outlook/index.ts new file mode 100644 index 0000000000..5fca542b25 --- /dev/null +++ b/apps/sim/triggers/outlook/index.ts @@ -0,0 +1 @@ +export { outlookPollingTrigger } from './poller' diff --git a/apps/sim/triggers/outlook/poller.ts b/apps/sim/triggers/outlook/poller.ts new file mode 100644 index 0000000000..5356f89b2f --- /dev/null +++ b/apps/sim/triggers/outlook/poller.ts @@ -0,0 +1,146 @@ +import { OutlookIcon } from '@/components/icons' +import type { TriggerConfig } from '@/triggers/types' + +export const outlookPollingTrigger: TriggerConfig = { + id: 'outlook_poller', + name: 'Outlook Email Trigger', + provider: 'outlook', + description: 'Triggers when new emails are received in Outlook (requires Microsoft credentials)', + version: '1.0.0', + icon: OutlookIcon, + + // Outlook requires OAuth credentials to work + requiresCredentials: true, + credentialProvider: 'outlook', + + configFields: { + folderIds: { + type: 'multiselect', + label: 'Outlook Folders to Monitor', + placeholder: 'Select Outlook folders to monitor for new emails', + description: 'Choose which Outlook folders to monitor. Leave empty to monitor all emails.', + required: false, + options: [], // Will be populated dynamically from user's Outlook folders + }, + folderFilterBehavior: { + type: 'select', + label: 'Folder Filter Behavior', + options: ['INCLUDE', 'EXCLUDE'], + defaultValue: 'INCLUDE', + description: + 'Include only emails from selected folders, or exclude emails from selected folders', + required: true, + }, + markAsRead: { + type: 'boolean', + label: 'Mark as Read', + defaultValue: false, + description: 'Automatically mark emails as read after processing', + required: false, + }, + includeRawEmail: { + type: 'boolean', + label: 'Include Raw Email Data', + defaultValue: false, + description: 'Include the complete raw Microsoft Graph API response in the trigger payload', + required: false, + }, + }, + + outputs: { + email: { + id: { + type: 'string', + description: 'Outlook message ID', + }, + conversationId: { + type: 'string', + description: 'Outlook conversation ID', + }, + subject: { + type: 'string', + description: 'Email subject line', + }, + from: { + type: 'string', + description: 'Sender email address', + }, + to: { + type: 'string', + description: 'Recipient email address', + }, + cc: { + type: 'string', + description: 'CC recipients', + }, + date: { + type: 'string', + description: 'Email date in ISO format', + }, + bodyText: { + type: 'string', + description: 'Plain text email body (preview)', + }, + bodyHtml: { + type: 'string', + description: 'HTML email body', + }, + hasAttachments: { + type: 'boolean', + description: 'Whether email has attachments', + }, + isRead: { + type: 'boolean', + description: 'Whether email is read', + }, + folderId: { + type: 'string', + description: 'Outlook folder ID where email is located', + }, + messageId: { + type: 'string', + description: 'Message ID for threading', + }, + threadId: { + type: 'string', + description: 'Thread ID for conversation threading', + }, + }, + timestamp: { + type: 'string', + description: 'Event timestamp', + }, + rawEmail: { + type: 'json', + description: 'Complete raw email data from Microsoft Graph API (if enabled)', + }, + }, + + instructions: [ + 'Connect your Microsoft account using OAuth credentials', + 'Configure which Outlook folders to monitor (optional)', + 'The system will automatically check for new emails and trigger your workflow', + ], + + samplePayload: { + email: { + id: 'AAMkADg1OWUyZjg4LWJkNGYtNDFhYy04OGVjLWVkM2VhY2YzYTcwZgBGAAAAAACE3bU', + conversationId: 'AAQkADg1OWUyZjg4LWJkNGYtNDFhYy04OGVjLWVkM2VhY2YzYTcwZgAQAErzGBJV', + subject: 'Quarterly Business Review - Q1 2025', + from: 'manager@company.com', + to: 'team@company.com', + cc: 'stakeholders@company.com', + date: '2025-05-10T14:30:00Z', + bodyText: + 'Hi Team,\n\nPlease find attached the Q1 2025 business review document. We need to discuss the results in our next meeting.\n\nBest regards,\nManager', + bodyHtml: + '

Hi Team,

Please find attached the Q1 2025 business review document. We need to discuss the results in our next meeting.

Best regards,
Manager

', + hasAttachments: true, + isRead: false, + folderId: 'AQMkADg1OWUyZjg4LWJkNGYtNDFhYy04OGVjAC4AAAJzE3bU', + messageId: 'AAMkADg1OWUyZjg4LWJkNGYtNDFhYy04OGVjLWVkM2VhY2YzYTcwZgBGAAAAAACE3bU', + threadId: 'AAQkADg1OWUyZjg4LWJkNGYtNDFhYy04OGVjLWVkM2VhY2YzYTcwZgAQAErzGBJV', + }, + timestamp: '2025-05-10T14:30:15.123Z', + }, +} diff --git a/apps/sim/triggers/slack/index.ts b/apps/sim/triggers/slack/index.ts new file mode 100644 index 0000000000..93eee192e0 --- /dev/null +++ b/apps/sim/triggers/slack/index.ts @@ -0,0 +1 @@ +export { slackWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/slack/webhook.ts b/apps/sim/triggers/slack/webhook.ts new file mode 100644 index 0000000000..fe652b6901 --- /dev/null +++ b/apps/sim/triggers/slack/webhook.ts @@ -0,0 +1,92 @@ +import { SlackIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const slackWebhookTrigger: TriggerConfig = { + id: 'slack_webhook', + name: 'Slack Webhook', + provider: 'slack', + description: 'Trigger workflow from Slack events like mentions, messages, and reactions', + version: '1.0.0', + icon: SlackIcon, + + configFields: { + signingSecret: { + type: 'string', + label: 'Signing Secret', + placeholder: 'Enter your Slack app signing secret', + description: 'The signing secret from your Slack app to validate request authenticity.', + required: true, + isSecret: true, + }, + }, + + outputs: { + event_type: { + type: 'string', + description: 'Type of Slack event (e.g., app_mention, message)', + }, + channel: { + type: 'string', + description: 'Slack channel ID where the event occurred', + }, + channel_name: { + type: 'string', + description: 'Human-readable channel name', + }, + user: { + type: 'string', + description: 'User ID who triggered the event', + }, + user_name: { + type: 'string', + description: 'Username who triggered the event', + }, + text: { + type: 'string', + description: 'Message text content', + }, + timestamp: { + type: 'string', + description: 'Event timestamp', + }, + team_id: { + type: 'string', + description: 'Slack workspace/team ID', + }, + event_id: { + type: 'string', + description: 'Unique event identifier', + }, + }, + + instructions: [ + 'Go to Slack Apps page', + 'If you don\'t have an app:
  • Create an app from scratch
  • Give it a name and select your workspace
', + 'Go to "Basic Information", find the "Signing Secret", and paste it in the field above.', + 'Go to "OAuth & Permissions" and add bot token scopes:
  • app_mentions:read - For viewing messages that tag your bot with an @
  • chat:write - To send messages to channels your bot is a part of
', + 'Go to "Event Subscriptions":
  • Enable events
  • Under "Subscribe to Bot Events", add app_mention to listen to messages that mention your bot
  • Paste the Webhook URL (from above) into the "Request URL" field
', + 'Save changes in both Slack and here.', + ], + + samplePayload: { + type: 'event_callback', + event: { + type: 'app_mention', + channel: 'C0123456789', + user: 'U0123456789', + text: '<@U0BOTUSER123> Hello from Slack!', + ts: '1234567890.123456', + channel_type: 'channel', + }, + team_id: 'T0123456789', + event_id: 'Ev0123456789', + event_time: 1234567890, + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/stripe/index.ts b/apps/sim/triggers/stripe/index.ts new file mode 100644 index 0000000000..3a0b14e6cf --- /dev/null +++ b/apps/sim/triggers/stripe/index.ts @@ -0,0 +1 @@ +export { stripeWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/stripe/webhook.ts b/apps/sim/triggers/stripe/webhook.ts new file mode 100644 index 0000000000..fb9ca0a4a4 --- /dev/null +++ b/apps/sim/triggers/stripe/webhook.ts @@ -0,0 +1,93 @@ +import { ShieldCheck } from 'lucide-react' +import type { TriggerConfig } from '../types' + +export const stripeWebhookTrigger: TriggerConfig = { + id: 'stripe_webhook', + name: 'Stripe Webhook', + provider: 'stripe', + description: 'Triggers when Stripe events occur (payments, subscriptions, etc.)', + version: '1.0.0', + icon: ShieldCheck, + + configFields: { + // Stripe webhooks don't require configuration fields - events are selected in Stripe dashboard + }, + + outputs: { + id: { + type: 'string', + description: 'Event ID from Stripe', + }, + type: { + type: 'string', + description: 'Event type (e.g., charge.succeeded, payment_intent.succeeded)', + }, + created: { + type: 'string', + description: 'Timestamp when the event was created', + }, + data: { + type: 'string', + description: 'Event data containing the affected Stripe object', + }, + object: { + type: 'string', + description: 'The Stripe object that was updated (e.g., charge, payment_intent)', + }, + livemode: { + type: 'string', + description: 'Whether this event occurred in live mode or test mode', + }, + apiVersion: { + type: 'string', + description: 'API version used to render this event', + }, + request: { + type: 'string', + description: 'Information about the request that triggered this event', + }, + }, + + instructions: [ + 'Go to your Stripe Dashboard at https://dashboard.stripe.com/', + 'Navigate to Developers > Webhooks', + 'Click "Add endpoint"', + 'Paste the Webhook URL (from above) into the "Endpoint URL" field', + 'Select the events you want to listen to (e.g., charge.succeeded)', + 'Click "Add endpoint"', + 'Stripe will send a test event to verify your webhook endpoint', + ], + + samplePayload: { + id: 'evt_1234567890', + type: 'charge.succeeded', + created: 1641234567, + data: { + object: { + id: 'ch_1234567890', + object: 'charge', + amount: 2500, + currency: 'usd', + description: 'Sample charge', + paid: true, + status: 'succeeded', + customer: 'cus_1234567890', + receipt_email: 'customer@example.com', + }, + }, + object: 'event', + livemode: false, + api_version: '2020-08-27', + request: { + id: 'req_1234567890', + idempotency_key: null, + }, + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/telegram/index.ts b/apps/sim/triggers/telegram/index.ts new file mode 100644 index 0000000000..72ac27a808 --- /dev/null +++ b/apps/sim/triggers/telegram/index.ts @@ -0,0 +1 @@ +export { telegramWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/telegram/webhook.ts b/apps/sim/triggers/telegram/webhook.ts new file mode 100644 index 0000000000..01afbc13f2 --- /dev/null +++ b/apps/sim/triggers/telegram/webhook.ts @@ -0,0 +1,117 @@ +import { TelegramIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const telegramWebhookTrigger: TriggerConfig = { + id: 'telegram_webhook', + name: 'Telegram Webhook', + provider: 'telegram', + description: 'Trigger workflow from Telegram bot messages and events', + version: '1.0.0', + icon: TelegramIcon, + + configFields: { + botToken: { + type: 'string', + label: 'Bot Token', + placeholder: '123456789:ABCdefGHIjklMNOpqrsTUVwxyz', + description: 'Your Telegram Bot Token from BotFather', + required: true, + isSecret: true, + }, + }, + + outputs: { + update_id: { + type: 'number', + description: 'Unique identifier for the update', + }, + message_id: { + type: 'number', + description: 'Unique message identifier', + }, + from_id: { + type: 'number', + description: 'User ID who sent the message', + }, + from_username: { + type: 'string', + description: 'Username of the sender', + }, + from_first_name: { + type: 'string', + description: 'First name of the sender', + }, + from_last_name: { + type: 'string', + description: 'Last name of the sender', + }, + chat_id: { + type: 'number', + description: 'Unique identifier for the chat', + }, + chat_type: { + type: 'string', + description: 'Type of chat (private, group, supergroup, channel)', + }, + chat_title: { + type: 'string', + description: 'Title of the chat (for groups and channels)', + }, + text: { + type: 'string', + description: 'Message text content', + }, + date: { + type: 'number', + description: 'Date the message was sent (Unix timestamp)', + }, + entities: { + type: 'string', + description: 'Special entities in the message (mentions, hashtags, etc.) as JSON string', + }, + }, + + instructions: [ + 'Message "/newbot" to @BotFather in Telegram to create a bot and copy its token.', + 'Enter your Bot Token above.', + 'Save settings and any message sent to your bot will trigger the workflow.', + ], + + samplePayload: { + update_id: 123456789, + message: { + message_id: 123, + from: { + id: 987654321, + is_bot: false, + first_name: 'John', + last_name: 'Doe', + username: 'johndoe', + language_code: 'en', + }, + chat: { + id: 987654321, + first_name: 'John', + last_name: 'Doe', + username: 'johndoe', + type: 'private', + }, + date: 1234567890, + text: 'Hello from Telegram!', + entities: [ + { + offset: 0, + length: 5, + type: 'bold', + }, + ], + }, + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/apps/sim/triggers/types.ts b/apps/sim/triggers/types.ts new file mode 100644 index 0000000000..7e54251aa4 --- /dev/null +++ b/apps/sim/triggers/types.ts @@ -0,0 +1,67 @@ +export type TriggerFieldType = 'string' | 'boolean' | 'select' | 'number' | 'multiselect' + +export interface TriggerConfigField { + type: TriggerFieldType + label: string + placeholder?: string + options?: string[] + defaultValue?: string | boolean | number | string[] + description?: string + required?: boolean + isSecret?: boolean +} + +export interface TriggerOutput { + type?: string + description?: string + [key: string]: TriggerOutput | string | undefined +} + +export interface TriggerConfig { + id: string + name: string + provider: string + description: string + version: string + + // Optional icon component for UI display + icon?: React.ComponentType<{ className?: string }> + + // Configuration fields that users need to fill + configFields: Record + + // Define the structure of data this trigger outputs to workflows + outputs: Record + + // Setup instructions for users + instructions: string[] + + // Example payload for documentation + samplePayload: any + + // Webhook configuration (for most triggers) + webhook?: { + method?: 'POST' | 'GET' | 'PUT' | 'DELETE' + headers?: Record + } + + // For triggers that require OAuth credentials (like Gmail) + requiresCredentials?: boolean + credentialProvider?: string // 'google-email', 'microsoft', etc. +} + +export interface TriggerRegistry { + [triggerId: string]: TriggerConfig +} + +export interface TriggerInstance { + id: string + triggerId: string + blockId: string + workflowId: string + config: Record + webhookPath?: string + isActive: boolean + createdAt: Date + updatedAt: Date +} diff --git a/apps/sim/triggers/whatsapp/index.ts b/apps/sim/triggers/whatsapp/index.ts new file mode 100644 index 0000000000..edc76eaa72 --- /dev/null +++ b/apps/sim/triggers/whatsapp/index.ts @@ -0,0 +1 @@ +export { whatsappWebhookTrigger } from './webhook' diff --git a/apps/sim/triggers/whatsapp/webhook.ts b/apps/sim/triggers/whatsapp/webhook.ts new file mode 100644 index 0000000000..583f220092 --- /dev/null +++ b/apps/sim/triggers/whatsapp/webhook.ts @@ -0,0 +1,108 @@ +import { WhatsAppIcon } from '@/components/icons' +import type { TriggerConfig } from '../types' + +export const whatsappWebhookTrigger: TriggerConfig = { + id: 'whatsapp_webhook', + name: 'WhatsApp Webhook', + provider: 'whatsapp', + description: 'Trigger workflow from WhatsApp messages and events via Business Platform webhooks', + version: '1.0.0', + icon: WhatsAppIcon, + + configFields: { + verificationToken: { + type: 'string', + label: 'Verification Token', + placeholder: 'Generate or enter a verification token', + description: + "Enter any secure token here. You'll need to provide the same token in your WhatsApp Business Platform dashboard.", + required: true, + isSecret: true, + }, + }, + + outputs: { + messageId: { + type: 'string', + description: 'Unique message identifier', + }, + from: { + type: 'string', + description: 'Phone number of the message sender', + }, + phoneNumberId: { + type: 'string', + description: 'WhatsApp Business phone number ID that received the message', + }, + text: { + type: 'string', + description: 'Message text content', + }, + timestamp: { + type: 'string', + description: 'Message timestamp', + }, + raw: { + type: 'string', + description: 'Complete raw message object from WhatsApp as JSON string', + }, + }, + + instructions: [ + 'Go to your Meta for Developers Apps page.', + 'If you don\'t have an app:
  • Create an app from scratch
  • Give it a name and select your workspace
', + 'Select your App, then navigate to WhatsApp > Configuration.', + 'Find the Webhooks section and click "Edit".', + 'Paste the Webhook URL (from above) into the "Callback URL" field.', + 'Paste the Verification Token (from above) into the "Verify token" field.', + 'Click "Verify and save".', + 'Click "Manage" next to Webhook fields and subscribe to `messages`.', + ], + + samplePayload: { + object: 'whatsapp_business_account', + entry: [ + { + id: '1234567890123456', + changes: [ + { + value: { + messaging_product: 'whatsapp', + metadata: { + display_phone_number: '15551234567', + phone_number_id: '1234567890123456', + }, + contacts: [ + { + profile: { + name: 'John Doe', + }, + wa_id: '15555551234', + }, + ], + messages: [ + { + from: '15555551234', + id: 'wamid.HBgNMTU1NTU1NTEyMzQVAgASGBQzQTdBNjg4QjU2NjZCMzY4ODE2AA==', + timestamp: '1234567890', + text: { + body: 'Hello from WhatsApp!', + }, + type: 'text', + }, + ], + }, + field: 'messages', + }, + ], + }, + ], + }, + + webhook: { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + }, +} diff --git a/scripts/generate-block-docs.ts b/scripts/generate-block-docs.ts old mode 100644 new mode 100755 index 55c9efbd44..f1a9501972 --- a/scripts/generate-block-docs.ts +++ b/scripts/generate-block-docs.ts @@ -29,22 +29,9 @@ interface BlockConfig { longDescription?: string category: string bgColor?: string - icon?: any - subBlocks?: Array<{ - id: string - title?: string - placeholder?: string - type?: string - layout?: string - required?: boolean - options?: Array<{ label: string; id: string }> - [key: string]: any - }> - inputs?: Record outputs?: Record tools?: { access?: string[] - config?: any } [key: string]: any } @@ -111,8 +98,7 @@ function extractIcons(): Record { // Function to extract block configuration from file content function extractBlockConfig(fileContent: string): BlockConfig | null { try { - // Match the block name and type from imports and export statement - const _typeMatch = fileContent.match(/type\s+(\w+)Response\s*=/) + // Extract the block name from export statement const exportMatch = fileContent.match(/export\s+const\s+(\w+)Block\s*:/) if (!exportMatch) { @@ -243,8 +229,6 @@ function extractIconName(content: string): string | null { return iconMatch ? iconMatch[1] : null } -// Helper to extract subBlocks array - // Updated function to extract outputs with a simpler and more reliable approach function extractOutputs(content: string): Record { // Look for the outputs section using balanced brace matching @@ -348,112 +332,6 @@ function extractOutputs(content: string): Record { return outputs } } - - // Fallback: Try to extract fields from the old nested format - const fieldMatches = outputsContent.match(/(\w+)\s*:\s*{([^}]+)}/g) - - if (fieldMatches && fieldMatches.length > 0) { - fieldMatches.forEach((fieldMatch) => { - const fieldNameMatch = fieldMatch.match(/(\w+)\s*:/) - if (fieldNameMatch) { - const fieldName = fieldNameMatch[1] - - // Check if there's a type with a nested structure - const typeMatch = fieldMatch.match(/type\s*:\s*{([^}]+)}/) - if (typeMatch) { - // Handle nested type object - const typeContent = typeMatch[1] - const properties: Record = {} - - // Extract property types from the type object - handle cases with comments - // const propertyMatches = typeContent.match(/(\w+)\s*:\s*['"]([^'"]+)['"]/g) - const propertyMatches = typeContent.match( - /(\w+)\s*:\s*['"](.*?)['"](?:\s*,)?(?:\s*\/\/[^\n]*)?/g - ) - if (propertyMatches) { - propertyMatches.forEach((propMatch) => { - // Extract the property name and type, ignoring any trailing comments - const propParts = propMatch.match(/(\w+)\s*:\s*['"](.*?)['"]/) - if (propParts) { - const propName = propParts[1] - const propType = propParts[2] - - // Look for an inline comment that might contain a description - const commentMatch = propMatch.match(/\/\/\s*(.+)$/) - const description = commentMatch - ? commentMatch[1].trim() - : `${propName} of the ${fieldName}` - - properties[propName] = { - type: propType, - description: description, - } - } - }) - } - - // Add the field with properties - outputs[fieldName] = { - properties, - description: `${fieldName} from the block execution`, - } - } else { - // Try to extract a simple type definition - const simpleTypeMatch = fieldMatch.match(/type\s*:\s*['"]([^'"]+)['"]/) - if (simpleTypeMatch) { - outputs[fieldName] = { - type: simpleTypeMatch[1], - description: `${fieldName} output from the block`, - } - } - } - } - }) - } - - // If we parsed anything, return it - if (Object.keys(outputs).length > 0) { - return outputs - } - } - - // Fallback to the original method for backward compatibility - const outputsSection = content.match(/outputs\s*:\s*{([^}]*response[^}]*)}(?:\s*,|\s*})/s) - - if (outputsSection) { - // Find the response type definition - const responseTypeMatch = content.match(/response\s*:\s*{\s*type\s*:\s*{([^}]*)}/s) - - if (responseTypeMatch) { - const typeContent = responseTypeMatch[1] - - // Extract all field: 'type' pairs regardless of comments or formatting - const fieldMatches = typeContent.match(/(\w+)\s*:\s*['"](.*?)['"]/g) - - if (fieldMatches && fieldMatches.length > 0) { - const typeFields: Record = {} - - // Process each field match - fieldMatches.forEach((match) => { - const fieldParts = match.match(/(\w+)\s*:\s*['"](.*?)['"]/) - if (fieldParts) { - const fieldName = fieldParts[1] - const fieldType = fieldParts[2] - typeFields[fieldName] = fieldType - } - }) - - // If we have any fields, return them in the expected structure - if (Object.keys(typeFields).length > 0) { - const result = { - response: { - type: typeFields, - }, - } - return result - } - } - } } return {} @@ -483,16 +361,16 @@ function extractToolsAccess(content: string): string[] { // Function to extract tool information from file content function extractToolInfo( toolName: string, - fileContent: string, - filePath = '' + fileContent: string ): { description: string params: Array<{ name: string; type: string; required: boolean; description: string }> outputs: Record } | null { try { - // Extract tool config section - Simplified regex to match any *Tool export pattern - const toolConfigRegex = /export const \w+Tool\s*[=<][^{]*{[\s\S]*?params\s*:\s*{([\s\S]*?)}/im + // Extract tool config section - Match params until the next top-level property + const toolConfigRegex = + /params\s*:\s*{([\s\S]*?)},?\s*(?:outputs|oauth|request|directExecution|postProcess|transformResponse|transformError)/ const toolConfigMatch = fileContent.match(toolConfigRegex) // Extract description @@ -506,14 +384,38 @@ function extractToolInfo( if (toolConfigMatch) { const paramsContent = toolConfigMatch[1] - // More robust approach to extract parameters + // More robust approach to extract parameters with balanced brace matching // Extract each parameter block completely - const paramBlocksRegex = /(\w+)\s*:\s*{([^}]+)}/g + const paramBlocksRegex = /(\w+)\s*:\s*{/g let paramMatch + const paramPositions: Array<{ name: string; start: number; content: string }> = [] while ((paramMatch = paramBlocksRegex.exec(paramsContent)) !== null) { const paramName = paramMatch[1] - const paramBlock = paramMatch[2] + const startPos = paramMatch.index + paramMatch[0].length - 1 // Position of opening brace + + // Find matching closing brace using balanced counting + let braceCount = 1 + let endPos = startPos + 1 + + while (endPos < paramsContent.length && braceCount > 0) { + if (paramsContent[endPos] === '{') { + braceCount++ + } else if (paramsContent[endPos] === '}') { + braceCount-- + } + endPos++ + } + + if (braceCount === 0) { + const paramBlock = paramsContent.substring(startPos + 1, endPos - 1).trim() + paramPositions.push({ name: paramName, start: startPos, content: paramBlock }) + } + } + + for (const param of paramPositions) { + const paramName = param.name + const paramBlock = param.content // Skip the accessToken parameter as it's handled automatically by the OAuth flow // Also skip any params parameter which isn't a real input @@ -526,13 +428,19 @@ function extractToolInfo( const requiredMatch = paramBlock.match(/required\s*:\s*(true|false)/) // More careful extraction of description with handling for multiline descriptions - let descriptionMatch = paramBlock.match(/description\s*:\s*'(.*?)'/) + let descriptionMatch = paramBlock.match(/description\s*:\s*'(.*?)'(?=\s*[,}])/s) if (!descriptionMatch) { - descriptionMatch = paramBlock.match(/description\s*:\s*"(.*?)"/) + descriptionMatch = paramBlock.match(/description\s*:\s*"(.*?)"(?=\s*[,}])/s) } if (!descriptionMatch) { // Try for template literals if the description uses backticks - descriptionMatch = paramBlock.match(/description\s*:\s*`([^`]+)`/) + descriptionMatch = paramBlock.match(/description\s*:\s*`([^`]+)`/s) + } + if (!descriptionMatch) { + // Handle multi-line descriptions without ending quote on same line + descriptionMatch = paramBlock.match( + /description\s*:\s*['"]([^'"]*(?:\n[^'"]*)*?)['"](?=\s*[,}])/s + ) } params.push({ @@ -544,115 +452,16 @@ function extractToolInfo( } } - // If no params were found with the first method, try a more direct regex approach - if (params.length === 0) { - const paramRegex = - /(\w+)\s*:\s*{(?:[^{}]|{[^{}]*})*type\s*:\s*['"](.*?)['"](?:[^{}]|{[^{}]*})*required\s*:\s*(true|false)(?:[^{}]|{[^{}]*})*description\s*:\s*['"](.*?)['"](?:[^{}]|{[^{}]*})*}/g - let match - - while ((match = paramRegex.exec(fileContent)) !== null) { - // Skip the accessToken parameter and any params parameter - if (match[1] === 'params' || match[1] === 'tools') continue - - params.push({ - name: match[1], - type: match[2], - required: match[3] === 'true', - description: match[4] || 'No description', - }) - } - } - - // Extract output structure from transformResponse + // First priority: Extract outputs from the new outputs field in ToolConfig let outputs: Record = {} - const outputRegex = /transformResponse[\s\S]*?return\s*{[\s\S]*?output\s*:\s*{([^}]*)/ - const outputMatch = fileContent.match(outputRegex) + const outputsFieldRegex = + /outputs\s*:\s*{([\s\S]*?)}\s*,?\s*(?:oauth|params|request|directExecution|postProcess|transformResponse|transformError|$|\})/ + const outputsFieldMatch = fileContent.match(outputsFieldRegex) - if (outputMatch) { - const outputContent = outputMatch[1] - // Try to parse the output structure based on the content - outputs = parseOutputStructure(toolName, outputContent) - } - - // If we couldn't extract outputs from transformResponse, try an alternative approach - if (Object.keys(outputs).length === 0) { - // Look for output in successful response in transformResponse - const successOutputRegex = - /success\s*:\s*true,\s*output\s*:\s*(\{[^}]*\}|\w+(\.\w+)+\s*\|\|\s*\{[^}]*\}|\w+(\.\w+)+\.map\s*\()/ - const successOutputMatch = fileContent.match(successOutputRegex) - - if (successOutputMatch) { - const outputExpression = successOutputMatch[1].trim() - - // Handle case where output is something like "data.data || {}" - if (outputExpression.includes('||')) { - outputs.data = 'json' - } - // Handle array mapping like "data.issues.map(...)" - else if (outputExpression.includes('.map')) { - // Try to extract the array object being mapped - const arrayMapMatch = outputExpression.match(/(\w+(?:\.\w+)+)\.map/) - if (arrayMapMatch) { - const arrayPath = arrayMapMatch[1] - // Get the base object being mapped to an array - const arrayObject = arrayPath.split('.').pop() - if (arrayObject) { - outputs[arrayObject] = 'Array of mapped items' - } - } else { - // Fallback if we can't extract the exact array object - outputs.items = 'Array of mapped items' - } - } - // Handle direct object assignment like "output: { field1, field2 }" - else if (outputExpression.startsWith('{')) { - const fieldMatches = outputExpression.match(/(\w+)\s*:/g) - if (fieldMatches) { - fieldMatches.forEach((match) => { - const fieldName = match.trim().replace(':', '') - outputs[fieldName] = 'Dynamic output field' - }) - } - } - // Check for data.X patterns like "data.data" - else if (outputExpression.includes('.')) { - const fieldName = outputExpression.split('.').pop() - if (fieldName) { - outputs[fieldName] = 'json' - } - } - } - } - - // Try to extract TypeScript interface for outputs as a fallback - if (Object.keys(outputs).length === 0) { - const interfaceRegex = new RegExp( - `interface\\s+${toolName.replace(/_/g, '')}Response\\s*{[\\s\\S]*?output\\s*:\\s*{([\\s\\S]*?)}[\\s\\S]*?}` - ) - const interfaceMatch = fileContent.match(interfaceRegex) - - if (interfaceMatch) { - const interfaceContent = interfaceMatch[1] - outputs = parseOutputStructure(toolName, interfaceContent) - } - } - - // Look for TypeScript types in a types.ts file if available - if (Object.keys(outputs).length === 0 && filePath) { - const toolDir = path.dirname(filePath) - const typesPath = path.join(toolDir, 'types.ts') - if (fs.existsSync(typesPath)) { - const typesContent = fs.readFileSync(typesPath, 'utf-8') - const responseTypeRegex = new RegExp( - `interface\\s+${toolName.replace(/_/g, '')}Response\\s*extends\\s+\\w+\\s*{\\s*output\\s*:\\s*{([\\s\\S]*?)}\\s*}`, - 'i' - ) - const responseTypeMatch = typesContent.match(responseTypeRegex) - - if (responseTypeMatch) { - outputs = parseOutputStructure(toolName, responseTypeMatch[1]) - } - } + if (outputsFieldMatch) { + const outputsContent = outputsFieldMatch[1] + outputs = parseToolOutputsField(outputsContent) + console.log(`Found tool outputs field for ${toolName}:`, Object.keys(outputs)) } return { @@ -666,81 +475,318 @@ function extractToolInfo( } } -// Update the parseOutputStructure function to better handle nested objects -function parseOutputStructure(toolName: string, outputContent: string): Record { +// Helper function to recursively format output structure for documentation +function formatOutputStructure(outputs: Record, indentLevel = 0): string { + let result = '' + + for (const [key, output] of Object.entries(outputs)) { + let type = 'unknown' + let description = `${key} output from the tool` + + if (typeof output === 'object' && output !== null) { + if (output.type) { + type = output.type + } + + if (output.description) { + description = output.description + } + } + + // Escape special characters in the description + const escapedDescription = description + .replace(/\|/g, '\\|') + .replace(/\{/g, '\\{') + .replace(/\}/g, '\\}') + .replace(/\(/g, '\\(') + .replace(/\)/g, '\\)') + .replace(/\[/g, '\\[') + .replace(/\]/g, '\\]') + .replace(//g, '>') + + // Create prefix based on nesting level with visual hierarchy + let prefix = '' + if (indentLevel === 1) { + prefix = '↳ ' + } else if (indentLevel >= 2) { + // For deeper nesting (like array items), use indented arrows + prefix = ' ↳ ' + } + + // For arrays, expand nested items + if (typeof output === 'object' && output !== null && output.type === 'array') { + result += `| ${prefix}\`${key}\` | ${type} | ${escapedDescription} |\n` + + // Handle array items with properties (nested TWO more levels to show it's inside the array) + if (output.items?.properties) { + // Create a visual separator to show these are array item properties + const arrayItemsResult = formatOutputStructure(output.items.properties, indentLevel + 2) + result += arrayItemsResult + } + } + // For objects, expand properties + else if ( + typeof output === 'object' && + output !== null && + output.properties && + (output.type === 'object' || output.type === 'json') + ) { + result += `| ${prefix}\`${key}\` | ${type} | ${escapedDescription} |\n` + + const nestedResult = formatOutputStructure(output.properties, indentLevel + 1) + result += nestedResult + } + // For simple types, show with prefix if nested + else { + result += `| ${prefix}\`${key}\` | ${type} | ${escapedDescription} |\n` + } + } + + return result +} + +// New function to parse the structured outputs field from ToolConfig +function parseToolOutputsField(outputsContent: string): Record { const outputs: Record = {} - // Try to extract field declarations with their types - const fieldRegex = /(\w+)\s*:([^,}]+)/g - let fieldMatch - - while ((fieldMatch = fieldRegex.exec(outputContent)) !== null) { - const fieldName = fieldMatch[1].trim() - - // Determine a good description based on field name - let description = 'Dynamic output field' - - if (fieldName === 'results' || fieldName === 'memories' || fieldName === 'searchResults') { - description = `${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)} from the operation` - } else if (fieldName === 'ids') { - description = 'IDs of created or retrieved resources' - } else if (fieldName === 'answer') { - description = 'Generated answer text' - } else if (fieldName === 'citations') { - description = 'References used to generate the answer' + // Calculate nesting levels for all braces first + const braces: Array<{ type: 'open' | 'close'; pos: number; level: number }> = [] + for (let i = 0; i < outputsContent.length; i++) { + if (outputsContent[i] === '{') { + braces.push({ type: 'open', pos: i, level: 0 }) + } else if (outputsContent[i] === '}') { + braces.push({ type: 'close', pos: i, level: 0 }) } - - outputs[fieldName] = description } - const shorthandRegex = /(?:^\s*|[,{]\s*)([A-Za-z_][\w]*)\s*(?=,|})/g - let shorthandMatch - - while ((shorthandMatch = shorthandRegex.exec(outputContent)) !== null) { - const fieldName = shorthandMatch[1].trim() - - // Ignore fields already captured or those that are part of key/value pairs - if (outputs[fieldName]) continue - - // Provide the same heuristic descriptions as above - let description = 'Dynamic output field' - - if (fieldName === 'results' || fieldName === 'memories' || fieldName === 'searchResults') { - description = `${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)} from the operation` - } else if (fieldName === 'ids') { - description = 'IDs of created or retrieved resources' - } else if (fieldName === 'answer') { - description = 'Generated answer text' - } else if (fieldName === 'citations') { - description = 'References used to generate the answer' - } - - outputs[fieldName] = description - } - - // Try to identify common patterns based on tool types - if (Object.keys(outputs).length === 0) { - if (toolName.includes('_search')) { - outputs.results = 'Array of search results' - } else if (toolName.includes('_answer')) { - outputs.answer = 'Generated answer text' - outputs.citations = 'References used to generate the answer' - } else if (toolName.includes('_add')) { - outputs.ids = 'IDs of created resources' - } else if (toolName.includes('_get')) { - outputs.data = 'Retrieved data' + // Calculate actual nesting levels + let currentLevel = 0 + for (const brace of braces) { + if (brace.type === 'open') { + brace.level = currentLevel + currentLevel++ } else { - // Try to extract field names from the output content with a simpler regex - const simpleFieldsRegex = /(\w+)\s*:/g - let simpleFieldMatch + currentLevel-- + brace.level = currentLevel + } + } - while ((simpleFieldMatch = simpleFieldsRegex.exec(outputContent)) !== null) { - outputs[simpleFieldMatch[1]] = 'Dynamic output field' + // Find field definitions and their nesting levels + const fieldStartRegex = /(\w+)\s*:\s*{/g + let match + const fieldPositions: Array<{ name: string; start: number; end: number; level: number }> = [] + + while ((match = fieldStartRegex.exec(outputsContent)) !== null) { + const fieldName = match[1] + const bracePos = match.index + match[0].length - 1 + + // Find the corresponding opening brace to determine nesting level + const openBrace = braces.find((b) => b.type === 'open' && b.pos === bracePos) + if (openBrace) { + // Find the matching closing brace + let braceCount = 1 + let endPos = bracePos + 1 + + while (endPos < outputsContent.length && braceCount > 0) { + if (outputsContent[endPos] === '{') { + braceCount++ + } else if (outputsContent[endPos] === '}') { + braceCount-- + } + endPos++ + } + + fieldPositions.push({ + name: fieldName, + start: bracePos, + end: endPos, + level: openBrace.level, + }) + } + } + + // Only process level 0 fields (top-level outputs) + const topLevelFields = fieldPositions.filter((f) => f.level === 0) + + topLevelFields.forEach((field) => { + const fieldContent = outputsContent.substring(field.start + 1, field.end - 1).trim() + + // Parse the field content + const parsedField = parseFieldContent(fieldContent) + if (parsedField) { + outputs[field.name] = parsedField + } + }) + + return outputs +} + +// Helper function to parse individual field content with support for nested structures +function parseFieldContent(fieldContent: string): any { + // Extract type and description + const typeMatch = fieldContent.match(/type\s*:\s*['"]([^'"]+)['"]/) + const descMatch = fieldContent.match(/description\s*:\s*['"`]([^'"`\n]+)['"`]/) + + if (!typeMatch) return null + + const fieldType = typeMatch[1] + const description = descMatch ? descMatch[1] : '' + + const result: any = { + type: fieldType, + description: description, + } + + // Check for properties (nested objects) - only for object types, not arrays + if (fieldType === 'object' || fieldType === 'json') { + const propertiesRegex = /properties\s*:\s*{/ + const propertiesStart = fieldContent.search(propertiesRegex) + + if (propertiesStart !== -1) { + const braceStart = fieldContent.indexOf('{', propertiesStart) + let braceCount = 1 + let braceEnd = braceStart + 1 + + // Find matching closing brace + while (braceEnd < fieldContent.length && braceCount > 0) { + if (fieldContent[braceEnd] === '{') braceCount++ + else if (fieldContent[braceEnd] === '}') braceCount-- + braceEnd++ + } + + if (braceCount === 0) { + const propertiesContent = fieldContent.substring(braceStart + 1, braceEnd - 1).trim() + result.properties = parsePropertiesContent(propertiesContent) } } } - return outputs + // Check for items (array items) - ensure balanced brace matching + const itemsRegex = /items\s*:\s*{/ + const itemsStart = fieldContent.search(itemsRegex) + + if (itemsStart !== -1) { + const braceStart = fieldContent.indexOf('{', itemsStart) + let braceCount = 1 + let braceEnd = braceStart + 1 + + // Find matching closing brace + while (braceEnd < fieldContent.length && braceCount > 0) { + if (fieldContent[braceEnd] === '{') braceCount++ + else if (fieldContent[braceEnd] === '}') braceCount-- + braceEnd++ + } + + if (braceCount === 0) { + const itemsContent = fieldContent.substring(braceStart + 1, braceEnd - 1).trim() + const itemsType = itemsContent.match(/type\s*:\s*['"]([^'"]+)['"]/) + + // Only look for description before any properties block to avoid picking up nested property descriptions + const propertiesStart = itemsContent.search(/properties\s*:\s*{/) + const searchContent = + propertiesStart >= 0 ? itemsContent.substring(0, propertiesStart) : itemsContent + const itemsDesc = searchContent.match(/description\s*:\s*['"`]([^'"`\n]+)['"`]/) + + result.items = { + type: itemsType ? itemsType[1] : 'object', + description: itemsDesc ? itemsDesc[1] : '', + } + + // Check if items have properties + const itemsPropertiesRegex = /properties\s*:\s*{/ + const itemsPropsStart = itemsContent.search(itemsPropertiesRegex) + + if (itemsPropsStart !== -1) { + const propsBraceStart = itemsContent.indexOf('{', itemsPropsStart) + let propsBraceCount = 1 + let propsBraceEnd = propsBraceStart + 1 + + while (propsBraceEnd < itemsContent.length && propsBraceCount > 0) { + if (itemsContent[propsBraceEnd] === '{') propsBraceCount++ + else if (itemsContent[propsBraceEnd] === '}') propsBraceCount-- + propsBraceEnd++ + } + + if (propsBraceCount === 0) { + const itemsPropsContent = itemsContent + .substring(propsBraceStart + 1, propsBraceEnd - 1) + .trim() + result.items.properties = parsePropertiesContent(itemsPropsContent) + } + } + } + } + + return result +} + +// Helper function to parse properties content recursively +function parsePropertiesContent(propertiesContent: string): Record { + const properties: Record = {} + + // Find property definitions using balanced brace matching, but exclude type-only definitions + const propStartRegex = /(\w+)\s*:\s*{/g + let match + const propPositions: Array<{ name: string; start: number; content: string }> = [] + + while ((match = propStartRegex.exec(propertiesContent)) !== null) { + const propName = match[1] + + // Skip structural keywords that should never be treated as property names + if (propName === 'items' || propName === 'properties') { + continue + } + + const startPos = match.index + match[0].length - 1 // Position of opening brace + + // Find the matching closing brace + let braceCount = 1 + let endPos = startPos + 1 + + while (endPos < propertiesContent.length && braceCount > 0) { + if (propertiesContent[endPos] === '{') { + braceCount++ + } else if (propertiesContent[endPos] === '}') { + braceCount-- + } + endPos++ + } + + if (braceCount === 0) { + const propContent = propertiesContent.substring(startPos + 1, endPos - 1).trim() + + // Skip if this is just a type definition (contains only 'type' field) rather than a real property + // This happens with array items definitions like: items: { type: 'string' } + // More precise check: only skip if it ONLY has 'type' and nothing else meaningful + const hasDescription = /description\s*:\s*/.test(propContent) + const hasProperties = /properties\s*:\s*{/.test(propContent) + const hasItems = /items\s*:\s*{/.test(propContent) + const isTypeOnly = + !hasDescription && + !hasProperties && + !hasItems && + /^type\s*:\s*['"].*?['"]\s*,?\s*$/.test(propContent) + + if (!isTypeOnly) { + propPositions.push({ + name: propName, + start: startPos, + content: propContent, + }) + } + } + } + + // Process the actual property definitions + propPositions.forEach((prop) => { + const parsedProp = parseFieldContent(prop.content) + if (parsedProp) { + properties[prop.name] = parsedProp + } + }) + + return properties } // Find and extract information about a tool @@ -784,14 +830,6 @@ async function getToolInfo(toolName: string): Promise<{ // Most common pattern: suffix.ts file in the prefix directory possibleLocations.push(path.join(rootDir, `apps/sim/tools/${toolPrefix}/${toolSuffix}.ts`)) - // Try underscore version if suffix has multiple parts - if (toolSuffix.includes('_')) { - const underscoreSuffix = toolSuffix.replace(/_/g, '_') - possibleLocations.push( - path.join(rootDir, `apps/sim/tools/${toolPrefix}/${underscoreSuffix}.ts`) - ) - } - // Try camelCase version of suffix const camelCaseSuffix = toolSuffix .split('_') @@ -803,45 +841,22 @@ async function getToolInfo(toolName: string): Promise<{ possibleLocations.push(path.join(rootDir, `apps/sim/tools/${toolPrefix}/index.ts`)) // Try to find the tool definition file - let toolFilePath = '' let toolFileContent = '' for (const location of possibleLocations) { if (fs.existsSync(location)) { - toolFilePath = location toolFileContent = fs.readFileSync(location, 'utf-8') break } } - // If not found, search in tool-specific directory - if (!toolFileContent) { - const toolsDir = path.join(rootDir, 'apps/tools') - if (fs.existsSync(path.join(toolsDir, toolPrefix))) { - const dirPath = path.join(toolsDir, toolPrefix) - const files = fs.readdirSync(dirPath).filter((file) => file.endsWith('.ts')) - - for (const file of files) { - const filePath = path.join(dirPath, file) - const content = fs.readFileSync(filePath, 'utf-8') - - // Check if this file contains the tool id - if (content.includes(`id: '${toolName}'`) || content.includes(`id: "${toolName}"`)) { - toolFilePath = filePath - toolFileContent = content - break - } - } - } - } - if (!toolFileContent) { console.warn(`Could not find definition for tool: ${toolName}`) return null } // Extract tool information from the file - return extractToolInfo(toolName, toolFileContent, toolFilePath) + return extractToolInfo(toolName, toolFileContent) } catch (error) { console.error(`Error getting info for tool ${toolName}:`, error) return null @@ -897,9 +912,6 @@ function mergeWithManualContent( usage: { regex: /## Usage Instructions/, }, - configuration: { - regex: /## Configuration/, - }, outputs: { regex: /## Outputs/, }, @@ -1011,7 +1023,7 @@ async function generateMarkdownForBlock( bgColor, iconName, outputs = {}, - tools = { access: [], config: {} }, + tools = { access: [] }, } = blockConfig // Get SVG icon if available @@ -1138,48 +1150,47 @@ async function generateMarkdownForBlock( // Add Output Parameters section for the tool toolsSection += '\n#### Output\n\n' - // Prefer block outputs over tool outputs if available, since block outputs have better descriptions - const outputsToUse = Object.keys(outputs).length > 0 ? outputs : toolInfo.outputs + // Always prefer tool-specific outputs over block outputs for accuracy + if (Object.keys(toolInfo.outputs).length > 0) { + // Use tool-specific outputs (most accurate) + toolsSection += '| Parameter | Type | Description |\n' + toolsSection += '| --------- | ---- | ----------- |\n' - if (Object.keys(outputsToUse).length > 0) { - // Use block outputs if available, otherwise tool outputs - if (Object.keys(outputs).length > 0) { - // Generate table with block outputs (which have descriptions) - toolsSection += '| Parameter | Type | Description |\n' - toolsSection += '| --------- | ---- | ----------- |\n' + // Use the enhanced formatOutputStructure function to handle nested structures + toolsSection += formatOutputStructure(toolInfo.outputs) + } else if (Object.keys(outputs).length > 0) { + // Fallback to block outputs only if no tool outputs are available + toolsSection += '| Parameter | Type | Description |\n' + toolsSection += '| --------- | ---- | ----------- |\n' - for (const [key, output] of Object.entries(outputs)) { - let type = 'string' - let description = `${key} output from the tool` + for (const [key, output] of Object.entries(outputs)) { + let type = 'string' + let description = `${key} output from the tool` - if (typeof output === 'string') { - type = output - } else if (typeof output === 'object' && output !== null) { - if ('type' in output && typeof output.type === 'string') { - type = output.type - } - if ('description' in output && typeof output.description === 'string') { - description = output.description - } + if (typeof output === 'string') { + type = output + } else if (typeof output === 'object' && output !== null) { + if ('type' in output && typeof output.type === 'string') { + type = output.type + } + if ('description' in output && typeof output.description === 'string') { + description = output.description } - - // Escape special characters in the description - const escapedDescription = description - .replace(/\|/g, '\\|') - .replace(/\{/g, '\\{') - .replace(/\}/g, '\\}') - .replace(/\(/g, '\\(') - .replace(/\)/g, '\\)') - .replace(/\[/g, '\\[') - .replace(/\]/g, '\\]') - .replace(//g, '>') - - toolsSection += `| \`${key}\` | ${type} | ${escapedDescription} |\n` } - } else { - // Use dynamically extracted tool outputs as fallback - toolsSection += generateMarkdownTable(toolInfo.outputs) + + // Escape special characters in the description + const escapedDescription = description + .replace(/\|/g, '\\|') + .replace(/\{/g, '\\{') + .replace(/\}/g, '\\}') + .replace(/\(/g, '\\(') + .replace(/\)/g, '\\)') + .replace(/\[/g, '\\[') + .replace(/\]/g, '\\]') + .replace(//g, '>') + + toolsSection += `| \`${key}\` | ${type} | ${escapedDescription} |\n` } } else { toolsSection += 'This tool does not produce any outputs.\n' @@ -1286,34 +1297,3 @@ generateAllBlockDocs() console.error('Fatal error:', error) process.exit(1) }) - -function generateMarkdownTable(outputs: Record): string { - let table = '' - table += '| Parameter | Type | Description |\n' - table += '| --------- | ---- | ----------- |\n' - - for (const [key, value] of Object.entries(outputs)) { - // Try to determine a reasonable type from the value description - let inferredType = 'string' - if (value.toLowerCase().includes('array')) inferredType = 'array' - if (value.toLowerCase().includes('json')) inferredType = 'json' - if (value.toLowerCase().includes('number')) inferredType = 'number' - if (value.toLowerCase().includes('boolean')) inferredType = 'boolean' - - // Escape special characters in the description - const escapedDescription = value - .replace(/\|/g, '\\|') - .replace(/\{/g, '\\{') - .replace(/\}/g, '\\}') - .replace(/\(/g, '\\(') - .replace(/\)/g, '\\)') - .replace(/\[/g, '\\[') - .replace(/\]/g, '\\]') - .replace(//g, '>') - - table += `| \`${key}\` | ${inferredType} | ${escapedDescription} |\n` - } - - return table -}