mirror of
https://github.com/simstudioai/sim.git
synced 2026-03-15 03:00:33 -04:00
Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6586c5ce40 | ||
|
|
127968d467 | ||
|
|
2722f0efbf | ||
|
|
3ce947566d | ||
|
|
4f45f705a5 | ||
|
|
d640fa0852 | ||
|
|
28f8e0fd97 | ||
|
|
cc38ecaf12 | ||
|
|
70c36cb7aa | ||
|
|
0a6a2ee694 | ||
|
|
8579beb199 | ||
|
|
115b4581a5 | ||
|
|
fcdcaed00d | ||
|
|
04fa31864b | ||
|
|
6b355e9b54 | ||
|
|
127994f077 |
10
.github/workflows/test-build.yml
vendored
10
.github/workflows/test-build.yml
vendored
@@ -90,6 +90,16 @@ jobs:
|
||||
|
||||
echo "✅ All feature flags are properly configured"
|
||||
|
||||
- name: Check subblock ID stability
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||
BASE_REF="origin/${{ github.base_ref }}"
|
||||
git fetch --depth=1 origin "${{ github.base_ref }}" 2>/dev/null || true
|
||||
else
|
||||
BASE_REF="HEAD~1"
|
||||
fi
|
||||
bun run apps/sim/scripts/check-subblock-id-stability.ts "$BASE_REF"
|
||||
|
||||
- name: Lint code
|
||||
run: bun run lint:check
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ These operations let your agents access and analyze Reddit content as part of yo
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Reddit into workflows. Read posts, comments, and search content. Submit posts, vote, reply, edit, and manage your Reddit account.
|
||||
Integrate Reddit into workflows. Read posts, comments, and search content. Submit posts, vote, reply, edit, manage messages, and access user and subreddit info.
|
||||
|
||||
|
||||
|
||||
@@ -39,14 +39,15 @@ Fetch posts from a subreddit with different sorting options
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `subreddit` | string | Yes | The subreddit to fetch posts from \(e.g., "technology", "news"\) |
|
||||
| `sort` | string | No | Sort method for posts \(e.g., "hot", "new", "top", "rising"\). Default: "hot" |
|
||||
| `sort` | string | No | Sort method for posts \(e.g., "hot", "new", "top", "rising", "controversial"\). Default: "hot" |
|
||||
| `limit` | number | No | Maximum number of posts to return \(e.g., 25\). Default: 10, max: 100 |
|
||||
| `time` | string | No | Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" \(default: "day"\) |
|
||||
| `time` | string | No | Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" \(default: "all"\) |
|
||||
| `after` | string | No | Fullname of a thing to fetch items after \(for pagination\) |
|
||||
| `before` | string | No | Fullname of a thing to fetch items before \(for pagination\) |
|
||||
| `count` | number | No | A count of items already seen in the listing \(used for numbering\) |
|
||||
| `show` | string | No | Show items that would normally be filtered \(e.g., "all"\) |
|
||||
| `sr_detail` | boolean | No | Expand subreddit details in the response |
|
||||
| `g` | string | No | Geo filter for posts \(e.g., "GLOBAL", "US", "AR", etc.\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -55,6 +56,7 @@ Fetch posts from a subreddit with different sorting options
|
||||
| `subreddit` | string | Name of the subreddit where posts were fetched from |
|
||||
| `posts` | array | Array of posts with title, author, URL, score, comments count, and metadata |
|
||||
| ↳ `id` | string | Post ID |
|
||||
| ↳ `name` | string | Thing fullname \(t3_xxxxx\) |
|
||||
| ↳ `title` | string | Post title |
|
||||
| ↳ `author` | string | Author username |
|
||||
| ↳ `url` | string | Post URL |
|
||||
@@ -66,6 +68,8 @@ Fetch posts from a subreddit with different sorting options
|
||||
| ↳ `selftext` | string | Text content for self posts |
|
||||
| ↳ `thumbnail` | string | Thumbnail URL |
|
||||
| ↳ `subreddit` | string | Subreddit name |
|
||||
| `after` | string | Fullname of the last item for forward pagination |
|
||||
| `before` | string | Fullname of the first item for backward pagination |
|
||||
|
||||
### `reddit_get_comments`
|
||||
|
||||
@@ -83,12 +87,9 @@ Fetch comments from a specific Reddit post
|
||||
| `context` | number | No | Number of parent comments to include |
|
||||
| `showedits` | boolean | No | Show edit information for comments |
|
||||
| `showmore` | boolean | No | Include "load more comments" elements in the response |
|
||||
| `showtitle` | boolean | No | Include submission title in the response |
|
||||
| `threaded` | boolean | No | Return comments in threaded/nested format |
|
||||
| `truncate` | number | No | Integer to truncate comment depth |
|
||||
| `after` | string | No | Fullname of a thing to fetch items after \(for pagination\) |
|
||||
| `before` | string | No | Fullname of a thing to fetch items before \(for pagination\) |
|
||||
| `count` | number | No | A count of items already seen in the listing \(used for numbering\) |
|
||||
| `comment` | string | No | ID36 of a comment to focus on \(returns that comment thread\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -96,6 +97,7 @@ Fetch comments from a specific Reddit post
|
||||
| --------- | ---- | ----------- |
|
||||
| `post` | object | Post information including ID, title, author, content, and metadata |
|
||||
| ↳ `id` | string | Post ID |
|
||||
| ↳ `name` | string | Thing fullname \(t3_xxxxx\) |
|
||||
| ↳ `title` | string | Post title |
|
||||
| ↳ `author` | string | Post author |
|
||||
| ↳ `selftext` | string | Post text content |
|
||||
@@ -104,6 +106,7 @@ Fetch comments from a specific Reddit post
|
||||
| ↳ `permalink` | string | Reddit permalink |
|
||||
| `comments` | array | Nested comments with author, body, score, timestamps, and replies |
|
||||
| ↳ `id` | string | Comment ID |
|
||||
| ↳ `name` | string | Thing fullname \(t1_xxxxx\) |
|
||||
| ↳ `author` | string | Comment author |
|
||||
| ↳ `body` | string | Comment text |
|
||||
| ↳ `score` | number | Comment score |
|
||||
@@ -135,6 +138,7 @@ Fetch controversial posts from a subreddit
|
||||
| `subreddit` | string | Name of the subreddit where posts were fetched from |
|
||||
| `posts` | array | Array of controversial posts with title, author, URL, score, comments count, and metadata |
|
||||
| ↳ `id` | string | Post ID |
|
||||
| ↳ `name` | string | Thing fullname \(t3_xxxxx\) |
|
||||
| ↳ `title` | string | Post title |
|
||||
| ↳ `author` | string | Author username |
|
||||
| ↳ `url` | string | Post URL |
|
||||
@@ -146,6 +150,8 @@ Fetch controversial posts from a subreddit
|
||||
| ↳ `selftext` | string | Text content for self posts |
|
||||
| ↳ `thumbnail` | string | Thumbnail URL |
|
||||
| ↳ `subreddit` | string | Subreddit name |
|
||||
| `after` | string | Fullname of the last item for forward pagination |
|
||||
| `before` | string | Fullname of the first item for backward pagination |
|
||||
|
||||
### `reddit_search`
|
||||
|
||||
@@ -165,6 +171,8 @@ Search for posts within a subreddit
|
||||
| `before` | string | No | Fullname of a thing to fetch items before \(for pagination\) |
|
||||
| `count` | number | No | A count of items already seen in the listing \(used for numbering\) |
|
||||
| `show` | string | No | Show items that would normally be filtered \(e.g., "all"\) |
|
||||
| `type` | string | No | Type of search results: "link" \(posts\), "sr" \(subreddits\), or "user" \(users\). Default: "link" |
|
||||
| `sr_detail` | boolean | No | Expand subreddit details in the response |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -173,6 +181,7 @@ Search for posts within a subreddit
|
||||
| `subreddit` | string | Name of the subreddit where search was performed |
|
||||
| `posts` | array | Array of search result posts with title, author, URL, score, comments count, and metadata |
|
||||
| ↳ `id` | string | Post ID |
|
||||
| ↳ `name` | string | Thing fullname \(t3_xxxxx\) |
|
||||
| ↳ `title` | string | Post title |
|
||||
| ↳ `author` | string | Author username |
|
||||
| ↳ `url` | string | Post URL |
|
||||
@@ -184,6 +193,8 @@ Search for posts within a subreddit
|
||||
| ↳ `selftext` | string | Text content for self posts |
|
||||
| ↳ `thumbnail` | string | Thumbnail URL |
|
||||
| ↳ `subreddit` | string | Subreddit name |
|
||||
| `after` | string | Fullname of the last item for forward pagination |
|
||||
| `before` | string | Fullname of the first item for backward pagination |
|
||||
|
||||
### `reddit_submit_post`
|
||||
|
||||
@@ -200,6 +211,9 @@ Submit a new post to a subreddit (text or link)
|
||||
| `nsfw` | boolean | No | Mark post as NSFW |
|
||||
| `spoiler` | boolean | No | Mark post as spoiler |
|
||||
| `send_replies` | boolean | No | Send reply notifications to inbox \(default: true\) |
|
||||
| `flair_id` | string | No | Flair template UUID for the post \(max 36 characters\) |
|
||||
| `flair_text` | string | No | Flair text to display on the post \(max 64 characters\) |
|
||||
| `collection_id` | string | No | Collection UUID to add the post to |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -264,6 +278,21 @@ Save a Reddit post or comment to your saved items
|
||||
| `posts` | json | Posts data |
|
||||
| `post` | json | Single post data |
|
||||
| `comments` | json | Comments data |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `message` | string | Result message |
|
||||
| `data` | json | Response data |
|
||||
| `after` | string | Pagination cursor \(next page\) |
|
||||
| `before` | string | Pagination cursor \(previous page\) |
|
||||
| `id` | string | Entity ID |
|
||||
| `name` | string | Entity fullname |
|
||||
| `messages` | json | Messages data |
|
||||
| `display_name` | string | Subreddit display name |
|
||||
| `subscribers` | number | Subscriber count |
|
||||
| `description` | string | Description text |
|
||||
| `link_karma` | number | Link karma |
|
||||
| `comment_karma` | number | Comment karma |
|
||||
| `total_karma` | number | Total karma |
|
||||
| `icon_img` | string | Icon image URL |
|
||||
|
||||
### `reddit_reply`
|
||||
|
||||
@@ -275,6 +304,7 @@ Add a comment reply to a Reddit post or comment
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `parent_id` | string | Yes | Thing fullname to reply to \(e.g., "t3_abc123" for post, "t1_def456" for comment\) |
|
||||
| `text` | string | Yes | Comment text in markdown format \(e.g., "Great post! Here is my **reply**"\) |
|
||||
| `return_rtjson` | boolean | No | Return response in Rich Text JSON format |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -345,4 +375,138 @@ Subscribe or unsubscribe from a subreddit
|
||||
| `success` | boolean | Whether the subscription action was successful |
|
||||
| `message` | string | Success or error message |
|
||||
|
||||
### `reddit_get_me`
|
||||
|
||||
Get information about the authenticated Reddit user
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | User ID |
|
||||
| `name` | string | Username |
|
||||
| `created_utc` | number | Account creation time in UTC epoch seconds |
|
||||
| `link_karma` | number | Total link karma |
|
||||
| `comment_karma` | number | Total comment karma |
|
||||
| `total_karma` | number | Combined total karma |
|
||||
| `is_gold` | boolean | Whether user has Reddit Premium |
|
||||
| `is_mod` | boolean | Whether user is a moderator |
|
||||
| `has_verified_email` | boolean | Whether email is verified |
|
||||
| `icon_img` | string | User avatar/icon URL |
|
||||
|
||||
### `reddit_get_user`
|
||||
|
||||
Get public profile information about any Reddit user by username
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `username` | string | Yes | Reddit username to look up \(e.g., "spez", "example_user"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | User ID |
|
||||
| `name` | string | Username |
|
||||
| `created_utc` | number | Account creation time in UTC epoch seconds |
|
||||
| `link_karma` | number | Total link karma |
|
||||
| `comment_karma` | number | Total comment karma |
|
||||
| `total_karma` | number | Combined total karma |
|
||||
| `is_gold` | boolean | Whether user has Reddit Premium |
|
||||
| `is_mod` | boolean | Whether user is a moderator |
|
||||
| `has_verified_email` | boolean | Whether email is verified |
|
||||
| `icon_img` | string | User avatar/icon URL |
|
||||
|
||||
### `reddit_send_message`
|
||||
|
||||
Send a private message to a Reddit user
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `to` | string | Yes | Recipient username \(e.g., "example_user"\) or subreddit \(e.g., "/r/subreddit"\) |
|
||||
| `subject` | string | Yes | Message subject \(max 100 characters\) |
|
||||
| `text` | string | Yes | Message body in markdown format |
|
||||
| `from_sr` | string | No | Subreddit name to send the message from \(requires moderator mail permission\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the message was sent successfully |
|
||||
| `message` | string | Success or error message |
|
||||
|
||||
### `reddit_get_messages`
|
||||
|
||||
Retrieve private messages from your Reddit inbox
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `where` | string | No | Message folder to retrieve: "inbox" \(all\), "unread", "sent", "messages" \(direct messages only\), "comments" \(comment replies\), "selfreply" \(self-post replies\), or "mentions" \(username mentions\). Default: "inbox" |
|
||||
| `limit` | number | No | Maximum number of messages to return \(e.g., 25\). Default: 25, max: 100 |
|
||||
| `after` | string | No | Fullname of a thing to fetch items after \(for pagination\) |
|
||||
| `before` | string | No | Fullname of a thing to fetch items before \(for pagination\) |
|
||||
| `mark` | boolean | No | Whether to mark fetched messages as read |
|
||||
| `count` | number | No | A count of items already seen in the listing \(used for numbering\) |
|
||||
| `show` | string | No | Show items that would normally be filtered \(e.g., "all"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `messages` | array | Array of messages with sender, recipient, subject, body, and metadata |
|
||||
| ↳ `id` | string | Message ID |
|
||||
| ↳ `name` | string | Thing fullname \(t4_xxxxx\) |
|
||||
| ↳ `author` | string | Sender username |
|
||||
| ↳ `dest` | string | Recipient username |
|
||||
| ↳ `subject` | string | Message subject |
|
||||
| ↳ `body` | string | Message body text |
|
||||
| ↳ `created_utc` | number | Creation time in UTC epoch seconds |
|
||||
| ↳ `new` | boolean | Whether the message is unread |
|
||||
| ↳ `was_comment` | boolean | Whether the message is a comment reply |
|
||||
| ↳ `context` | string | Context URL for comment replies |
|
||||
| ↳ `distinguished` | string | Distinction: null/"moderator"/"admin" |
|
||||
| `after` | string | Fullname of the last item for forward pagination |
|
||||
| `before` | string | Fullname of the first item for backward pagination |
|
||||
|
||||
### `reddit_get_subreddit_info`
|
||||
|
||||
Get metadata and information about a subreddit
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `subreddit` | string | Yes | The subreddit to get info about \(e.g., "technology", "programming", "news"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Subreddit ID |
|
||||
| `name` | string | Subreddit fullname \(t5_xxxxx\) |
|
||||
| `display_name` | string | Subreddit name without prefix |
|
||||
| `title` | string | Subreddit title |
|
||||
| `description` | string | Full subreddit description \(markdown\) |
|
||||
| `public_description` | string | Short public description |
|
||||
| `subscribers` | number | Number of subscribers |
|
||||
| `accounts_active` | number | Number of currently active users |
|
||||
| `created_utc` | number | Creation time in UTC epoch seconds |
|
||||
| `over18` | boolean | Whether the subreddit is NSFW |
|
||||
| `lang` | string | Primary language of the subreddit |
|
||||
| `subreddit_type` | string | Subreddit type: public, private, restricted, etc. |
|
||||
| `url` | string | Subreddit URL path \(e.g., /r/technology/\) |
|
||||
| `icon_img` | string | Subreddit icon URL |
|
||||
| `banner_img` | string | Subreddit banner URL |
|
||||
|
||||
|
||||
|
||||
@@ -69,7 +69,9 @@ Read records from a ServiceNow table
|
||||
| `number` | string | No | Record number \(e.g., INC0010001\) |
|
||||
| `query` | string | No | Encoded query string \(e.g., "active=true^priority=1"\) |
|
||||
| `limit` | number | No | Maximum number of records to return \(e.g., 10, 50, 100\) |
|
||||
| `offset` | number | No | Number of records to skip for pagination \(e.g., 0, 10, 20\) |
|
||||
| `fields` | string | No | Comma-separated list of fields to return \(e.g., sys_id,number,short_description,state\) |
|
||||
| `displayValue` | string | No | Return display values for reference fields: "true" \(display only\), "false" \(sys_id only\), or "all" \(both\) |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Slack
|
||||
description: Send, update, delete messages, send ephemeral messages, add reactions in Slack or trigger workflows from Slack events
|
||||
description: Send, update, delete messages, manage views and modals, add or remove reactions, manage canvases, get channel info and user presence in Slack
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -39,7 +39,7 @@ If you encounter issues with the Slack integration, contact us at [help@sim.ai](
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.
|
||||
Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, open/update/push modal views, publish Home tab views, create canvases, read messages, and add or remove reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.
|
||||
|
||||
|
||||
|
||||
@@ -799,4 +799,313 @@ Add an emoji reaction to a Slack message
|
||||
| ↳ `timestamp` | string | Message timestamp |
|
||||
| ↳ `reaction` | string | Emoji reaction name |
|
||||
|
||||
### `slack_remove_reaction`
|
||||
|
||||
Remove an emoji reaction from a Slack message
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Channel ID where the message was posted \(e.g., C1234567890\) |
|
||||
| `timestamp` | string | Yes | Timestamp of the message to remove reaction from \(e.g., 1405894322.002768\) |
|
||||
| `name` | string | Yes | Name of the emoji reaction to remove \(without colons, e.g., thumbsup, heart, eyes\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | Success message |
|
||||
| `metadata` | object | Reaction metadata |
|
||||
| ↳ `channel` | string | Channel ID |
|
||||
| ↳ `timestamp` | string | Message timestamp |
|
||||
| ↳ `reaction` | string | Emoji reaction name |
|
||||
|
||||
### `slack_get_channel_info`
|
||||
|
||||
Get detailed information about a Slack channel by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Channel ID to get information about \(e.g., C1234567890\) |
|
||||
| `includeNumMembers` | boolean | No | Whether to include the member count in the response |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `channelInfo` | object | Detailed channel information |
|
||||
| ↳ `id` | string | Channel ID \(e.g., C1234567890\) |
|
||||
| ↳ `name` | string | Channel name without # prefix |
|
||||
| ↳ `is_channel` | boolean | Whether this is a channel |
|
||||
| ↳ `is_private` | boolean | Whether channel is private |
|
||||
| ↳ `is_archived` | boolean | Whether channel is archived |
|
||||
| ↳ `is_general` | boolean | Whether this is the general channel |
|
||||
| ↳ `is_member` | boolean | Whether the bot/user is a member |
|
||||
| ↳ `is_shared` | boolean | Whether channel is shared across workspaces |
|
||||
| ↳ `is_ext_shared` | boolean | Whether channel is externally shared |
|
||||
| ↳ `is_org_shared` | boolean | Whether channel is org-wide shared |
|
||||
| ↳ `num_members` | number | Number of members in the channel |
|
||||
| ↳ `topic` | string | Channel topic |
|
||||
| ↳ `purpose` | string | Channel purpose/description |
|
||||
| ↳ `created` | number | Unix timestamp when channel was created |
|
||||
| ↳ `creator` | string | User ID of channel creator |
|
||||
| ↳ `updated` | number | Unix timestamp of last update |
|
||||
|
||||
### `slack_get_user_presence`
|
||||
|
||||
Check whether a Slack user is currently active or away
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `userId` | string | Yes | User ID to check presence for \(e.g., U1234567890\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `presence` | string | User presence status: "active" or "away" |
|
||||
| `online` | boolean | Whether user has an active client connection \(only available when checking own presence\) |
|
||||
| `autoAway` | boolean | Whether user was automatically set to away due to inactivity \(only available when checking own presence\) |
|
||||
| `manualAway` | boolean | Whether user manually set themselves as away \(only available when checking own presence\) |
|
||||
| `connectionCount` | number | Total number of active connections for the user \(only available when checking own presence\) |
|
||||
| `lastActivity` | number | Unix timestamp of last detected activity \(only available when checking own presence\) |
|
||||
|
||||
### `slack_edit_canvas`
|
||||
|
||||
Edit an existing Slack canvas by inserting, replacing, or deleting content
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `canvasId` | string | Yes | Canvas ID to edit \(e.g., F1234ABCD\) |
|
||||
| `operation` | string | Yes | Edit operation: insert_at_start, insert_at_end, insert_after, insert_before, replace, delete, or rename |
|
||||
| `content` | string | No | Markdown content for the operation \(required for insert/replace operations\) |
|
||||
| `sectionId` | string | No | Section ID to target \(required for insert_after, insert_before, replace, and delete\) |
|
||||
| `title` | string | No | New title for the canvas \(only used with rename operation\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | Success message |
|
||||
|
||||
### `slack_create_channel_canvas`
|
||||
|
||||
Create a canvas pinned to a Slack channel as its resource hub
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Channel ID to create the canvas in \(e.g., C1234567890\) |
|
||||
| `title` | string | No | Title for the channel canvas |
|
||||
| `content` | string | No | Canvas content in markdown format |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `canvas_id` | string | ID of the created channel canvas |
|
||||
|
||||
### `slack_open_view`
|
||||
|
||||
Open a modal view in Slack using a trigger_id from an interaction payload. Used to display forms, confirmations, and other interactive modals.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `triggerId` | string | Yes | Exchange a trigger to post to the user. Obtained from an interaction payload \(e.g., slash command, button click\) |
|
||||
| `interactivityPointer` | string | No | Alternative to trigger_id for posting to user |
|
||||
| `view` | json | Yes | A view payload object defining the modal. Must include type \("modal"\), title, and blocks array |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `view` | object | The opened modal view object |
|
||||
| ↳ `id` | string | Unique view identifier |
|
||||
| ↳ `team_id` | string | Workspace/team ID |
|
||||
| ↳ `type` | string | View type \(e.g., "modal"\) |
|
||||
| ↳ `title` | json | Plain text title object with type and text fields |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Title text content |
|
||||
| ↳ `submit` | json | Plain text submit button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Submit button text |
|
||||
| ↳ `close` | json | Plain text close button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Close button text |
|
||||
| ↳ `blocks` | array | Block Kit blocks in the view |
|
||||
| ↳ `type` | string | Block type \(section, divider, image, actions, etc.\) |
|
||||
| ↳ `block_id` | string | Unique block identifier |
|
||||
| ↳ `private_metadata` | string | Private metadata string passed with the view |
|
||||
| ↳ `callback_id` | string | Custom identifier for the view |
|
||||
| ↳ `external_id` | string | Custom external identifier \(max 255 chars, unique per workspace\) |
|
||||
| ↳ `state` | json | Current state of the view with input values |
|
||||
| ↳ `hash` | string | View version hash for updates |
|
||||
| ↳ `clear_on_close` | boolean | Whether to clear all views in the stack when this view is closed |
|
||||
| ↳ `notify_on_close` | boolean | Whether to send a view_closed event when this view is closed |
|
||||
| ↳ `root_view_id` | string | ID of the root view in the view stack |
|
||||
| ↳ `previous_view_id` | string | ID of the previous view in the view stack |
|
||||
| ↳ `app_id` | string | Application identifier |
|
||||
| ↳ `bot_id` | string | Bot identifier |
|
||||
|
||||
### `slack_update_view`
|
||||
|
||||
Update an existing modal view in Slack. Identify the view by view_id or external_id, and provide the updated view payload.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `viewId` | string | No | Unique identifier of the view to update. Either viewId or externalId is required |
|
||||
| `externalId` | string | No | Developer-set unique identifier of the view to update \(max 255 chars\). Either viewId or externalId is required |
|
||||
| `hash` | string | No | View state hash to protect against race conditions. Obtained from a previous views response |
|
||||
| `view` | json | Yes | A view payload object defining the updated modal. Must include type \("modal"\), title, and blocks array. Use identical block_id and action_id values to preserve input data |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `view` | object | The updated modal view object |
|
||||
| ↳ `id` | string | Unique view identifier |
|
||||
| ↳ `team_id` | string | Workspace/team ID |
|
||||
| ↳ `type` | string | View type \(e.g., "modal"\) |
|
||||
| ↳ `title` | json | Plain text title object with type and text fields |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Title text content |
|
||||
| ↳ `submit` | json | Plain text submit button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Submit button text |
|
||||
| ↳ `close` | json | Plain text close button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Close button text |
|
||||
| ↳ `blocks` | array | Block Kit blocks in the view |
|
||||
| ↳ `type` | string | Block type \(section, divider, image, actions, etc.\) |
|
||||
| ↳ `block_id` | string | Unique block identifier |
|
||||
| ↳ `private_metadata` | string | Private metadata string passed with the view |
|
||||
| ↳ `callback_id` | string | Custom identifier for the view |
|
||||
| ↳ `external_id` | string | Custom external identifier \(max 255 chars, unique per workspace\) |
|
||||
| ↳ `state` | json | Current state of the view with input values |
|
||||
| ↳ `hash` | string | View version hash for updates |
|
||||
| ↳ `clear_on_close` | boolean | Whether to clear all views in the stack when this view is closed |
|
||||
| ↳ `notify_on_close` | boolean | Whether to send a view_closed event when this view is closed |
|
||||
| ↳ `root_view_id` | string | ID of the root view in the view stack |
|
||||
| ↳ `previous_view_id` | string | ID of the previous view in the view stack |
|
||||
| ↳ `app_id` | string | Application identifier |
|
||||
| ↳ `bot_id` | string | Bot identifier |
|
||||
|
||||
### `slack_push_view`
|
||||
|
||||
Push a new view onto an existing modal stack in Slack. Limited to 2 additional views after the initial modal is opened.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `triggerId` | string | Yes | Exchange a trigger to post to the user. Obtained from an interaction payload \(e.g., button click within an existing modal\) |
|
||||
| `interactivityPointer` | string | No | Alternative to trigger_id for posting to user |
|
||||
| `view` | json | Yes | A view payload object defining the modal to push. Must include type \("modal"\), title, and blocks array |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `view` | object | The pushed modal view object |
|
||||
| ↳ `id` | string | Unique view identifier |
|
||||
| ↳ `team_id` | string | Workspace/team ID |
|
||||
| ↳ `type` | string | View type \(e.g., "modal"\) |
|
||||
| ↳ `title` | json | Plain text title object with type and text fields |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Title text content |
|
||||
| ↳ `submit` | json | Plain text submit button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Submit button text |
|
||||
| ↳ `close` | json | Plain text close button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Close button text |
|
||||
| ↳ `blocks` | array | Block Kit blocks in the view |
|
||||
| ↳ `type` | string | Block type \(section, divider, image, actions, etc.\) |
|
||||
| ↳ `block_id` | string | Unique block identifier |
|
||||
| ↳ `private_metadata` | string | Private metadata string passed with the view |
|
||||
| ↳ `callback_id` | string | Custom identifier for the view |
|
||||
| ↳ `external_id` | string | Custom external identifier \(max 255 chars, unique per workspace\) |
|
||||
| ↳ `state` | json | Current state of the view with input values |
|
||||
| ↳ `hash` | string | View version hash for updates |
|
||||
| ↳ `clear_on_close` | boolean | Whether to clear all views in the stack when this view is closed |
|
||||
| ↳ `notify_on_close` | boolean | Whether to send a view_closed event when this view is closed |
|
||||
| ↳ `root_view_id` | string | ID of the root view in the view stack |
|
||||
| ↳ `previous_view_id` | string | ID of the previous view in the view stack |
|
||||
| ↳ `app_id` | string | Application identifier |
|
||||
| ↳ `bot_id` | string | Bot identifier |
|
||||
|
||||
### `slack_publish_view`
|
||||
|
||||
Publish a static view to a user
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `userId` | string | Yes | The user ID to publish the Home tab view to \(e.g., U0BPQUNTA\) |
|
||||
| `hash` | string | No | View state hash to protect against race conditions. Obtained from a previous views response |
|
||||
| `view` | json | Yes | A view payload object defining the Home tab. Must include type \("home"\) and blocks array |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `view` | object | The published Home tab view object |
|
||||
| ↳ `id` | string | Unique view identifier |
|
||||
| ↳ `team_id` | string | Workspace/team ID |
|
||||
| ↳ `type` | string | View type \(e.g., "modal"\) |
|
||||
| ↳ `title` | json | Plain text title object with type and text fields |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Title text content |
|
||||
| ↳ `submit` | json | Plain text submit button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Submit button text |
|
||||
| ↳ `close` | json | Plain text close button object |
|
||||
| ↳ `type` | string | Text object type \(plain_text\) |
|
||||
| ↳ `text` | string | Close button text |
|
||||
| ↳ `blocks` | array | Block Kit blocks in the view |
|
||||
| ↳ `type` | string | Block type \(section, divider, image, actions, etc.\) |
|
||||
| ↳ `block_id` | string | Unique block identifier |
|
||||
| ↳ `private_metadata` | string | Private metadata string passed with the view |
|
||||
| ↳ `callback_id` | string | Custom identifier for the view |
|
||||
| ↳ `external_id` | string | Custom external identifier \(max 255 chars, unique per workspace\) |
|
||||
| ↳ `state` | json | Current state of the view with input values |
|
||||
| ↳ `hash` | string | View version hash for updates |
|
||||
| ↳ `clear_on_close` | boolean | Whether to clear all views in the stack when this view is closed |
|
||||
| ↳ `notify_on_close` | boolean | Whether to send a view_closed event when this view is closed |
|
||||
| ↳ `root_view_id` | string | ID of the root view in the view stack |
|
||||
| ↳ `previous_view_id` | string | ID of the previous view in the view stack |
|
||||
| ↳ `app_id` | string | Application identifier |
|
||||
| ↳ `bot_id` | string | Bot identifier |
|
||||
|
||||
|
||||
|
||||
87
apps/sim/app/api/tools/slack/remove-reaction/route.ts
Normal file
87
apps/sim/app/api/tools/slack/remove-reaction/route.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const SlackRemoveReactionSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
channel: z.string().min(1, 'Channel is required'),
|
||||
timestamp: z.string().min(1, 'Message timestamp is required'),
|
||||
name: z.string().min(1, 'Emoji name is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SlackRemoveReactionSchema.parse(body)
|
||||
|
||||
const slackResponse = await fetch('https://slack.com/api/reactions.remove', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
channel: validatedData.channel,
|
||||
timestamp: validatedData.timestamp,
|
||||
name: validatedData.name,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await slackResponse.json()
|
||||
|
||||
if (!data.ok) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: data.error || 'Failed to remove reaction',
|
||||
},
|
||||
{ status: slackResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: `Successfully removed :${validatedData.name}: reaction`,
|
||||
metadata: {
|
||||
channel: validatedData.channel,
|
||||
timestamp: validatedData.timestamp,
|
||||
reaction: validatedData.name,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -150,6 +150,7 @@ export async function POST(request: NextRequest) {
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
await response.text().catch(() => {})
|
||||
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -135,6 +135,7 @@ async function fetchDocumentBytes(url: string): Promise<{ bytes: string; content
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
await response.text().catch(() => {})
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -65,6 +65,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
await response.body?.cancel().catch(() => {})
|
||||
logger.error(`Failed to generate TTS: ${response.status} ${response.statusText}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Failed to generate TTS: ${response.status} ${response.statusText}` },
|
||||
|
||||
@@ -184,6 +184,7 @@ export async function POST(request: NextRequest) {
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
await response.text().catch(() => {})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch image for Gemini' },
|
||||
{ status: 400 }
|
||||
|
||||
@@ -964,7 +964,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
logger.error(`[${requestId}] Error streaming block content:`, error)
|
||||
} finally {
|
||||
try {
|
||||
reader.releaseLock()
|
||||
await reader.cancel().catch(() => {})
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -501,17 +501,6 @@ export function Chat() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isExecuting && isStreaming) {
|
||||
const lastMessage = workflowMessages[workflowMessages.length - 1]
|
||||
if (lastMessage?.isStreaming) {
|
||||
streamReaderRef.current?.cancel()
|
||||
streamReaderRef.current = null
|
||||
finalizeMessageStream(lastMessage.id)
|
||||
}
|
||||
}
|
||||
}, [isExecuting, isStreaming, workflowMessages, finalizeMessageStream])
|
||||
|
||||
const handleStopStreaming = useCallback(() => {
|
||||
streamReaderRef.current?.cancel()
|
||||
streamReaderRef.current = null
|
||||
|
||||
@@ -40,6 +40,10 @@ import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/component
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getSubBlockStableKey } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import {
|
||||
isAncestorProtected,
|
||||
isBlockProtected,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/block-protection-utils'
|
||||
import { PreviewWorkflow } from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockType } from '@/blocks/types'
|
||||
@@ -107,12 +111,11 @@ export function Editor() {
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
// Check if block is locked (or inside a locked container) and compute edit permission
|
||||
// Check if block is locked (or inside a locked ancestor) and compute edit permission
|
||||
// Locked blocks cannot be edited by anyone (admins can only lock/unlock)
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const parentId = currentBlock?.data?.parentId as string | undefined
|
||||
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
|
||||
const isLocked = (currentBlock?.locked ?? false) || isParentLocked
|
||||
const isLocked = currentBlockId ? isBlockProtected(currentBlockId, blocks) : false
|
||||
const isAncestorLocked = currentBlockId ? isAncestorProtected(currentBlockId, blocks) : false
|
||||
const canEditBlock = userPermissions.canEdit && !isLocked
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||
@@ -247,10 +250,7 @@ export function Editor() {
|
||||
const block = blocks[blockId]
|
||||
if (!block) return
|
||||
|
||||
const parentId = block.data?.parentId as string | undefined
|
||||
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
|
||||
const isLocked = (block.locked ?? false) || isParentLocked
|
||||
if (!userPermissions.canEdit || isLocked) return
|
||||
if (!userPermissions.canEdit || isBlockProtected(blockId, blocks)) return
|
||||
|
||||
renamingBlockIdRef.current = blockId
|
||||
setEditedName(block.name || '')
|
||||
@@ -364,11 +364,11 @@ export function Editor() {
|
||||
)}
|
||||
</div>
|
||||
<div className='flex shrink-0 items-center gap-[8px]'>
|
||||
{/* Locked indicator - clickable to unlock if user has admin permissions, block is locked, and parent is not locked */}
|
||||
{/* Locked indicator - clickable to unlock if user has admin permissions, block is locked directly, and not locked by an ancestor */}
|
||||
{isLocked && currentBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
{userPermissions.canAdmin && currentBlock.locked && !isParentLocked ? (
|
||||
{userPermissions.canAdmin && currentBlock.locked && !isAncestorLocked ? (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='p-0'
|
||||
@@ -385,8 +385,8 @@ export function Editor() {
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>
|
||||
{isParentLocked
|
||||
? 'Parent container is locked'
|
||||
{isAncestorLocked
|
||||
? 'Ancestor container is locked'
|
||||
: userPermissions.canAdmin && currentBlock.locked
|
||||
? 'Unlock block'
|
||||
: 'Block is locked'}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { memo, useMemo, useRef } from 'react'
|
||||
import { memo, useMemo } from 'react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Handle, type NodeProps, Position, useReactFlow } from 'reactflow'
|
||||
import { Badge } from '@/components/emcn'
|
||||
@@ -28,6 +28,28 @@ export interface SubflowNodeData {
|
||||
executionStatus?: 'success' | 'error' | 'not-executed'
|
||||
}
|
||||
|
||||
const HANDLE_STYLE = {
|
||||
top: `${HANDLE_POSITIONS.DEFAULT_Y_OFFSET}px`,
|
||||
transform: 'translateY(-50%)',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Reusable class names for Handle components.
|
||||
* Matches the styling pattern from workflow-block.tsx.
|
||||
*/
|
||||
const getHandleClasses = (position: 'left' | 'right') => {
|
||||
const baseClasses = '!z-[10] !cursor-crosshair !border-none !transition-[colors] !duration-150'
|
||||
const colorClasses = '!bg-[var(--workflow-edge)]'
|
||||
|
||||
const positionClasses = {
|
||||
left: '!left-[-8px] !h-5 !w-[7px] !rounded-l-[2px] !rounded-r-none hover:!left-[-11px] hover:!w-[10px] hover:!rounded-l-full',
|
||||
right:
|
||||
'!right-[-8px] !h-5 !w-[7px] !rounded-r-[2px] !rounded-l-none hover:!right-[-11px] hover:!w-[10px] hover:!rounded-r-full',
|
||||
}
|
||||
|
||||
return cn(baseClasses, colorClasses, positionClasses[position])
|
||||
}
|
||||
|
||||
/**
|
||||
* Subflow node component for loop and parallel execution containers.
|
||||
* Renders a resizable container with a header displaying the block name and icon,
|
||||
@@ -38,7 +60,6 @@ export interface SubflowNodeData {
|
||||
*/
|
||||
export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<SubflowNodeData>) => {
|
||||
const { getNodes } = useReactFlow()
|
||||
const blockRef = useRef<HTMLDivElement>(null)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
@@ -52,7 +73,6 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
const isLocked = currentBlock?.locked ?? false
|
||||
const isPreview = data?.isPreview || false
|
||||
|
||||
// Focus state
|
||||
const setCurrentBlockId = usePanelEditorStore((state) => state.setCurrentBlockId)
|
||||
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
|
||||
const isFocused = currentBlockId === id
|
||||
@@ -84,7 +104,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
}
|
||||
|
||||
return level
|
||||
}, [id, data?.parentId, getNodes])
|
||||
}, [data?.parentId, getNodes])
|
||||
|
||||
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
|
||||
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
|
||||
@@ -92,27 +112,6 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
const blockIconBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
|
||||
const blockName = data.name || (data.kind === 'loop' ? 'Loop' : 'Parallel')
|
||||
|
||||
/**
|
||||
* Reusable styles and positioning for Handle components.
|
||||
* Matches the styling pattern from workflow-block.tsx.
|
||||
*/
|
||||
const getHandleClasses = (position: 'left' | 'right') => {
|
||||
const baseClasses = '!z-[10] !cursor-crosshair !border-none !transition-[colors] !duration-150'
|
||||
const colorClasses = '!bg-[var(--workflow-edge)]'
|
||||
|
||||
const positionClasses = {
|
||||
left: '!left-[-8px] !h-5 !w-[7px] !rounded-l-[2px] !rounded-r-none hover:!left-[-11px] hover:!w-[10px] hover:!rounded-l-full',
|
||||
right:
|
||||
'!right-[-8px] !h-5 !w-[7px] !rounded-r-[2px] !rounded-l-none hover:!right-[-11px] hover:!w-[10px] hover:!rounded-r-full',
|
||||
}
|
||||
|
||||
return cn(baseClasses, colorClasses, positionClasses[position])
|
||||
}
|
||||
|
||||
const getHandleStyle = () => {
|
||||
return { top: `${HANDLE_POSITIONS.DEFAULT_Y_OFFSET}px`, transform: 'translateY(-50%)' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the ring styling based on subflow state priority:
|
||||
* 1. Focused (selected in editor), selected (shift-click/box), or preview selected - blue ring
|
||||
@@ -127,46 +126,37 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
diffStatus === 'new' ||
|
||||
diffStatus === 'edited' ||
|
||||
!!runPathStatus
|
||||
|
||||
/**
|
||||
* Compute the outline color for the subflow ring.
|
||||
* Uses CSS outline instead of box-shadow ring because in ReactFlow v11,
|
||||
* child nodes are DOM children of parent nodes and paint over the parent's
|
||||
* internal ring overlay. Outline renders on the element's own compositing
|
||||
* layer, so it stays visible above nested child nodes.
|
||||
* Compute the ring color for the subflow selection indicator.
|
||||
* Uses boxShadow (not CSS outline) to match the ring styling of regular workflow blocks.
|
||||
* This works because ReactFlow renders child nodes as sibling divs at the viewport level
|
||||
* (not as DOM children), so children at zIndex 1000 don't clip the parent's boxShadow.
|
||||
*/
|
||||
const outlineColor = hasRing
|
||||
? isFocused || isSelected || isPreviewSelected
|
||||
? 'var(--brand-secondary)'
|
||||
: diffStatus === 'new'
|
||||
? 'var(--brand-tertiary-2)'
|
||||
: diffStatus === 'edited'
|
||||
? 'var(--warning)'
|
||||
: runPathStatus === 'success'
|
||||
? executionStatus
|
||||
? 'var(--brand-tertiary-2)'
|
||||
: 'var(--border-success)'
|
||||
: runPathStatus === 'error'
|
||||
? 'var(--text-error)'
|
||||
: undefined
|
||||
: undefined
|
||||
const getRingColor = (): string | undefined => {
|
||||
if (!hasRing) return undefined
|
||||
if (isFocused || isSelected || isPreviewSelected) return 'var(--brand-secondary)'
|
||||
if (diffStatus === 'new') return 'var(--brand-tertiary-2)'
|
||||
if (diffStatus === 'edited') return 'var(--warning)'
|
||||
if (runPathStatus === 'success') {
|
||||
return executionStatus ? 'var(--brand-tertiary-2)' : 'var(--border-success)'
|
||||
}
|
||||
if (runPathStatus === 'error') return 'var(--text-error)'
|
||||
return undefined
|
||||
}
|
||||
const ringColor = getRingColor()
|
||||
|
||||
return (
|
||||
<div className='group pointer-events-none relative'>
|
||||
<div
|
||||
ref={blockRef}
|
||||
className={cn(
|
||||
'relative select-none rounded-[8px] border border-[var(--border-1)]',
|
||||
'transition-block-bg'
|
||||
)}
|
||||
className='relative select-none rounded-[8px] border border-[var(--border-1)] transition-block-bg'
|
||||
style={{
|
||||
width: data.width || 500,
|
||||
height: data.height || 300,
|
||||
position: 'relative',
|
||||
overflow: 'visible',
|
||||
pointerEvents: 'none',
|
||||
...(outlineColor && {
|
||||
outline: `1.75px solid ${outlineColor}`,
|
||||
outlineOffset: '-1px',
|
||||
...(ringColor && {
|
||||
boxShadow: `0 0 0 1.75px ${ringColor}`,
|
||||
}),
|
||||
}}
|
||||
data-node-id={id}
|
||||
@@ -181,9 +171,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
{/* Header Section — only interactive area for dragging */}
|
||||
<div
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'workflow-drag-handle flex cursor-grab items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px] [&:active]:cursor-grabbing'
|
||||
)}
|
||||
className='workflow-drag-handle flex cursor-grab items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px] [&:active]:cursor-grabbing'
|
||||
style={{ pointerEvents: 'auto' }}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
@@ -209,6 +197,17 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/*
|
||||
* Click-catching background — selects this subflow when the body area is clicked.
|
||||
* No event bubbling concern: ReactFlow renders child nodes as viewport-level siblings,
|
||||
* not as DOM children of this component, so child clicks never reach this div.
|
||||
*/}
|
||||
<div
|
||||
className='absolute inset-0 top-[44px] rounded-b-[8px]'
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
/>
|
||||
|
||||
{!isPreview && (
|
||||
<div
|
||||
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
|
||||
@@ -217,12 +216,9 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
)}
|
||||
|
||||
<div
|
||||
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
|
||||
className='relative h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
|
||||
data-dragarea='true'
|
||||
style={{
|
||||
position: 'relative',
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
style={{ pointerEvents: 'none' }}
|
||||
>
|
||||
{/* Subflow Start */}
|
||||
<div
|
||||
@@ -255,7 +251,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
position={Position.Left}
|
||||
className={getHandleClasses('left')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
...HANDLE_STYLE,
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
/>
|
||||
@@ -266,7 +262,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
position={Position.Right}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
...HANDLE_STYLE,
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
id={endHandleId}
|
||||
|
||||
@@ -527,7 +527,8 @@ const SubBlockRow = memo(function SubBlockRow({
|
||||
const { displayName: credentialName } = useCredentialName(
|
||||
credentialSourceId,
|
||||
credentialProviderId,
|
||||
workflowId
|
||||
workflowId,
|
||||
workspaceId
|
||||
)
|
||||
|
||||
const credentialId = dependencyValues.credential
|
||||
|
||||
@@ -20,7 +20,10 @@ import {
|
||||
TriggerUtils,
|
||||
} from '@/lib/workflows/triggers/triggers'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import { updateActiveBlockRefCount } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-execution-utils'
|
||||
import {
|
||||
markOutgoingEdgesFromOutput,
|
||||
updateActiveBlockRefCount,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-execution-utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type {
|
||||
@@ -63,7 +66,7 @@ interface DebugValidationResult {
|
||||
interface BlockEventHandlerConfig {
|
||||
workflowId?: string
|
||||
executionIdRef: { current: string }
|
||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||
workflowEdges: Array<{ id: string; source: string; target: string; sourceHandle?: string | null }>
|
||||
activeBlocksSet: Set<string>
|
||||
activeBlockRefCounts: Map<string, number>
|
||||
accumulatedBlockLogs: BlockLog[]
|
||||
@@ -335,13 +338,9 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks(workflowId, new Set(activeBlocksSet))
|
||||
}
|
||||
|
||||
const markIncomingEdges = (blockId: string) => {
|
||||
const markOutgoingEdges = (blockId: string, output: Record<string, any> | undefined) => {
|
||||
if (!workflowId) return
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
const status = edge.sourceHandle === 'error' ? 'error' : 'success'
|
||||
setEdgeRunStatus(workflowId, edge.id, status)
|
||||
})
|
||||
markOutgoingEdgesFromOutput(blockId, output, workflowEdges, workflowId, setEdgeRunStatus)
|
||||
}
|
||||
|
||||
const isContainerBlockType = (blockType?: string) => {
|
||||
@@ -460,7 +459,6 @@ export function useWorkflowExecution() {
|
||||
const onBlockStarted = (data: BlockStartedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, true)
|
||||
markIncomingEdges(data.blockId)
|
||||
|
||||
if (!includeStartConsoleEntry || !workflowId) return
|
||||
|
||||
@@ -487,6 +485,7 @@ export function useWorkflowExecution() {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||
markOutgoingEdges(data.blockId, data.output as Record<string, any> | undefined)
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
@@ -505,7 +504,9 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
if (isContainerBlockType(data.blockType) && !data.iterationContainerId) {
|
||||
return
|
||||
const output = data.output as Record<string, any> | undefined
|
||||
const isEmptySubflow = Array.isArray(output?.results) && output.results.length === 0
|
||||
if (!isEmptySubflow) return
|
||||
}
|
||||
|
||||
accumulatedBlockLogs.push(createBlockLogEntry(data, { success: true, output: data.output }))
|
||||
@@ -527,6 +528,7 @@ export function useWorkflowExecution() {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||
markOutgoingEdges(data.blockId, { error: data.error })
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
@@ -1124,9 +1126,7 @@ export function useWorkflowExecution() {
|
||||
{} as typeof workflowBlocks
|
||||
)
|
||||
|
||||
const isExecutingFromChat =
|
||||
overrideTriggerType === 'chat' ||
|
||||
(workflowInput && typeof workflowInput === 'object' && 'input' in workflowInput)
|
||||
const isExecutingFromChat = overrideTriggerType === 'chat'
|
||||
|
||||
logger.info('Executing workflow', {
|
||||
isDiffMode: currentWorkflow.isDiffMode,
|
||||
@@ -1495,8 +1495,13 @@ export function useWorkflowExecution() {
|
||||
: null
|
||||
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
||||
setExecutionResult(executionResult)
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
// For chat executions, don't set isExecuting=false here — the chat's
|
||||
// client-side stream wrapper still has buffered data to deliver.
|
||||
// The chat's finally block handles cleanup after the stream is fully consumed.
|
||||
if (!isExecutingFromChat) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
@@ -1536,7 +1541,7 @@ export function useWorkflowExecution() {
|
||||
isPreExecutionError,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
if (activeWorkflowId && !isExecutingFromChat) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
@@ -1562,7 +1567,7 @@ export function useWorkflowExecution() {
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
if (activeWorkflowId && !isExecutingFromChat) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { isAncestorProtected, isBlockProtected } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
export { isAncestorProtected, isBlockProtected }
|
||||
|
||||
/**
|
||||
* Result of filtering protected blocks from a deletion operation
|
||||
@@ -12,28 +15,6 @@ export interface FilterProtectedBlocksResult {
|
||||
allProtected: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block is protected from editing/deletion.
|
||||
* A block is protected if it is locked or if its parent container is locked.
|
||||
*
|
||||
* @param blockId - The ID of the block to check
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @returns True if the block is protected
|
||||
*/
|
||||
export function isBlockProtected(blockId: string, blocks: Record<string, BlockState>): boolean {
|
||||
const block = blocks[blockId]
|
||||
if (!block) return false
|
||||
|
||||
// Block is locked directly
|
||||
if (block.locked) return true
|
||||
|
||||
// Block is inside a locked container
|
||||
const parentId = block.data?.parentId
|
||||
if (parentId && blocks[parentId]?.locked) return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an edge is protected from modification.
|
||||
* An edge is protected only if its target block is protected.
|
||||
|
||||
@@ -29,6 +29,62 @@ export function updateActiveBlockRefCount(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if a workflow edge should be marked as active based on its handle and the block output.
|
||||
* Mirrors the executor's EdgeManager.shouldActivateEdge logic on the client side.
|
||||
* Exclude sentinel handles here
|
||||
*/
|
||||
function shouldActivateEdgeClient(
|
||||
handle: string | null | undefined,
|
||||
output: Record<string, any> | undefined
|
||||
): boolean {
|
||||
if (!handle) return true
|
||||
|
||||
if (handle.startsWith('condition-')) {
|
||||
return output?.selectedOption === handle.substring('condition-'.length)
|
||||
}
|
||||
|
||||
if (handle.startsWith('router-')) {
|
||||
return output?.selectedRoute === handle.substring('router-'.length)
|
||||
}
|
||||
|
||||
switch (handle) {
|
||||
case 'error':
|
||||
return !!output?.error
|
||||
case 'source':
|
||||
return !output?.error
|
||||
case 'loop-start-source':
|
||||
case 'loop-end-source':
|
||||
case 'parallel-start-source':
|
||||
case 'parallel-end-source':
|
||||
return true
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
export function markOutgoingEdgesFromOutput(
|
||||
blockId: string,
|
||||
output: Record<string, any> | undefined,
|
||||
workflowEdges: Array<{
|
||||
id: string
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
}>,
|
||||
workflowId: string,
|
||||
setEdgeRunStatus: (wfId: string, edgeId: string, status: 'success' | 'error') => void
|
||||
): void {
|
||||
const outgoing = workflowEdges.filter((edge) => edge.source === blockId)
|
||||
for (const edge of outgoing) {
|
||||
const handle = edge.sourceHandle
|
||||
if (shouldActivateEdgeClient(handle, output)) {
|
||||
const status = handle === 'error' ? 'error' : output?.error ? 'error' : 'success'
|
||||
setEdgeRunStatus(workflowId, edge.id, status)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionOptions {
|
||||
workflowInput?: any
|
||||
onStream?: (se: StreamingExecution) => Promise<void>
|
||||
@@ -135,13 +191,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
true
|
||||
)
|
||||
setActiveBlocks(wfId, new Set(activeBlocksSet))
|
||||
|
||||
const incomingEdges = workflowEdges.filter(
|
||||
(edge) => edge.target === event.data.blockId
|
||||
)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(wfId, edge.id, 'success')
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
@@ -155,6 +204,13 @@ export async function executeWorkflowWithFullLogging(
|
||||
setActiveBlocks(wfId, new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(wfId, event.data.blockId, 'success')
|
||||
markOutgoingEdgesFromOutput(
|
||||
event.data.blockId,
|
||||
event.data.output,
|
||||
workflowEdges,
|
||||
wfId,
|
||||
setEdgeRunStatus
|
||||
)
|
||||
|
||||
addConsole({
|
||||
input: event.data.input || {},
|
||||
@@ -194,6 +250,13 @@ export async function executeWorkflowWithFullLogging(
|
||||
setActiveBlocks(wfId, new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(wfId, event.data.blockId, 'error')
|
||||
markOutgoingEdgesFromOutput(
|
||||
event.data.blockId,
|
||||
{ error: event.data.error },
|
||||
workflowEdges,
|
||||
wfId,
|
||||
setEdgeRunStatus
|
||||
)
|
||||
|
||||
addConsole({
|
||||
input: event.data.input || {},
|
||||
|
||||
@@ -196,17 +196,14 @@ const edgeTypes: EdgeTypes = {
|
||||
const defaultEdgeOptions = { type: 'custom' }
|
||||
|
||||
const reactFlowStyles = [
|
||||
'bg-[var(--bg)]',
|
||||
'[&_.react-flow__edges]:!z-0',
|
||||
'[&_.react-flow__node]:z-[21]',
|
||||
'[&_.react-flow__handle]:!z-[30]',
|
||||
'[&_.react-flow__edge-labels]:!z-[60]',
|
||||
'[&_.react-flow__pane]:!bg-[var(--bg)]',
|
||||
'[&_.react-flow__edge-labels]:!z-[1001]',
|
||||
'[&_.react-flow__pane]:select-none',
|
||||
'[&_.react-flow__selectionpane]:select-none',
|
||||
'[&_.react-flow__renderer]:!bg-[var(--bg)]',
|
||||
'[&_.react-flow__viewport]:!bg-[var(--bg)]',
|
||||
'[&_.react-flow__background]:hidden',
|
||||
'[&_.react-flow__node-subflowNode.selected]:!shadow-none',
|
||||
].join(' ')
|
||||
const reactFlowFitViewOptions = { padding: 0.6, maxZoom: 1.0 } as const
|
||||
const reactFlowProOptions = { hideAttribution: true } as const
|
||||
@@ -2412,6 +2409,12 @@ const WorkflowContent = React.memo(() => {
|
||||
const nodeType = block.type === 'note' ? 'noteBlock' : 'workflowBlock'
|
||||
const dragHandle = block.type === 'note' ? '.note-drag-handle' : '.workflow-drag-handle'
|
||||
|
||||
// Compute zIndex for blocks inside containers so they render above the
|
||||
// parent subflow's interactive body area (which needs pointer-events for
|
||||
// click-to-select). Container nodes use zIndex: depth (0, 1, 2...),
|
||||
// so child blocks use a baseline that is always above any container.
|
||||
const childZIndex = block.data?.parentId ? 1000 : undefined
|
||||
|
||||
// Create stable node object - React Flow will handle shallow comparison
|
||||
nodeArray.push({
|
||||
id: block.id,
|
||||
@@ -2420,6 +2423,7 @@ const WorkflowContent = React.memo(() => {
|
||||
parentId: block.data?.parentId,
|
||||
dragHandle,
|
||||
draggable: !isBlockProtected(block.id, blocks),
|
||||
...(childZIndex !== undefined && { zIndex: childZIndex }),
|
||||
extent: (() => {
|
||||
// Clamp children to subflow body (exclude header)
|
||||
const parentId = block.data?.parentId as string | undefined
|
||||
@@ -3768,21 +3772,20 @@ const WorkflowContent = React.memo(() => {
|
||||
return (
|
||||
<div className='flex h-full w-full flex-col overflow-hidden'>
|
||||
<div className='relative h-full w-full flex-1'>
|
||||
{/* Loading spinner - always mounted, animation paused when hidden to avoid overhead */}
|
||||
<div
|
||||
className={`absolute inset-0 z-[5] flex items-center justify-center bg-[var(--bg)] transition-opacity duration-150 ${isWorkflowReady ? 'pointer-events-none opacity-0' : 'opacity-100'}`}
|
||||
>
|
||||
<div
|
||||
className={`h-[18px] w-[18px] rounded-full ${isWorkflowReady ? '' : 'animate-spin'}`}
|
||||
style={{
|
||||
background:
|
||||
'conic-gradient(from 0deg, hsl(var(--muted-foreground)) 0deg 120deg, transparent 120deg 180deg, hsl(var(--muted-foreground)) 180deg 300deg, transparent 300deg 360deg)',
|
||||
mask: 'radial-gradient(farthest-side, transparent calc(100% - 1.5px), black calc(100% - 1.5px))',
|
||||
WebkitMask:
|
||||
'radial-gradient(farthest-side, transparent calc(100% - 1.5px), black calc(100% - 1.5px))',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{!isWorkflowReady && (
|
||||
<div className='absolute inset-0 z-[5] flex items-center justify-center bg-[var(--bg)]'>
|
||||
<div
|
||||
className='h-[18px] w-[18px] animate-spin rounded-full'
|
||||
style={{
|
||||
background:
|
||||
'conic-gradient(from 0deg, hsl(var(--muted-foreground)) 0deg 120deg, transparent 120deg 180deg, hsl(var(--muted-foreground)) 180deg 300deg, transparent 300deg 360deg)',
|
||||
mask: 'radial-gradient(farthest-side, transparent calc(100% - 1.5px), black calc(100% - 1.5px))',
|
||||
WebkitMask:
|
||||
'radial-gradient(farthest-side, transparent calc(100% - 1.5px), black calc(100% - 1.5px))',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isWorkflowReady && (
|
||||
<>
|
||||
@@ -3835,7 +3838,7 @@ const WorkflowContent = React.memo(() => {
|
||||
noWheelClassName='allow-scroll'
|
||||
edgesFocusable={true}
|
||||
edgesUpdatable={effectivePermissions.canEdit}
|
||||
className={`workflow-container h-full transition-opacity duration-150 ${reactFlowStyles} ${isCanvasReady ? 'opacity-100' : 'opacity-0'} ${isHandMode ? 'canvas-mode-hand' : 'canvas-mode-cursor'}`}
|
||||
className={`workflow-container h-full bg-[var(--bg)] transition-opacity duration-150 ${reactFlowStyles} ${isCanvasReady ? 'opacity-100' : 'opacity-0'} ${isHandMode ? 'canvas-mode-hand' : 'canvas-mode-cursor'}`}
|
||||
onNodeDrag={effectivePermissions.canEdit ? onNodeDrag : undefined}
|
||||
onNodeDragStop={effectivePermissions.canEdit ? onNodeDragStop : undefined}
|
||||
onSelectionDragStart={effectivePermissions.canEdit ? onSelectionDragStart : undefined}
|
||||
@@ -3847,7 +3850,7 @@ const WorkflowContent = React.memo(() => {
|
||||
elevateEdgesOnSelect={true}
|
||||
onlyRenderVisibleElements={false}
|
||||
deleteKeyCode={null}
|
||||
elevateNodesOnSelect={true}
|
||||
elevateNodesOnSelect={false}
|
||||
autoPanOnConnect={effectivePermissions.canEdit}
|
||||
autoPanOnNodeDrag={effectivePermissions.canEdit}
|
||||
/>
|
||||
|
||||
@@ -145,7 +145,7 @@ interface PreviewWorkflowProps {
|
||||
/** Cursor style to show when hovering the canvas */
|
||||
cursorStyle?: 'default' | 'pointer' | 'grab'
|
||||
/** Map of executed block IDs to their status for highlighting the execution path */
|
||||
executedBlocks?: Record<string, { status: string }>
|
||||
executedBlocks?: Record<string, { status: string; output?: unknown }>
|
||||
/** Currently selected block ID for highlighting */
|
||||
selectedBlockId?: string | null
|
||||
/** Skips expensive subblock computations for thumbnails/template previews */
|
||||
@@ -274,9 +274,9 @@ export function PreviewWorkflow({
|
||||
|
||||
/** Maps base block IDs to execution data, handling parallel iteration variants (blockId₍n₎). */
|
||||
const blockExecutionMap = useMemo(() => {
|
||||
if (!executedBlocks) return new Map<string, { status: string }>()
|
||||
if (!executedBlocks) return new Map<string, { status: string; output?: unknown }>()
|
||||
|
||||
const map = new Map<string, { status: string }>()
|
||||
const map = new Map<string, { status: string; output?: unknown }>()
|
||||
for (const [key, value] of Object.entries(executedBlocks)) {
|
||||
// Extract base ID (remove iteration suffix like ₍0₎)
|
||||
const baseId = key.includes('₍') ? key.split('₍')[0] : key
|
||||
@@ -451,7 +451,6 @@ export function PreviewWorkflow({
|
||||
const edges: Edge[] = useMemo(() => {
|
||||
if (!isValidWorkflowState) return []
|
||||
|
||||
/** Edge is green if target executed and source condition met by edge type. */
|
||||
const getEdgeExecutionStatus = (edge: {
|
||||
source: string
|
||||
target: string
|
||||
@@ -463,17 +462,40 @@ export function PreviewWorkflow({
|
||||
if (!targetStatus?.executed) return 'not-executed'
|
||||
|
||||
const sourceStatus = getBlockExecutionStatus(edge.source)
|
||||
const { sourceHandle } = edge
|
||||
if (!sourceStatus?.executed) return 'not-executed'
|
||||
|
||||
if (sourceHandle === 'error') {
|
||||
return sourceStatus?.status === 'error' ? 'success' : 'not-executed'
|
||||
const handle = edge.sourceHandle
|
||||
if (!handle) {
|
||||
return sourceStatus.status === 'success' ? 'success' : 'not-executed'
|
||||
}
|
||||
|
||||
if (sourceHandle === 'loop-start-source' || sourceHandle === 'parallel-start-source') {
|
||||
return 'success'
|
||||
const sourceOutput = blockExecutionMap.get(edge.source)?.output as
|
||||
| Record<string, any>
|
||||
| undefined
|
||||
|
||||
if (handle.startsWith('condition-')) {
|
||||
const conditionValue = handle.substring('condition-'.length)
|
||||
return sourceOutput?.selectedOption === conditionValue ? 'success' : 'not-executed'
|
||||
}
|
||||
|
||||
return sourceStatus?.status === 'success' ? 'success' : 'not-executed'
|
||||
if (handle.startsWith('router-')) {
|
||||
const routeId = handle.substring('router-'.length)
|
||||
return sourceOutput?.selectedRoute === routeId ? 'success' : 'not-executed'
|
||||
}
|
||||
|
||||
switch (handle) {
|
||||
case 'error':
|
||||
return sourceStatus.status === 'error' ? 'error' : 'not-executed'
|
||||
case 'source':
|
||||
return sourceStatus.status === 'success' ? 'success' : 'not-executed'
|
||||
case 'loop-start-source':
|
||||
case 'loop-end-source':
|
||||
case 'parallel-start-source':
|
||||
case 'parallel-end-source':
|
||||
return 'success'
|
||||
default:
|
||||
return sourceStatus.status === 'success' ? 'success' : 'not-executed'
|
||||
}
|
||||
}
|
||||
|
||||
return (workflowState.edges || []).map((edge) => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -129,6 +129,30 @@ Output: {"short_description": "Network outage", "description": "Network connecti
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
description: 'Number of records to skip for pagination',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'displayValue',
|
||||
title: 'Display Value',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Default (not set)', id: '' },
|
||||
{ label: 'False (sys_id only)', id: 'false' },
|
||||
{ label: 'True (display value only)', id: 'true' },
|
||||
{ label: 'All (both)', id: 'all' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
description: 'Return display values for reference fields instead of sys_ids',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'fields',
|
||||
title: 'Fields to Return',
|
||||
@@ -203,6 +227,9 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
const isCreateOrUpdate =
|
||||
operation === 'servicenow_create_record' || operation === 'servicenow_update_record'
|
||||
|
||||
if (rest.limit != null && rest.limit !== '') rest.limit = Number(rest.limit)
|
||||
if (rest.offset != null && rest.offset !== '') rest.offset = Number(rest.offset)
|
||||
|
||||
if (fields && isCreateOrUpdate) {
|
||||
const parsedFields = typeof fields === 'string' ? JSON.parse(fields) : fields
|
||||
return { ...rest, fields: parsedFields }
|
||||
@@ -222,7 +249,9 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
number: { type: 'string', description: 'Record number' },
|
||||
query: { type: 'string', description: 'Query string' },
|
||||
limit: { type: 'number', description: 'Result limit' },
|
||||
offset: { type: 'number', description: 'Pagination offset' },
|
||||
fields: { type: 'json', description: 'Fields object or JSON string' },
|
||||
displayValue: { type: 'string', description: 'Display value mode for reference fields' },
|
||||
},
|
||||
outputs: {
|
||||
record: { type: 'json', description: 'Single ServiceNow record' },
|
||||
|
||||
@@ -9,10 +9,10 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
type: 'slack',
|
||||
name: 'Slack',
|
||||
description:
|
||||
'Send, update, delete messages, send ephemeral messages, add reactions in Slack or trigger workflows from Slack events',
|
||||
'Send, update, delete messages, manage views and modals, add or remove reactions, manage canvases, get channel info and user presence in Slack',
|
||||
authMode: AuthMode.OAuth,
|
||||
longDescription:
|
||||
'Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.',
|
||||
'Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, open/update/push modal views, publish Home tab views, create canvases, read messages, and add or remove reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.',
|
||||
docsLink: 'https://docs.sim.ai/tools/slack',
|
||||
category: 'tools',
|
||||
bgColor: '#611f69',
|
||||
@@ -38,6 +38,15 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
{ label: 'Update Message', id: 'update' },
|
||||
{ label: 'Delete Message', id: 'delete' },
|
||||
{ label: 'Add Reaction', id: 'react' },
|
||||
{ label: 'Remove Reaction', id: 'unreact' },
|
||||
{ label: 'Get Channel Info', id: 'get_channel_info' },
|
||||
{ label: 'Get User Presence', id: 'get_user_presence' },
|
||||
{ label: 'Edit Canvas', id: 'edit_canvas' },
|
||||
{ label: 'Create Channel Canvas', id: 'create_channel_canvas' },
|
||||
{ label: 'Open View', id: 'open_view' },
|
||||
{ label: 'Update View', id: 'update_view' },
|
||||
{ label: 'Push View', id: 'push_view' },
|
||||
{ label: 'Publish View', id: 'publish_view' },
|
||||
],
|
||||
value: () => 'send',
|
||||
},
|
||||
@@ -141,7 +150,17 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
}
|
||||
return {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
value: [
|
||||
'list_channels',
|
||||
'list_users',
|
||||
'get_user',
|
||||
'get_user_presence',
|
||||
'edit_canvas',
|
||||
'open_view',
|
||||
'update_view',
|
||||
'push_view',
|
||||
'publish_view',
|
||||
],
|
||||
not: true,
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
@@ -166,7 +185,17 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
}
|
||||
return {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
value: [
|
||||
'list_channels',
|
||||
'list_users',
|
||||
'get_user',
|
||||
'get_user_presence',
|
||||
'edit_canvas',
|
||||
'open_view',
|
||||
'update_view',
|
||||
'push_view',
|
||||
'publish_view',
|
||||
],
|
||||
not: true,
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
@@ -209,8 +238,26 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
{
|
||||
id: 'ephemeralUser',
|
||||
title: 'Target User',
|
||||
type: 'user-selector',
|
||||
canonicalParamId: 'ephemeralUser',
|
||||
serviceId: 'slack',
|
||||
selectorKey: 'slack.users',
|
||||
placeholder: 'Select Slack user',
|
||||
mode: 'basic',
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'ephemeral',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'manualEphemeralUser',
|
||||
title: 'Target User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'User ID who will see the message (e.g., U1234567890)',
|
||||
canonicalParamId: 'ephemeralUser',
|
||||
placeholder: 'Enter Slack user ID (e.g., U1234567890)',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'ephemeral',
|
||||
@@ -440,9 +487,27 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
// Get User specific fields
|
||||
{
|
||||
id: 'userId',
|
||||
title: 'User',
|
||||
type: 'user-selector',
|
||||
canonicalParamId: 'userId',
|
||||
serviceId: 'slack',
|
||||
selectorKey: 'slack.users',
|
||||
placeholder: 'Select Slack user',
|
||||
mode: 'basic',
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_user',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'manualUserId',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'userId',
|
||||
placeholder: 'Enter Slack user ID (e.g., U1234567890)',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_user',
|
||||
@@ -608,7 +673,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'react',
|
||||
value: ['react', 'unreact'],
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
@@ -619,10 +684,301 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Emoji name without colons (e.g., thumbsup, heart, eyes)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'react',
|
||||
value: ['react', 'unreact'],
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Get Channel Info specific fields
|
||||
{
|
||||
id: 'includeNumMembers',
|
||||
title: 'Include Member Count',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
value: () => 'true',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_channel_info',
|
||||
},
|
||||
},
|
||||
// Get User Presence specific fields
|
||||
{
|
||||
id: 'presenceUserId',
|
||||
title: 'User',
|
||||
type: 'user-selector',
|
||||
canonicalParamId: 'presenceUserId',
|
||||
serviceId: 'slack',
|
||||
selectorKey: 'slack.users',
|
||||
placeholder: 'Select Slack user',
|
||||
mode: 'basic',
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_user_presence',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'manualPresenceUserId',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'presenceUserId',
|
||||
placeholder: 'Enter Slack user ID (e.g., U1234567890)',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_user_presence',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Edit Canvas specific fields
|
||||
{
|
||||
id: 'editCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'edit_canvas',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'canvasOperation',
|
||||
title: 'Edit Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Insert at Start', id: 'insert_at_start' },
|
||||
{ label: 'Insert at End', id: 'insert_at_end' },
|
||||
{ label: 'Insert After Section', id: 'insert_after' },
|
||||
{ label: 'Insert Before Section', id: 'insert_before' },
|
||||
{ label: 'Replace Section', id: 'replace' },
|
||||
{ label: 'Delete Section', id: 'delete' },
|
||||
{ label: 'Rename Canvas', id: 'rename' },
|
||||
],
|
||||
value: () => 'insert_at_end',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'edit_canvas',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'canvasContent',
|
||||
title: 'Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter content in markdown format',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'edit_canvas',
|
||||
and: {
|
||||
field: 'canvasOperation',
|
||||
value: ['delete', 'rename'],
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'sectionId',
|
||||
title: 'Section ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Section ID to target',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'edit_canvas',
|
||||
and: {
|
||||
field: 'canvasOperation',
|
||||
value: ['insert_after', 'insert_before', 'replace', 'delete'],
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'canvasTitle',
|
||||
title: 'New Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter new canvas title',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'edit_canvas',
|
||||
and: { field: 'canvasOperation', value: 'rename' },
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Create Channel Canvas specific fields
|
||||
{
|
||||
id: 'channelCanvasTitle',
|
||||
title: 'Canvas Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas title (optional)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_channel_canvas',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'channelCanvasContent',
|
||||
title: 'Canvas Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter canvas content (markdown supported)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_channel_canvas',
|
||||
},
|
||||
},
|
||||
// Open View / Push View specific fields
|
||||
{
|
||||
id: 'viewTriggerId',
|
||||
title: 'Trigger ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Trigger ID from interaction payload',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['open_view', 'push_view'],
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'viewInteractivityPointer',
|
||||
title: 'Interactivity Pointer',
|
||||
type: 'short-input',
|
||||
placeholder: 'Alternative to trigger_id (optional)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['open_view', 'push_view'],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Update View specific fields
|
||||
{
|
||||
id: 'viewId',
|
||||
title: 'View ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unique view identifier (either View ID or External ID required)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update_view',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'viewExternalId',
|
||||
title: 'External ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Developer-set unique identifier (max 255 chars)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update_view',
|
||||
},
|
||||
},
|
||||
// Update View / Publish View hash field
|
||||
{
|
||||
id: 'viewHash',
|
||||
title: 'View Hash',
|
||||
type: 'short-input',
|
||||
placeholder: 'View state hash for race condition protection',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_view', 'publish_view'],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Publish View specific fields
|
||||
{
|
||||
id: 'publishUserId',
|
||||
title: 'User',
|
||||
type: 'user-selector',
|
||||
canonicalParamId: 'publishUserId',
|
||||
serviceId: 'slack',
|
||||
selectorKey: 'slack.users',
|
||||
placeholder: 'Select user to publish Home tab to',
|
||||
mode: 'basic',
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'publish_view',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'manualPublishUserId',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'publishUserId',
|
||||
placeholder: 'Enter Slack user ID (e.g., U0BPQUNTA)',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'publish_view',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// View payload (shared across all view operations)
|
||||
{
|
||||
id: 'viewPayload',
|
||||
title: 'View Payload',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: 'JSON view payload with type, title, and blocks',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['open_view', 'update_view', 'push_view', 'publish_view'],
|
||||
},
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `You are an expert at Slack Block Kit views.
|
||||
Generate ONLY a valid JSON view payload object based on the user's request.
|
||||
The output MUST be a JSON object starting with { and ending with }.
|
||||
|
||||
Current view: {context}
|
||||
|
||||
The view object must include:
|
||||
- "type": "modal" (for open/update/push) or "home" (for publish)
|
||||
- "title": { "type": "plain_text", "text": "Title text", "emoji": true } (max 24 chars)
|
||||
- "blocks": Array of Block Kit blocks
|
||||
|
||||
Optional fields:
|
||||
- "submit": { "type": "plain_text", "text": "Submit" } - Submit button text
|
||||
- "close": { "type": "plain_text", "text": "Cancel" } - Close button text
|
||||
- "private_metadata": String up to 3000 chars
|
||||
- "callback_id": String identifier for interaction handling
|
||||
- "clear_on_close": true/false
|
||||
- "notify_on_close": true/false
|
||||
- "external_id": Unique string per workspace (max 255 chars)
|
||||
|
||||
Available block types:
|
||||
- "section": Text with optional accessory. Text uses { "type": "mrkdwn", "text": "..." } or { "type": "plain_text", "text": "..." }
|
||||
- "input": Form input with a label and element (plain_text_input, static_select, multi_static_select, datepicker, timepicker, checkboxes, radio_buttons)
|
||||
- "header": Large text header (plain_text only)
|
||||
- "divider": Horizontal rule separator
|
||||
- "image": Requires "image_url" and "alt_text"
|
||||
- "context": Contextual info with "elements" array
|
||||
- "actions": Interactive elements like buttons
|
||||
|
||||
Example modal:
|
||||
{
|
||||
"type": "modal",
|
||||
"title": { "type": "plain_text", "text": "My Form" },
|
||||
"submit": { "type": "plain_text", "text": "Submit" },
|
||||
"close": { "type": "plain_text", "text": "Cancel" },
|
||||
"blocks": [
|
||||
{
|
||||
"type": "input",
|
||||
"block_id": "input_1",
|
||||
"label": { "type": "plain_text", "text": "Name" },
|
||||
"element": { "type": "plain_text_input", "action_id": "name_input" }
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
You can reference workflow variables using angle brackets, e.g., <blockName.output>.
|
||||
Do not include any explanations, markdown formatting, or other text outside the JSON object.`,
|
||||
placeholder: 'Describe the view/modal you want to create...',
|
||||
},
|
||||
},
|
||||
...getTrigger('slack_webhook').subBlocks,
|
||||
],
|
||||
tools: {
|
||||
@@ -641,6 +997,15 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
'slack_update_message',
|
||||
'slack_delete_message',
|
||||
'slack_add_reaction',
|
||||
'slack_remove_reaction',
|
||||
'slack_get_channel_info',
|
||||
'slack_get_user_presence',
|
||||
'slack_edit_canvas',
|
||||
'slack_create_channel_canvas',
|
||||
'slack_open_view',
|
||||
'slack_update_view',
|
||||
'slack_push_view',
|
||||
'slack_publish_view',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -673,6 +1038,24 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
return 'slack_delete_message'
|
||||
case 'react':
|
||||
return 'slack_add_reaction'
|
||||
case 'unreact':
|
||||
return 'slack_remove_reaction'
|
||||
case 'get_channel_info':
|
||||
return 'slack_get_channel_info'
|
||||
case 'get_user_presence':
|
||||
return 'slack_get_user_presence'
|
||||
case 'edit_canvas':
|
||||
return 'slack_edit_canvas'
|
||||
case 'create_channel_canvas':
|
||||
return 'slack_create_channel_canvas'
|
||||
case 'open_view':
|
||||
return 'slack_open_view'
|
||||
case 'update_view':
|
||||
return 'slack_update_view'
|
||||
case 'push_view':
|
||||
return 'slack_push_view'
|
||||
case 'publish_view':
|
||||
return 'slack_publish_view'
|
||||
default:
|
||||
throw new Error(`Invalid Slack operation: ${params.operation}`)
|
||||
}
|
||||
@@ -710,6 +1093,22 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
getMessageTimestamp,
|
||||
getThreadTimestamp,
|
||||
threadLimit,
|
||||
includeNumMembers,
|
||||
presenceUserId,
|
||||
editCanvasId,
|
||||
canvasOperation,
|
||||
canvasContent,
|
||||
sectionId,
|
||||
canvasTitle,
|
||||
channelCanvasTitle,
|
||||
channelCanvasContent,
|
||||
viewTriggerId,
|
||||
viewInteractivityPointer,
|
||||
viewId,
|
||||
viewExternalId,
|
||||
viewHash,
|
||||
publishUserId,
|
||||
viewPayload,
|
||||
...rest
|
||||
} = params
|
||||
|
||||
@@ -820,10 +1219,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
|
||||
case 'download': {
|
||||
const fileId = (rest as any).fileId
|
||||
const downloadFileName = (rest as any).downloadFileName
|
||||
const fileName = (rest as any).fileName
|
||||
baseParams.fileId = fileId
|
||||
if (downloadFileName) {
|
||||
baseParams.fileName = downloadFileName
|
||||
if (fileName) {
|
||||
baseParams.fileName = fileName
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -841,9 +1240,78 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
break
|
||||
|
||||
case 'react':
|
||||
case 'unreact':
|
||||
baseParams.timestamp = reactionTimestamp
|
||||
baseParams.name = emojiName
|
||||
break
|
||||
|
||||
case 'get_channel_info':
|
||||
baseParams.includeNumMembers = includeNumMembers !== 'false'
|
||||
break
|
||||
|
||||
case 'get_user_presence':
|
||||
baseParams.userId = presenceUserId
|
||||
break
|
||||
|
||||
case 'edit_canvas':
|
||||
baseParams.canvasId = editCanvasId
|
||||
baseParams.operation = canvasOperation
|
||||
if (canvasContent) {
|
||||
baseParams.content = canvasContent
|
||||
}
|
||||
if (sectionId) {
|
||||
baseParams.sectionId = sectionId
|
||||
}
|
||||
if (canvasTitle) {
|
||||
baseParams.title = canvasTitle
|
||||
}
|
||||
break
|
||||
|
||||
case 'create_channel_canvas':
|
||||
if (channelCanvasTitle) {
|
||||
baseParams.title = channelCanvasTitle
|
||||
}
|
||||
if (channelCanvasContent) {
|
||||
baseParams.content = channelCanvasContent
|
||||
}
|
||||
break
|
||||
|
||||
case 'open_view':
|
||||
baseParams.triggerId = viewTriggerId
|
||||
if (viewInteractivityPointer) {
|
||||
baseParams.interactivityPointer = viewInteractivityPointer
|
||||
}
|
||||
baseParams.view = viewPayload
|
||||
break
|
||||
|
||||
case 'update_view':
|
||||
if (viewId) {
|
||||
baseParams.viewId = viewId
|
||||
}
|
||||
if (viewExternalId) {
|
||||
baseParams.externalId = viewExternalId
|
||||
}
|
||||
if (viewHash) {
|
||||
baseParams.hash = viewHash
|
||||
}
|
||||
baseParams.view = viewPayload
|
||||
break
|
||||
|
||||
case 'push_view':
|
||||
baseParams.triggerId = viewTriggerId
|
||||
if (viewInteractivityPointer) {
|
||||
baseParams.interactivityPointer = viewInteractivityPointer
|
||||
}
|
||||
baseParams.view = viewPayload
|
||||
break
|
||||
|
||||
case 'publish_view':
|
||||
baseParams.userId = publishUserId
|
||||
if (viewHash) {
|
||||
baseParams.hash = viewHash
|
||||
}
|
||||
baseParams.view = viewPayload
|
||||
break
|
||||
}
|
||||
|
||||
return baseParams
|
||||
@@ -898,6 +1366,36 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
type: 'string',
|
||||
description: 'Maximum number of messages to return from thread',
|
||||
},
|
||||
// Get Channel Info inputs
|
||||
includeNumMembers: { type: 'string', description: 'Include member count (true/false)' },
|
||||
// Get User Presence inputs
|
||||
presenceUserId: { type: 'string', description: 'User ID to check presence for' },
|
||||
// Edit Canvas inputs
|
||||
editCanvasId: { type: 'string', description: 'Canvas ID to edit' },
|
||||
canvasOperation: { type: 'string', description: 'Canvas edit operation' },
|
||||
canvasContent: { type: 'string', description: 'Markdown content for canvas edit' },
|
||||
sectionId: { type: 'string', description: 'Canvas section ID to target' },
|
||||
canvasTitle: { type: 'string', description: 'New canvas title for rename' },
|
||||
// Create Channel Canvas inputs
|
||||
channelCanvasTitle: { type: 'string', description: 'Title for channel canvas' },
|
||||
channelCanvasContent: { type: 'string', description: 'Content for channel canvas' },
|
||||
// View operation inputs
|
||||
viewTriggerId: { type: 'string', description: 'Trigger ID from interaction payload' },
|
||||
viewInteractivityPointer: {
|
||||
type: 'string',
|
||||
description: 'Alternative to trigger_id for posting to user',
|
||||
},
|
||||
viewId: { type: 'string', description: 'Unique view identifier for update' },
|
||||
viewExternalId: {
|
||||
type: 'string',
|
||||
description: 'Developer-set unique identifier for update (max 255 chars)',
|
||||
},
|
||||
viewHash: { type: 'string', description: 'View state hash for race condition protection' },
|
||||
publishUserId: {
|
||||
type: 'string',
|
||||
description: 'User ID to publish Home tab view to',
|
||||
},
|
||||
viewPayload: { type: 'json', description: 'View payload object with type, title, and blocks' },
|
||||
},
|
||||
outputs: {
|
||||
// slack_message outputs (send operation)
|
||||
@@ -994,6 +1492,50 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
description: 'Updated message metadata (legacy, use message object instead)',
|
||||
},
|
||||
|
||||
// slack_get_channel_info outputs (get_channel_info operation)
|
||||
channelInfo: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Detailed channel object with properties: id, name, is_private, is_archived, is_member, num_members, topic, purpose, created, creator',
|
||||
},
|
||||
|
||||
// slack_get_user_presence outputs (get_user_presence operation)
|
||||
presence: {
|
||||
type: 'string',
|
||||
description: 'User presence status: "active" or "away"',
|
||||
},
|
||||
online: {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether user has an active client connection (only available when checking own presence)',
|
||||
},
|
||||
autoAway: {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether user was automatically set to away (only available when checking own presence)',
|
||||
},
|
||||
manualAway: {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether user manually set themselves as away (only available when checking own presence)',
|
||||
},
|
||||
connectionCount: {
|
||||
type: 'number',
|
||||
description: 'Total number of active connections (only available when checking own presence)',
|
||||
},
|
||||
lastActivity: {
|
||||
type: 'number',
|
||||
description:
|
||||
'Unix timestamp of last detected activity (only available when checking own presence)',
|
||||
},
|
||||
|
||||
// View operation outputs (open_view, update_view, push_view, publish_view)
|
||||
view: {
|
||||
type: 'json',
|
||||
description:
|
||||
'View object with properties: id, team_id, type, title, submit, close, blocks, private_metadata, callback_id, external_id, state, hash, clear_on_close, notify_on_close, root_view_id, previous_view_id, app_id, bot_id',
|
||||
},
|
||||
|
||||
// Trigger outputs (when used as webhook trigger)
|
||||
event_type: { type: 'string', description: 'Type of Slack event that triggered the workflow' },
|
||||
channel_name: { type: 'string', description: 'Human-readable channel name' },
|
||||
|
||||
@@ -618,6 +618,8 @@ export class BlockExecutor {
|
||||
await ctx.onStream?.(clientStreamingExec)
|
||||
} catch (error) {
|
||||
logger.error('Error in onStream callback', { blockId, error })
|
||||
// Cancel the client stream to release the tee'd buffer
|
||||
await processedClientStream.cancel().catch(() => {})
|
||||
}
|
||||
})()
|
||||
|
||||
@@ -646,6 +648,7 @@ export class BlockExecutor {
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error in onStream callback', { blockId, error })
|
||||
await processedStream.cancel().catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -657,22 +660,25 @@ export class BlockExecutor {
|
||||
): Promise<void> {
|
||||
const reader = stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let fullContent = ''
|
||||
const chunks: string[] = []
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
fullContent += decoder.decode(value, { stream: true })
|
||||
chunks.push(decoder.decode(value, { stream: true }))
|
||||
}
|
||||
const tail = decoder.decode()
|
||||
if (tail) chunks.push(tail)
|
||||
} catch (error) {
|
||||
logger.error('Error reading executor stream for block', { blockId, error })
|
||||
} finally {
|
||||
try {
|
||||
reader.releaseLock()
|
||||
await reader.cancel().catch(() => {})
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const fullContent = chunks.join('')
|
||||
if (!fullContent) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -66,11 +66,15 @@ describe('EdgeManager', () => {
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
const readyAfterA = edgeManager.processOutgoingEdges(blockANode, { result: 'done' })
|
||||
const readyAfterA = edgeManager.processOutgoingEdges(blockANode, {
|
||||
result: 'done',
|
||||
})
|
||||
expect(readyAfterA).toContain(blockBId)
|
||||
expect(readyAfterA).not.toContain(blockCId)
|
||||
|
||||
const readyAfterB = edgeManager.processOutgoingEdges(blockBNode, { result: 'done' })
|
||||
const readyAfterB = edgeManager.processOutgoingEdges(blockBNode, {
|
||||
result: 'done',
|
||||
})
|
||||
expect(readyAfterB).toContain(blockCId)
|
||||
})
|
||||
|
||||
@@ -591,7 +595,9 @@ describe('EdgeManager', () => {
|
||||
|
||||
function1Node.incomingEdges.add(conditionId)
|
||||
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'if' })
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'if',
|
||||
})
|
||||
expect(readyNodes).toContain(function1Id)
|
||||
})
|
||||
})
|
||||
@@ -977,11 +983,15 @@ describe('EdgeManager', () => {
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, { selectedOption: 'if' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, {
|
||||
selectedOption: 'if',
|
||||
})
|
||||
expect(ready1).toContain(condition2Id)
|
||||
expect(ready1).not.toContain(target1Id)
|
||||
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, { selectedOption: 'else' })
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
expect(ready2).toContain(target1Id)
|
||||
expect(ready2).not.toContain(target2Id)
|
||||
})
|
||||
@@ -1394,10 +1404,14 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Path: condition1(if) → condition2(else) → nodeC → sentinel_end
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, { selectedOption: 'if' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, {
|
||||
selectedOption: 'if',
|
||||
})
|
||||
expect(ready1).toContain(condition2Id)
|
||||
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, { selectedOption: 'else' })
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
expect(ready2).toContain(nodeCId)
|
||||
|
||||
const ready3 = edgeManager.processOutgoingEdges(nodeCNode, {})
|
||||
@@ -1448,7 +1462,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Test else path through diamond
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
expect(ready1).toContain(nodeBId)
|
||||
expect(ready1).not.toContain(nodeAId)
|
||||
|
||||
@@ -1509,7 +1525,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Select else - triggers deep cascade deactivation of if path
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
expect(ready1).toContain(nodeDId)
|
||||
|
||||
const ready2 = edgeManager.processOutgoingEdges(nodeDNode, {})
|
||||
@@ -1566,7 +1584,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Test middle branch (elseif2)
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'elseif2' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'elseif2',
|
||||
})
|
||||
expect(ready1).toContain(nodeCId)
|
||||
expect(ready1).not.toContain(nodeAId)
|
||||
expect(ready1).not.toContain(nodeBId)
|
||||
@@ -1629,7 +1649,7 @@ describe('EdgeManager', () => {
|
||||
// Scenario: Loop with Function 1 → Condition 1 → Function 2
|
||||
// Condition has "if" branch → Function 2
|
||||
// Condition has "else" branch → NO connection (dead end)
|
||||
// When else is selected (selectedOption: null), the loop should continue
|
||||
// When else is selected, the loop sentinel should still fire
|
||||
//
|
||||
// DAG structure:
|
||||
// sentinel_start → func1 → condition → (if) → func2 → sentinel_end
|
||||
@@ -1637,11 +1657,12 @@ describe('EdgeManager', () => {
|
||||
// sentinel_end → (loop_continue) → sentinel_start
|
||||
//
|
||||
// When condition takes else with no edge:
|
||||
// - selectedOption: null (no condition matches)
|
||||
// - selectedOption is set (condition made a routing decision)
|
||||
// - The "if" edge gets deactivated
|
||||
// - func2 has no other active incoming edges, so edge to sentinel_end gets deactivated
|
||||
// - sentinel_end has no active incoming edges and should become ready
|
||||
// - sentinel_end is the enclosing loop's sentinel and should become ready
|
||||
|
||||
const loopId = 'loop-1'
|
||||
const sentinelStartId = 'sentinel-start'
|
||||
const sentinelEndId = 'sentinel-end'
|
||||
const func1Id = 'func1'
|
||||
@@ -1649,14 +1670,21 @@ describe('EdgeManager', () => {
|
||||
const func2Id = 'func2'
|
||||
|
||||
const sentinelStartNode = createMockNode(sentinelStartId, [{ target: func1Id }])
|
||||
sentinelStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId }
|
||||
|
||||
const func1Node = createMockNode(func1Id, [{ target: conditionId }], [sentinelStartId])
|
||||
// Condition only has "if" branch, no "else" edge (dead end)
|
||||
func1Node.metadata = { loopId, isLoopNode: true }
|
||||
|
||||
const conditionNode = createMockNode(
|
||||
conditionId,
|
||||
[{ target: func2Id, sourceHandle: 'condition-if' }],
|
||||
[func1Id]
|
||||
)
|
||||
conditionNode.metadata = { loopId, isLoopNode: true }
|
||||
|
||||
const func2Node = createMockNode(func2Id, [{ target: sentinelEndId }], [conditionId])
|
||||
func2Node.metadata = { loopId, isLoopNode: true }
|
||||
|
||||
const sentinelEndNode = createMockNode(
|
||||
sentinelEndId,
|
||||
[
|
||||
@@ -1665,6 +1693,8 @@ describe('EdgeManager', () => {
|
||||
],
|
||||
[func2Id]
|
||||
)
|
||||
sentinelEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId }
|
||||
|
||||
const afterLoopNode = createMockNode('after-loop', [], [sentinelEndId])
|
||||
|
||||
const nodes = new Map<string, DAGNode>([
|
||||
@@ -1679,22 +1709,17 @@ describe('EdgeManager', () => {
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Simulate execution: sentinel_start → func1 → condition
|
||||
// Clear incoming edges as execution progresses (simulating normal flow)
|
||||
func1Node.incomingEdges.clear()
|
||||
conditionNode.incomingEdges.clear()
|
||||
|
||||
// Condition takes "else" but there's no else edge
|
||||
// selectedOption: null means no condition branch matches
|
||||
// Condition selects dead-end else (selectedOption is set — routing decision made)
|
||||
// but it's inside the loop, so the enclosing sentinel should still fire
|
||||
const ready = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: null,
|
||||
conditionResult: false,
|
||||
selectedOption: 'else-id',
|
||||
conditionResult: true,
|
||||
selectedPath: null,
|
||||
})
|
||||
|
||||
// The "if" edge to func2 should be deactivated
|
||||
// func2 has no other incoming edges, so its edge to sentinel_end gets deactivated
|
||||
// sentinel_end has no active incoming edges and should be ready
|
||||
expect(ready).toContain(sentinelEndId)
|
||||
})
|
||||
|
||||
@@ -1763,11 +1788,12 @@ describe('EdgeManager', () => {
|
||||
// → (else) → [nothing]
|
||||
// → (else) → [nothing]
|
||||
//
|
||||
// When condition1 takes if, then condition2 takes else:
|
||||
// When condition1 takes if, then condition2 takes else (dead-end):
|
||||
// - condition2's "if" edge to func gets deactivated
|
||||
// - func's edge to sentinel_end gets deactivated
|
||||
// - sentinel_end should become ready
|
||||
// - sentinel_end is the enclosing loop's sentinel and should become ready
|
||||
|
||||
const loopId = 'loop-1'
|
||||
const sentinelStartId = 'sentinel-start'
|
||||
const sentinelEndId = 'sentinel-end'
|
||||
const condition1Id = 'condition1'
|
||||
@@ -1775,22 +1801,31 @@ describe('EdgeManager', () => {
|
||||
const funcId = 'func'
|
||||
|
||||
const sentinelStartNode = createMockNode(sentinelStartId, [{ target: condition1Id }])
|
||||
sentinelStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId }
|
||||
|
||||
const condition1Node = createMockNode(
|
||||
condition1Id,
|
||||
[{ target: condition2Id, sourceHandle: 'condition-if' }],
|
||||
[sentinelStartId]
|
||||
)
|
||||
condition1Node.metadata = { loopId, isLoopNode: true }
|
||||
|
||||
const condition2Node = createMockNode(
|
||||
condition2Id,
|
||||
[{ target: funcId, sourceHandle: 'condition-if' }],
|
||||
[condition1Id]
|
||||
)
|
||||
condition2Node.metadata = { loopId, isLoopNode: true }
|
||||
|
||||
const funcNode = createMockNode(funcId, [{ target: sentinelEndId }], [condition2Id])
|
||||
funcNode.metadata = { loopId, isLoopNode: true }
|
||||
|
||||
const sentinelEndNode = createMockNode(
|
||||
sentinelEndId,
|
||||
[{ target: sentinelStartId, sourceHandle: 'loop_continue' }],
|
||||
[funcId]
|
||||
)
|
||||
sentinelEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId }
|
||||
|
||||
const nodes = new Map<string, DAGNode>([
|
||||
[sentinelStartId, sentinelStartNode],
|
||||
@@ -1803,22 +1838,95 @@ describe('EdgeManager', () => {
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Clear incoming edges as execution progresses
|
||||
condition1Node.incomingEdges.clear()
|
||||
|
||||
// condition1 takes "if" - condition2 becomes ready
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, { selectedOption: 'if' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, {
|
||||
selectedOption: 'if',
|
||||
})
|
||||
expect(ready1).toContain(condition2Id)
|
||||
|
||||
condition2Node.incomingEdges.clear()
|
||||
|
||||
// condition2 takes "else" (dead end)
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, { selectedOption: null })
|
||||
// condition2 selects dead-end else (selectedOption set — routing decision made)
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, {
|
||||
selectedOption: 'else-id',
|
||||
})
|
||||
|
||||
// sentinel_end should be ready because all paths to it are deactivated
|
||||
// sentinel_end is the enclosing loop's sentinel and should be ready
|
||||
expect(ready2).toContain(sentinelEndId)
|
||||
})
|
||||
|
||||
it('should not fire nested subflow sentinel when condition inside outer loop hits dead-end', () => {
|
||||
// Scenario: outer loop contains condition → (if) → inner loop → sentinel_end
|
||||
// → (else) → [dead end]
|
||||
//
|
||||
// When condition selects dead-end else:
|
||||
// - The outer loop's sentinel should fire (enclosing subflow)
|
||||
// - The inner loop's sentinel should NOT fire (downstream subflow)
|
||||
|
||||
const outerLoopId = 'outer-loop'
|
||||
const innerLoopId = 'inner-loop'
|
||||
const outerStartId = 'outer-start'
|
||||
const outerEndId = 'outer-end'
|
||||
const conditionId = 'condition'
|
||||
const innerStartId = 'inner-start'
|
||||
const innerBodyId = 'inner-body'
|
||||
const innerEndId = 'inner-end'
|
||||
|
||||
const outerStartNode = createMockNode(outerStartId, [{ target: conditionId }])
|
||||
outerStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId: outerLoopId }
|
||||
|
||||
const conditionNode = createMockNode(
|
||||
conditionId,
|
||||
[{ target: innerStartId, sourceHandle: 'condition-if' }],
|
||||
[outerStartId]
|
||||
)
|
||||
conditionNode.metadata = { loopId: outerLoopId, isLoopNode: true }
|
||||
|
||||
const innerStartNode = createMockNode(innerStartId, [{ target: innerBodyId }], [conditionId])
|
||||
innerStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId: innerLoopId }
|
||||
|
||||
const innerBodyNode = createMockNode(innerBodyId, [{ target: innerEndId }], [innerStartId])
|
||||
innerBodyNode.metadata = { loopId: innerLoopId, isLoopNode: true }
|
||||
|
||||
const innerEndNode = createMockNode(
|
||||
innerEndId,
|
||||
[{ target: outerEndId, sourceHandle: 'loop_exit' }],
|
||||
[innerBodyId]
|
||||
)
|
||||
innerEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId: innerLoopId }
|
||||
|
||||
const outerEndNode = createMockNode(
|
||||
outerEndId,
|
||||
[{ target: outerStartId, sourceHandle: 'loop_continue' }],
|
||||
[innerEndId]
|
||||
)
|
||||
outerEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId: outerLoopId }
|
||||
|
||||
const nodes = new Map<string, DAGNode>([
|
||||
[outerStartId, outerStartNode],
|
||||
[conditionId, conditionNode],
|
||||
[innerStartId, innerStartNode],
|
||||
[innerBodyId, innerBodyNode],
|
||||
[innerEndId, innerEndNode],
|
||||
[outerEndId, outerEndNode],
|
||||
])
|
||||
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
conditionNode.incomingEdges.clear()
|
||||
|
||||
const ready = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else-id',
|
||||
})
|
||||
|
||||
// Outer loop sentinel should fire (condition is inside outer loop)
|
||||
expect(ready).toContain(outerEndId)
|
||||
// Inner loop sentinel should NOT fire (it's a downstream subflow)
|
||||
expect(ready).not.toContain(innerEndId)
|
||||
})
|
||||
|
||||
it('should NOT execute intermediate nodes in long cascade chains (2+ hops)', () => {
|
||||
// Regression test: When condition hits dead-end with 2+ intermediate nodes,
|
||||
// only sentinel_end should be ready, NOT the intermediate nodes.
|
||||
@@ -1922,7 +2030,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Select else path
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
expect(ready1).toContain(nodeBId)
|
||||
expect(ready1).not.toContain(nodeAId)
|
||||
|
||||
@@ -1968,7 +2078,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// When selectedOption is null, the cascade deactivation makes parallel_end ready
|
||||
const ready = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: null })
|
||||
const ready = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: null,
|
||||
})
|
||||
expect(ready).toContain(parallelEndId)
|
||||
})
|
||||
|
||||
@@ -2039,11 +2151,15 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Branch 1: condition1 selects else
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, { selectedOption: 'else' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(condition1Node, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
expect(ready1).toContain(nodeBId)
|
||||
|
||||
// Branch 2: condition2 selects if
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, { selectedOption: 'if' })
|
||||
const ready2 = edgeManager.processOutgoingEdges(condition2Node, {
|
||||
selectedOption: 'if',
|
||||
})
|
||||
expect(ready2).toContain(nodeCId)
|
||||
|
||||
// Both complete
|
||||
@@ -2200,7 +2316,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// nodeA errors
|
||||
const ready1 = edgeManager.processOutgoingEdges(nodeANode, { error: 'Something failed' })
|
||||
const ready1 = edgeManager.processOutgoingEdges(nodeANode, {
|
||||
error: 'Something failed',
|
||||
})
|
||||
expect(ready1).toContain(errorNodeId)
|
||||
expect(ready1).not.toContain(successNodeId)
|
||||
|
||||
@@ -2289,7 +2407,9 @@ describe('EdgeManager', () => {
|
||||
edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'if' })
|
||||
edgeManager.processOutgoingEdges(nodeANode, {})
|
||||
|
||||
const ready2 = edgeManager.processOutgoingEdges(loopEndNode, { selectedRoute: 'loop_exit' })
|
||||
const ready2 = edgeManager.processOutgoingEdges(loopEndNode, {
|
||||
selectedRoute: 'loop_exit',
|
||||
})
|
||||
expect(ready2).toContain(parallelEndId)
|
||||
|
||||
const ready3 = edgeManager.processOutgoingEdges(parallelEndNode, {
|
||||
@@ -2413,7 +2533,9 @@ describe('EdgeManager', () => {
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
const successReady = edgeManager.processOutgoingEdges(sourceNode, { result: 'ok' })
|
||||
const successReady = edgeManager.processOutgoingEdges(sourceNode, {
|
||||
result: 'ok',
|
||||
})
|
||||
expect(successReady).toContain(targetId)
|
||||
})
|
||||
})
|
||||
@@ -2472,7 +2594,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Condition selects "else" branch, deactivating the "if" branch (which contains the loop)
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
|
||||
// Only otherBranch should be ready
|
||||
expect(readyNodes).toContain(otherBranchId)
|
||||
@@ -2539,7 +2663,9 @@ describe('EdgeManager', () => {
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Condition selects "else" branch
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else',
|
||||
})
|
||||
|
||||
expect(readyNodes).toContain(otherBranchId)
|
||||
expect(readyNodes).not.toContain(parallelStartId)
|
||||
@@ -2626,6 +2752,171 @@ describe('EdgeManager', () => {
|
||||
expect(readyNodes).not.toContain(afterLoopId)
|
||||
})
|
||||
|
||||
it('should not queue sentinel-end when condition selects no-edge path (loop)', () => {
|
||||
// Bug scenario: condition → (if) → sentinel_start → body → sentinel_end → (loop_exit) → after_loop
|
||||
// → (else) → [NO outgoing edge]
|
||||
// Condition evaluates false, else is selected but has no edge.
|
||||
// With selectedOption set (routing decision made), cascadeTargets should NOT be queued.
|
||||
// Previously sentinel_end was queued via cascadeTargets, causing downstream blocks to execute.
|
||||
|
||||
const conditionId = 'condition'
|
||||
const sentinelStartId = 'sentinel-start'
|
||||
const loopBodyId = 'loop-body'
|
||||
const sentinelEndId = 'sentinel-end'
|
||||
const afterLoopId = 'after-loop'
|
||||
|
||||
const conditionNode = createMockNode(conditionId, [
|
||||
{ target: sentinelStartId, sourceHandle: 'condition-if-id' },
|
||||
])
|
||||
|
||||
const sentinelStartNode = createMockNode(
|
||||
sentinelStartId,
|
||||
[{ target: loopBodyId }],
|
||||
[conditionId]
|
||||
)
|
||||
|
||||
const loopBodyNode = createMockNode(
|
||||
loopBodyId,
|
||||
[{ target: sentinelEndId }],
|
||||
[sentinelStartId]
|
||||
)
|
||||
|
||||
const sentinelEndNode = createMockNode(
|
||||
sentinelEndId,
|
||||
[
|
||||
{ target: sentinelStartId, sourceHandle: 'loop_continue' },
|
||||
{ target: afterLoopId, sourceHandle: 'loop_exit' },
|
||||
],
|
||||
[loopBodyId]
|
||||
)
|
||||
|
||||
const afterLoopNode = createMockNode(afterLoopId, [], [sentinelEndId])
|
||||
|
||||
const nodes = new Map<string, DAGNode>([
|
||||
[conditionId, conditionNode],
|
||||
[sentinelStartId, sentinelStartNode],
|
||||
[loopBodyId, loopBodyNode],
|
||||
[sentinelEndId, sentinelEndNode],
|
||||
[afterLoopId, afterLoopNode],
|
||||
])
|
||||
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
// Condition selected else, but else has no outgoing edge.
|
||||
// selectedOption is set (routing decision was made).
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else-id',
|
||||
})
|
||||
|
||||
// Nothing should be queued -- the entire branch is intentionally dead
|
||||
expect(readyNodes).not.toContain(sentinelStartId)
|
||||
expect(readyNodes).not.toContain(loopBodyId)
|
||||
expect(readyNodes).not.toContain(sentinelEndId)
|
||||
expect(readyNodes).not.toContain(afterLoopId)
|
||||
expect(readyNodes).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should not queue sentinel-end when condition selects no-edge path (parallel)', () => {
|
||||
// Same scenario with parallel instead of loop
|
||||
const conditionId = 'condition'
|
||||
const parallelStartId = 'parallel-start'
|
||||
const branchId = 'branch-0'
|
||||
const parallelEndId = 'parallel-end'
|
||||
const afterParallelId = 'after-parallel'
|
||||
|
||||
const conditionNode = createMockNode(conditionId, [
|
||||
{ target: parallelStartId, sourceHandle: 'condition-if-id' },
|
||||
])
|
||||
|
||||
const parallelStartNode = createMockNode(
|
||||
parallelStartId,
|
||||
[{ target: branchId }],
|
||||
[conditionId]
|
||||
)
|
||||
|
||||
const branchNode = createMockNode(
|
||||
branchId,
|
||||
[{ target: parallelEndId, sourceHandle: 'parallel_exit' }],
|
||||
[parallelStartId]
|
||||
)
|
||||
|
||||
const parallelEndNode = createMockNode(
|
||||
parallelEndId,
|
||||
[{ target: afterParallelId, sourceHandle: 'parallel_exit' }],
|
||||
[branchId]
|
||||
)
|
||||
|
||||
const afterParallelNode = createMockNode(afterParallelId, [], [parallelEndId])
|
||||
|
||||
const nodes = new Map<string, DAGNode>([
|
||||
[conditionId, conditionNode],
|
||||
[parallelStartId, parallelStartNode],
|
||||
[branchId, branchNode],
|
||||
[parallelEndId, parallelEndNode],
|
||||
[afterParallelId, afterParallelNode],
|
||||
])
|
||||
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: 'else-id',
|
||||
})
|
||||
|
||||
expect(readyNodes).not.toContain(parallelStartId)
|
||||
expect(readyNodes).not.toContain(branchId)
|
||||
expect(readyNodes).not.toContain(parallelEndId)
|
||||
expect(readyNodes).not.toContain(afterParallelId)
|
||||
expect(readyNodes).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should still queue sentinel-end inside loop when no condition matches (true dead-end)', () => {
|
||||
// Contrast: condition INSIDE a loop with selectedOption null (no match, no routing decision).
|
||||
// This is a true dead-end where cascadeTargets SHOULD fire so the loop sentinel can handle exit.
|
||||
|
||||
const sentinelStartId = 'sentinel-start'
|
||||
const sentinelEndId = 'sentinel-end'
|
||||
const conditionId = 'condition'
|
||||
const nodeAId = 'node-a'
|
||||
|
||||
const sentinelStartNode = createMockNode(sentinelStartId, [{ target: conditionId }])
|
||||
const conditionNode = createMockNode(
|
||||
conditionId,
|
||||
[{ target: nodeAId, sourceHandle: 'condition-if' }],
|
||||
[sentinelStartId]
|
||||
)
|
||||
const nodeANode = createMockNode(nodeAId, [{ target: sentinelEndId }], [conditionId])
|
||||
const sentinelEndNode = createMockNode(
|
||||
sentinelEndId,
|
||||
[
|
||||
{ target: sentinelStartId, sourceHandle: 'loop_continue' },
|
||||
{ target: 'after-loop', sourceHandle: 'loop_exit' },
|
||||
],
|
||||
[nodeAId]
|
||||
)
|
||||
|
||||
const nodes = new Map<string, DAGNode>([
|
||||
[sentinelStartId, sentinelStartNode],
|
||||
[conditionId, conditionNode],
|
||||
[nodeAId, nodeANode],
|
||||
[sentinelEndId, sentinelEndNode],
|
||||
])
|
||||
|
||||
const dag = createMockDAG(nodes)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
conditionNode.incomingEdges.clear()
|
||||
|
||||
// selectedOption: null → no routing decision, true dead-end
|
||||
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, {
|
||||
selectedOption: null,
|
||||
})
|
||||
|
||||
// sentinel-end SHOULD be queued (true dead-end inside loop)
|
||||
expect(readyNodes).toContain(sentinelEndId)
|
||||
})
|
||||
|
||||
it('should still correctly handle normal loop exit (not deactivate when loop runs)', () => {
|
||||
// When a loop actually executes and exits normally, after_loop should become ready
|
||||
const sentinelStartId = 'sentinel-start'
|
||||
|
||||
@@ -69,15 +69,23 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
const isDeadEnd = activatedTargets.length === 0
|
||||
const isRoutedDeadEnd = isDeadEnd && !!(output.selectedOption || output.selectedRoute)
|
||||
|
||||
for (const targetId of cascadeTargets) {
|
||||
if (!readyNodes.includes(targetId) && !activatedTargets.includes(targetId)) {
|
||||
// Only queue cascade terminal control nodes when ALL outgoing edges from the
|
||||
// current node were deactivated (dead-end scenario). When some edges are
|
||||
// activated, terminal control nodes on deactivated branches should NOT be
|
||||
// queued - they will be reached through the normal activated path's completion.
|
||||
// This prevents loop/parallel sentinels on fully deactivated paths (e.g., an
|
||||
// upstream condition took a different branch) from being spuriously executed.
|
||||
if (activatedTargets.length === 0 && this.isTargetReady(targetId)) {
|
||||
if (!isDeadEnd || !this.isTargetReady(targetId)) continue
|
||||
|
||||
if (isRoutedDeadEnd) {
|
||||
// A condition/router deliberately selected a dead-end path.
|
||||
// Only queue the sentinel if it belongs to the SAME subflow as the
|
||||
// current node (the condition is inside the loop/parallel and the
|
||||
// loop still needs to continue/exit). Downstream subflow sentinels
|
||||
// should NOT fire.
|
||||
if (this.isEnclosingSentinel(node, targetId)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
} else {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
@@ -145,6 +153,27 @@ export class EdgeManager {
|
||||
return targetNode ? this.isNodeReady(targetNode) : false
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the cascade target sentinel belongs to the same subflow as the source node.
|
||||
* A condition inside a loop that hits a dead-end should still allow the enclosing
|
||||
* loop's sentinel to fire so the loop can continue or exit.
|
||||
*/
|
||||
private isEnclosingSentinel(sourceNode: DAGNode, sentinelId: string): boolean {
|
||||
const sentinel = this.dag.nodes.get(sentinelId)
|
||||
if (!sentinel?.metadata.isSentinel) return false
|
||||
|
||||
const sourceLoopId = sourceNode.metadata.loopId
|
||||
const sourceParallelId = sourceNode.metadata.parallelId
|
||||
const sentinelLoopId = sentinel.metadata.loopId
|
||||
const sentinelParallelId = sentinel.metadata.parallelId
|
||||
|
||||
if (sourceLoopId && sentinelLoopId && sourceLoopId === sentinelLoopId) return true
|
||||
if (sourceParallelId && sentinelParallelId && sourceParallelId === sentinelParallelId)
|
||||
return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
private isLoopEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
|
||||
@@ -555,7 +555,7 @@ describe('ConditionBlockHandler', () => {
|
||||
})
|
||||
|
||||
describe('Condition with no outgoing edge', () => {
|
||||
it('should return null path when condition matches but has no edge', async () => {
|
||||
it('should set selectedOption when condition matches but has no edge', async () => {
|
||||
const conditions = [
|
||||
{ id: 'cond1', title: 'if', value: 'true' },
|
||||
{ id: 'else1', title: 'else', value: '' },
|
||||
@@ -570,9 +570,52 @@ describe('ConditionBlockHandler', () => {
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
// Condition matches but no edge for it
|
||||
expect((result as any).conditionResult).toBe(false)
|
||||
expect((result as any).conditionResult).toBe(true)
|
||||
expect((result as any).selectedPath).toBeNull()
|
||||
expect((result as any).selectedOption).toBe('cond1')
|
||||
expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('cond1')
|
||||
})
|
||||
|
||||
it('should set selectedOption when else is selected but has no edge', async () => {
|
||||
const conditions = [
|
||||
{ id: 'cond1', title: 'if', value: 'false' },
|
||||
{ id: 'else1', title: 'else', value: '' },
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Only the if branch has an edge; else has no outgoing connection
|
||||
mockContext.workflow!.connections = [
|
||||
{ source: mockSourceBlock.id, target: mockBlock.id },
|
||||
{ source: mockBlock.id, target: mockTargetBlock1.id, sourceHandle: 'condition-cond1' },
|
||||
]
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect((result as any).conditionResult).toBe(true)
|
||||
expect((result as any).selectedPath).toBeNull()
|
||||
expect((result as any).selectedOption).toBe('else1')
|
||||
expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('else1')
|
||||
})
|
||||
|
||||
it('should deactivate if-path when else is selected with no edge', async () => {
|
||||
const conditions = [
|
||||
{ id: 'cond1', title: 'if', value: 'context.value > 100' },
|
||||
{ id: 'else1', title: 'else', value: '' },
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Only the if branch has an edge to a loop; else has nothing
|
||||
mockContext.workflow!.connections = [
|
||||
{ source: mockSourceBlock.id, target: mockBlock.id },
|
||||
{ source: mockBlock.id, target: mockTargetBlock1.id, sourceHandle: 'condition-cond1' },
|
||||
]
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
// Else was selected (value 10 is not > 100), so selectedOption should be 'else1'
|
||||
// This allows the edge manager to deactivate the cond1 edge
|
||||
expect((result as any).selectedOption).toBe('else1')
|
||||
expect((result as any).conditionResult).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -602,6 +645,67 @@ describe('ConditionBlockHandler', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Source output filtering', () => {
|
||||
it('should not propagate error field from source block output', async () => {
|
||||
;(mockContext.blockStates as any).set(mockSourceBlock.id, {
|
||||
output: { value: 10, text: 'hello', error: 'upstream block failed' },
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
const conditions = [
|
||||
{ id: 'cond1', title: 'if', value: 'context.value > 5' },
|
||||
{ id: 'else1', title: 'else', value: '' },
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect((result as any).conditionResult).toBe(true)
|
||||
expect((result as any).selectedOption).toBe('cond1')
|
||||
expect(result).not.toHaveProperty('error')
|
||||
})
|
||||
|
||||
it('should not propagate _pauseMetadata from source block output', async () => {
|
||||
;(mockContext.blockStates as any).set(mockSourceBlock.id, {
|
||||
output: { value: 10, _pauseMetadata: { contextId: 'abc' } },
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
const conditions = [
|
||||
{ id: 'cond1', title: 'if', value: 'context.value > 5' },
|
||||
{ id: 'else1', title: 'else', value: '' },
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect((result as any).conditionResult).toBe(true)
|
||||
expect(result).not.toHaveProperty('_pauseMetadata')
|
||||
})
|
||||
|
||||
it('should still pass through non-control fields from source output', async () => {
|
||||
;(mockContext.blockStates as any).set(mockSourceBlock.id, {
|
||||
output: { value: 10, text: 'hello', customData: { nested: true } },
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
const conditions = [
|
||||
{ id: 'cond1', title: 'if', value: 'context.value > 5' },
|
||||
{ id: 'else1', title: 'else', value: '' },
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect((result as any).value).toBe(10)
|
||||
expect((result as any).text).toBe('hello')
|
||||
expect((result as any).customData).toEqual({ nested: true })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Virtual block ID handling', () => {
|
||||
it('should use currentVirtualBlockId for decision key when available', async () => {
|
||||
mockContext.currentVirtualBlockId = 'virtual-block-123'
|
||||
|
||||
@@ -108,9 +108,7 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
const evalContext = this.buildEvaluationContext(ctx, sourceBlockId)
|
||||
const rawSourceOutput = sourceBlockId ? ctx.blockStates.get(sourceBlockId)?.output : null
|
||||
|
||||
// Filter out _pauseMetadata from source output to prevent the engine from
|
||||
// thinking this block is pausing (it was already resumed by the HITL block)
|
||||
const sourceOutput = this.filterPauseMetadata(rawSourceOutput)
|
||||
const sourceOutput = this.filterSourceOutput(rawSourceOutput)
|
||||
|
||||
const outgoingConnections = ctx.workflow?.connections.filter(
|
||||
(conn) => conn.source === baseBlockId
|
||||
@@ -124,7 +122,7 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
block.id
|
||||
)
|
||||
|
||||
if (!selectedConnection || !selectedCondition) {
|
||||
if (!selectedCondition) {
|
||||
return {
|
||||
...((sourceOutput as any) || {}),
|
||||
conditionResult: false,
|
||||
@@ -133,6 +131,17 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
if (!selectedConnection) {
|
||||
const decisionKey = ctx.currentVirtualBlockId || block.id
|
||||
ctx.decisions.condition.set(decisionKey, selectedCondition.id)
|
||||
return {
|
||||
...((sourceOutput as any) || {}),
|
||||
conditionResult: true,
|
||||
selectedPath: null,
|
||||
selectedOption: selectedCondition.id,
|
||||
}
|
||||
}
|
||||
|
||||
const targetBlock = ctx.workflow?.blocks.find((b) => b.id === selectedConnection?.target)
|
||||
if (!targetBlock) {
|
||||
throw new Error(`Target block ${selectedConnection?.target} not found`)
|
||||
@@ -153,11 +162,11 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
private filterPauseMetadata(output: any): any {
|
||||
private filterSourceOutput(output: any): any {
|
||||
if (!output || typeof output !== 'object') {
|
||||
return output
|
||||
}
|
||||
const { _pauseMetadata, ...rest } = output
|
||||
const { _pauseMetadata, error, ...rest } = output
|
||||
return rest
|
||||
}
|
||||
|
||||
@@ -223,8 +232,7 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
if (connection) {
|
||||
return { selectedConnection: connection, selectedCondition: condition }
|
||||
}
|
||||
// Condition is true but has no outgoing edge - branch ends gracefully
|
||||
return { selectedConnection: null, selectedCondition: null }
|
||||
return { selectedConnection: null, selectedCondition: condition }
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`Failed to evaluate condition "${condition.title}": ${error.message}`)
|
||||
@@ -238,7 +246,7 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
if (elseConnection) {
|
||||
return { selectedConnection: elseConnection, selectedCondition: elseCondition }
|
||||
}
|
||||
return { selectedConnection: null, selectedCondition: null }
|
||||
return { selectedConnection: null, selectedCondition: elseCondition }
|
||||
}
|
||||
|
||||
return { selectedConnection: null, selectedCondition: null }
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
buildParallelSentinelStartId,
|
||||
buildSentinelEndId,
|
||||
buildSentinelStartId,
|
||||
emitEmptySubflowEvents,
|
||||
extractBaseBlockId,
|
||||
resolveArrayInput,
|
||||
validateMaxCount,
|
||||
@@ -596,6 +597,7 @@ export class LoopOrchestrator {
|
||||
if (!scope.items || scope.items.length === 0) {
|
||||
logger.info('ForEach loop has empty collection, skipping loop body', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@@ -605,6 +607,7 @@ export class LoopOrchestrator {
|
||||
if (scope.maxIterations === 0) {
|
||||
logger.info('For loop has 0 iterations, skipping loop body', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@@ -617,6 +620,8 @@ export class LoopOrchestrator {
|
||||
if (scope.loopType === 'while') {
|
||||
if (!scope.condition) {
|
||||
logger.warn('No condition defined for while loop', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -627,6 +632,11 @@ export class LoopOrchestrator {
|
||||
result,
|
||||
})
|
||||
|
||||
if (!result) {
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import { buildContainerIterationContext } from '@/executor/utils/iteration-conte
|
||||
import { ParallelExpander } from '@/executor/utils/parallel-expansion'
|
||||
import {
|
||||
addSubflowErrorLog,
|
||||
emitEmptySubflowEvents,
|
||||
extractBranchIndex,
|
||||
resolveArrayInput,
|
||||
validateMaxCount,
|
||||
@@ -108,6 +109,8 @@ export class ParallelOrchestrator {
|
||||
|
||||
this.state.setBlockOutput(parallelId, { results: [] })
|
||||
|
||||
emitEmptySubflowEvents(ctx, parallelId, 'parallel', this.contextExtensions)
|
||||
|
||||
logger.info('Parallel scope initialized with empty distribution, skipping body', {
|
||||
parallelId,
|
||||
branchCount: 0,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { LOOP, PARALLEL, REFERENCE } from '@/executor/constants'
|
||||
import { DEFAULTS, LOOP, PARALLEL, REFERENCE } from '@/executor/constants'
|
||||
import type { ContextExtensions } from '@/executor/execution/types'
|
||||
import { type BlockLog, type ExecutionContext, getNextExecutionOrder } from '@/executor/types'
|
||||
import { buildContainerIterationContext } from '@/executor/utils/iteration-context'
|
||||
import type { VariableResolver } from '@/executor/variables/resolver'
|
||||
|
||||
const BRANCH_PATTERN = new RegExp(`${PARALLEL.BRANCH.PREFIX}\\d+${PARALLEL.BRANCH.SUFFIX}$`)
|
||||
@@ -309,3 +310,54 @@ export function addSubflowErrorLog(
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits block log + SSE events for a loop/parallel that was skipped due to an
|
||||
* empty collection or false initial condition. This ensures the container block
|
||||
* appears in terminal logs, execution snapshots, and edge highlighting.
|
||||
*/
|
||||
export function emitEmptySubflowEvents(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
blockType: 'loop' | 'parallel',
|
||||
contextExtensions: ContextExtensions | null
|
||||
): void {
|
||||
const now = new Date().toISOString()
|
||||
const executionOrder = getNextExecutionOrder(ctx)
|
||||
const output = { results: [] }
|
||||
const block = ctx.workflow?.blocks.find((b) => b.id === blockId)
|
||||
const blockName = block?.metadata?.name ?? blockType
|
||||
const iterationContext = buildContainerIterationContext(ctx, blockId)
|
||||
|
||||
ctx.blockLogs.push({
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
startedAt: now,
|
||||
endedAt: now,
|
||||
durationMs: DEFAULTS.EXECUTION_TIME,
|
||||
success: true,
|
||||
output,
|
||||
executionOrder,
|
||||
})
|
||||
|
||||
if (contextExtensions?.onBlockStart) {
|
||||
contextExtensions.onBlockStart(blockId, blockName, blockType, executionOrder)
|
||||
}
|
||||
|
||||
if (contextExtensions?.onBlockComplete) {
|
||||
contextExtensions.onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
{
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
startedAt: now,
|
||||
executionOrder,
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { filterNewEdges, filterValidEdges, mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/workflow/types'
|
||||
import { findAllDescendantNodes, isBlockProtected } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('CollaborativeWorkflow')
|
||||
|
||||
@@ -748,9 +749,7 @@ export function useCollaborativeWorkflow() {
|
||||
const block = blocks[id]
|
||||
|
||||
if (block) {
|
||||
const parentId = block.data?.parentId
|
||||
const isParentLocked = parentId ? blocks[parentId]?.locked : false
|
||||
if (block.locked || isParentLocked) {
|
||||
if (isBlockProtected(id, blocks)) {
|
||||
logger.error('Cannot rename locked block')
|
||||
useNotificationStore.getState().addNotification({
|
||||
level: 'info',
|
||||
@@ -858,21 +857,21 @@ export function useCollaborativeWorkflow() {
|
||||
const previousStates: Record<string, boolean> = {}
|
||||
const validIds: string[] = []
|
||||
|
||||
// For each ID, collect non-locked blocks and their children for undo/redo
|
||||
// For each ID, collect non-locked blocks and their descendants for undo/redo
|
||||
for (const id of ids) {
|
||||
const block = currentBlocks[id]
|
||||
if (!block) continue
|
||||
|
||||
// Skip locked blocks
|
||||
if (block.locked) continue
|
||||
// Skip protected blocks (locked or inside a locked ancestor)
|
||||
if (isBlockProtected(id, currentBlocks)) continue
|
||||
validIds.push(id)
|
||||
previousStates[id] = block.enabled
|
||||
|
||||
// If it's a loop or parallel, also capture children's previous states for undo/redo
|
||||
// If it's a loop or parallel, also capture descendants' previous states for undo/redo
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
Object.entries(currentBlocks).forEach(([blockId, b]) => {
|
||||
if (b.data?.parentId === id && !b.locked) {
|
||||
previousStates[blockId] = b.enabled
|
||||
findAllDescendantNodes(id, currentBlocks).forEach((descId) => {
|
||||
if (!isBlockProtected(descId, currentBlocks)) {
|
||||
previousStates[descId] = currentBlocks[descId]?.enabled ?? true
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1038,21 +1037,12 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
|
||||
const isProtected = (blockId: string): boolean => {
|
||||
const block = blocks[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = block.data?.parentId
|
||||
if (parentId && blocks[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
const previousStates: Record<string, boolean> = {}
|
||||
const validIds: string[] = []
|
||||
|
||||
for (const id of ids) {
|
||||
const block = blocks[id]
|
||||
if (block && !isProtected(id)) {
|
||||
if (block && !isBlockProtected(id, blocks)) {
|
||||
previousStates[id] = block.horizontalHandles ?? false
|
||||
validIds.push(id)
|
||||
}
|
||||
@@ -1100,10 +1090,8 @@ export function useCollaborativeWorkflow() {
|
||||
previousStates[id] = block.locked ?? false
|
||||
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
Object.entries(currentBlocks).forEach(([blockId, b]) => {
|
||||
if (b.data?.parentId === id) {
|
||||
previousStates[blockId] = b.locked ?? false
|
||||
}
|
||||
findAllDescendantNodes(id, currentBlocks).forEach((descId) => {
|
||||
previousStates[descId] = currentBlocks[descId]?.locked ?? false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -761,9 +761,17 @@ function groupIterationBlocksRecursive(
|
||||
}
|
||||
}
|
||||
|
||||
// Non-iteration spans that aren't consumed container sentinels go straight to result
|
||||
const containerIdsWithIterations = new Set<string>()
|
||||
for (const span of iterationSpans) {
|
||||
const outermost = getOutermostContainer(span)
|
||||
if (outermost) containerIdsWithIterations.add(outermost.containerId)
|
||||
}
|
||||
|
||||
const nonContainerSpans = nonIterationSpans.filter(
|
||||
(span) => (span.type !== 'parallel' && span.type !== 'loop') || span.status === 'error'
|
||||
(span) =>
|
||||
(span.type !== 'parallel' && span.type !== 'loop') ||
|
||||
span.status === 'error' ||
|
||||
(span.blockId && !containerIdsWithIterations.has(span.blockId))
|
||||
)
|
||||
|
||||
if (iterationSpans.length === 0) {
|
||||
|
||||
@@ -23,6 +23,16 @@ export function startMemoryTelemetry(intervalMs = 60_000) {
|
||||
started = true
|
||||
|
||||
const timer = setInterval(() => {
|
||||
// Trigger opportunistic (non-blocking) garbage collection if running on Bun.
|
||||
// This signals JSC GC + mimalloc page purge without blocking the event loop,
|
||||
// helping reclaim RSS that mimalloc otherwise retains under sustained load.
|
||||
const bunGlobal = (globalThis as Record<string, unknown>).Bun as
|
||||
| { gc?: (force: boolean) => void }
|
||||
| undefined
|
||||
if (typeof bunGlobal?.gc === 'function') {
|
||||
bunGlobal.gc(false)
|
||||
}
|
||||
|
||||
const mem = process.memoryUsage()
|
||||
const heap = v8.getHeapStatistics()
|
||||
|
||||
|
||||
@@ -759,6 +759,7 @@ async function markEmailAsRead(accessToken: string, messageId: string) {
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
await response.body?.cancel().catch(() => {})
|
||||
throw new Error(
|
||||
`Failed to mark email ${messageId} as read: ${response.status} ${response.statusText}`
|
||||
)
|
||||
|
||||
183
apps/sim/lib/workflows/migrations/subblock-migrations.test.ts
Normal file
183
apps/sim/lib/workflows/migrations/subblock-migrations.test.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { migrateSubblockIds } from './subblock-migrations'
|
||||
|
||||
function makeBlock(overrides: Partial<BlockState> & { type: string }): BlockState {
|
||||
return {
|
||||
id: 'block-1',
|
||||
name: 'Test',
|
||||
position: { x: 0, y: 0 },
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
...overrides,
|
||||
} as BlockState
|
||||
}
|
||||
|
||||
describe('migrateSubblockIds', () => {
|
||||
describe('knowledge block', () => {
|
||||
it('should rename knowledgeBaseId to knowledgeBaseSelector', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({
|
||||
type: 'knowledge',
|
||||
subBlocks: {
|
||||
operation: { id: 'operation', type: 'dropdown', value: 'search' },
|
||||
knowledgeBaseId: {
|
||||
id: 'knowledgeBaseId',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'kb-uuid-123',
|
||||
},
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
const { blocks, migrated } = migrateSubblockIds(input)
|
||||
|
||||
expect(migrated).toBe(true)
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseSelector).toEqual({
|
||||
id: 'knowledgeBaseSelector',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'kb-uuid-123',
|
||||
})
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseId).toBeUndefined()
|
||||
expect(blocks.b1.subBlocks.operation.value).toBe('search')
|
||||
})
|
||||
|
||||
it('should prefer new key when both old and new exist', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({
|
||||
type: 'knowledge',
|
||||
subBlocks: {
|
||||
knowledgeBaseId: {
|
||||
id: 'knowledgeBaseId',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'stale-kb',
|
||||
},
|
||||
knowledgeBaseSelector: {
|
||||
id: 'knowledgeBaseSelector',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'fresh-kb',
|
||||
},
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
const { blocks, migrated } = migrateSubblockIds(input)
|
||||
|
||||
expect(migrated).toBe(true)
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseSelector.value).toBe('fresh-kb')
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseId).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should not touch blocks that already use the new key', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({
|
||||
type: 'knowledge',
|
||||
subBlocks: {
|
||||
knowledgeBaseSelector: {
|
||||
id: 'knowledgeBaseSelector',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'kb-uuid',
|
||||
},
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
const { blocks, migrated } = migrateSubblockIds(input)
|
||||
|
||||
expect(migrated).toBe(false)
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseSelector.value).toBe('kb-uuid')
|
||||
})
|
||||
})
|
||||
|
||||
it('should not mutate the input blocks', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({
|
||||
type: 'knowledge',
|
||||
subBlocks: {
|
||||
knowledgeBaseId: {
|
||||
id: 'knowledgeBaseId',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'kb-uuid',
|
||||
},
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
const { blocks } = migrateSubblockIds(input)
|
||||
|
||||
expect(input.b1.subBlocks.knowledgeBaseId).toBeDefined()
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseSelector).toBeDefined()
|
||||
expect(blocks).not.toBe(input)
|
||||
})
|
||||
|
||||
it('should skip blocks with no registered migrations', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({
|
||||
type: 'function',
|
||||
subBlocks: {
|
||||
code: { id: 'code', type: 'code', value: 'console.log("hi")' },
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
const { blocks, migrated } = migrateSubblockIds(input)
|
||||
|
||||
expect(migrated).toBe(false)
|
||||
expect(blocks.b1.subBlocks.code.value).toBe('console.log("hi")')
|
||||
})
|
||||
|
||||
it('should migrate multiple blocks in one pass', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({
|
||||
id: 'b1',
|
||||
type: 'knowledge',
|
||||
subBlocks: {
|
||||
knowledgeBaseId: {
|
||||
id: 'knowledgeBaseId',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'kb-1',
|
||||
},
|
||||
},
|
||||
}),
|
||||
b2: makeBlock({
|
||||
id: 'b2',
|
||||
type: 'knowledge',
|
||||
subBlocks: {
|
||||
knowledgeBaseId: {
|
||||
id: 'knowledgeBaseId',
|
||||
type: 'knowledge-base-selector',
|
||||
value: 'kb-2',
|
||||
},
|
||||
},
|
||||
}),
|
||||
b3: makeBlock({
|
||||
id: 'b3',
|
||||
type: 'function',
|
||||
subBlocks: {
|
||||
code: { id: 'code', type: 'code', value: '' },
|
||||
},
|
||||
}),
|
||||
}
|
||||
|
||||
const { blocks, migrated } = migrateSubblockIds(input)
|
||||
|
||||
expect(migrated).toBe(true)
|
||||
expect(blocks.b1.subBlocks.knowledgeBaseSelector.value).toBe('kb-1')
|
||||
expect(blocks.b2.subBlocks.knowledgeBaseSelector.value).toBe('kb-2')
|
||||
expect(blocks.b3.subBlocks.code).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle blocks with empty subBlocks', () => {
|
||||
const input: Record<string, BlockState> = {
|
||||
b1: makeBlock({ type: 'knowledge', subBlocks: {} }),
|
||||
}
|
||||
|
||||
const { migrated } = migrateSubblockIds(input)
|
||||
|
||||
expect(migrated).toBe(false)
|
||||
})
|
||||
})
|
||||
90
apps/sim/lib/workflows/migrations/subblock-migrations.ts
Normal file
90
apps/sim/lib/workflows/migrations/subblock-migrations.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('SubblockMigrations')
|
||||
|
||||
/**
|
||||
* Maps old subblock IDs to their current equivalents per block type.
|
||||
*
|
||||
* When a subblock is renamed in a block definition, old deployed/saved states
|
||||
* still carry the value under the previous key. Without this mapping the
|
||||
* serializer silently drops the value, breaking execution.
|
||||
*
|
||||
* Format: { blockType: { oldSubblockId: newSubblockId } }
|
||||
*/
|
||||
export const SUBBLOCK_ID_MIGRATIONS: Record<string, Record<string, string>> = {
|
||||
knowledge: {
|
||||
knowledgeBaseId: 'knowledgeBaseSelector',
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates legacy subblock IDs inside a single block's subBlocks map.
|
||||
* Returns a new subBlocks record if anything changed, or the original if not.
|
||||
*/
|
||||
function migrateBlockSubblockIds(
|
||||
subBlocks: Record<string, BlockState['subBlocks'][string]>,
|
||||
renames: Record<string, string>
|
||||
): { subBlocks: Record<string, BlockState['subBlocks'][string]>; migrated: boolean } {
|
||||
let migrated = false
|
||||
|
||||
for (const oldId of Object.keys(renames)) {
|
||||
if (oldId in subBlocks) {
|
||||
migrated = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!migrated) return { subBlocks, migrated: false }
|
||||
|
||||
const result = { ...subBlocks }
|
||||
|
||||
for (const [oldId, newId] of Object.entries(renames)) {
|
||||
if (!(oldId in result)) continue
|
||||
|
||||
if (newId in result) {
|
||||
delete result[oldId]
|
||||
continue
|
||||
}
|
||||
|
||||
const oldEntry = result[oldId]
|
||||
result[newId] = { ...oldEntry, id: newId }
|
||||
delete result[oldId]
|
||||
}
|
||||
|
||||
return { subBlocks: result, migrated: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies subblock-ID migrations to every block in a workflow.
|
||||
* Returns a new blocks record with migrated subBlocks where needed.
|
||||
*/
|
||||
export function migrateSubblockIds(blocks: Record<string, BlockState>): {
|
||||
blocks: Record<string, BlockState>
|
||||
migrated: boolean
|
||||
} {
|
||||
let anyMigrated = false
|
||||
const result: Record<string, BlockState> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const renames = SUBBLOCK_ID_MIGRATIONS[block.type]
|
||||
if (!renames || !block.subBlocks) {
|
||||
result[blockId] = block
|
||||
continue
|
||||
}
|
||||
|
||||
const { subBlocks, migrated } = migrateBlockSubblockIds(block.subBlocks, renames)
|
||||
if (migrated) {
|
||||
logger.info('Migrated legacy subblock IDs', {
|
||||
blockId: block.id,
|
||||
blockType: block.type,
|
||||
})
|
||||
anyMigrated = true
|
||||
result[blockId] = { ...block, subBlocks }
|
||||
} else {
|
||||
result[blockId] = block
|
||||
}
|
||||
}
|
||||
|
||||
return { blocks: result, migrated: anyMigrated }
|
||||
}
|
||||
@@ -14,6 +14,7 @@ import { and, desc, eq, inArray, sql } from 'drizzle-orm'
|
||||
import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
import { migrateSubblockIds } from '@/lib/workflows/migrations/subblock-migrations'
|
||||
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation'
|
||||
import type { BlockState, Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { SUBFLOW_TYPES } from '@/stores/workflows/workflow/types'
|
||||
@@ -113,10 +114,10 @@ export async function loadDeployedWorkflowState(
|
||||
resolvedWorkspaceId = wfRow?.workspaceId ?? undefined
|
||||
}
|
||||
|
||||
const resolvedBlocks = state.blocks || {}
|
||||
const { blocks: migratedBlocks } = resolvedWorkspaceId
|
||||
? await migrateCredentialIds(resolvedBlocks, resolvedWorkspaceId)
|
||||
: { blocks: resolvedBlocks }
|
||||
const { blocks: migratedBlocks } = await applyBlockMigrations(
|
||||
state.blocks || {},
|
||||
resolvedWorkspaceId
|
||||
)
|
||||
|
||||
return {
|
||||
blocks: migratedBlocks,
|
||||
@@ -133,6 +134,50 @@ export async function loadDeployedWorkflowState(
|
||||
}
|
||||
}
|
||||
|
||||
interface MigrationContext {
|
||||
blocks: Record<string, BlockState>
|
||||
workspaceId?: string
|
||||
migrated: boolean
|
||||
}
|
||||
|
||||
type BlockMigration = (ctx: MigrationContext) => MigrationContext | Promise<MigrationContext>
|
||||
|
||||
function createMigrationPipeline(migrations: BlockMigration[]) {
|
||||
return async (
|
||||
blocks: Record<string, BlockState>,
|
||||
workspaceId?: string
|
||||
): Promise<{ blocks: Record<string, BlockState>; migrated: boolean }> => {
|
||||
let ctx: MigrationContext = { blocks, workspaceId, migrated: false }
|
||||
for (const migration of migrations) {
|
||||
ctx = await migration(ctx)
|
||||
}
|
||||
return { blocks: ctx.blocks, migrated: ctx.migrated }
|
||||
}
|
||||
}
|
||||
|
||||
const applyBlockMigrations = createMigrationPipeline([
|
||||
(ctx) => {
|
||||
const { blocks } = sanitizeAgentToolsInBlocks(ctx.blocks)
|
||||
return { ...ctx, blocks }
|
||||
},
|
||||
|
||||
(ctx) => ({
|
||||
...ctx,
|
||||
blocks: migrateAgentBlocksToMessagesFormat(ctx.blocks),
|
||||
}),
|
||||
|
||||
async (ctx) => {
|
||||
if (!ctx.workspaceId) return ctx
|
||||
const { blocks, migrated } = await migrateCredentialIds(ctx.blocks, ctx.workspaceId)
|
||||
return { ...ctx, blocks, migrated: ctx.migrated || migrated }
|
||||
},
|
||||
|
||||
(ctx) => {
|
||||
const { blocks, migrated } = migrateSubblockIds(ctx.blocks)
|
||||
return { ...ctx, blocks, migrated: ctx.migrated || migrated }
|
||||
},
|
||||
])
|
||||
|
||||
/**
|
||||
* Migrates agent blocks from old format (systemPrompt/userPrompt) to new format (messages array)
|
||||
* This ensures backward compatibility for workflows created before the messages-input refactor.
|
||||
@@ -356,22 +401,16 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
blocksMap[block.id] = assembled
|
||||
})
|
||||
|
||||
// Sanitize any invalid custom tools in agent blocks to prevent client crashes
|
||||
const { blocks: sanitizedBlocks } = sanitizeAgentToolsInBlocks(blocksMap)
|
||||
const { blocks: finalBlocks, migrated } = await applyBlockMigrations(
|
||||
blocksMap,
|
||||
workflowRow?.workspaceId ?? undefined
|
||||
)
|
||||
|
||||
// Migrate old agent block format (systemPrompt/userPrompt) to new messages array format
|
||||
const migratedBlocks = migrateAgentBlocksToMessagesFormat(sanitizedBlocks)
|
||||
|
||||
// Migrate legacy account.id → credential.id in OAuth subblocks
|
||||
const { blocks: credMigratedBlocks, migrated: credentialsMigrated } = workflowRow?.workspaceId
|
||||
? await migrateCredentialIds(migratedBlocks, workflowRow.workspaceId)
|
||||
: { blocks: migratedBlocks, migrated: false }
|
||||
|
||||
if (credentialsMigrated) {
|
||||
if (migrated) {
|
||||
Promise.resolve().then(async () => {
|
||||
try {
|
||||
for (const [blockId, block] of Object.entries(credMigratedBlocks)) {
|
||||
if (block.subBlocks !== migratedBlocks[blockId]?.subBlocks) {
|
||||
for (const [blockId, block] of Object.entries(finalBlocks)) {
|
||||
if (block.subBlocks !== blocksMap[blockId]?.subBlocks) {
|
||||
await db
|
||||
.update(workflowBlocks)
|
||||
.set({ subBlocks: block.subBlocks, updatedAt: new Date() })
|
||||
@@ -381,7 +420,7 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to persist credential ID migration', { workflowId, error: err })
|
||||
logger.warn('Failed to persist block migrations', { workflowId, error: err })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -422,13 +461,13 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
forEachItems: (config as Loop).forEachItems ?? '',
|
||||
whileCondition: (config as Loop).whileCondition ?? '',
|
||||
doWhileCondition: (config as Loop).doWhileCondition ?? '',
|
||||
enabled: credMigratedBlocks[subflow.id]?.enabled ?? true,
|
||||
enabled: finalBlocks[subflow.id]?.enabled ?? true,
|
||||
}
|
||||
loops[subflow.id] = loop
|
||||
|
||||
if (credMigratedBlocks[subflow.id]) {
|
||||
const block = credMigratedBlocks[subflow.id]
|
||||
credMigratedBlocks[subflow.id] = {
|
||||
if (finalBlocks[subflow.id]) {
|
||||
const block = finalBlocks[subflow.id]
|
||||
finalBlocks[subflow.id] = {
|
||||
...block,
|
||||
data: {
|
||||
...block.data,
|
||||
@@ -449,7 +488,7 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
(config as Parallel).parallelType === 'collection'
|
||||
? (config as Parallel).parallelType
|
||||
: 'count',
|
||||
enabled: credMigratedBlocks[subflow.id]?.enabled ?? true,
|
||||
enabled: finalBlocks[subflow.id]?.enabled ?? true,
|
||||
}
|
||||
parallels[subflow.id] = parallel
|
||||
} else {
|
||||
@@ -458,7 +497,7 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
})
|
||||
|
||||
return {
|
||||
blocks: credMigratedBlocks,
|
||||
blocks: finalBlocks,
|
||||
edges: edgesArray,
|
||||
loops,
|
||||
parallels,
|
||||
|
||||
@@ -95,6 +95,7 @@ const nextConfig: NextConfig = {
|
||||
optimizeCss: true,
|
||||
turbopackSourceMaps: false,
|
||||
turbopackFileSystemCacheForDev: true,
|
||||
preloadEntriesOnStart: false,
|
||||
},
|
||||
...(isDev && {
|
||||
allowedDevOrigins: [
|
||||
|
||||
@@ -122,6 +122,40 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.4',
|
||||
pricing: {
|
||||
input: 2.5,
|
||||
cachedInput: 0.25,
|
||||
output: 15.0,
|
||||
updatedAt: '2026-03-05',
|
||||
},
|
||||
capabilities: {
|
||||
reasoningEffort: {
|
||||
values: ['none', 'low', 'medium', 'high', 'xhigh'],
|
||||
},
|
||||
verbosity: {
|
||||
values: ['low', 'medium', 'high'],
|
||||
},
|
||||
maxOutputTokens: 128000,
|
||||
},
|
||||
contextWindow: 1050000,
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.4-pro',
|
||||
pricing: {
|
||||
input: 30.0,
|
||||
output: 180.0,
|
||||
updatedAt: '2026-03-05',
|
||||
},
|
||||
capabilities: {
|
||||
reasoningEffort: {
|
||||
values: ['medium', 'high', 'xhigh'],
|
||||
},
|
||||
maxOutputTokens: 128000,
|
||||
},
|
||||
contextWindow: 1050000,
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.2',
|
||||
pricing: {
|
||||
@@ -493,6 +527,25 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'azure/gpt-5.4',
|
||||
pricing: {
|
||||
input: 2.5,
|
||||
cachedInput: 0.25,
|
||||
output: 15.0,
|
||||
updatedAt: '2026-03-05',
|
||||
},
|
||||
capabilities: {
|
||||
reasoningEffort: {
|
||||
values: ['none', 'low', 'medium', 'high', 'xhigh'],
|
||||
},
|
||||
verbosity: {
|
||||
values: ['low', 'medium', 'high'],
|
||||
},
|
||||
maxOutputTokens: 128000,
|
||||
},
|
||||
contextWindow: 1050000,
|
||||
},
|
||||
{
|
||||
id: 'azure/gpt-5.2',
|
||||
pricing: {
|
||||
|
||||
@@ -523,13 +523,16 @@ describe('Model Capabilities', () => {
|
||||
|
||||
it.concurrent('should have GPT-5 models in both reasoning effort and verbosity arrays', () => {
|
||||
const gpt5ModelsWithReasoningEffort = MODELS_WITH_REASONING_EFFORT.filter(
|
||||
(m) => m.includes('gpt-5') && !m.includes('chat-latest')
|
||||
(m) => m.includes('gpt-5') && !m.includes('chat-latest') && !m.includes('gpt-5.4-pro')
|
||||
)
|
||||
const gpt5ModelsWithVerbosity = MODELS_WITH_VERBOSITY.filter(
|
||||
(m) => m.includes('gpt-5') && !m.includes('chat-latest')
|
||||
)
|
||||
expect(gpt5ModelsWithReasoningEffort.sort()).toEqual(gpt5ModelsWithVerbosity.sort())
|
||||
|
||||
expect(MODELS_WITH_REASONING_EFFORT).toContain('gpt-5.4-pro')
|
||||
expect(MODELS_WITH_VERBOSITY).not.toContain('gpt-5.4-pro')
|
||||
|
||||
expect(MODELS_WITH_REASONING_EFFORT).toContain('o1')
|
||||
expect(MODELS_WITH_VERBOSITY).not.toContain('o1')
|
||||
})
|
||||
|
||||
170
apps/sim/scripts/check-subblock-id-stability.ts
Normal file
170
apps/sim/scripts/check-subblock-id-stability.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
/**
|
||||
* CI check: detect subblock ID renames that would break deployed workflows.
|
||||
*
|
||||
* Compares the current block registry against the parent commit.
|
||||
* If any subblock ID was removed from a block, it must have a corresponding
|
||||
* entry in SUBBLOCK_ID_MIGRATIONS — otherwise this script exits non-zero.
|
||||
*
|
||||
* Usage:
|
||||
* bun run apps/sim/scripts/check-subblock-id-stability.ts [base-ref]
|
||||
*
|
||||
* base-ref defaults to HEAD~1. In a PR CI pipeline, pass the merge base:
|
||||
* bun run apps/sim/scripts/check-subblock-id-stability.ts origin/main
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process'
|
||||
import { SUBBLOCK_ID_MIGRATIONS } from '@/lib/workflows/migrations/subblock-migrations'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
|
||||
const baseRef = process.argv[2] || 'HEAD~1'
|
||||
|
||||
const gitRoot = execSync('git rev-parse --show-toplevel', { encoding: 'utf-8' }).trim()
|
||||
const gitOpts = { encoding: 'utf-8' as const, cwd: gitRoot }
|
||||
|
||||
type IdMap = Record<string, Set<string>>
|
||||
|
||||
/**
|
||||
* Extracts subblock IDs from the `subBlocks: [ ... ]` section of a block
|
||||
* definition. Only grabs the top-level `id:` of each subblock object —
|
||||
* ignores nested IDs inside `options`, `columns`, etc.
|
||||
*/
|
||||
function extractSubBlockIds(source: string): string[] {
|
||||
const startIdx = source.indexOf('subBlocks:')
|
||||
if (startIdx === -1) return []
|
||||
|
||||
const bracketStart = source.indexOf('[', startIdx)
|
||||
if (bracketStart === -1) return []
|
||||
|
||||
const ids: string[] = []
|
||||
let braceDepth = 0
|
||||
let bracketDepth = 0
|
||||
let i = bracketStart + 1
|
||||
bracketDepth = 1
|
||||
|
||||
while (i < source.length && bracketDepth > 0) {
|
||||
const ch = source[i]
|
||||
|
||||
if (ch === '[') bracketDepth++
|
||||
else if (ch === ']') {
|
||||
bracketDepth--
|
||||
if (bracketDepth === 0) break
|
||||
} else if (ch === '{') {
|
||||
braceDepth++
|
||||
if (braceDepth === 1) {
|
||||
const ahead = source.slice(i, i + 200)
|
||||
const idMatch = ahead.match(/{\s*(?:\/\/[^\n]*\n\s*)*id:\s*['"]([^'"]+)['"]/)
|
||||
if (idMatch) {
|
||||
ids.push(idMatch[1])
|
||||
}
|
||||
}
|
||||
} else if (ch === '}') {
|
||||
braceDepth--
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
function getCurrentIds(): IdMap {
|
||||
const map: IdMap = {}
|
||||
for (const block of getAllBlocks()) {
|
||||
map[block.type] = new Set(block.subBlocks.map((sb) => sb.id))
|
||||
}
|
||||
return map
|
||||
}
|
||||
|
||||
function getPreviousIds(): IdMap {
|
||||
const registryPath = 'apps/sim/blocks/registry.ts'
|
||||
const blocksDir = 'apps/sim/blocks/blocks'
|
||||
|
||||
let hasChanges = false
|
||||
try {
|
||||
const diff = execSync(
|
||||
`git diff --name-only ${baseRef} HEAD -- ${registryPath} ${blocksDir}`,
|
||||
gitOpts
|
||||
).trim()
|
||||
hasChanges = diff.length > 0
|
||||
} catch {
|
||||
console.log('⚠ Could not diff against base ref — skipping check')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
if (!hasChanges) {
|
||||
console.log('✓ No block definition changes detected — nothing to check')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const map: IdMap = {}
|
||||
|
||||
try {
|
||||
const blockFiles = execSync(`git ls-tree -r --name-only ${baseRef} -- ${blocksDir}`, gitOpts)
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((f) => f.endsWith('.ts') && !f.endsWith('.test.ts'))
|
||||
|
||||
for (const filePath of blockFiles) {
|
||||
let content: string
|
||||
try {
|
||||
content = execSync(`git show ${baseRef}:${filePath}`, gitOpts)
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
|
||||
const typeMatch = content.match(/BlockConfig\s*=\s*\{[\s\S]*?type:\s*['"]([^'"]+)['"]/)
|
||||
if (!typeMatch) continue
|
||||
const blockType = typeMatch[1]
|
||||
|
||||
const ids = extractSubBlockIds(content)
|
||||
if (ids.length === 0) continue
|
||||
|
||||
map[blockType] = new Set(ids)
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`⚠ Could not read previous block files from ${baseRef} — skipping check`, err)
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
return map
|
||||
}
|
||||
|
||||
const previous = getPreviousIds()
|
||||
const current = getCurrentIds()
|
||||
const errors: string[] = []
|
||||
|
||||
for (const [blockType, prevIds] of Object.entries(previous)) {
|
||||
const currIds = current[blockType]
|
||||
if (!currIds) continue
|
||||
|
||||
const migrations = SUBBLOCK_ID_MIGRATIONS[blockType] ?? {}
|
||||
|
||||
for (const oldId of prevIds) {
|
||||
if (currIds.has(oldId)) continue
|
||||
|
||||
if (oldId in migrations) continue
|
||||
|
||||
errors.push(
|
||||
`Block "${blockType}": subblock ID "${oldId}" was removed.\n` +
|
||||
` → Add a migration in SUBBLOCK_ID_MIGRATIONS (lib/workflows/migrations/subblock-migrations.ts)\n` +
|
||||
` mapping "${oldId}" to its replacement ID.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.error('✗ Subblock ID stability check FAILED\n')
|
||||
console.error(
|
||||
'Removing subblock IDs breaks deployed workflows.\n' +
|
||||
'Either revert the rename or add a migration entry.\n'
|
||||
)
|
||||
for (const err of errors) {
|
||||
console.error(` ${err}\n`)
|
||||
}
|
||||
process.exit(1)
|
||||
} else {
|
||||
console.log('✓ Subblock ID stability check passed')
|
||||
process.exit(0)
|
||||
}
|
||||
@@ -39,6 +39,56 @@ const db = socketDb
|
||||
const DEFAULT_LOOP_ITERATIONS = 5
|
||||
const DEFAULT_PARALLEL_COUNT = 5
|
||||
|
||||
/** Minimal block shape needed for protection and descendant checks */
|
||||
interface DbBlockRef {
|
||||
id: string
|
||||
locked?: boolean | null
|
||||
data: unknown
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block is protected (locked or inside a locked ancestor).
|
||||
* Works with raw DB records.
|
||||
*/
|
||||
function isDbBlockProtected(blockId: string, blocksById: Record<string, DbBlockRef>): boolean {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const visited = new Set<string>()
|
||||
let parentId = (block.data as Record<string, unknown> | null)?.parentId as string | undefined
|
||||
while (parentId && !visited.has(parentId)) {
|
||||
visited.add(parentId)
|
||||
if (blocksById[parentId]?.locked) return true
|
||||
parentId = (blocksById[parentId]?.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all descendant block IDs of a container (recursive).
|
||||
* Works with raw DB block arrays.
|
||||
*/
|
||||
function findDbDescendants(containerId: string, allBlocks: DbBlockRef[]): string[] {
|
||||
const descendants: string[] = []
|
||||
const visited = new Set<string>()
|
||||
const stack = [containerId]
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop()!
|
||||
if (visited.has(current)) continue
|
||||
visited.add(current)
|
||||
for (const b of allBlocks) {
|
||||
const pid = (b.data as Record<string, unknown> | null)?.parentId
|
||||
if (pid === current) {
|
||||
descendants.push(b.id)
|
||||
stack.push(b.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
return descendants
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared function to handle auto-connect edge insertion
|
||||
* @param tx - Database transaction
|
||||
@@ -753,20 +803,8 @@ async function handleBlocksOperationTx(
|
||||
allBlocks.map((b: BlockRecord) => [b.id, b])
|
||||
)
|
||||
|
||||
// Helper to check if a block is protected (locked or inside locked parent)
|
||||
const isProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (parentId && blocksById[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
// Filter out protected blocks from deletion request
|
||||
const deletableIds = ids.filter((id) => !isProtected(id))
|
||||
const deletableIds = ids.filter((id) => !isDbBlockProtected(id, blocksById))
|
||||
if (deletableIds.length === 0) {
|
||||
logger.info('All requested blocks are protected, skipping deletion')
|
||||
return
|
||||
@@ -778,18 +816,14 @@ async function handleBlocksOperationTx(
|
||||
)
|
||||
}
|
||||
|
||||
// Collect all block IDs including children of subflows
|
||||
// Collect all block IDs including all descendants of subflows
|
||||
const allBlocksToDelete = new Set<string>(deletableIds)
|
||||
|
||||
for (const id of deletableIds) {
|
||||
const block = blocksById[id]
|
||||
if (block && isSubflowBlockType(block.type)) {
|
||||
// Include all children of the subflow (they should be deleted with parent)
|
||||
for (const b of allBlocks) {
|
||||
const parentId = (b.data as Record<string, unknown> | null)?.parentId
|
||||
if (parentId === id) {
|
||||
allBlocksToDelete.add(b.id)
|
||||
}
|
||||
for (const descId of findDbDescendants(id, allBlocks)) {
|
||||
allBlocksToDelete.add(descId)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -902,19 +936,18 @@ async function handleBlocksOperationTx(
|
||||
)
|
||||
const blocksToToggle = new Set<string>()
|
||||
|
||||
// Collect all blocks to toggle including children of containers
|
||||
// Collect all blocks to toggle including descendants of containers
|
||||
for (const id of blockIds) {
|
||||
const block = blocksById[id]
|
||||
if (!block || block.locked) continue
|
||||
if (!block || isDbBlockProtected(id, blocksById)) continue
|
||||
|
||||
blocksToToggle.add(id)
|
||||
|
||||
// If it's a loop or parallel, also include all children
|
||||
// If it's a loop or parallel, also include all non-locked descendants
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
for (const b of allBlocks) {
|
||||
const parentId = (b.data as Record<string, unknown> | null)?.parentId
|
||||
if (parentId === id && !b.locked) {
|
||||
blocksToToggle.add(b.id)
|
||||
for (const descId of findDbDescendants(id, allBlocks)) {
|
||||
if (!isDbBlockProtected(descId, blocksById)) {
|
||||
blocksToToggle.add(descId)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -966,20 +999,10 @@ async function handleBlocksOperationTx(
|
||||
allBlocks.map((b: HandleBlockRecord) => [b.id, b])
|
||||
)
|
||||
|
||||
// Helper to check if a block is protected (locked or inside locked parent)
|
||||
const isProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (parentId && blocksById[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
// Filter to only toggle handles on unprotected blocks
|
||||
const blocksToToggle = blockIds.filter((id) => blocksById[id] && !isProtected(id))
|
||||
const blocksToToggle = blockIds.filter(
|
||||
(id) => blocksById[id] && !isDbBlockProtected(id, blocksById)
|
||||
)
|
||||
if (blocksToToggle.length === 0) {
|
||||
logger.info('All requested blocks are protected, skipping handles toggle')
|
||||
break
|
||||
@@ -1025,20 +1048,17 @@ async function handleBlocksOperationTx(
|
||||
)
|
||||
const blocksToToggle = new Set<string>()
|
||||
|
||||
// Collect all blocks to toggle including children of containers
|
||||
// Collect all blocks to toggle including descendants of containers
|
||||
for (const id of blockIds) {
|
||||
const block = blocksById[id]
|
||||
if (!block) continue
|
||||
|
||||
blocksToToggle.add(id)
|
||||
|
||||
// If it's a loop or parallel, also include all children
|
||||
// If it's a loop or parallel, also include all descendants
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
for (const b of allBlocks) {
|
||||
const parentId = (b.data as Record<string, unknown> | null)?.parentId
|
||||
if (parentId === id) {
|
||||
blocksToToggle.add(b.id)
|
||||
}
|
||||
for (const descId of findDbDescendants(id, allBlocks)) {
|
||||
blocksToToggle.add(descId)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1088,31 +1108,19 @@ async function handleBlocksOperationTx(
|
||||
allBlocks.map((b: ParentBlockRecord) => [b.id, b])
|
||||
)
|
||||
|
||||
// Helper to check if a block is protected (locked or inside locked parent)
|
||||
const isProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const currentParentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (currentParentId && blocksById[currentParentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
for (const update of updates) {
|
||||
const { id, parentId, position } = update
|
||||
if (!id) continue
|
||||
|
||||
// Skip protected blocks (locked or inside locked container)
|
||||
if (isProtected(id)) {
|
||||
if (isDbBlockProtected(id, blocksById)) {
|
||||
logger.info(`Skipping block ${id} parent update - block is protected`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip if trying to move into a locked container
|
||||
if (parentId && blocksById[parentId]?.locked) {
|
||||
logger.info(`Skipping block ${id} parent update - target parent ${parentId} is locked`)
|
||||
// Skip if trying to move into a locked container (or any of its ancestors)
|
||||
if (parentId && isDbBlockProtected(parentId, blocksById)) {
|
||||
logger.info(`Skipping block ${id} parent update - target parent ${parentId} is protected`)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1235,18 +1243,7 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str
|
||||
}
|
||||
}
|
||||
|
||||
const isBlockProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (parentId && blocksById[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
if (isBlockProtected(payload.target)) {
|
||||
if (isDbBlockProtected(payload.target, blocksById)) {
|
||||
logger.info(`Skipping edge add - target block is protected`)
|
||||
break
|
||||
}
|
||||
@@ -1334,18 +1331,7 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str
|
||||
}
|
||||
}
|
||||
|
||||
const isBlockProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (parentId && blocksById[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
if (isBlockProtected(edgeToRemove.targetBlockId)) {
|
||||
if (isDbBlockProtected(edgeToRemove.targetBlockId, blocksById)) {
|
||||
logger.info(`Skipping edge remove - target block is protected`)
|
||||
break
|
||||
}
|
||||
@@ -1455,19 +1441,8 @@ async function handleEdgesOperationTx(
|
||||
}
|
||||
}
|
||||
|
||||
const isBlockProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (parentId && blocksById[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
const safeEdgeIds = edgesToRemove
|
||||
.filter((e: EdgeToRemove) => !isBlockProtected(e.targetBlockId))
|
||||
.filter((e: EdgeToRemove) => !isDbBlockProtected(e.targetBlockId, blocksById))
|
||||
.map((e: EdgeToRemove) => e.id)
|
||||
|
||||
if (safeEdgeIds.length === 0) {
|
||||
@@ -1552,20 +1527,9 @@ async function handleEdgesOperationTx(
|
||||
}
|
||||
}
|
||||
|
||||
const isBlockProtected = (blockId: string): boolean => {
|
||||
const block = blocksById[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
|
||||
| string
|
||||
| undefined
|
||||
if (parentId && blocksById[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
// Filter edges - only add edges where target block is not protected
|
||||
const safeEdges = (edges as Array<Record<string, unknown>>).filter(
|
||||
(e) => !isBlockProtected(e.target as string)
|
||||
(e) => !isDbBlockProtected(e.target as string, blocksById)
|
||||
)
|
||||
|
||||
if (safeEdges.length === 0) {
|
||||
|
||||
@@ -20,8 +20,10 @@ import type {
|
||||
WorkflowStore,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
findAllDescendantNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
isBlockProtected,
|
||||
wouldCreateCycle,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
@@ -374,21 +376,21 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
const blocksToToggle = new Set<string>()
|
||||
|
||||
// For each ID, collect blocks to toggle (skip locked blocks entirely)
|
||||
// If it's a container, also include non-locked children
|
||||
// If it's a container, also include non-locked descendants
|
||||
for (const id of ids) {
|
||||
const block = currentBlocks[id]
|
||||
if (!block) continue
|
||||
|
||||
// Skip locked blocks entirely (including their children)
|
||||
if (block.locked) continue
|
||||
// Skip protected blocks entirely (locked or inside a locked ancestor)
|
||||
if (isBlockProtected(id, currentBlocks)) continue
|
||||
|
||||
blocksToToggle.add(id)
|
||||
|
||||
// If it's a loop or parallel, also include non-locked children
|
||||
// If it's a loop or parallel, also include non-locked descendants
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
Object.entries(currentBlocks).forEach(([blockId, b]) => {
|
||||
if (b.data?.parentId === id && !b.locked) {
|
||||
blocksToToggle.add(blockId)
|
||||
findAllDescendantNodes(id, currentBlocks).forEach((descId) => {
|
||||
if (!isBlockProtected(descId, currentBlocks)) {
|
||||
blocksToToggle.add(descId)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -415,18 +417,8 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
const currentBlocks = get().blocks
|
||||
const newBlocks = { ...currentBlocks }
|
||||
|
||||
// Helper to check if a block is protected (locked or inside locked parent)
|
||||
const isProtected = (blockId: string): boolean => {
|
||||
const block = currentBlocks[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
const parentId = block.data?.parentId
|
||||
if (parentId && currentBlocks[parentId]?.locked) return true
|
||||
return false
|
||||
}
|
||||
|
||||
for (const id of ids) {
|
||||
if (!newBlocks[id] || isProtected(id)) continue
|
||||
if (!newBlocks[id] || isBlockProtected(id, currentBlocks)) continue
|
||||
newBlocks[id] = {
|
||||
...newBlocks[id],
|
||||
horizontalHandles: !newBlocks[id].horizontalHandles,
|
||||
@@ -1267,19 +1259,17 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
const blocksToToggle = new Set<string>()
|
||||
|
||||
// For each ID, collect blocks to toggle
|
||||
// If it's a container, also include all children
|
||||
// If it's a container, also include all descendants
|
||||
for (const id of ids) {
|
||||
const block = currentBlocks[id]
|
||||
if (!block) continue
|
||||
|
||||
blocksToToggle.add(id)
|
||||
|
||||
// If it's a loop or parallel, also include all children
|
||||
// If it's a loop or parallel, also include all descendants
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
Object.entries(currentBlocks).forEach(([blockId, b]) => {
|
||||
if (b.data?.parentId === id) {
|
||||
blocksToToggle.add(blockId)
|
||||
}
|
||||
findAllDescendantNodes(id, currentBlocks).forEach((descId) => {
|
||||
blocksToToggle.add(descId)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,21 +143,56 @@ export function findAllDescendantNodes(
|
||||
blocks: Record<string, BlockState>
|
||||
): string[] {
|
||||
const descendants: string[] = []
|
||||
const findDescendants = (parentId: string) => {
|
||||
const children = Object.values(blocks)
|
||||
.filter((block) => block.data?.parentId === parentId)
|
||||
.map((block) => block.id)
|
||||
|
||||
children.forEach((childId) => {
|
||||
descendants.push(childId)
|
||||
findDescendants(childId)
|
||||
})
|
||||
const visited = new Set<string>()
|
||||
const stack = [containerId]
|
||||
while (stack.length > 0) {
|
||||
const current = stack.pop()!
|
||||
if (visited.has(current)) continue
|
||||
visited.add(current)
|
||||
for (const block of Object.values(blocks)) {
|
||||
if (block.data?.parentId === current) {
|
||||
descendants.push(block.id)
|
||||
stack.push(block.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
findDescendants(containerId)
|
||||
return descendants
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if any ancestor container of a block is locked.
|
||||
* Unlike {@link isBlockProtected}, this ignores the block's own locked state.
|
||||
*
|
||||
* @param blockId - The ID of the block to check
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @returns True if any ancestor is locked
|
||||
*/
|
||||
export function isAncestorProtected(blockId: string, blocks: Record<string, BlockState>): boolean {
|
||||
const visited = new Set<string>()
|
||||
let parentId = blocks[blockId]?.data?.parentId
|
||||
while (parentId && !visited.has(parentId)) {
|
||||
visited.add(parentId)
|
||||
if (blocks[parentId]?.locked) return true
|
||||
parentId = blocks[parentId]?.data?.parentId
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block is protected from editing/deletion.
|
||||
* A block is protected if it is locked or if any ancestor container is locked.
|
||||
*
|
||||
* @param blockId - The ID of the block to check
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @returns True if the block is protected
|
||||
*/
|
||||
export function isBlockProtected(blockId: string, blocks: Record<string, BlockState>): boolean {
|
||||
const block = blocks[blockId]
|
||||
if (!block) return false
|
||||
if (block.locked) return true
|
||||
return isAncestorProtected(blockId, blocks)
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a complete collection of loops from the UI blocks
|
||||
*
|
||||
|
||||
@@ -239,6 +239,7 @@ export async function downloadAttachments(
|
||||
)
|
||||
|
||||
if (!attachmentResponse.ok) {
|
||||
await attachmentResponse.body?.cancel().catch(() => {})
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -46,9 +46,7 @@ export const deleteTool: ToolConfig<RedditDeleteParams, RedditWriteResponse> = {
|
||||
id: params.id,
|
||||
})
|
||||
|
||||
return {
|
||||
body: formData.toString(),
|
||||
}
|
||||
return formData.toString() as unknown as Record<string, any>
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { validatePathSegment } from '@/lib/core/security/input-validation'
|
||||
import type { RedditCommentsParams, RedditCommentsResponse } from '@/tools/reddit/types'
|
||||
import { normalizeSubreddit } from '@/tools/reddit/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
@@ -69,12 +70,6 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Include "load more comments" elements in the response',
|
||||
},
|
||||
showtitle: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Include submission title in the response',
|
||||
},
|
||||
threaded: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
@@ -87,23 +82,11 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Integer to truncate comment depth',
|
||||
},
|
||||
after: {
|
||||
comment: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Fullname of a thing to fetch items after (for pagination)',
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Fullname of a thing to fetch items before (for pagination)',
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'A count of items already seen in the listing (used for numbering)',
|
||||
description: 'ID36 of a comment to focus on (returns that comment thread)',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -111,7 +94,7 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
url: (params: RedditCommentsParams) => {
|
||||
const subreddit = normalizeSubreddit(params.subreddit)
|
||||
const sort = params.sort || 'confidence'
|
||||
const limit = Math.min(Math.max(1, params.limit || 50), 100)
|
||||
const limit = Math.min(Math.max(1, params.limit ?? 50), 100)
|
||||
|
||||
// Build URL with query parameters
|
||||
const urlParams = new URLSearchParams({
|
||||
@@ -126,18 +109,21 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
urlParams.append('context', Number(params.context).toString())
|
||||
if (params.showedits !== undefined) urlParams.append('showedits', params.showedits.toString())
|
||||
if (params.showmore !== undefined) urlParams.append('showmore', params.showmore.toString())
|
||||
if (params.showtitle !== undefined) urlParams.append('showtitle', params.showtitle.toString())
|
||||
if (params.threaded !== undefined) urlParams.append('threaded', params.threaded.toString())
|
||||
if (params.truncate !== undefined)
|
||||
urlParams.append('truncate', Number(params.truncate).toString())
|
||||
|
||||
// Add pagination parameters if provided
|
||||
if (params.after) urlParams.append('after', params.after)
|
||||
if (params.before) urlParams.append('before', params.before)
|
||||
if (params.count !== undefined) urlParams.append('count', Number(params.count).toString())
|
||||
if (params.comment) urlParams.append('comment', params.comment)
|
||||
|
||||
// Validate postId to prevent path traversal
|
||||
const postId = params.postId.trim()
|
||||
const postIdValidation = validatePathSegment(postId, { paramName: 'postId' })
|
||||
if (!postIdValidation.isValid) {
|
||||
throw new Error(postIdValidation.error)
|
||||
}
|
||||
|
||||
// Build URL using OAuth endpoint
|
||||
return `https://oauth.reddit.com/r/${subreddit}/comments/${params.postId}?${urlParams.toString()}`
|
||||
return `https://oauth.reddit.com/r/${subreddit}/comments/${postId}?${urlParams.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: RedditCommentsParams) => {
|
||||
@@ -157,7 +143,7 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
const data = await response.json()
|
||||
|
||||
// Extract post data (first element in the array)
|
||||
const postData = data[0]?.data?.children[0]?.data || {}
|
||||
const postData = data[0]?.data?.children?.[0]?.data || {}
|
||||
|
||||
// Extract and transform comments (second element in the array)
|
||||
const commentsData = data[1]?.data?.children || []
|
||||
@@ -179,11 +165,12 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
: []
|
||||
|
||||
return {
|
||||
id: commentData.id || '',
|
||||
id: commentData.id ?? '',
|
||||
name: commentData.name ?? '',
|
||||
author: commentData.author || '[deleted]',
|
||||
body: commentData.body || '',
|
||||
created_utc: commentData.created_utc || 0,
|
||||
score: commentData.score || 0,
|
||||
body: commentData.body ?? '',
|
||||
created_utc: commentData.created_utc ?? 0,
|
||||
score: commentData.score ?? 0,
|
||||
permalink: commentData.permalink
|
||||
? `https://www.reddit.com${commentData.permalink}`
|
||||
: '',
|
||||
@@ -199,12 +186,13 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
success: true,
|
||||
output: {
|
||||
post: {
|
||||
id: postData.id || '',
|
||||
title: postData.title || '',
|
||||
id: postData.id ?? '',
|
||||
name: postData.name ?? '',
|
||||
title: postData.title ?? '',
|
||||
author: postData.author || '[deleted]',
|
||||
selftext: postData.selftext || '',
|
||||
created_utc: postData.created_utc || 0,
|
||||
score: postData.score || 0,
|
||||
selftext: postData.selftext ?? '',
|
||||
created_utc: postData.created_utc ?? 0,
|
||||
score: postData.score ?? 0,
|
||||
permalink: postData.permalink ? `https://www.reddit.com${postData.permalink}` : '',
|
||||
},
|
||||
comments: comments,
|
||||
@@ -218,6 +206,7 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
description: 'Post information including ID, title, author, content, and metadata',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Post author' },
|
||||
selftext: { type: 'string', description: 'Post text content' },
|
||||
@@ -233,6 +222,7 @@ export const getCommentsTool: ToolConfig<RedditCommentsParams, RedditCommentsRes
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Comment ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t1_xxxxx)' },
|
||||
author: { type: 'string', description: 'Comment author' },
|
||||
body: { type: 'string', description: 'Comment text' },
|
||||
score: { type: 'number', description: 'Comment score' },
|
||||
|
||||
@@ -74,7 +74,7 @@ export const getControversialTool: ToolConfig<RedditControversialParams, RedditP
|
||||
request: {
|
||||
url: (params: RedditControversialParams) => {
|
||||
const subreddit = normalizeSubreddit(params.subreddit)
|
||||
const limit = Math.min(Math.max(1, params.limit || 10), 100)
|
||||
const limit = Math.min(Math.max(1, params.limit ?? 10), 100)
|
||||
|
||||
// Build URL with appropriate parameters using OAuth endpoint
|
||||
const urlParams = new URLSearchParams({
|
||||
@@ -115,25 +115,26 @@ export const getControversialTool: ToolConfig<RedditControversialParams, RedditP
|
||||
|
||||
// Extract subreddit name from response (with fallback)
|
||||
const subredditName =
|
||||
data.data?.children[0]?.data?.subreddit || requestParams?.subreddit || 'unknown'
|
||||
data.data?.children?.[0]?.data?.subreddit || requestParams?.subreddit || 'unknown'
|
||||
|
||||
// Transform posts data
|
||||
const posts =
|
||||
data.data?.children?.map((child: any) => {
|
||||
const post = child.data || {}
|
||||
return {
|
||||
id: post.id || '',
|
||||
title: post.title || '',
|
||||
id: post.id ?? '',
|
||||
name: post.name ?? '',
|
||||
title: post.title ?? '',
|
||||
author: post.author || '[deleted]',
|
||||
url: post.url || '',
|
||||
url: post.url ?? '',
|
||||
permalink: post.permalink ? `https://www.reddit.com${post.permalink}` : '',
|
||||
created_utc: post.created_utc || 0,
|
||||
score: post.score || 0,
|
||||
num_comments: post.num_comments || 0,
|
||||
created_utc: post.created_utc ?? 0,
|
||||
score: post.score ?? 0,
|
||||
num_comments: post.num_comments ?? 0,
|
||||
is_self: !!post.is_self,
|
||||
selftext: post.selftext || '',
|
||||
thumbnail: post.thumbnail || '',
|
||||
subreddit: post.subreddit || subredditName,
|
||||
selftext: post.selftext ?? '',
|
||||
thumbnail: post.thumbnail ?? '',
|
||||
subreddit: post.subreddit ?? subredditName,
|
||||
}
|
||||
}) || []
|
||||
|
||||
@@ -142,6 +143,8 @@ export const getControversialTool: ToolConfig<RedditControversialParams, RedditP
|
||||
output: {
|
||||
subreddit: subredditName,
|
||||
posts,
|
||||
after: data.data?.after ?? null,
|
||||
before: data.data?.before ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -159,6 +162,7 @@ export const getControversialTool: ToolConfig<RedditControversialParams, RedditP
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Author username' },
|
||||
url: { type: 'string', description: 'Post URL' },
|
||||
@@ -173,5 +177,15 @@ export const getControversialTool: ToolConfig<RedditControversialParams, RedditP
|
||||
},
|
||||
},
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the last item for forward pagination',
|
||||
optional: true,
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the first item for backward pagination',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
90
apps/sim/tools/reddit/get_me.ts
Normal file
90
apps/sim/tools/reddit/get_me.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { RedditGetMeParams, RedditUserResponse } from '@/tools/reddit/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getMeTool: ToolConfig<RedditGetMeParams, RedditUserResponse> = {
|
||||
id: 'reddit_get_me',
|
||||
name: 'Get Reddit User Identity',
|
||||
description: 'Get information about the authenticated Reddit user',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'reddit',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'Access token for Reddit API',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => 'https://oauth.reddit.com/api/v1/me?raw_json=1',
|
||||
method: 'GET',
|
||||
headers: (params: RedditGetMeParams) => {
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required for Reddit API')
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'User-Agent': 'sim-studio/1.0 (https://github.com/simstudioai/sim)',
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
id: '',
|
||||
name: '',
|
||||
created_utc: 0,
|
||||
link_karma: 0,
|
||||
comment_karma: 0,
|
||||
total_karma: 0,
|
||||
is_gold: false,
|
||||
is_mod: false,
|
||||
has_verified_email: false,
|
||||
icon_img: '',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? '',
|
||||
name: data.name ?? '',
|
||||
created_utc: data.created_utc ?? 0,
|
||||
link_karma: data.link_karma ?? 0,
|
||||
comment_karma: data.comment_karma ?? 0,
|
||||
total_karma: data.total_karma ?? 0,
|
||||
is_gold: data.is_gold ?? false,
|
||||
is_mod: data.is_mod ?? false,
|
||||
has_verified_email: data.has_verified_email ?? false,
|
||||
icon_img: data.icon_img ?? '',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'User ID' },
|
||||
name: { type: 'string', description: 'Username' },
|
||||
created_utc: { type: 'number', description: 'Account creation time in UTC epoch seconds' },
|
||||
link_karma: { type: 'number', description: 'Total link karma' },
|
||||
comment_karma: { type: 'number', description: 'Total comment karma' },
|
||||
total_karma: { type: 'number', description: 'Combined total karma' },
|
||||
is_gold: { type: 'boolean', description: 'Whether user has Reddit Premium' },
|
||||
is_mod: { type: 'boolean', description: 'Whether user is a moderator' },
|
||||
has_verified_email: { type: 'boolean', description: 'Whether email is verified' },
|
||||
icon_img: { type: 'string', description: 'User avatar/icon URL' },
|
||||
},
|
||||
}
|
||||
188
apps/sim/tools/reddit/get_messages.ts
Normal file
188
apps/sim/tools/reddit/get_messages.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { validateEnum } from '@/lib/core/security/input-validation'
|
||||
import type { RedditGetMessagesParams, RedditMessagesResponse } from '@/tools/reddit/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const ALLOWED_MESSAGE_FOLDERS = [
|
||||
'inbox',
|
||||
'unread',
|
||||
'sent',
|
||||
'messages',
|
||||
'comments',
|
||||
'selfreply',
|
||||
'mentions',
|
||||
] as const
|
||||
|
||||
export const getMessagesTool: ToolConfig<RedditGetMessagesParams, RedditMessagesResponse> = {
|
||||
id: 'reddit_get_messages',
|
||||
name: 'Get Reddit Messages',
|
||||
description: 'Retrieve private messages from your Reddit inbox',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'reddit',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'Access token for Reddit API',
|
||||
},
|
||||
where: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Message folder to retrieve: "inbox" (all), "unread", "sent", "messages" (direct messages only), "comments" (comment replies), "selfreply" (self-post replies), or "mentions" (username mentions). Default: "inbox"',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of messages to return (e.g., 25). Default: 25, max: 100',
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Fullname of a thing to fetch items after (for pagination)',
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Fullname of a thing to fetch items before (for pagination)',
|
||||
},
|
||||
mark: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Whether to mark fetched messages as read',
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'A count of items already seen in the listing (used for numbering)',
|
||||
},
|
||||
show: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Show items that would normally be filtered (e.g., "all")',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: RedditGetMessagesParams) => {
|
||||
const where = params.where || 'inbox'
|
||||
const validation = validateEnum(where, ALLOWED_MESSAGE_FOLDERS, 'where')
|
||||
if (!validation.isValid) {
|
||||
throw new Error(validation.error)
|
||||
}
|
||||
const limit = Math.min(Math.max(1, params.limit ?? 25), 100)
|
||||
|
||||
const urlParams = new URLSearchParams({
|
||||
limit: limit.toString(),
|
||||
raw_json: '1',
|
||||
})
|
||||
|
||||
if (params.after) urlParams.append('after', params.after)
|
||||
if (params.before) urlParams.append('before', params.before)
|
||||
if (params.mark !== undefined) urlParams.append('mark', params.mark.toString())
|
||||
if (params.count !== undefined) urlParams.append('count', Number(params.count).toString())
|
||||
if (params.show) urlParams.append('show', params.show)
|
||||
|
||||
return `https://oauth.reddit.com/message/${where}?${urlParams.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: RedditGetMessagesParams) => {
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required for Reddit API')
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'User-Agent': 'sim-studio/1.0 (https://github.com/simstudioai/sim)',
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
success: false,
|
||||
output: { messages: [], after: null, before: null },
|
||||
}
|
||||
}
|
||||
|
||||
const messages =
|
||||
data.data?.children?.map((child: any) => {
|
||||
const msg = child.data || {}
|
||||
return {
|
||||
id: msg.id ?? '',
|
||||
name: msg.name ?? '',
|
||||
author: msg.author ?? '',
|
||||
dest: msg.dest ?? '',
|
||||
subject: msg.subject ?? '',
|
||||
body: msg.body ?? '',
|
||||
created_utc: msg.created_utc ?? 0,
|
||||
new: msg.new ?? false,
|
||||
was_comment: msg.was_comment ?? false,
|
||||
context: msg.context ?? '',
|
||||
distinguished: msg.distinguished ?? null,
|
||||
}
|
||||
}) || []
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
messages,
|
||||
after: data.data?.after ?? null,
|
||||
before: data.data?.before ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
messages: {
|
||||
type: 'array',
|
||||
description: 'Array of messages with sender, recipient, subject, body, and metadata',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Message ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t4_xxxxx)' },
|
||||
author: { type: 'string', description: 'Sender username' },
|
||||
dest: { type: 'string', description: 'Recipient username' },
|
||||
subject: { type: 'string', description: 'Message subject' },
|
||||
body: { type: 'string', description: 'Message body text' },
|
||||
created_utc: { type: 'number', description: 'Creation time in UTC epoch seconds' },
|
||||
new: { type: 'boolean', description: 'Whether the message is unread' },
|
||||
was_comment: { type: 'boolean', description: 'Whether the message is a comment reply' },
|
||||
context: { type: 'string', description: 'Context URL for comment replies' },
|
||||
distinguished: {
|
||||
type: 'string',
|
||||
description: 'Distinction: null/"moderator"/"admin"',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the last item for forward pagination',
|
||||
optional: true,
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the first item for backward pagination',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,7 +1,10 @@
|
||||
import { validateEnum } from '@/lib/core/security/input-validation'
|
||||
import type { RedditPostsParams, RedditPostsResponse } from '@/tools/reddit/types'
|
||||
import { normalizeSubreddit } from '@/tools/reddit/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const ALLOWED_SORT_OPTIONS = ['hot', 'new', 'top', 'controversial', 'rising'] as const
|
||||
|
||||
export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> = {
|
||||
id: 'reddit_get_posts',
|
||||
name: 'Get Reddit Posts',
|
||||
@@ -30,7 +33,8 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Sort method for posts (e.g., "hot", "new", "top", "rising"). Default: "hot"',
|
||||
description:
|
||||
'Sort method for posts (e.g., "hot", "new", "top", "rising", "controversial"). Default: "hot"',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
@@ -43,7 +47,7 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" (default: "day")',
|
||||
'Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" (default: "all")',
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
@@ -75,13 +79,23 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Expand subreddit details in the response',
|
||||
},
|
||||
g: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Geo filter for posts (e.g., "GLOBAL", "US", "AR", etc.)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: RedditPostsParams) => {
|
||||
const subreddit = normalizeSubreddit(params.subreddit)
|
||||
const sort = params.sort || 'hot'
|
||||
const limit = Math.min(Math.max(1, params.limit || 10), 100)
|
||||
const sortValidation = validateEnum(sort, ALLOWED_SORT_OPTIONS, 'sort')
|
||||
if (!sortValidation.isValid) {
|
||||
throw new Error(sortValidation.error)
|
||||
}
|
||||
const limit = Math.min(Math.max(1, params.limit ?? 10), 100)
|
||||
|
||||
// Build URL with appropriate parameters using OAuth endpoint
|
||||
const urlParams = new URLSearchParams({
|
||||
@@ -89,8 +103,12 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
raw_json: '1',
|
||||
})
|
||||
|
||||
// Add time parameter only for 'top' sorting
|
||||
if (sort === 'top' && params.time !== undefined && params.time !== null) {
|
||||
// Add time parameter for 'top' and 'controversial' sorting
|
||||
if (
|
||||
(sort === 'top' || sort === 'controversial') &&
|
||||
params.time !== undefined &&
|
||||
params.time !== null
|
||||
) {
|
||||
urlParams.append('t', params.time)
|
||||
}
|
||||
|
||||
@@ -105,6 +123,7 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
urlParams.append('show', params.show)
|
||||
if (params.sr_detail !== undefined && params.sr_detail !== null)
|
||||
urlParams.append('sr_detail', params.sr_detail.toString())
|
||||
if (params.g) urlParams.append('g', params.g)
|
||||
|
||||
return `https://oauth.reddit.com/r/${subreddit}/${sort}?${urlParams.toString()}`
|
||||
},
|
||||
@@ -127,25 +146,26 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
|
||||
// Extract subreddit name from response (with fallback)
|
||||
const subredditName =
|
||||
data.data?.children[0]?.data?.subreddit || requestParams?.subreddit || 'unknown'
|
||||
data.data?.children?.[0]?.data?.subreddit || requestParams?.subreddit || 'unknown'
|
||||
|
||||
// Transform posts data
|
||||
const posts =
|
||||
data.data?.children?.map((child: any) => {
|
||||
const post = child.data || {}
|
||||
return {
|
||||
id: post.id || '',
|
||||
title: post.title || '',
|
||||
id: post.id ?? '',
|
||||
name: post.name ?? '',
|
||||
title: post.title ?? '',
|
||||
author: post.author || '[deleted]',
|
||||
url: post.url || '',
|
||||
url: post.url ?? '',
|
||||
permalink: post.permalink ? `https://www.reddit.com${post.permalink}` : '',
|
||||
created_utc: post.created_utc || 0,
|
||||
score: post.score || 0,
|
||||
num_comments: post.num_comments || 0,
|
||||
created_utc: post.created_utc ?? 0,
|
||||
score: post.score ?? 0,
|
||||
num_comments: post.num_comments ?? 0,
|
||||
is_self: !!post.is_self,
|
||||
selftext: post.selftext || '',
|
||||
thumbnail: post.thumbnail || '',
|
||||
subreddit: post.subreddit || subredditName,
|
||||
selftext: post.selftext ?? '',
|
||||
thumbnail: post.thumbnail ?? '',
|
||||
subreddit: post.subreddit ?? subredditName,
|
||||
}
|
||||
}) || []
|
||||
|
||||
@@ -154,6 +174,8 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
output: {
|
||||
subreddit: subredditName,
|
||||
posts,
|
||||
after: data.data?.after ?? null,
|
||||
before: data.data?.before ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -170,6 +192,7 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Author username' },
|
||||
url: { type: 'string', description: 'Post URL' },
|
||||
@@ -184,5 +207,15 @@ export const getPostsTool: ToolConfig<RedditPostsParams, RedditPostsResponse> =
|
||||
},
|
||||
},
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the last item for forward pagination',
|
||||
optional: true,
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the first item for backward pagination',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
126
apps/sim/tools/reddit/get_subreddit_info.ts
Normal file
126
apps/sim/tools/reddit/get_subreddit_info.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import type {
|
||||
RedditGetSubredditInfoParams,
|
||||
RedditSubredditInfoResponse,
|
||||
} from '@/tools/reddit/types'
|
||||
import { normalizeSubreddit } from '@/tools/reddit/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getSubredditInfoTool: ToolConfig<
|
||||
RedditGetSubredditInfoParams,
|
||||
RedditSubredditInfoResponse
|
||||
> = {
|
||||
id: 'reddit_get_subreddit_info',
|
||||
name: 'Get Subreddit Info',
|
||||
description: 'Get metadata and information about a subreddit',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'reddit',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'Access token for Reddit API',
|
||||
},
|
||||
subreddit: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The subreddit to get info about (e.g., "technology", "programming", "news")',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: RedditGetSubredditInfoParams) => {
|
||||
const subreddit = normalizeSubreddit(params.subreddit)
|
||||
return `https://oauth.reddit.com/r/${subreddit}/about?raw_json=1`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: RedditGetSubredditInfoParams) => {
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required for Reddit API')
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'User-Agent': 'sim-studio/1.0 (https://github.com/simstudioai/sim)',
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
id: '',
|
||||
name: '',
|
||||
display_name: '',
|
||||
title: '',
|
||||
description: '',
|
||||
public_description: '',
|
||||
subscribers: 0,
|
||||
accounts_active: 0,
|
||||
created_utc: 0,
|
||||
over18: false,
|
||||
lang: '',
|
||||
subreddit_type: '',
|
||||
url: '',
|
||||
icon_img: null,
|
||||
banner_img: null,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const sub = data.data || data
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: sub.id ?? '',
|
||||
name: sub.name ?? '',
|
||||
display_name: sub.display_name ?? '',
|
||||
title: sub.title ?? '',
|
||||
description: sub.description ?? '',
|
||||
public_description: sub.public_description ?? '',
|
||||
subscribers: sub.subscribers ?? 0,
|
||||
accounts_active: sub.accounts_active ?? 0,
|
||||
created_utc: sub.created_utc ?? 0,
|
||||
over18: sub.over18 ?? false,
|
||||
lang: sub.lang ?? '',
|
||||
subreddit_type: sub.subreddit_type ?? '',
|
||||
url: sub.url ?? '',
|
||||
icon_img: sub.icon_img ?? null,
|
||||
banner_img: sub.banner_img ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Subreddit ID' },
|
||||
name: { type: 'string', description: 'Subreddit fullname (t5_xxxxx)' },
|
||||
display_name: { type: 'string', description: 'Subreddit name without prefix' },
|
||||
title: { type: 'string', description: 'Subreddit title' },
|
||||
description: { type: 'string', description: 'Full subreddit description (markdown)' },
|
||||
public_description: { type: 'string', description: 'Short public description' },
|
||||
subscribers: { type: 'number', description: 'Number of subscribers' },
|
||||
accounts_active: { type: 'number', description: 'Number of currently active users' },
|
||||
created_utc: { type: 'number', description: 'Creation time in UTC epoch seconds' },
|
||||
over18: { type: 'boolean', description: 'Whether the subreddit is NSFW' },
|
||||
lang: { type: 'string', description: 'Primary language of the subreddit' },
|
||||
subreddit_type: {
|
||||
type: 'string',
|
||||
description: 'Subreddit type: public, private, restricted, etc.',
|
||||
},
|
||||
url: { type: 'string', description: 'Subreddit URL path (e.g., /r/technology/)' },
|
||||
icon_img: { type: 'string', description: 'Subreddit icon URL', optional: true },
|
||||
banner_img: { type: 'string', description: 'Subreddit banner URL', optional: true },
|
||||
},
|
||||
}
|
||||
106
apps/sim/tools/reddit/get_user.ts
Normal file
106
apps/sim/tools/reddit/get_user.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { validatePathSegment } from '@/lib/core/security/input-validation'
|
||||
import type { RedditGetUserParams, RedditUserResponse } from '@/tools/reddit/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getUserTool: ToolConfig<RedditGetUserParams, RedditUserResponse> = {
|
||||
id: 'reddit_get_user',
|
||||
name: 'Get Reddit User Profile',
|
||||
description: 'Get public profile information about any Reddit user by username',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'reddit',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'Access token for Reddit API',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Reddit username to look up (e.g., "spez", "example_user")',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: RedditGetUserParams) => {
|
||||
const username = params.username.trim().replace(/^u\//, '')
|
||||
const validation = validatePathSegment(username, { paramName: 'username' })
|
||||
if (!validation.isValid) {
|
||||
throw new Error(validation.error)
|
||||
}
|
||||
return `https://oauth.reddit.com/user/${username}/about?raw_json=1`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: RedditGetUserParams) => {
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required for Reddit API')
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'User-Agent': 'sim-studio/1.0 (https://github.com/simstudioai/sim)',
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
id: '',
|
||||
name: '',
|
||||
created_utc: 0,
|
||||
link_karma: 0,
|
||||
comment_karma: 0,
|
||||
total_karma: 0,
|
||||
is_gold: false,
|
||||
is_mod: false,
|
||||
has_verified_email: false,
|
||||
icon_img: '',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const user = data.data || data
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: user.id ?? '',
|
||||
name: user.name ?? '',
|
||||
created_utc: user.created_utc ?? 0,
|
||||
link_karma: user.link_karma ?? 0,
|
||||
comment_karma: user.comment_karma ?? 0,
|
||||
total_karma: user.total_karma ?? 0,
|
||||
is_gold: user.is_gold ?? false,
|
||||
is_mod: user.is_mod ?? false,
|
||||
has_verified_email: user.has_verified_email ?? false,
|
||||
icon_img: user.icon_img ?? '',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'User ID' },
|
||||
name: { type: 'string', description: 'Username' },
|
||||
created_utc: { type: 'number', description: 'Account creation time in UTC epoch seconds' },
|
||||
link_karma: { type: 'number', description: 'Total link karma' },
|
||||
comment_karma: { type: 'number', description: 'Total comment karma' },
|
||||
total_karma: { type: 'number', description: 'Combined total karma' },
|
||||
is_gold: { type: 'boolean', description: 'Whether user has Reddit Premium' },
|
||||
is_mod: { type: 'boolean', description: 'Whether user is a moderator' },
|
||||
has_verified_email: { type: 'boolean', description: 'Whether email is verified' },
|
||||
icon_img: { type: 'string', description: 'User avatar/icon URL' },
|
||||
},
|
||||
}
|
||||
@@ -43,7 +43,7 @@ export const hotPostsTool: ToolConfig<HotPostsParams, RedditHotPostsResponse> =
|
||||
request: {
|
||||
url: (params) => {
|
||||
const subreddit = normalizeSubreddit(params.subreddit)
|
||||
const limit = Math.min(Math.max(1, params.limit || 10), 100)
|
||||
const limit = Math.min(Math.max(1, params.limit ?? 10), 100)
|
||||
|
||||
return `https://oauth.reddit.com/r/${subreddit}/hot?limit=${limit}&raw_json=1`
|
||||
},
|
||||
@@ -65,25 +65,26 @@ export const hotPostsTool: ToolConfig<HotPostsParams, RedditHotPostsResponse> =
|
||||
const data = await response.json()
|
||||
|
||||
// Process the posts data with proper error handling
|
||||
const posts: RedditPost[] = data.data.children.map((child: any) => {
|
||||
const post = child.data || {}
|
||||
return {
|
||||
id: post.id || '',
|
||||
title: post.title || '',
|
||||
author: post.author || '[deleted]',
|
||||
url: post.url || '',
|
||||
permalink: post.permalink ? `https://www.reddit.com${post.permalink}` : '',
|
||||
created_utc: post.created_utc || 0,
|
||||
score: post.score || 0,
|
||||
num_comments: post.num_comments || 0,
|
||||
selftext: post.selftext || '',
|
||||
thumbnail:
|
||||
post.thumbnail !== 'self' && post.thumbnail !== 'default' ? post.thumbnail : undefined,
|
||||
is_self: !!post.is_self,
|
||||
subreddit: post.subreddit || requestParams?.subreddit || '',
|
||||
subreddit_name_prefixed: post.subreddit_name_prefixed || '',
|
||||
}
|
||||
})
|
||||
const posts: RedditPost[] =
|
||||
data.data?.children?.map((child: any) => {
|
||||
const post = child.data || {}
|
||||
return {
|
||||
id: post.id ?? '',
|
||||
name: post.name ?? '',
|
||||
title: post.title ?? '',
|
||||
author: post.author || '[deleted]',
|
||||
url: post.url ?? '',
|
||||
permalink: post.permalink ? `https://www.reddit.com${post.permalink}` : '',
|
||||
created_utc: post.created_utc ?? 0,
|
||||
score: post.score ?? 0,
|
||||
num_comments: post.num_comments ?? 0,
|
||||
selftext: post.selftext ?? '',
|
||||
thumbnail:
|
||||
post.thumbnail !== 'self' && post.thumbnail !== 'default' ? post.thumbnail : undefined,
|
||||
is_self: !!post.is_self,
|
||||
subreddit: post.subreddit ?? requestParams?.subreddit ?? '',
|
||||
}
|
||||
}) || []
|
||||
|
||||
// Extract the subreddit name from the response data with fallback
|
||||
const subreddit =
|
||||
@@ -95,6 +96,8 @@ export const hotPostsTool: ToolConfig<HotPostsParams, RedditHotPostsResponse> =
|
||||
output: {
|
||||
subreddit,
|
||||
posts,
|
||||
after: data.data?.after ?? null,
|
||||
before: data.data?.before ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -112,6 +115,7 @@ export const hotPostsTool: ToolConfig<HotPostsParams, RedditHotPostsResponse> =
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Author username' },
|
||||
url: { type: 'string', description: 'Post URL' },
|
||||
@@ -123,9 +127,18 @@ export const hotPostsTool: ToolConfig<HotPostsParams, RedditHotPostsResponse> =
|
||||
selftext: { type: 'string', description: 'Text content for self posts' },
|
||||
thumbnail: { type: 'string', description: 'Thumbnail URL' },
|
||||
subreddit: { type: 'string', description: 'Subreddit name' },
|
||||
subreddit_name_prefixed: { type: 'string', description: 'Subreddit name with r/ prefix' },
|
||||
},
|
||||
},
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the last item for forward pagination',
|
||||
optional: true,
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the first item for backward pagination',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,11 +2,16 @@ import { deleteTool } from '@/tools/reddit/delete'
|
||||
import { editTool } from '@/tools/reddit/edit'
|
||||
import { getCommentsTool } from '@/tools/reddit/get_comments'
|
||||
import { getControversialTool } from '@/tools/reddit/get_controversial'
|
||||
import { getMeTool } from '@/tools/reddit/get_me'
|
||||
import { getMessagesTool } from '@/tools/reddit/get_messages'
|
||||
import { getPostsTool } from '@/tools/reddit/get_posts'
|
||||
import { getSubredditInfoTool } from '@/tools/reddit/get_subreddit_info'
|
||||
import { getUserTool } from '@/tools/reddit/get_user'
|
||||
import { hotPostsTool } from '@/tools/reddit/hot_posts'
|
||||
import { replyTool } from '@/tools/reddit/reply'
|
||||
import { saveTool, unsaveTool } from '@/tools/reddit/save'
|
||||
import { searchTool } from '@/tools/reddit/search'
|
||||
import { sendMessageTool } from '@/tools/reddit/send_message'
|
||||
import { submitPostTool } from '@/tools/reddit/submit_post'
|
||||
import { subscribeTool } from '@/tools/reddit/subscribe'
|
||||
import { voteTool } from '@/tools/reddit/vote'
|
||||
@@ -24,3 +29,8 @@ export const redditReplyTool = replyTool
|
||||
export const redditEditTool = editTool
|
||||
export const redditDeleteTool = deleteTool
|
||||
export const redditSubscribeTool = subscribeTool
|
||||
export const redditGetMeTool = getMeTool
|
||||
export const redditGetUserTool = getUserTool
|
||||
export const redditSendMessageTool = sendMessageTool
|
||||
export const redditGetMessagesTool = getMessagesTool
|
||||
export const redditGetSubredditInfoTool = getSubredditInfoTool
|
||||
|
||||
@@ -32,6 +32,12 @@ export const replyTool: ToolConfig<RedditReplyParams, RedditWriteResponse> = {
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Comment text in markdown format (e.g., "Great post! Here is my **reply**")',
|
||||
},
|
||||
return_rtjson: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Return response in Rich Text JSON format',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -55,6 +61,9 @@ export const replyTool: ToolConfig<RedditReplyParams, RedditWriteResponse> = {
|
||||
api_type: 'json',
|
||||
})
|
||||
|
||||
if (params.return_rtjson !== undefined)
|
||||
formData.append('return_rtjson', params.return_rtjson.toString())
|
||||
|
||||
return formData.toString() as unknown as Record<string, any>
|
||||
},
|
||||
},
|
||||
@@ -62,6 +71,17 @@ export const replyTool: ToolConfig<RedditReplyParams, RedditWriteResponse> = {
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMsg = data?.message || `HTTP error ${response.status}`
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
success: false,
|
||||
message: `Failed to post reply: ${errorMsg}`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Reddit API returns errors in json.errors array
|
||||
if (data.json?.errors && data.json.errors.length > 0) {
|
||||
const errors = data.json.errors.map((err: any) => err.join(': ')).join(', ')
|
||||
|
||||
@@ -83,13 +83,26 @@ export const searchTool: ToolConfig<RedditSearchParams, RedditPostsResponse> = {
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Show items that would normally be filtered (e.g., "all")',
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Type of search results: "link" (posts), "sr" (subreddits), or "user" (users). Default: "link"',
|
||||
},
|
||||
sr_detail: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Expand subreddit details in the response',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: RedditSearchParams) => {
|
||||
const subreddit = normalizeSubreddit(params.subreddit)
|
||||
const sort = params.sort || 'relevance'
|
||||
const limit = Math.min(Math.max(1, params.limit || 10), 100)
|
||||
const limit = Math.min(Math.max(1, params.limit ?? 10), 100)
|
||||
const restrict_sr = params.restrict_sr !== false // Default to true
|
||||
|
||||
// Build URL with appropriate parameters using OAuth endpoint
|
||||
@@ -111,6 +124,8 @@ export const searchTool: ToolConfig<RedditSearchParams, RedditPostsResponse> = {
|
||||
if (params.before) urlParams.append('before', params.before)
|
||||
if (params.count !== undefined) urlParams.append('count', Number(params.count).toString())
|
||||
if (params.show) urlParams.append('show', params.show)
|
||||
if (params.type) urlParams.append('type', params.type)
|
||||
if (params.sr_detail !== undefined) urlParams.append('sr_detail', params.sr_detail.toString())
|
||||
|
||||
return `https://oauth.reddit.com/r/${subreddit}/search?${urlParams.toString()}`
|
||||
},
|
||||
@@ -133,25 +148,26 @@ export const searchTool: ToolConfig<RedditSearchParams, RedditPostsResponse> = {
|
||||
|
||||
// Extract subreddit name from response (with fallback)
|
||||
const subredditName =
|
||||
data.data?.children[0]?.data?.subreddit || requestParams?.subreddit || 'unknown'
|
||||
data.data?.children?.[0]?.data?.subreddit || requestParams?.subreddit || 'unknown'
|
||||
|
||||
// Transform posts data
|
||||
const posts =
|
||||
data.data?.children?.map((child: any) => {
|
||||
const post = child.data || {}
|
||||
return {
|
||||
id: post.id || '',
|
||||
title: post.title || '',
|
||||
id: post.id ?? '',
|
||||
name: post.name ?? '',
|
||||
title: post.title ?? '',
|
||||
author: post.author || '[deleted]',
|
||||
url: post.url || '',
|
||||
url: post.url ?? '',
|
||||
permalink: post.permalink ? `https://www.reddit.com${post.permalink}` : '',
|
||||
created_utc: post.created_utc || 0,
|
||||
score: post.score || 0,
|
||||
num_comments: post.num_comments || 0,
|
||||
created_utc: post.created_utc ?? 0,
|
||||
score: post.score ?? 0,
|
||||
num_comments: post.num_comments ?? 0,
|
||||
is_self: !!post.is_self,
|
||||
selftext: post.selftext || '',
|
||||
thumbnail: post.thumbnail || '',
|
||||
subreddit: post.subreddit || subredditName,
|
||||
selftext: post.selftext ?? '',
|
||||
thumbnail: post.thumbnail ?? '',
|
||||
subreddit: post.subreddit ?? subredditName,
|
||||
}
|
||||
}) || []
|
||||
|
||||
@@ -160,6 +176,8 @@ export const searchTool: ToolConfig<RedditSearchParams, RedditPostsResponse> = {
|
||||
output: {
|
||||
subreddit: subredditName,
|
||||
posts,
|
||||
after: data.data?.after ?? null,
|
||||
before: data.data?.before ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -177,6 +195,7 @@ export const searchTool: ToolConfig<RedditSearchParams, RedditPostsResponse> = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Author username' },
|
||||
url: { type: 'string', description: 'Post URL' },
|
||||
@@ -191,5 +210,15 @@ export const searchTool: ToolConfig<RedditSearchParams, RedditPostsResponse> = {
|
||||
},
|
||||
},
|
||||
},
|
||||
after: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the last item for forward pagination',
|
||||
optional: true,
|
||||
},
|
||||
before: {
|
||||
type: 'string',
|
||||
description: 'Fullname of the first item for backward pagination',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
122
apps/sim/tools/reddit/send_message.ts
Normal file
122
apps/sim/tools/reddit/send_message.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import type { RedditSendMessageParams, RedditWriteResponse } from '@/tools/reddit/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const sendMessageTool: ToolConfig<RedditSendMessageParams, RedditWriteResponse> = {
|
||||
id: 'reddit_send_message',
|
||||
name: 'Send Reddit Message',
|
||||
description: 'Send a private message to a Reddit user',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'reddit',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'Access token for Reddit API',
|
||||
},
|
||||
to: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Recipient username (e.g., "example_user") or subreddit (e.g., "/r/subreddit")',
|
||||
},
|
||||
subject: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Message subject (max 100 characters)',
|
||||
},
|
||||
text: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Message body in markdown format',
|
||||
},
|
||||
from_sr: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Subreddit name to send the message from (requires moderator mail permission)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => 'https://oauth.reddit.com/api/compose',
|
||||
method: 'POST',
|
||||
headers: (params: RedditSendMessageParams) => {
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required for Reddit API')
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'User-Agent': 'sim-studio/1.0 (https://github.com/simstudioai/sim)',
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
},
|
||||
body: (params: RedditSendMessageParams) => {
|
||||
const formData = new URLSearchParams({
|
||||
to: params.to.trim(),
|
||||
subject: params.subject,
|
||||
text: params.text,
|
||||
api_type: 'json',
|
||||
})
|
||||
|
||||
if (params.from_sr) {
|
||||
formData.append('from_sr', params.from_sr.trim())
|
||||
}
|
||||
|
||||
return formData.toString() as unknown as Record<string, any>
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMsg = data?.message || `HTTP error ${response.status}`
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
success: false,
|
||||
message: `Failed to send message: ${errorMsg}`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (data.json?.errors && data.json.errors.length > 0) {
|
||||
const errors = data.json.errors.map((err: any) => err.join(': ')).join(', ')
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
success: false,
|
||||
message: `Failed to send message: ${errors}`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
message: 'Message sent successfully',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
success: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the message was sent successfully',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Success or error message',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -64,6 +64,24 @@ export const submitPostTool: ToolConfig<RedditSubmitParams, RedditWriteResponse>
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Send reply notifications to inbox (default: true)',
|
||||
},
|
||||
flair_id: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Flair template UUID for the post (max 36 characters)',
|
||||
},
|
||||
flair_text: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Flair text to display on the post (max 64 characters)',
|
||||
},
|
||||
collection_id: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Collection UUID to add the post to',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -105,6 +123,9 @@ export const submitPostTool: ToolConfig<RedditSubmitParams, RedditWriteResponse>
|
||||
// Add optional parameters
|
||||
if (params.nsfw !== undefined) formData.append('nsfw', params.nsfw.toString())
|
||||
if (params.spoiler !== undefined) formData.append('spoiler', params.spoiler.toString())
|
||||
if (params.flair_id) formData.append('flair_id', params.flair_id)
|
||||
if (params.flair_text) formData.append('flair_text', params.flair_text)
|
||||
if (params.collection_id) formData.append('collection_id', params.collection_id)
|
||||
if (params.send_replies !== undefined)
|
||||
formData.append('sendreplies', params.send_replies.toString())
|
||||
|
||||
@@ -138,7 +159,9 @@ export const submitPostTool: ToolConfig<RedditSubmitParams, RedditWriteResponse>
|
||||
id: postData?.id,
|
||||
name: postData?.name,
|
||||
url: postData?.url,
|
||||
permalink: `https://www.reddit.com${postData?.url}`,
|
||||
permalink: postData?.permalink
|
||||
? `https://www.reddit.com${postData.permalink}`
|
||||
: (postData?.url ?? ''),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -11,12 +11,14 @@ import type { OutputProperty, ToolResponse } from '@/tools/types'
|
||||
*/
|
||||
export const POST_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title (may contain newlines)' },
|
||||
author: { type: 'string', description: 'Poster account name (null for promotional links)' },
|
||||
url: { type: 'string', description: 'External link URL or self-post permalink' },
|
||||
permalink: { type: 'string', description: 'Relative permanent link URL' },
|
||||
created_utc: { type: 'number', description: 'Creation time in UTC epoch seconds' },
|
||||
score: { type: 'number', description: 'Net upvotes minus downvotes' },
|
||||
upvote_ratio: { type: 'number', description: 'Ratio of upvotes to total votes' },
|
||||
num_comments: { type: 'number', description: 'Total comments including removed ones' },
|
||||
is_self: { type: 'boolean', description: 'Indicates self-post vs external link' },
|
||||
selftext: {
|
||||
@@ -55,6 +57,7 @@ export const POST_OUTPUT_PROPERTIES = {
|
||||
*/
|
||||
export const COMMENT_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Comment ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t1_xxxxx)' },
|
||||
author: { type: 'string', description: 'Commenter account name' },
|
||||
body: { type: 'string', description: 'Raw unformatted comment text with markup characters' },
|
||||
body_html: { type: 'string', description: 'Formatted HTML version of comment' },
|
||||
@@ -70,6 +73,7 @@ export const COMMENT_OUTPUT_PROPERTIES = {
|
||||
type: 'string',
|
||||
description: 'Distinction: null/"moderator"/"admin"/"special"',
|
||||
},
|
||||
is_submitter: { type: 'boolean', description: 'Whether commenter is the post author' },
|
||||
ups: { type: 'number', description: 'Upvote count' },
|
||||
downs: { type: 'number', description: 'Downvote count' },
|
||||
likes: { type: 'boolean', description: 'User vote: true (up), false (down), null (none)' },
|
||||
@@ -88,6 +92,7 @@ export const COMMENT_OUTPUT_PROPERTIES = {
|
||||
*/
|
||||
export const POST_LISTING_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Author username' },
|
||||
url: { type: 'string', description: 'Post URL' },
|
||||
@@ -106,6 +111,7 @@ export const POST_LISTING_OUTPUT_PROPERTIES = {
|
||||
*/
|
||||
export const COMMENT_LISTING_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Comment ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t1_xxxxx)' },
|
||||
author: { type: 'string', description: 'Comment author' },
|
||||
body: { type: 'string', description: 'Comment text' },
|
||||
score: { type: 'number', description: 'Comment score' },
|
||||
@@ -130,6 +136,7 @@ export const COMMENT_WITH_REPLIES_OUTPUT_PROPERTIES = {
|
||||
*/
|
||||
export const POST_METADATA_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Post ID' },
|
||||
name: { type: 'string', description: 'Thing fullname (t3_xxxxx)' },
|
||||
title: { type: 'string', description: 'Post title' },
|
||||
author: { type: 'string', description: 'Post author' },
|
||||
selftext: { type: 'string', description: 'Post text content' },
|
||||
@@ -237,6 +244,7 @@ export const EDIT_DATA_OUTPUT: OutputProperty = {
|
||||
|
||||
export interface RedditPost {
|
||||
id: string
|
||||
name: string
|
||||
title: string
|
||||
author: string
|
||||
url: string
|
||||
@@ -248,11 +256,11 @@ export interface RedditPost {
|
||||
thumbnail?: string
|
||||
is_self: boolean
|
||||
subreddit: string
|
||||
subreddit_name_prefixed: string
|
||||
}
|
||||
|
||||
export interface RedditComment {
|
||||
id: string
|
||||
name: string
|
||||
author: string
|
||||
body: string
|
||||
created_utc: number
|
||||
@@ -261,62 +269,72 @@ export interface RedditComment {
|
||||
replies: RedditComment[]
|
||||
}
|
||||
|
||||
export interface RedditMessage {
|
||||
id: string
|
||||
name: string
|
||||
author: string
|
||||
dest: string
|
||||
subject: string
|
||||
body: string
|
||||
created_utc: number
|
||||
new: boolean
|
||||
was_comment: boolean
|
||||
context: string
|
||||
distinguished: string | null
|
||||
}
|
||||
|
||||
export interface RedditHotPostsResponse extends ToolResponse {
|
||||
output: {
|
||||
subreddit: string
|
||||
posts: RedditPost[]
|
||||
after: string | null
|
||||
before: string | null
|
||||
}
|
||||
}
|
||||
|
||||
// Parameters for the generalized get_posts tool
|
||||
export interface RedditPostsParams {
|
||||
subreddit: string
|
||||
sort?: 'hot' | 'new' | 'top' | 'rising'
|
||||
sort?: 'hot' | 'new' | 'top' | 'rising' | 'controversial'
|
||||
limit?: number
|
||||
time?: 'day' | 'week' | 'month' | 'year' | 'all'
|
||||
// Pagination parameters
|
||||
time?: 'hour' | 'day' | 'week' | 'month' | 'year' | 'all'
|
||||
after?: string
|
||||
before?: string
|
||||
count?: number
|
||||
show?: string
|
||||
sr_detail?: boolean
|
||||
g?: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Response for the generalized get_posts tool
|
||||
export interface RedditPostsResponse extends ToolResponse {
|
||||
output: {
|
||||
subreddit: string
|
||||
posts: RedditPost[]
|
||||
after: string | null
|
||||
before: string | null
|
||||
}
|
||||
}
|
||||
|
||||
// Parameters for the get_comments tool
|
||||
export interface RedditCommentsParams {
|
||||
postId: string
|
||||
subreddit: string
|
||||
sort?: 'confidence' | 'top' | 'new' | 'controversial' | 'old' | 'random' | 'qa'
|
||||
limit?: number
|
||||
// Comment-specific parameters
|
||||
depth?: number
|
||||
context?: number
|
||||
showedits?: boolean
|
||||
showmore?: boolean
|
||||
showtitle?: boolean
|
||||
threaded?: boolean
|
||||
truncate?: number
|
||||
// Pagination parameters
|
||||
after?: string
|
||||
before?: string
|
||||
count?: number
|
||||
comment?: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Response for the get_comments tool
|
||||
export interface RedditCommentsResponse extends ToolResponse {
|
||||
output: {
|
||||
post: {
|
||||
id: string
|
||||
name: string
|
||||
title: string
|
||||
author: string
|
||||
selftext?: string
|
||||
@@ -328,7 +346,6 @@ export interface RedditCommentsResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Parameters for controversial posts
|
||||
export interface RedditControversialParams {
|
||||
subreddit: string
|
||||
time?: 'hour' | 'day' | 'week' | 'month' | 'year' | 'all'
|
||||
@@ -341,7 +358,6 @@ export interface RedditControversialParams {
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for search
|
||||
export interface RedditSearchParams {
|
||||
subreddit: string
|
||||
query: string
|
||||
@@ -353,10 +369,11 @@ export interface RedditSearchParams {
|
||||
count?: number
|
||||
show?: string
|
||||
restrict_sr?: boolean
|
||||
type?: 'link' | 'sr' | 'user'
|
||||
sr_detail?: boolean
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for submit post
|
||||
export interface RedditSubmitParams {
|
||||
subreddit: string
|
||||
title: string
|
||||
@@ -365,51 +382,81 @@ export interface RedditSubmitParams {
|
||||
nsfw?: boolean
|
||||
spoiler?: boolean
|
||||
send_replies?: boolean
|
||||
flair_id?: string
|
||||
flair_text?: string
|
||||
collection_id?: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for vote
|
||||
export interface RedditVoteParams {
|
||||
id: string // Thing fullname (e.g., t3_xxxxx for post, t1_xxxxx for comment)
|
||||
dir: 1 | 0 | -1 // 1 = upvote, 0 = unvote, -1 = downvote
|
||||
id: string
|
||||
dir: 1 | 0 | -1
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for save/unsave
|
||||
export interface RedditSaveParams {
|
||||
id: string // Thing fullname
|
||||
category?: string // Save category
|
||||
id: string
|
||||
category?: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for reply
|
||||
export interface RedditReplyParams {
|
||||
parent_id: string // Thing fullname to reply to
|
||||
text: string // Comment text in markdown
|
||||
parent_id: string
|
||||
text: string
|
||||
return_rtjson?: boolean
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for edit
|
||||
export interface RedditEditParams {
|
||||
thing_id: string // Thing fullname to edit
|
||||
text: string // New text in markdown
|
||||
thing_id: string
|
||||
text: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for delete
|
||||
export interface RedditDeleteParams {
|
||||
id: string // Thing fullname to delete
|
||||
id: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Parameters for subscribe/unsubscribe
|
||||
export interface RedditSubscribeParams {
|
||||
subreddit: string
|
||||
action: 'sub' | 'unsub'
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
// Generic success response for write operations
|
||||
export interface RedditGetMeParams {
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
export interface RedditGetUserParams {
|
||||
username: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
export interface RedditSendMessageParams {
|
||||
to: string
|
||||
subject: string
|
||||
text: string
|
||||
from_sr?: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
export interface RedditGetMessagesParams {
|
||||
where?: 'inbox' | 'unread' | 'sent' | 'messages' | 'comments' | 'selfreply' | 'mentions'
|
||||
limit?: number
|
||||
after?: string
|
||||
before?: string
|
||||
mark?: boolean
|
||||
count?: number
|
||||
show?: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
export interface RedditGetSubredditInfoParams {
|
||||
subreddit: string
|
||||
accessToken?: string
|
||||
}
|
||||
|
||||
export interface RedditWriteResponse extends ToolResponse {
|
||||
output: {
|
||||
success: boolean
|
||||
@@ -418,8 +465,54 @@ export interface RedditWriteResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface RedditUserResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
name: string
|
||||
created_utc: number
|
||||
link_karma: number
|
||||
comment_karma: number
|
||||
total_karma: number
|
||||
is_gold: boolean
|
||||
is_mod: boolean
|
||||
has_verified_email: boolean
|
||||
icon_img: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface RedditMessagesResponse extends ToolResponse {
|
||||
output: {
|
||||
messages: RedditMessage[]
|
||||
after: string | null
|
||||
before: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface RedditSubredditInfoResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
name: string
|
||||
display_name: string
|
||||
title: string
|
||||
description: string
|
||||
public_description: string
|
||||
subscribers: number
|
||||
accounts_active: number
|
||||
created_utc: number
|
||||
over18: boolean
|
||||
lang: string
|
||||
subreddit_type: string
|
||||
url: string
|
||||
icon_img: string | null
|
||||
banner_img: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export type RedditResponse =
|
||||
| RedditHotPostsResponse
|
||||
| RedditPostsResponse
|
||||
| RedditCommentsResponse
|
||||
| RedditWriteResponse
|
||||
| RedditUserResponse
|
||||
| RedditMessagesResponse
|
||||
| RedditSubredditInfoResponse
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
import { validatePathSegment } from '@/lib/core/security/input-validation'
|
||||
|
||||
const SUBREDDIT_PREFIX = /^r\//
|
||||
|
||||
/**
|
||||
* Normalizes a subreddit name by removing the 'r/' prefix if present and trimming whitespace.
|
||||
* Validates the result to prevent path traversal attacks.
|
||||
* @param subreddit - The subreddit name to normalize
|
||||
* @returns The normalized subreddit name without the 'r/' prefix
|
||||
* @throws Error if the subreddit name contains invalid characters
|
||||
*/
|
||||
export function normalizeSubreddit(subreddit: string): string {
|
||||
return subreddit.trim().replace(SUBREDDIT_PREFIX, '')
|
||||
const normalized = subreddit.trim().replace(SUBREDDIT_PREFIX, '')
|
||||
const validation = validatePathSegment(normalized, { paramName: 'subreddit' })
|
||||
if (!validation.isValid) {
|
||||
throw new Error(validation.error)
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
@@ -1606,11 +1606,16 @@ import {
|
||||
redditEditTool,
|
||||
redditGetCommentsTool,
|
||||
redditGetControversialTool,
|
||||
redditGetMessagesTool,
|
||||
redditGetMeTool,
|
||||
redditGetPostsTool,
|
||||
redditGetSubredditInfoTool,
|
||||
redditGetUserTool,
|
||||
redditHotPostsTool,
|
||||
redditReplyTool,
|
||||
redditSaveTool,
|
||||
redditSearchTool,
|
||||
redditSendMessageTool,
|
||||
redditSubmitPostTool,
|
||||
redditSubscribeTool,
|
||||
redditUnsaveTool,
|
||||
@@ -1797,18 +1802,27 @@ import {
|
||||
import {
|
||||
slackAddReactionTool,
|
||||
slackCanvasTool,
|
||||
slackCreateChannelCanvasTool,
|
||||
slackDeleteMessageTool,
|
||||
slackDownloadTool,
|
||||
slackEditCanvasTool,
|
||||
slackEphemeralMessageTool,
|
||||
slackGetChannelInfoTool,
|
||||
slackGetMessageTool,
|
||||
slackGetThreadTool,
|
||||
slackGetUserPresenceTool,
|
||||
slackGetUserTool,
|
||||
slackListChannelsTool,
|
||||
slackListMembersTool,
|
||||
slackListUsersTool,
|
||||
slackMessageReaderTool,
|
||||
slackMessageTool,
|
||||
slackOpenViewTool,
|
||||
slackPublishViewTool,
|
||||
slackPushViewTool,
|
||||
slackRemoveReactionTool,
|
||||
slackUpdateMessageTool,
|
||||
slackUpdateViewTool,
|
||||
} from '@/tools/slack'
|
||||
import { smsSendTool } from '@/tools/sms'
|
||||
import { smtpSendMailTool } from '@/tools/smtp'
|
||||
@@ -2611,6 +2625,15 @@ export const tools: Record<string, ToolConfig> = {
|
||||
slack_update_message: slackUpdateMessageTool,
|
||||
slack_delete_message: slackDeleteMessageTool,
|
||||
slack_add_reaction: slackAddReactionTool,
|
||||
slack_remove_reaction: slackRemoveReactionTool,
|
||||
slack_get_channel_info: slackGetChannelInfoTool,
|
||||
slack_get_user_presence: slackGetUserPresenceTool,
|
||||
slack_open_view: slackOpenViewTool,
|
||||
slack_update_view: slackUpdateViewTool,
|
||||
slack_push_view: slackPushViewTool,
|
||||
slack_publish_view: slackPublishViewTool,
|
||||
slack_edit_canvas: slackEditCanvasTool,
|
||||
slack_create_channel_canvas: slackCreateChannelCanvasTool,
|
||||
github_repo_info: githubRepoInfoTool,
|
||||
github_repo_info_v2: githubRepoInfoV2Tool,
|
||||
github_latest_commit: githubLatestCommitTool,
|
||||
@@ -3159,6 +3182,11 @@ export const tools: Record<string, ToolConfig> = {
|
||||
reddit_edit: redditEditTool,
|
||||
reddit_delete: redditDeleteTool,
|
||||
reddit_subscribe: redditSubscribeTool,
|
||||
reddit_get_me: redditGetMeTool,
|
||||
reddit_get_user: redditGetUserTool,
|
||||
reddit_send_message: redditSendMessageTool,
|
||||
reddit_get_messages: redditGetMessagesTool,
|
||||
reddit_get_subreddit_info: redditGetSubredditInfoTool,
|
||||
redis_get: redisGetTool,
|
||||
redis_set: redisSetTool,
|
||||
redis_delete: redisDeleteTool,
|
||||
|
||||
@@ -60,6 +60,12 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of records to return (e.g., 10, 50, 100)',
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Number of records to skip for pagination (e.g., 0, 10, 20)',
|
||||
},
|
||||
fields: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
@@ -67,6 +73,13 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
description:
|
||||
'Comma-separated list of fields to return (e.g., sys_id,number,short_description,state)',
|
||||
},
|
||||
displayValue: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Return display values for reference fields: "true" (display only), "false" (sys_id only), or "all" (both)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -96,10 +109,18 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
queryParams.append('sysparm_limit', params.limit.toString())
|
||||
}
|
||||
|
||||
if (params.offset !== undefined && params.offset !== null) {
|
||||
queryParams.append('sysparm_offset', params.offset.toString())
|
||||
}
|
||||
|
||||
if (params.fields) {
|
||||
queryParams.append('sysparm_fields', params.fields)
|
||||
}
|
||||
|
||||
if (params.displayValue) {
|
||||
queryParams.append('sysparm_display_value', params.displayValue)
|
||||
}
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
return queryString ? `${url}?${queryString}` : url
|
||||
},
|
||||
|
||||
@@ -31,7 +31,9 @@ export interface ServiceNowReadParams extends ServiceNowBaseParams {
|
||||
number?: string
|
||||
query?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
fields?: string
|
||||
displayValue?: string
|
||||
}
|
||||
|
||||
export interface ServiceNowReadResponse extends ToolResponse {
|
||||
|
||||
@@ -87,9 +87,21 @@ export const slackCanvasTool: ToolConfig<SlackCanvasParams, SlackCanvasResponse>
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
transformResponse: async (response: Response): Promise<SlackCanvasResponse> => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
canvas_id: '',
|
||||
channel: '',
|
||||
title: '',
|
||||
},
|
||||
error: data.error || 'Unknown error',
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
|
||||
108
apps/sim/tools/slack/create_channel_canvas.ts
Normal file
108
apps/sim/tools/slack/create_channel_canvas.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type {
|
||||
SlackCreateChannelCanvasParams,
|
||||
SlackCreateChannelCanvasResponse,
|
||||
} from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackCreateChannelCanvasTool: ToolConfig<
|
||||
SlackCreateChannelCanvasParams,
|
||||
SlackCreateChannelCanvasResponse
|
||||
> = {
|
||||
id: 'slack_create_channel_canvas',
|
||||
name: 'Slack Create Channel Canvas',
|
||||
description: 'Create a canvas pinned to a Slack channel as its resource hub',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Channel ID to create the canvas in (e.g., C1234567890)',
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Title for the channel canvas',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Canvas content in markdown format',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/conversations.canvases.create',
|
||||
method: 'POST',
|
||||
headers: (params: SlackCreateChannelCanvasParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackCreateChannelCanvasParams) => {
|
||||
const body: Record<string, unknown> = {
|
||||
channel_id: params.channel.trim(),
|
||||
}
|
||||
|
||||
if (params.title) {
|
||||
body.title = params.title
|
||||
}
|
||||
|
||||
if (params.content) {
|
||||
body.document_content = {
|
||||
type: 'markdown',
|
||||
markdown: params.content,
|
||||
}
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'channel_canvas_already_exists') {
|
||||
throw new Error('A canvas already exists for this channel. Use Edit Canvas to modify it.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to create channel canvas')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
canvas_id: data.canvas_id,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
canvas_id: { type: 'string', description: 'ID of the created channel canvas' },
|
||||
},
|
||||
}
|
||||
121
apps/sim/tools/slack/edit_canvas.ts
Normal file
121
apps/sim/tools/slack/edit_canvas.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import type { SlackEditCanvasParams, SlackEditCanvasResponse } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackEditCanvasTool: ToolConfig<SlackEditCanvasParams, SlackEditCanvasResponse> = {
|
||||
id: 'slack_edit_canvas',
|
||||
name: 'Slack Edit Canvas',
|
||||
description: 'Edit an existing Slack canvas by inserting, replacing, or deleting content',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
canvasId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Canvas ID to edit (e.g., F1234ABCD)',
|
||||
},
|
||||
operation: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Edit operation: insert_at_start, insert_at_end, insert_after, insert_before, replace, delete, or rename',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Markdown content for the operation (required for insert/replace operations)',
|
||||
},
|
||||
sectionId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Section ID to target (required for insert_after, insert_before, replace, and delete)',
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'New title for the canvas (only used with rename operation)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/canvases.edit',
|
||||
method: 'POST',
|
||||
headers: (params: SlackEditCanvasParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackEditCanvasParams) => {
|
||||
const change: Record<string, unknown> = {
|
||||
operation: params.operation,
|
||||
}
|
||||
|
||||
if (params.sectionId) {
|
||||
change.section_id = params.sectionId.trim()
|
||||
}
|
||||
|
||||
if (params.operation === 'rename' && params.title) {
|
||||
change.title_content = {
|
||||
type: 'markdown',
|
||||
markdown: params.title,
|
||||
}
|
||||
} else if (params.content && params.operation !== 'delete') {
|
||||
change.document_content = {
|
||||
type: 'markdown',
|
||||
markdown: params.content,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
canvas_id: params.canvasId.trim(),
|
||||
changes: [change],
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
throw new Error(data.error || 'Failed to edit canvas')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
content: 'Successfully edited canvas',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
content: { type: 'string', description: 'Success message' },
|
||||
},
|
||||
}
|
||||
115
apps/sim/tools/slack/get_channel_info.ts
Normal file
115
apps/sim/tools/slack/get_channel_info.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import type { SlackGetChannelInfoParams, SlackGetChannelInfoResponse } from '@/tools/slack/types'
|
||||
import { CHANNEL_OUTPUT_PROPERTIES } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackGetChannelInfoTool: ToolConfig<
|
||||
SlackGetChannelInfoParams,
|
||||
SlackGetChannelInfoResponse
|
||||
> = {
|
||||
id: 'slack_get_channel_info',
|
||||
name: 'Slack Get Channel Info',
|
||||
description: 'Get detailed information about a Slack channel by its ID',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Channel ID to get information about (e.g., C1234567890)',
|
||||
},
|
||||
includeNumMembers: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Whether to include the member count in the response',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: SlackGetChannelInfoParams) => {
|
||||
const url = new URL('https://slack.com/api/conversations.info')
|
||||
url.searchParams.append('channel', params.channel.trim())
|
||||
url.searchParams.append('include_num_members', String(params.includeNumMembers ?? true))
|
||||
return url.toString()
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: SlackGetChannelInfoParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'channel_not_found') {
|
||||
throw new Error('Channel not found. Please check the channel ID and try again.')
|
||||
}
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes (channels:read).'
|
||||
)
|
||||
}
|
||||
throw new Error(data.error || 'Failed to get channel info from Slack')
|
||||
}
|
||||
|
||||
const channel = data.channel
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
channelInfo: {
|
||||
id: channel.id,
|
||||
name: channel.name ?? '',
|
||||
is_channel: channel.is_channel ?? false,
|
||||
is_private: channel.is_private ?? false,
|
||||
is_archived: channel.is_archived ?? false,
|
||||
is_general: channel.is_general ?? false,
|
||||
is_member: channel.is_member ?? false,
|
||||
is_shared: channel.is_shared ?? false,
|
||||
is_ext_shared: channel.is_ext_shared ?? false,
|
||||
is_org_shared: channel.is_org_shared ?? false,
|
||||
num_members: channel.num_members ?? null,
|
||||
topic: channel.topic?.value ?? '',
|
||||
purpose: channel.purpose?.value ?? '',
|
||||
created: channel.created ?? null,
|
||||
creator: channel.creator ?? null,
|
||||
updated: channel.updated ?? null,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
channelInfo: {
|
||||
type: 'object',
|
||||
description: 'Detailed channel information',
|
||||
properties: CHANNEL_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
}
|
||||
122
apps/sim/tools/slack/get_user_presence.ts
Normal file
122
apps/sim/tools/slack/get_user_presence.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import type { SlackGetUserPresenceParams, SlackGetUserPresenceResponse } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackGetUserPresenceTool: ToolConfig<
|
||||
SlackGetUserPresenceParams,
|
||||
SlackGetUserPresenceResponse
|
||||
> = {
|
||||
id: 'slack_get_user_presence',
|
||||
name: 'Slack Get User Presence',
|
||||
description: 'Check whether a Slack user is currently active or away',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
userId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'User ID to check presence for (e.g., U1234567890)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: SlackGetUserPresenceParams) => {
|
||||
const url = new URL('https://slack.com/api/users.getPresence')
|
||||
url.searchParams.append('user', params.userId.trim())
|
||||
return url.toString()
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: SlackGetUserPresenceParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'user_not_found') {
|
||||
throw new Error('User not found. Please check the user ID and try again.')
|
||||
}
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes (users:read).'
|
||||
)
|
||||
}
|
||||
throw new Error(data.error || 'Failed to get user presence from Slack')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
presence: data.presence,
|
||||
online: data.online ?? null,
|
||||
autoAway: data.auto_away ?? null,
|
||||
manualAway: data.manual_away ?? null,
|
||||
connectionCount: data.connection_count ?? null,
|
||||
lastActivity: data.last_activity ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
presence: {
|
||||
type: 'string',
|
||||
description: 'User presence status: "active" or "away"',
|
||||
},
|
||||
online: {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether user has an active client connection (only available when checking own presence)',
|
||||
optional: true,
|
||||
},
|
||||
autoAway: {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether user was automatically set to away due to inactivity (only available when checking own presence)',
|
||||
optional: true,
|
||||
},
|
||||
manualAway: {
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether user manually set themselves as away (only available when checking own presence)',
|
||||
optional: true,
|
||||
},
|
||||
connectionCount: {
|
||||
type: 'number',
|
||||
description:
|
||||
'Total number of active connections for the user (only available when checking own presence)',
|
||||
optional: true,
|
||||
},
|
||||
lastActivity: {
|
||||
type: 'number',
|
||||
description:
|
||||
'Unix timestamp of last detected activity (only available when checking own presence)',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,31 +1,49 @@
|
||||
import { slackAddReactionTool } from '@/tools/slack/add_reaction'
|
||||
import { slackCanvasTool } from '@/tools/slack/canvas'
|
||||
import { slackCreateChannelCanvasTool } from '@/tools/slack/create_channel_canvas'
|
||||
import { slackDeleteMessageTool } from '@/tools/slack/delete_message'
|
||||
import { slackDownloadTool } from '@/tools/slack/download'
|
||||
import { slackEditCanvasTool } from '@/tools/slack/edit_canvas'
|
||||
import { slackEphemeralMessageTool } from '@/tools/slack/ephemeral_message'
|
||||
import { slackGetChannelInfoTool } from '@/tools/slack/get_channel_info'
|
||||
import { slackGetMessageTool } from '@/tools/slack/get_message'
|
||||
import { slackGetThreadTool } from '@/tools/slack/get_thread'
|
||||
import { slackGetUserTool } from '@/tools/slack/get_user'
|
||||
import { slackGetUserPresenceTool } from '@/tools/slack/get_user_presence'
|
||||
import { slackListChannelsTool } from '@/tools/slack/list_channels'
|
||||
import { slackListMembersTool } from '@/tools/slack/list_members'
|
||||
import { slackListUsersTool } from '@/tools/slack/list_users'
|
||||
import { slackMessageTool } from '@/tools/slack/message'
|
||||
import { slackMessageReaderTool } from '@/tools/slack/message_reader'
|
||||
import { slackOpenViewTool } from '@/tools/slack/open_view'
|
||||
import { slackPublishViewTool } from '@/tools/slack/publish_view'
|
||||
import { slackPushViewTool } from '@/tools/slack/push_view'
|
||||
import { slackRemoveReactionTool } from '@/tools/slack/remove_reaction'
|
||||
import { slackUpdateMessageTool } from '@/tools/slack/update_message'
|
||||
import { slackUpdateViewTool } from '@/tools/slack/update_view'
|
||||
|
||||
export {
|
||||
slackMessageTool,
|
||||
slackCanvasTool,
|
||||
slackCreateChannelCanvasTool,
|
||||
slackMessageReaderTool,
|
||||
slackDownloadTool,
|
||||
slackEditCanvasTool,
|
||||
slackEphemeralMessageTool,
|
||||
slackUpdateMessageTool,
|
||||
slackDeleteMessageTool,
|
||||
slackAddReactionTool,
|
||||
slackRemoveReactionTool,
|
||||
slackGetChannelInfoTool,
|
||||
slackListChannelsTool,
|
||||
slackListMembersTool,
|
||||
slackListUsersTool,
|
||||
slackGetUserTool,
|
||||
slackGetUserPresenceTool,
|
||||
slackOpenViewTool,
|
||||
slackUpdateViewTool,
|
||||
slackPushViewTool,
|
||||
slackPublishViewTool,
|
||||
slackGetMessageTool,
|
||||
slackGetThreadTool,
|
||||
}
|
||||
|
||||
166
apps/sim/tools/slack/open_view.ts
Normal file
166
apps/sim/tools/slack/open_view.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import type { SlackOpenViewParams, SlackOpenViewResponse } from '@/tools/slack/types'
|
||||
import { VIEW_OUTPUT_PROPERTIES } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackOpenViewTool: ToolConfig<SlackOpenViewParams, SlackOpenViewResponse> = {
|
||||
id: 'slack_open_view',
|
||||
name: 'Slack Open View',
|
||||
description:
|
||||
'Open a modal view in Slack using a trigger_id from an interaction payload. Used to display forms, confirmations, and other interactive modals.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
triggerId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Exchange a trigger to post to the user. Obtained from an interaction payload (e.g., slash command, button click)',
|
||||
},
|
||||
interactivityPointer: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Alternative to trigger_id for posting to user',
|
||||
},
|
||||
view: {
|
||||
type: 'json',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'A view payload object defining the modal. Must include type ("modal"), title, and blocks array',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/views.open',
|
||||
method: 'POST',
|
||||
headers: (params: SlackOpenViewParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackOpenViewParams) => {
|
||||
const body: Record<string, unknown> = {
|
||||
view: typeof params.view === 'string' ? JSON.parse(params.view) : params.view,
|
||||
}
|
||||
|
||||
if (params.triggerId) {
|
||||
body.trigger_id = params.triggerId.trim()
|
||||
}
|
||||
|
||||
if (params.interactivityPointer) {
|
||||
body.interactivity_pointer = params.interactivityPointer.trim()
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'expired_trigger_id') {
|
||||
throw new Error(
|
||||
'The trigger_id has expired. Trigger IDs are only valid for 3 seconds after the interaction.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_trigger_id') {
|
||||
throw new Error(
|
||||
'Invalid trigger_id. Ensure you are using a trigger_id from a valid interaction payload.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'exchanged_trigger_id') {
|
||||
throw new Error(
|
||||
'This trigger_id has already been used. Each trigger_id can only be used once.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'view_too_large') {
|
||||
throw new Error('The view payload is too large. Reduce the number of blocks or content.')
|
||||
}
|
||||
if (data.error === 'duplicate_external_id') {
|
||||
throw new Error(
|
||||
'A view with this external_id already exists. Use a unique external_id per workspace.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_arguments') {
|
||||
const messages = data.response_metadata?.messages ?? []
|
||||
throw new Error(
|
||||
`Invalid view arguments: ${messages.length > 0 ? messages.join(', ') : data.error}`
|
||||
)
|
||||
}
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes.'
|
||||
)
|
||||
}
|
||||
if (
|
||||
data.error === 'invalid_auth' ||
|
||||
data.error === 'not_authed' ||
|
||||
data.error === 'token_expired'
|
||||
) {
|
||||
throw new Error('Invalid authentication. Please check your Slack credentials.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to open view in Slack')
|
||||
}
|
||||
|
||||
const view = data.view
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
view: {
|
||||
id: view.id,
|
||||
team_id: view.team_id ?? null,
|
||||
type: view.type,
|
||||
title: view.title ?? null,
|
||||
submit: view.submit ?? null,
|
||||
close: view.close ?? null,
|
||||
blocks: view.blocks ?? [],
|
||||
private_metadata: view.private_metadata ?? null,
|
||||
callback_id: view.callback_id ?? null,
|
||||
external_id: view.external_id ?? null,
|
||||
state: view.state ?? null,
|
||||
hash: view.hash ?? null,
|
||||
clear_on_close: view.clear_on_close ?? false,
|
||||
notify_on_close: view.notify_on_close ?? false,
|
||||
root_view_id: view.root_view_id ?? null,
|
||||
previous_view_id: view.previous_view_id ?? null,
|
||||
app_id: view.app_id ?? null,
|
||||
bot_id: view.bot_id ?? null,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
view: {
|
||||
type: 'object',
|
||||
description: 'The opened modal view object',
|
||||
properties: VIEW_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
}
|
||||
163
apps/sim/tools/slack/publish_view.ts
Normal file
163
apps/sim/tools/slack/publish_view.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import type { SlackPublishViewParams, SlackPublishViewResponse } from '@/tools/slack/types'
|
||||
import { VIEW_OUTPUT_PROPERTIES } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackPublishViewTool: ToolConfig<SlackPublishViewParams, SlackPublishViewResponse> = {
|
||||
id: 'slack_publish_view',
|
||||
name: 'Slack Publish View',
|
||||
description:
|
||||
"Publish a static view to a user's Home tab in Slack. Used to create or update the app's Home tab experience.",
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
userId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The user ID to publish the Home tab view to (e.g., U0BPQUNTA)',
|
||||
},
|
||||
hash: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'View state hash to protect against race conditions. Obtained from a previous views response',
|
||||
},
|
||||
view: {
|
||||
type: 'json',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'A view payload object defining the Home tab. Must include type ("home") and blocks array',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/views.publish',
|
||||
method: 'POST',
|
||||
headers: (params: SlackPublishViewParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackPublishViewParams) => {
|
||||
const body: Record<string, unknown> = {
|
||||
user_id: params.userId.trim(),
|
||||
view: typeof params.view === 'string' ? JSON.parse(params.view) : params.view,
|
||||
}
|
||||
|
||||
if (params.hash) {
|
||||
body.hash = params.hash.trim()
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'not_found') {
|
||||
throw new Error('User not found. Please check the user ID and try again.')
|
||||
}
|
||||
if (data.error === 'not_enabled') {
|
||||
throw new Error(
|
||||
'The Home tab is not enabled for this app. Enable it in your app configuration.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'hash_conflict') {
|
||||
throw new Error(
|
||||
'The view has been modified since the hash was generated. Retrieve the latest view and try again.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'view_too_large') {
|
||||
throw new Error(
|
||||
'The view payload is too large (max 250kb). Reduce the number of blocks or content.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'duplicate_external_id') {
|
||||
throw new Error(
|
||||
'A view with this external_id already exists. Use a unique external_id per workspace.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_arguments') {
|
||||
const messages = data.response_metadata?.messages ?? []
|
||||
throw new Error(
|
||||
`Invalid view arguments: ${messages.length > 0 ? messages.join(', ') : data.error}`
|
||||
)
|
||||
}
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes.'
|
||||
)
|
||||
}
|
||||
if (
|
||||
data.error === 'invalid_auth' ||
|
||||
data.error === 'not_authed' ||
|
||||
data.error === 'token_expired'
|
||||
) {
|
||||
throw new Error('Invalid authentication. Please check your Slack credentials.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to publish view in Slack')
|
||||
}
|
||||
|
||||
const view = data.view
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
view: {
|
||||
id: view.id,
|
||||
team_id: view.team_id ?? null,
|
||||
type: view.type,
|
||||
title: view.title ?? null,
|
||||
submit: view.submit ?? null,
|
||||
close: view.close ?? null,
|
||||
blocks: view.blocks ?? [],
|
||||
private_metadata: view.private_metadata ?? null,
|
||||
callback_id: view.callback_id ?? null,
|
||||
external_id: view.external_id ?? null,
|
||||
state: view.state ?? null,
|
||||
hash: view.hash ?? null,
|
||||
clear_on_close: view.clear_on_close ?? false,
|
||||
notify_on_close: view.notify_on_close ?? false,
|
||||
root_view_id: view.root_view_id ?? null,
|
||||
previous_view_id: view.previous_view_id ?? null,
|
||||
app_id: view.app_id ?? null,
|
||||
bot_id: view.bot_id ?? null,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
view: {
|
||||
type: 'object',
|
||||
description: 'The published Home tab view object',
|
||||
properties: VIEW_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
}
|
||||
173
apps/sim/tools/slack/push_view.ts
Normal file
173
apps/sim/tools/slack/push_view.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import type { SlackPushViewParams, SlackPushViewResponse } from '@/tools/slack/types'
|
||||
import { VIEW_OUTPUT_PROPERTIES } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackPushViewTool: ToolConfig<SlackPushViewParams, SlackPushViewResponse> = {
|
||||
id: 'slack_push_view',
|
||||
name: 'Slack Push View',
|
||||
description:
|
||||
'Push a new view onto an existing modal stack in Slack. Limited to 2 additional views after the initial modal is opened.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
triggerId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Exchange a trigger to post to the user. Obtained from an interaction payload (e.g., button click within an existing modal)',
|
||||
},
|
||||
interactivityPointer: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Alternative to trigger_id for posting to user',
|
||||
},
|
||||
view: {
|
||||
type: 'json',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'A view payload object defining the modal to push. Must include type ("modal"), title, and blocks array',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/views.push',
|
||||
method: 'POST',
|
||||
headers: (params: SlackPushViewParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackPushViewParams) => {
|
||||
const body: Record<string, unknown> = {
|
||||
view: typeof params.view === 'string' ? JSON.parse(params.view) : params.view,
|
||||
}
|
||||
|
||||
if (params.triggerId) {
|
||||
body.trigger_id = params.triggerId.trim()
|
||||
}
|
||||
|
||||
if (params.interactivityPointer) {
|
||||
body.interactivity_pointer = params.interactivityPointer.trim()
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'expired_trigger_id') {
|
||||
throw new Error(
|
||||
'The trigger_id has expired. Trigger IDs are only valid for 3 seconds after the interaction.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_trigger_id') {
|
||||
throw new Error(
|
||||
'Invalid trigger_id. Ensure you are using a trigger_id from a valid interaction payload.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'exchanged_trigger_id') {
|
||||
throw new Error(
|
||||
'This trigger_id has already been used. Each trigger_id can only be used once.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'push_limit_reached') {
|
||||
throw new Error(
|
||||
'Cannot push more views. After a modal is opened, only 2 additional views can be pushed onto the stack.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'view_too_large') {
|
||||
throw new Error(
|
||||
'The view payload is too large (max 250kb). Reduce the number of blocks or content.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'duplicate_external_id') {
|
||||
throw new Error(
|
||||
'A view with this external_id already exists. Use a unique external_id per workspace.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_arguments') {
|
||||
const messages = data.response_metadata?.messages ?? []
|
||||
throw new Error(
|
||||
`Invalid view arguments: ${messages.length > 0 ? messages.join(', ') : data.error}`
|
||||
)
|
||||
}
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes.'
|
||||
)
|
||||
}
|
||||
if (
|
||||
data.error === 'invalid_auth' ||
|
||||
data.error === 'not_authed' ||
|
||||
data.error === 'token_expired'
|
||||
) {
|
||||
throw new Error('Invalid authentication. Please check your Slack credentials.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to push view in Slack')
|
||||
}
|
||||
|
||||
const view = data.view
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
view: {
|
||||
id: view.id,
|
||||
team_id: view.team_id ?? null,
|
||||
type: view.type,
|
||||
title: view.title ?? null,
|
||||
submit: view.submit ?? null,
|
||||
close: view.close ?? null,
|
||||
blocks: view.blocks ?? [],
|
||||
private_metadata: view.private_metadata ?? null,
|
||||
callback_id: view.callback_id ?? null,
|
||||
external_id: view.external_id ?? null,
|
||||
state: view.state ?? null,
|
||||
hash: view.hash ?? null,
|
||||
clear_on_close: view.clear_on_close ?? false,
|
||||
notify_on_close: view.notify_on_close ?? false,
|
||||
root_view_id: view.root_view_id ?? null,
|
||||
previous_view_id: view.previous_view_id ?? null,
|
||||
app_id: view.app_id ?? null,
|
||||
bot_id: view.bot_id ?? null,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
view: {
|
||||
type: 'object',
|
||||
description: 'The pushed modal view object',
|
||||
properties: VIEW_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
}
|
||||
108
apps/sim/tools/slack/remove_reaction.ts
Normal file
108
apps/sim/tools/slack/remove_reaction.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { SlackRemoveReactionParams, SlackRemoveReactionResponse } from '@/tools/slack/types'
|
||||
import { REACTION_METADATA_OUTPUT_PROPERTIES } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackRemoveReactionTool: ToolConfig<
|
||||
SlackRemoveReactionParams,
|
||||
SlackRemoveReactionResponse
|
||||
> = {
|
||||
id: 'slack_remove_reaction',
|
||||
name: 'Slack Remove Reaction',
|
||||
description: 'Remove an emoji reaction from a Slack message',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Channel ID where the message was posted (e.g., C1234567890)',
|
||||
},
|
||||
timestamp: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Timestamp of the message to remove reaction from (e.g., 1405894322.002768)',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Name of the emoji reaction to remove (without colons, e.g., thumbsup, heart, eyes)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/slack/remove-reaction',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: SlackRemoveReactionParams) => ({
|
||||
accessToken: params.accessToken || params.botToken,
|
||||
channel: params.channel,
|
||||
timestamp: params.timestamp,
|
||||
name: params.name,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
content: data.error || 'Failed to remove reaction',
|
||||
metadata: {
|
||||
channel: '',
|
||||
timestamp: '',
|
||||
reaction: '',
|
||||
},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
content: data.output.content,
|
||||
metadata: data.output.metadata,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
content: { type: 'string', description: 'Success message' },
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'Reaction metadata',
|
||||
properties: REACTION_METADATA_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -478,6 +478,90 @@ export const CANVAS_OUTPUT_PROPERTIES = {
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
} as const satisfies Record<string, OutputProperty>
|
||||
|
||||
/**
|
||||
* Output definition for modal view objects
|
||||
* Based on Slack views.open response structure
|
||||
*/
|
||||
export const VIEW_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Unique view identifier' },
|
||||
team_id: { type: 'string', description: 'Workspace/team ID', optional: true },
|
||||
type: { type: 'string', description: 'View type (e.g., "modal")' },
|
||||
title: {
|
||||
type: 'json',
|
||||
description: 'Plain text title object with type and text fields',
|
||||
optional: true,
|
||||
properties: {
|
||||
type: { type: 'string', description: 'Text object type (plain_text)' },
|
||||
text: { type: 'string', description: 'Title text content' },
|
||||
},
|
||||
},
|
||||
submit: {
|
||||
type: 'json',
|
||||
description: 'Plain text submit button object',
|
||||
optional: true,
|
||||
properties: {
|
||||
type: { type: 'string', description: 'Text object type (plain_text)' },
|
||||
text: { type: 'string', description: 'Submit button text' },
|
||||
},
|
||||
},
|
||||
close: {
|
||||
type: 'json',
|
||||
description: 'Plain text close button object',
|
||||
optional: true,
|
||||
properties: {
|
||||
type: { type: 'string', description: 'Text object type (plain_text)' },
|
||||
text: { type: 'string', description: 'Close button text' },
|
||||
},
|
||||
},
|
||||
blocks: {
|
||||
type: 'array',
|
||||
description: 'Block Kit blocks in the view',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: BLOCK_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
private_metadata: {
|
||||
type: 'string',
|
||||
description: 'Private metadata string passed with the view',
|
||||
optional: true,
|
||||
},
|
||||
callback_id: { type: 'string', description: 'Custom identifier for the view', optional: true },
|
||||
external_id: {
|
||||
type: 'string',
|
||||
description: 'Custom external identifier (max 255 chars, unique per workspace)',
|
||||
optional: true,
|
||||
},
|
||||
state: {
|
||||
type: 'json',
|
||||
description: 'Current state of the view with input values',
|
||||
optional: true,
|
||||
},
|
||||
hash: { type: 'string', description: 'View version hash for updates', optional: true },
|
||||
clear_on_close: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to clear all views in the stack when this view is closed',
|
||||
optional: true,
|
||||
},
|
||||
notify_on_close: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to send a view_closed event when this view is closed',
|
||||
optional: true,
|
||||
},
|
||||
root_view_id: {
|
||||
type: 'string',
|
||||
description: 'ID of the root view in the view stack',
|
||||
optional: true,
|
||||
},
|
||||
previous_view_id: {
|
||||
type: 'string',
|
||||
description: 'ID of the previous view in the view stack',
|
||||
optional: true,
|
||||
},
|
||||
app_id: { type: 'string', description: 'Application identifier', optional: true },
|
||||
bot_id: { type: 'string', description: 'Bot identifier', optional: true },
|
||||
} as const satisfies Record<string, OutputProperty>
|
||||
|
||||
/**
|
||||
* File download output properties
|
||||
*/
|
||||
@@ -561,6 +645,12 @@ export interface SlackAddReactionParams extends SlackBaseParams {
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface SlackRemoveReactionParams extends SlackBaseParams {
|
||||
channel: string
|
||||
timestamp: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface SlackListChannelsParams extends SlackBaseParams {
|
||||
includePrivate?: boolean
|
||||
excludeArchived?: boolean
|
||||
@@ -600,6 +690,54 @@ export interface SlackGetThreadParams extends SlackBaseParams {
|
||||
limit?: number
|
||||
}
|
||||
|
||||
export interface SlackGetChannelInfoParams extends SlackBaseParams {
|
||||
channel: string
|
||||
includeNumMembers?: boolean
|
||||
}
|
||||
|
||||
export interface SlackGetUserPresenceParams extends SlackBaseParams {
|
||||
userId: string
|
||||
}
|
||||
|
||||
export interface SlackEditCanvasParams extends SlackBaseParams {
|
||||
canvasId: string
|
||||
operation: string
|
||||
content?: string
|
||||
sectionId?: string
|
||||
title?: string
|
||||
}
|
||||
|
||||
export interface SlackCreateChannelCanvasParams extends SlackBaseParams {
|
||||
channel: string
|
||||
title?: string
|
||||
content?: string
|
||||
}
|
||||
|
||||
export interface SlackOpenViewParams extends SlackBaseParams {
|
||||
triggerId: string
|
||||
interactivityPointer?: string
|
||||
view: object | string
|
||||
}
|
||||
|
||||
export interface SlackUpdateViewParams extends SlackBaseParams {
|
||||
viewId?: string
|
||||
externalId?: string
|
||||
hash?: string
|
||||
view: object | string
|
||||
}
|
||||
|
||||
export interface SlackPushViewParams extends SlackBaseParams {
|
||||
triggerId: string
|
||||
interactivityPointer?: string
|
||||
view: object | string
|
||||
}
|
||||
|
||||
export interface SlackPublishViewParams extends SlackBaseParams {
|
||||
userId: string
|
||||
hash?: string
|
||||
view: object | string
|
||||
}
|
||||
|
||||
export interface SlackMessageResponse extends ToolResponse {
|
||||
output: {
|
||||
// Legacy properties for backward compatibility
|
||||
@@ -759,17 +897,34 @@ export interface SlackAddReactionResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackRemoveReactionResponse extends ToolResponse {
|
||||
output: {
|
||||
content: string
|
||||
metadata: {
|
||||
channel: string
|
||||
timestamp: string
|
||||
reaction: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackChannel {
|
||||
id: string
|
||||
name: string
|
||||
is_channel?: boolean
|
||||
is_private: boolean
|
||||
is_archived: boolean
|
||||
is_general?: boolean
|
||||
is_member: boolean
|
||||
is_shared?: boolean
|
||||
is_ext_shared?: boolean
|
||||
is_org_shared?: boolean
|
||||
num_members?: number
|
||||
topic?: string
|
||||
purpose?: string
|
||||
created?: number
|
||||
creator?: string
|
||||
updated?: number
|
||||
}
|
||||
|
||||
export interface SlackListChannelsResponse extends ToolResponse {
|
||||
@@ -858,6 +1013,80 @@ export interface SlackGetThreadResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackGetChannelInfoResponse extends ToolResponse {
|
||||
output: {
|
||||
channelInfo: SlackChannel
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackGetUserPresenceResponse extends ToolResponse {
|
||||
output: {
|
||||
presence: string
|
||||
online?: boolean | null
|
||||
autoAway?: boolean | null
|
||||
manualAway?: boolean | null
|
||||
connectionCount?: number | null
|
||||
lastActivity?: number | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackEditCanvasResponse extends ToolResponse {
|
||||
output: {
|
||||
content: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackCreateChannelCanvasResponse extends ToolResponse {
|
||||
output: {
|
||||
canvas_id: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackView {
|
||||
id: string
|
||||
team_id?: string | null
|
||||
type: string
|
||||
title?: { type: string; text: string } | null
|
||||
submit?: { type: string; text: string } | null
|
||||
close?: { type: string; text: string } | null
|
||||
blocks: SlackBlock[]
|
||||
private_metadata?: string | null
|
||||
callback_id?: string | null
|
||||
external_id?: string | null
|
||||
state?: Record<string, unknown> | null
|
||||
hash?: string | null
|
||||
clear_on_close?: boolean
|
||||
notify_on_close?: boolean
|
||||
root_view_id?: string | null
|
||||
previous_view_id?: string | null
|
||||
app_id?: string | null
|
||||
bot_id?: string | null
|
||||
}
|
||||
|
||||
export interface SlackOpenViewResponse extends ToolResponse {
|
||||
output: {
|
||||
view: SlackView
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackUpdateViewResponse extends ToolResponse {
|
||||
output: {
|
||||
view: SlackView
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackPushViewResponse extends ToolResponse {
|
||||
output: {
|
||||
view: SlackView
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackPublishViewResponse extends ToolResponse {
|
||||
output: {
|
||||
view: SlackView
|
||||
}
|
||||
}
|
||||
|
||||
export type SlackResponse =
|
||||
| SlackCanvasResponse
|
||||
| SlackMessageReaderResponse
|
||||
@@ -866,6 +1095,7 @@ export type SlackResponse =
|
||||
| SlackUpdateMessageResponse
|
||||
| SlackDeleteMessageResponse
|
||||
| SlackAddReactionResponse
|
||||
| SlackRemoveReactionResponse
|
||||
| SlackListChannelsResponse
|
||||
| SlackListMembersResponse
|
||||
| SlackListUsersResponse
|
||||
@@ -873,3 +1103,11 @@ export type SlackResponse =
|
||||
| SlackEphemeralMessageResponse
|
||||
| SlackGetMessageResponse
|
||||
| SlackGetThreadResponse
|
||||
| SlackGetChannelInfoResponse
|
||||
| SlackGetUserPresenceResponse
|
||||
| SlackEditCanvasResponse
|
||||
| SlackCreateChannelCanvasResponse
|
||||
| SlackOpenViewResponse
|
||||
| SlackUpdateViewResponse
|
||||
| SlackPushViewResponse
|
||||
| SlackPublishViewResponse
|
||||
|
||||
175
apps/sim/tools/slack/update_view.ts
Normal file
175
apps/sim/tools/slack/update_view.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import type { SlackUpdateViewParams, SlackUpdateViewResponse } from '@/tools/slack/types'
|
||||
import { VIEW_OUTPUT_PROPERTIES } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackUpdateViewTool: ToolConfig<SlackUpdateViewParams, SlackUpdateViewResponse> = {
|
||||
id: 'slack_update_view',
|
||||
name: 'Slack Update View',
|
||||
description:
|
||||
'Update an existing modal view in Slack. Identify the view by view_id or external_id, and provide the updated view payload.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
viewId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Unique identifier of the view to update. Either viewId or externalId is required',
|
||||
},
|
||||
externalId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Developer-set unique identifier of the view to update (max 255 chars). Either viewId or externalId is required',
|
||||
},
|
||||
hash: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'View state hash to protect against race conditions. Obtained from a previous views response',
|
||||
},
|
||||
view: {
|
||||
type: 'json',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'A view payload object defining the updated modal. Must include type ("modal"), title, and blocks array. Use identical block_id and action_id values to preserve input data',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/views.update',
|
||||
method: 'POST',
|
||||
headers: (params: SlackUpdateViewParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackUpdateViewParams) => {
|
||||
const body: Record<string, unknown> = {
|
||||
view: typeof params.view === 'string' ? JSON.parse(params.view) : params.view,
|
||||
}
|
||||
|
||||
if (params.viewId) {
|
||||
body.view_id = params.viewId.trim()
|
||||
}
|
||||
|
||||
if (params.externalId) {
|
||||
body.external_id = params.externalId.trim()
|
||||
}
|
||||
|
||||
if (params.hash) {
|
||||
body.hash = params.hash.trim()
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'not_found') {
|
||||
throw new Error(
|
||||
'View not found. The provided view_id or external_id does not match an existing view.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'hash_conflict') {
|
||||
throw new Error(
|
||||
'The view has been modified since the hash was generated. Retrieve the latest view and try again.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'view_too_large') {
|
||||
throw new Error(
|
||||
'The view payload is too large (max 250kb). Reduce the number of blocks or content.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'duplicate_external_id') {
|
||||
throw new Error(
|
||||
'A view with this external_id already exists. Use a unique external_id per workspace.'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_arguments') {
|
||||
const messages = data.response_metadata?.messages ?? []
|
||||
throw new Error(
|
||||
`Invalid view arguments: ${messages.length > 0 ? messages.join(', ') : data.error}`
|
||||
)
|
||||
}
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes.'
|
||||
)
|
||||
}
|
||||
if (
|
||||
data.error === 'invalid_auth' ||
|
||||
data.error === 'not_authed' ||
|
||||
data.error === 'token_expired'
|
||||
) {
|
||||
throw new Error('Invalid authentication. Please check your Slack credentials.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to update view in Slack')
|
||||
}
|
||||
|
||||
const view = data.view
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
view: {
|
||||
id: view.id,
|
||||
team_id: view.team_id ?? null,
|
||||
type: view.type,
|
||||
title: view.title ?? null,
|
||||
submit: view.submit ?? null,
|
||||
close: view.close ?? null,
|
||||
blocks: view.blocks ?? [],
|
||||
private_metadata: view.private_metadata ?? null,
|
||||
callback_id: view.callback_id ?? null,
|
||||
external_id: view.external_id ?? null,
|
||||
state: view.state ?? null,
|
||||
hash: view.hash ?? null,
|
||||
clear_on_close: view.clear_on_close ?? false,
|
||||
notify_on_close: view.notify_on_close ?? false,
|
||||
root_view_id: view.root_view_id ?? null,
|
||||
previous_view_id: view.previous_view_id ?? null,
|
||||
app_id: view.app_id ?? null,
|
||||
bot_id: view.bot_id ?? null,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
view: {
|
||||
type: 'object',
|
||||
description: 'The updated modal view object',
|
||||
properties: VIEW_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
}
|
||||
16
bun.lock
16
bun.lock
@@ -13,7 +13,7 @@
|
||||
"glob": "13.0.0",
|
||||
"husky": "9.1.7",
|
||||
"lint-staged": "16.0.0",
|
||||
"turbo": "2.8.12",
|
||||
"turbo": "2.8.13",
|
||||
},
|
||||
},
|
||||
"apps/docs": {
|
||||
@@ -3493,19 +3493,19 @@
|
||||
|
||||
"tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="],
|
||||
|
||||
"turbo": ["turbo@2.8.12", "", { "optionalDependencies": { "turbo-darwin-64": "2.8.12", "turbo-darwin-arm64": "2.8.12", "turbo-linux-64": "2.8.12", "turbo-linux-arm64": "2.8.12", "turbo-windows-64": "2.8.12", "turbo-windows-arm64": "2.8.12" }, "bin": { "turbo": "bin/turbo" } }, "sha512-auUAMLmi0eJhxDhQrxzvuhfEbICnVt0CTiYQYY8WyRJ5nwCDZxD0JG8bCSxT4nusI2CwJzmZAay5BfF6LmK7Hw=="],
|
||||
"turbo": ["turbo@2.8.13", "", { "optionalDependencies": { "turbo-darwin-64": "2.8.13", "turbo-darwin-arm64": "2.8.13", "turbo-linux-64": "2.8.13", "turbo-linux-arm64": "2.8.13", "turbo-windows-64": "2.8.13", "turbo-windows-arm64": "2.8.13" }, "bin": { "turbo": "bin/turbo" } }, "sha512-nyM99hwFB9/DHaFyKEqatdayGjsMNYsQ/XBNO6MITc7roncZetKb97MpHxWf3uiU+LB9c9HUlU3Jp2Ixei2k1A=="],
|
||||
|
||||
"turbo-darwin-64": ["turbo-darwin-64@2.8.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-EiHJmW2MeQQx+21x8hjMHw/uPhXt9PIxvDrxzOtyVwrXzL0tQmsxtO4qHf2l7uA+K6PUJ4+TjY1MHZDuCvWXrw=="],
|
||||
"turbo-darwin-64": ["turbo-darwin-64@2.8.13", "", { "os": "darwin", "cpu": "x64" }, "sha512-PmOvodQNiOj77+Zwoqku70vwVjKzL34RTNxxoARjp5RU5FOj/CGiC6vcDQhNtFPUOWSAaogHF5qIka9TBhX4XA=="],
|
||||
|
||||
"turbo-darwin-arm64": ["turbo-darwin-arm64@2.8.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-cbqqGN0vd7ly2TeuaM8k9AK9u1CABO4kBA5KPSqovTiLL3sORccn/mZzJSbvQf0EsYRfU34MgW5FotfwW3kx8Q=="],
|
||||
"turbo-darwin-arm64": ["turbo-darwin-arm64@2.8.13", "", { "os": "darwin", "cpu": "arm64" }, "sha512-kI+anKcLIM4L8h+NsM7mtAUpElkCOxv5LgiQVQR8BASyDFfc8Efj5kCk3cqxuxOvIqx0sLfCX7atrHQ2kwuNJQ=="],
|
||||
|
||||
"turbo-linux-64": ["turbo-linux-64@2.8.12", "", { "os": "linux", "cpu": "x64" }, "sha512-jXKw9j4r4q6s0goSXuKI3aKbQK2qiNeP25lGGEnq018TM6SWRW1CCpPMxyG91aCKrub7wDm/K45sGNT4ZFBcFQ=="],
|
||||
"turbo-linux-64": ["turbo-linux-64@2.8.13", "", { "os": "linux", "cpu": "x64" }, "sha512-j29KnQhHyzdzgCykBFeBqUPS4Wj7lWMnZ8CHqytlYDap4Jy70l4RNG46pOL9+lGu6DepK2s1rE86zQfo0IOdPw=="],
|
||||
|
||||
"turbo-linux-arm64": ["turbo-linux-arm64@2.8.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-BRJCMdyXjyBoL0GYpvj9d2WNfMHwc3tKmJG5ATn2Efvil9LsiOsd/93/NxDqW0jACtHFNVOPnd/CBwXRPiRbwA=="],
|
||||
"turbo-linux-arm64": ["turbo-linux-arm64@2.8.13", "", { "os": "linux", "cpu": "arm64" }, "sha512-OEl1YocXGZDRDh28doOUn49QwNe82kXljO1HXApjU0LapkDiGpfl3jkAlPKxEkGDSYWc8MH5Ll8S16Rf5tEBYg=="],
|
||||
|
||||
"turbo-windows-64": ["turbo-windows-64@2.8.12", "", { "os": "win32", "cpu": "x64" }, "sha512-vyFOlpFFzQFkikvSVhVkESEfzIopgs2J7J1rYvtSwSHQ4zmHxkC95Q8Kjkus8gg+8X2mZyP1GS5jirmaypGiPw=="],
|
||||
"turbo-windows-64": ["turbo-windows-64@2.8.13", "", { "os": "win32", "cpu": "x64" }, "sha512-717bVk1+Pn2Jody7OmWludhEirEe0okoj1NpRbSm5kVZz/yNN/jfjbxWC6ilimXMz7xoMT3IDfQFJsFR3PMANA=="],
|
||||
|
||||
"turbo-windows-arm64": ["turbo-windows-arm64@2.8.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-9nRnlw5DF0LkJClkIws1evaIF36dmmMEO84J5Uj4oQ8C0QTHwlH7DNe5Kq2Jdmu8GXESCNDNuUYG8Cx6W/vm3g=="],
|
||||
"turbo-windows-arm64": ["turbo-windows-arm64@2.8.13", "", { "os": "win32", "cpu": "arm64" }, "sha512-R819HShLIT0Wj6zWVnIsYvSNtRNj1q9VIyaUz0P24SMcLCbQZIm1sV09F4SDbg+KCCumqD2lcaR2UViQ8SnUJA=="],
|
||||
|
||||
"tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="],
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
"glob": "13.0.0",
|
||||
"husky": "9.1.7",
|
||||
"lint-staged": "16.0.0",
|
||||
"turbo": "2.8.12"
|
||||
"turbo": "2.8.13"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,json,css,scss}": [
|
||||
|
||||
Reference in New Issue
Block a user