Compare commits

...

13 Commits

Author SHA1 Message Date
Lakee Sivaraya
d5bd97de32 feat(tinybird): added tinybird block (#2781) 2026-01-14 00:25:54 -08:00
Siddharth Ganesan
bd7009e316 fix(copilot): commands (#2811) 2026-01-13 22:48:21 -08:00
Waleed
4f04b1efea feat(terminal): migrate from zustand for console terminal logs to indexedDb, incr limit from 5mb to ~GBs (#2812)
* feat(terminal): migrate from zustand for console terminal logs to indexedDb, incr limit from 5mb to ~GBs

* ack PR comments
2026-01-13 22:42:15 -08:00
Vikhyath Mondreti
258e96d6b5 improvement(pricing): drop agent multiplier in docs, change base exec cost 2026-01-13 22:03:19 -08:00
Waleed
4b026ad54d fix(a2a): added file data part and data data part to a2a agents (#2805)
* fix(a2a): added file data part and data data part to a2a agents

* removed unused streaming tool

* ack comment
2026-01-13 21:08:00 -08:00
Waleed
f6b7c15dc4 improvement(oauth): added random identifier in unused accountId to bypass betterauth unique constraint (#2807)
* improvement(oauth): added random identifier in unnused accountId to bypass betterauth unique constraint

* ack pr comments
2026-01-13 21:01:42 -08:00
Vikhyath Mondreti
70ed19fcdb fix(chat): remove special handling for non-streaming (#2808) 2026-01-13 20:43:37 -08:00
Waleed
d6e4c91e81 fix(invitations): preserve tokens after error (#2806) 2026-01-13 20:10:44 -08:00
Vikhyath Mondreti
e3fa40af11 fix(sockets): redrawing edges should not lead to socket ops (#2804)
* fix(sockets): redrawing edges should not lead to socket ops

* consolidate
2026-01-13 18:45:41 -08:00
Waleed
6e0055f847 feat(slack): added get message by timestamp and get thread tool (#2803)
* feat(slack): added get message tool

* added get thread
2026-01-13 18:37:06 -08:00
Vikhyath Mondreti
ebbe67aae3 fix(triggers): cleanup trigger outputs formatting, fix display name issues (#2801)
* fix(triggers): package lemlist data, cleanup trigger outputs formatting, fix display name issues

* cleanup trigger outputs

* fix tests

* more test fixes

* remove branch field for ones where it's not relevant

* remove branch from unrelated ops
2026-01-13 17:48:19 -08:00
Waleed
2b49d15ec8 fix(comparison): add condition to prevent duplicate identical edges (#2799)
* fix)comparison): add condition to prevent duplicate identical edges, ignore from workflow change detection

* fix failing test

* added back store check
2026-01-13 17:17:23 -08:00
Siddharth Ganesan
3d037c9b74 fix(executor): pattern match more errors to prevent swallow (#2802) 2026-01-13 17:12:31 -08:00
99 changed files with 12960 additions and 1822 deletions

View File

@@ -552,6 +552,53 @@ All fields automatically have:
- `mode: 'trigger'` - Only shown in trigger mode
- `condition: { field: 'selectedTriggerId', value: triggerId }` - Only shown when this trigger is selected
## Trigger Outputs & Webhook Input Formatting
### Important: Two Sources of Truth
There are two related but separate concerns:
1. **Trigger `outputs`** - Schema/contract defining what fields SHOULD be available. Used by UI for tag dropdown.
2. **`formatWebhookInput`** - Implementation that transforms raw webhook payload into actual data. Located in `apps/sim/lib/webhooks/utils.server.ts`.
**These MUST be aligned.** The fields returned by `formatWebhookInput` should match what's defined in trigger `outputs`. If they differ:
- Tag dropdown shows fields that don't exist (broken variable resolution)
- Or actual data has fields not shown in dropdown (users can't discover them)
### When to Add a formatWebhookInput Handler
- **Simple providers**: If the raw webhook payload structure already matches your outputs, you don't need a handler. The generic fallback returns `body` directly.
- **Complex providers**: If you need to transform, flatten, extract nested data, compute fields, or handle conditional logic, add a handler.
### Adding a Handler
In `apps/sim/lib/webhooks/utils.server.ts`, add a handler block:
```typescript
if (foundWebhook.provider === '{service}') {
// Transform raw webhook body to match trigger outputs
return {
eventType: body.type,
resourceId: body.data?.id || '',
timestamp: body.created_at,
resource: body.data,
}
}
```
**Key rules:**
- Return fields that match your trigger `outputs` definition exactly
- No wrapper objects like `webhook: { data: ... }` or `{service}: { ... }`
- No duplication (don't spread body AND add individual fields)
- Use `null` for missing optional data, not empty objects with empty strings
### Verify Alignment
Run the alignment checker:
```bash
bunx scripts/check-trigger-alignment.ts {service}
```
## Trigger Outputs
Trigger outputs use the same schema as block outputs (NOT tool outputs).
@@ -649,6 +696,11 @@ export const {service}WebhookTrigger: TriggerConfig = {
- [ ] Added `delete{Service}Webhook` function to `provider-subscriptions.ts`
- [ ] Added provider to `cleanupExternalWebhook` function
### Webhook Input Formatting
- [ ] Added handler in `apps/sim/lib/webhooks/utils.server.ts` (if custom formatting needed)
- [ ] Handler returns fields matching trigger `outputs` exactly
- [ ] Run `bunx scripts/check-trigger-alignment.ts {service}` to verify alignment
### Testing
- [ ] Run `bun run type-check` to verify no TypeScript errors
- [ ] Restart dev server to pick up new triggers

View File

@@ -1855,17 +1855,25 @@ export function LinearIcon(props: React.SVGProps<SVGSVGElement>) {
export function LemlistIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
width='24'
height='24'
fill='none'
>
<rect width='24' height='24' rx='4' fill='#316BFF' />
<path d='M7 6h2v9h5v2H7V6Z' fill='white' />
<circle cx='17' cy='8' r='2' fill='white' />
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 180 181' fill='none'>
<path
fillRule='evenodd'
clipRule='evenodd'
d='M32.0524 0.919922H147.948C165.65 0.919922 180 15.2703 180 32.9723V148.867C180 166.57 165.65 180.92 147.948 180.92H32.0524C14.3504 180.92 0 166.57 0 148.867V32.9723C0 15.2703 14.3504 0.919922 32.0524 0.919922ZM119.562 82.8879H85.0826C82.4732 82.8879 80.3579 85.0032 80.3579 87.6126V94.2348C80.3579 96.8442 82.4732 98.9595 85.0826 98.9595H119.562C122.171 98.9595 124.286 96.8442 124.286 94.2348V87.6126C124.286 85.0032 122.171 82.8879 119.562 82.8879ZM85.0826 49.1346H127.061C129.67 49.1346 131.785 51.2499 131.785 53.8593V60.4815C131.785 63.0909 129.67 65.2062 127.061 65.2062H85.0826C82.4732 65.2062 80.3579 63.0909 80.3579 60.4815V53.8593C80.3579 51.2499 82.4732 49.1346 85.0826 49.1346ZM131.785 127.981V121.358C131.785 118.75 129.669 116.634 127.061 116.634H76.5706C69.7821 116.634 64.2863 111.138 64.2863 104.349V53.8593C64.2863 51.2513 62.1697 49.1346 59.5616 49.1346H52.9395C50.3314 49.1346 48.2147 51.2513 48.2147 53.8593V114.199C48.8497 124.133 56.7873 132.07 66.7205 132.705H127.061C129.669 132.705 131.785 130.589 131.785 127.981Z'
fill='#316BFF'
/>
<path
d='M85.0826 49.1346H127.061C129.67 49.1346 131.785 51.2499 131.785 53.8593V60.4815C131.785 63.0909 129.67 65.2062 127.061 65.2062H85.0826C82.4732 65.2062 80.3579 63.0909 80.3579 60.4815V53.8593C80.3579 51.2499 82.4732 49.1346 85.0826 49.1346Z'
fill='white'
/>
<path
d='M85.0826 82.8879H119.562C122.171 82.8879 124.286 85.0032 124.286 87.6126V94.2348C124.286 96.8442 122.171 98.9595 119.562 98.9595H85.0826C82.4732 98.9595 80.3579 96.8442 80.3579 94.2348V87.6126C80.3579 85.0032 82.4732 82.8879 85.0826 82.8879Z'
fill='white'
/>
<path
d='M131.785 121.358V127.981C131.785 130.589 129.669 132.705 127.061 132.705H66.7205C56.7873 132.07 48.8497 124.133 48.2147 114.199V53.8593C48.2147 51.2513 50.3314 49.1346 52.9395 49.1346H59.5616C62.1697 49.1346 64.2863 51.2513 64.2863 53.8593V104.349C64.2863 111.138 69.7821 116.634 76.5706 116.634H127.061C129.669 116.634 131.785 118.75 131.785 121.358Z'
fill='white'
/>
</svg>
)
}
@@ -1889,6 +1897,19 @@ export function TelegramIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TinybirdIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none'>
<rect x='0' y='0' width='24' height='24' fill='#2EF598' rx='6' />
<g transform='translate(2, 2) scale(0.833)'>
<path d='M25 2.64 17.195.5 14.45 6.635z' fill='#1E7F63' />
<path d='M17.535 17.77 10.39 15.215 6.195 25.5z' fill='#1E7F63' />
<path d='M0 11.495 17.535 17.77 20.41 4.36z' fill='#1F2437' />
</g>
</svg>
)
}
export function ClayIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' width='40' height='40' viewBox='0 0 400 400'>

View File

@@ -107,6 +107,7 @@ import {
SupabaseIcon,
TavilyIcon,
TelegramIcon,
TinybirdIcon,
TranslateIcon,
TrelloIcon,
TTSIcon,
@@ -230,6 +231,8 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
supabase: SupabaseIcon,
tavily: TavilyIcon,
telegram: TelegramIcon,
thinking: BrainIcon,
tinybird: TinybirdIcon,
translate: TranslateIcon,
trello: TrelloIcon,
tts: TTSIcon,

View File

@@ -12,7 +12,7 @@ Sim automatically calculates costs for all workflow executions, providing transp
Every workflow execution includes two cost components:
**Base Execution Charge**: $0.001 per execution
**Base Execution Charge**: $0.005 per execution
**AI Model Usage**: Variable cost based on token consumption
```javascript
@@ -48,40 +48,40 @@ The model breakdown shows:
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
<Tab>
**Hosted Models** - Sim provides API keys with a 1.4x pricing multiplier for Agent blocks:
**Hosted Models** - Sim provides API keys with a 1.1x pricing multiplier for Agent blocks:
**OpenAI**
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
| GPT-5.1 | $1.25 / $10.00 | $1.38 / $11.00 |
| GPT-5 | $1.25 / $10.00 | $1.38 / $11.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.28 / $2.20 |
| GPT-5 Nano | $0.05 / $0.40 | $0.06 / $0.44 |
| GPT-4o | $2.50 / $10.00 | $2.75 / $11.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.20 / $8.80 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.44 / $1.76 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.11 / $0.44 |
| o1 | $15.00 / $60.00 | $16.50 / $66.00 |
| o3 | $2.00 / $8.00 | $2.20 / $8.80 |
| o4 Mini | $1.10 / $4.40 | $1.21 / $4.84 |
**Anthropic**
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
| Claude Opus 4.5 | $5.00 / $25.00 | $5.50 / $27.50 |
| Claude Opus 4.1 | $15.00 / $75.00 | $16.50 / $82.50 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $3.30 / $16.50 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $3.30 / $16.50 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.10 / $5.50 |
**Google**
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.20 / $13.20 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.38 / $11.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.33 / $2.75 |
*The 1.4x multiplier covers infrastructure and API management costs.*
*The 1.1x multiplier covers infrastructure and API management costs.*
</Tab>
<Tab>

View File

@@ -44,6 +44,8 @@ Send a message to an external A2A-compatible agent.
| `message` | string | Yes | Message to send to the agent |
| `taskId` | string | No | Task ID for continuing an existing task |
| `contextId` | string | No | Context ID for conversation continuity |
| `data` | string | No | Structured data to include with the message \(JSON string\) |
| `files` | array | No | Files to include with the message |
| `apiKey` | string | No | API key for authentication |
#### Output
@@ -208,8 +210,3 @@ Delete the push notification webhook configuration for a task.
| `success` | boolean | Whether deletion was successful |
## Notes
- Category: `tools`
- Type: `a2a`

View File

@@ -49,8 +49,7 @@ Retrieves lead information by email address or lead ID.
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Lemlist API key |
| `email` | string | No | Lead email address \(use either email or id\) |
| `id` | string | No | Lead ID \(use either email or id\) |
| `leadIdentifier` | string | Yes | Lead email address or lead ID |
#### Output

View File

@@ -103,6 +103,8 @@
"supabase",
"tavily",
"telegram",
"thinking",
"tinybird",
"translate",
"trello",
"tts",

View File

@@ -124,6 +124,45 @@ Read the latest messages from Slack channels. Retrieve conversation history with
| --------- | ---- | ----------- |
| `messages` | array | Array of message objects from the channel |
### `slack_get_message`
Retrieve a specific message by its timestamp. Useful for getting a thread parent message.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `authMethod` | string | No | Authentication method: oauth or bot_token |
| `botToken` | string | No | Bot token for Custom Bot |
| `channel` | string | Yes | Slack channel ID \(e.g., C1234567890\) |
| `timestamp` | string | Yes | Message timestamp to retrieve \(e.g., 1405894322.002768\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | object | The retrieved message object |
### `slack_get_thread`
Retrieve an entire thread including the parent message and all replies. Useful for getting full conversation context.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `authMethod` | string | No | Authentication method: oauth or bot_token |
| `botToken` | string | No | Bot token for Custom Bot |
| `channel` | string | Yes | Slack channel ID \(e.g., C1234567890\) |
| `threadTs` | string | Yes | Thread timestamp \(thread_ts\) to retrieve \(e.g., 1405894322.002768\) |
| `limit` | number | No | Maximum number of messages to return \(default: 100, max: 200\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `parentMessage` | object | The thread parent message |
### `slack_list_channels`
List all channels in a Slack workspace. Returns public and private channels the bot has access to.

View File

@@ -0,0 +1,70 @@
---
title: Tinybird
description: Send events and query data with Tinybird
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="tinybird"
color="#2EF598"
/>
## Usage Instructions
Interact with Tinybird using the Events API to stream JSON or NDJSON events, or use the Query API to execute SQL queries against Pipes and Data Sources.
## Tools
### `tinybird_events`
Send events to a Tinybird Data Source using the Events API. Supports JSON and NDJSON formats with optional gzip compression.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `base_url` | string | Yes | Tinybird API base URL \(e.g., https://api.tinybird.co or https://api.us-east.tinybird.co\) |
| `datasource` | string | Yes | Name of the Tinybird Data Source to send events to |
| `data` | string | Yes | Data to send as NDJSON \(newline-delimited JSON\) or JSON string. Each event should be a valid JSON object. |
| `wait` | boolean | No | Wait for database acknowledgment before responding. Enables safer retries but introduces latency. Defaults to false. |
| `format` | string | No | Format of the events data: "ndjson" \(default\) or "json" |
| `compression` | string | No | Compression format: "none" \(default\) or "gzip" |
| `token` | string | Yes | Tinybird API Token with DATASOURCE:APPEND or DATASOURCE:CREATE scope |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `successful_rows` | number | Number of rows successfully ingested |
| `quarantined_rows` | number | Number of rows quarantined \(failed validation\) |
### `tinybird_query`
Execute SQL queries against Tinybird Pipes and Data Sources using the Query API.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `base_url` | string | Yes | Tinybird API base URL \(e.g., https://api.tinybird.co\) |
| `query` | string | Yes | SQL query to execute. Specify your desired output format \(e.g., FORMAT JSON, FORMAT CSV, FORMAT TSV\). JSON format provides structured data, while other formats return raw text. |
| `pipeline` | string | No | Optional pipe name. When provided, enables SELECT * FROM _ syntax |
| `token` | string | Yes | Tinybird API Token with PIPE:READ scope |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `data` | json | Query result data. For FORMAT JSON: array of objects. For other formats \(CSV, TSV, etc.\): raw text string. |
| `rows` | number | Number of rows returned \(only available with FORMAT JSON\) |
| `statistics` | json | Query execution statistics - elapsed time, rows read, bytes read \(only available with FORMAT JSON\) |
## Notes
- Category: `tools`
- Type: `tinybird`

View File

@@ -1,150 +0,0 @@
import type {
Artifact,
Message,
Task,
TaskArtifactUpdateEvent,
TaskState,
TaskStatusUpdateEvent,
} from '@a2a-js/sdk'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic'
const logger = createLogger('A2ASendMessageStreamAPI')
const A2ASendMessageStreamSchema = z.object({
agentUrl: z.string().min(1, 'Agent URL is required'),
message: z.string().min(1, 'Message is required'),
taskId: z.string().optional(),
contextId: z.string().optional(),
apiKey: z.string().optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(
`[${requestId}] Unauthorized A2A send message stream attempt: ${authResult.error}`
)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
logger.info(
`[${requestId}] Authenticated A2A send message stream request via ${authResult.authType}`,
{
userId: authResult.userId,
}
)
const body = await request.json()
const validatedData = A2ASendMessageStreamSchema.parse(body)
logger.info(`[${requestId}] Sending A2A streaming message`, {
agentUrl: validatedData.agentUrl,
hasTaskId: !!validatedData.taskId,
hasContextId: !!validatedData.contextId,
})
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
const message: Message = {
kind: 'message',
messageId: crypto.randomUUID(),
role: 'user',
parts: [{ kind: 'text', text: validatedData.message }],
...(validatedData.taskId && { taskId: validatedData.taskId }),
...(validatedData.contextId && { contextId: validatedData.contextId }),
}
const stream = client.sendMessageStream({ message })
let taskId = ''
let contextId: string | undefined
let state: TaskState = 'working'
let content = ''
let artifacts: Artifact[] = []
let history: Message[] = []
for await (const event of stream) {
if (event.kind === 'message') {
const msg = event as Message
content = extractTextContent(msg)
taskId = msg.taskId || taskId
contextId = msg.contextId || contextId
state = 'completed'
} else if (event.kind === 'task') {
const task = event as Task
taskId = task.id
contextId = task.contextId
state = task.status.state
artifacts = task.artifacts || []
history = task.history || []
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
if (lastAgentMessage) {
content = extractTextContent(lastAgentMessage)
}
} else if ('status' in event) {
const statusEvent = event as TaskStatusUpdateEvent
state = statusEvent.status.state
} else if ('artifact' in event) {
const artifactEvent = event as TaskArtifactUpdateEvent
artifacts.push(artifactEvent.artifact)
}
}
logger.info(`[${requestId}] A2A streaming message completed`, {
taskId,
state,
artifactCount: artifacts.length,
})
return NextResponse.json({
success: isTerminalState(state) && state !== 'failed',
output: {
content,
taskId,
contextId,
state,
artifacts,
history,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in A2A streaming:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Streaming failed',
},
{ status: 500 }
)
}
}

View File

@@ -1,4 +1,4 @@
import type { Message, Task } from '@a2a-js/sdk'
import type { DataPart, FilePart, Message, Part, Task, TextPart } from '@a2a-js/sdk'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
@@ -10,11 +10,20 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('A2ASendMessageAPI')
const FileInputSchema = z.object({
type: z.enum(['file', 'url']),
data: z.string(),
name: z.string(),
mime: z.string().optional(),
})
const A2ASendMessageSchema = z.object({
agentUrl: z.string().min(1, 'Agent URL is required'),
message: z.string().min(1, 'Message is required'),
taskId: z.string().optional(),
contextId: z.string().optional(),
data: z.string().optional(),
files: z.array(FileInputSchema).optional(),
apiKey: z.string().optional(),
})
@@ -51,18 +60,100 @@ export async function POST(request: NextRequest) {
hasContextId: !!validatedData.contextId,
})
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
let client
try {
client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
logger.info(`[${requestId}] A2A client created successfully`)
} catch (clientError) {
logger.error(`[${requestId}] Failed to create A2A client:`, clientError)
return NextResponse.json(
{
success: false,
error: `Failed to connect to agent: ${clientError instanceof Error ? clientError.message : 'Unknown error'}`,
},
{ status: 502 }
)
}
const parts: Part[] = []
const textPart: TextPart = { kind: 'text', text: validatedData.message }
parts.push(textPart)
if (validatedData.data) {
try {
const parsedData = JSON.parse(validatedData.data)
const dataPart: DataPart = { kind: 'data', data: parsedData }
parts.push(dataPart)
} catch (parseError) {
logger.warn(`[${requestId}] Failed to parse data as JSON, skipping DataPart`, {
error: parseError instanceof Error ? parseError.message : String(parseError),
})
}
}
if (validatedData.files && validatedData.files.length > 0) {
for (const file of validatedData.files) {
if (file.type === 'url') {
const filePart: FilePart = {
kind: 'file',
file: {
name: file.name,
mimeType: file.mime,
uri: file.data,
},
}
parts.push(filePart)
} else if (file.type === 'file') {
let bytes = file.data
let mimeType = file.mime
if (file.data.startsWith('data:')) {
const match = file.data.match(/^data:([^;]+);base64,(.+)$/)
if (match) {
mimeType = mimeType || match[1]
bytes = match[2]
} else {
bytes = file.data
}
}
const filePart: FilePart = {
kind: 'file',
file: {
name: file.name,
mimeType: mimeType || 'application/octet-stream',
bytes,
},
}
parts.push(filePart)
}
}
}
const message: Message = {
kind: 'message',
messageId: crypto.randomUUID(),
role: 'user',
parts: [{ kind: 'text', text: validatedData.message }],
parts,
...(validatedData.taskId && { taskId: validatedData.taskId }),
...(validatedData.contextId && { contextId: validatedData.contextId }),
}
const result = await client.sendMessage({ message })
let result
try {
result = await client.sendMessage({ message })
logger.info(`[${requestId}] A2A sendMessage completed`, { resultKind: result?.kind })
} catch (sendError) {
logger.error(`[${requestId}] Failed to send A2A message:`, sendError)
return NextResponse.json(
{
success: false,
error: `Failed to send message: ${sendError instanceof Error ? sendError.message : 'Unknown error'}`,
},
{ status: 502 }
)
}
if (result.kind === 'message') {
const responseMessage = result as Message

View File

@@ -2,13 +2,6 @@ import { createSession, createWorkspaceRecord, loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { beforeEach, describe, expect, it, vi } from 'vitest'
/**
* Tests for workspace invitation by ID API route
* Tests GET (details + token acceptance), DELETE (cancellation)
*
* @vitest-environment node
*/
const mockGetSession = vi.fn()
const mockHasWorkspaceAdminAccess = vi.fn()
@@ -227,7 +220,7 @@ describe('Workspace Invitation [invitationId] API Route', () => {
expect(response.headers.get('location')).toBe('https://test.sim.ai/workspace/workspace-456/w')
})
it('should redirect to error page when invitation expired', async () => {
it('should redirect to error page with token preserved when invitation expired', async () => {
const session = createSession({
userId: mockUser.id,
email: 'invited@example.com',
@@ -250,12 +243,13 @@ describe('Workspace Invitation [invitationId] API Route', () => {
const response = await GET(request, { params })
expect(response.status).toBe(307)
expect(response.headers.get('location')).toBe(
'https://test.sim.ai/invite/invitation-789?error=expired'
const location = response.headers.get('location')
expect(location).toBe(
'https://test.sim.ai/invite/invitation-789?error=expired&token=token-abc123'
)
})
it('should redirect to error page when email mismatch', async () => {
it('should redirect to error page with token preserved when email mismatch', async () => {
const session = createSession({
userId: mockUser.id,
email: 'wrong@example.com',
@@ -277,12 +271,13 @@ describe('Workspace Invitation [invitationId] API Route', () => {
const response = await GET(request, { params })
expect(response.status).toBe(307)
expect(response.headers.get('location')).toBe(
'https://test.sim.ai/invite/invitation-789?error=email-mismatch'
const location = response.headers.get('location')
expect(location).toBe(
'https://test.sim.ai/invite/invitation-789?error=email-mismatch&token=token-abc123'
)
})
it('should return 404 when invitation not found', async () => {
it('should return 404 when invitation not found (without token)', async () => {
const session = createSession({ userId: mockUser.id, email: mockUser.email })
mockGetSession.mockResolvedValue(session)
dbSelectResults = [[]]
@@ -296,6 +291,189 @@ describe('Workspace Invitation [invitationId] API Route', () => {
expect(response.status).toBe(404)
expect(data).toEqual({ error: 'Invitation not found or has expired' })
})
it('should redirect to error page with token preserved when invitation not found (with token)', async () => {
const session = createSession({ userId: mockUser.id, email: mockUser.email })
mockGetSession.mockResolvedValue(session)
dbSelectResults = [[]]
const request = new NextRequest(
'http://localhost/api/workspaces/invitations/non-existent?token=some-invalid-token'
)
const params = Promise.resolve({ invitationId: 'non-existent' })
const response = await GET(request, { params })
expect(response.status).toBe(307)
const location = response.headers.get('location')
expect(location).toBe(
'https://test.sim.ai/invite/non-existent?error=invalid-token&token=some-invalid-token'
)
})
it('should redirect to error page with token preserved when invitation already processed', async () => {
const session = createSession({
userId: mockUser.id,
email: 'invited@example.com',
name: mockUser.name,
})
mockGetSession.mockResolvedValue(session)
const acceptedInvitation = {
...mockInvitation,
status: 'accepted',
}
dbSelectResults = [[acceptedInvitation], [mockWorkspace]]
const request = new NextRequest(
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
)
const params = Promise.resolve({ invitationId: 'token-abc123' })
const response = await GET(request, { params })
expect(response.status).toBe(307)
const location = response.headers.get('location')
expect(location).toBe(
'https://test.sim.ai/invite/invitation-789?error=already-processed&token=token-abc123'
)
})
it('should redirect to error page with token preserved when workspace not found', async () => {
const session = createSession({
userId: mockUser.id,
email: 'invited@example.com',
name: mockUser.name,
})
mockGetSession.mockResolvedValue(session)
dbSelectResults = [[mockInvitation], []]
const request = new NextRequest(
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
)
const params = Promise.resolve({ invitationId: 'token-abc123' })
const response = await GET(request, { params })
expect(response.status).toBe(307)
const location = response.headers.get('location')
expect(location).toBe(
'https://test.sim.ai/invite/invitation-789?error=workspace-not-found&token=token-abc123'
)
})
it('should redirect to error page with token preserved when user not found', async () => {
const session = createSession({
userId: mockUser.id,
email: 'invited@example.com',
name: mockUser.name,
})
mockGetSession.mockResolvedValue(session)
dbSelectResults = [[mockInvitation], [mockWorkspace], []]
const request = new NextRequest(
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
)
const params = Promise.resolve({ invitationId: 'token-abc123' })
const response = await GET(request, { params })
expect(response.status).toBe(307)
const location = response.headers.get('location')
expect(location).toBe(
'https://test.sim.ai/invite/invitation-789?error=user-not-found&token=token-abc123'
)
})
it('should URL encode special characters in token when preserving in error redirects', async () => {
const session = createSession({
userId: mockUser.id,
email: 'wrong@example.com',
name: mockUser.name,
})
mockGetSession.mockResolvedValue(session)
dbSelectResults = [
[mockInvitation],
[mockWorkspace],
[{ ...mockUser, email: 'wrong@example.com' }],
]
const specialToken = 'token+with/special=chars&more'
const request = new NextRequest(
`http://localhost/api/workspaces/invitations/token-abc123?token=${encodeURIComponent(specialToken)}`
)
const params = Promise.resolve({ invitationId: 'token-abc123' })
const response = await GET(request, { params })
expect(response.status).toBe(307)
const location = response.headers.get('location')
expect(location).toContain('error=email-mismatch')
expect(location).toContain(`token=${encodeURIComponent(specialToken)}`)
})
})
describe('Token Preservation - Full Flow Scenario', () => {
it('should preserve token through email mismatch so user can retry with correct account', async () => {
const wrongSession = createSession({
userId: 'wrong-user',
email: 'wrong@example.com',
name: 'Wrong User',
})
mockGetSession.mockResolvedValue(wrongSession)
dbSelectResults = [
[mockInvitation],
[mockWorkspace],
[{ id: 'wrong-user', email: 'wrong@example.com' }],
]
const request1 = new NextRequest(
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
)
const params1 = Promise.resolve({ invitationId: 'token-abc123' })
const response1 = await GET(request1, { params: params1 })
expect(response1.status).toBe(307)
const location1 = response1.headers.get('location')
expect(location1).toBe(
'https://test.sim.ai/invite/invitation-789?error=email-mismatch&token=token-abc123'
)
vi.clearAllMocks()
dbSelectCallIndex = 0
const correctSession = createSession({
userId: mockUser.id,
email: 'invited@example.com',
name: mockUser.name,
})
mockGetSession.mockResolvedValue(correctSession)
dbSelectResults = [
[mockInvitation],
[mockWorkspace],
[{ ...mockUser, email: 'invited@example.com' }],
[],
]
const request2 = new NextRequest(
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
)
const params2 = Promise.resolve({ invitationId: 'token-abc123' })
const response2 = await GET(request2, { params: params2 })
expect(response2.status).toBe(307)
expect(response2.headers.get('location')).toBe(
'https://test.sim.ai/workspace/workspace-456/w'
)
})
})
describe('DELETE /api/workspaces/invitations/[invitationId]', () => {

View File

@@ -31,7 +31,6 @@ export async function GET(
const isAcceptFlow = !!token // If token is provided, this is an acceptance flow
if (!session?.user?.id) {
// For token-based acceptance flows, redirect to login
if (isAcceptFlow) {
return NextResponse.redirect(new URL(`/invite/${invitationId}?token=${token}`, getBaseUrl()))
}
@@ -51,8 +50,9 @@ export async function GET(
if (!invitation) {
if (isAcceptFlow) {
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
return NextResponse.redirect(
new URL(`/invite/${invitationId}?error=invalid-token`, getBaseUrl())
new URL(`/invite/${invitationId}?error=invalid-token${tokenParam}`, getBaseUrl())
)
}
return NextResponse.json({ error: 'Invitation not found or has expired' }, { status: 404 })
@@ -60,8 +60,9 @@ export async function GET(
if (new Date() > new Date(invitation.expiresAt)) {
if (isAcceptFlow) {
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
return NextResponse.redirect(
new URL(`/invite/${invitation.id}?error=expired`, getBaseUrl())
new URL(`/invite/${invitation.id}?error=expired${tokenParam}`, getBaseUrl())
)
}
return NextResponse.json({ error: 'Invitation has expired' }, { status: 400 })
@@ -75,17 +76,20 @@ export async function GET(
if (!workspaceDetails) {
if (isAcceptFlow) {
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
return NextResponse.redirect(
new URL(`/invite/${invitation.id}?error=workspace-not-found`, getBaseUrl())
new URL(`/invite/${invitation.id}?error=workspace-not-found${tokenParam}`, getBaseUrl())
)
}
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
}
if (isAcceptFlow) {
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
if (invitation.status !== ('pending' as WorkspaceInvitationStatus)) {
return NextResponse.redirect(
new URL(`/invite/${invitation.id}?error=already-processed`, getBaseUrl())
new URL(`/invite/${invitation.id}?error=already-processed${tokenParam}`, getBaseUrl())
)
}
@@ -100,7 +104,7 @@ export async function GET(
if (!userData) {
return NextResponse.redirect(
new URL(`/invite/${invitation.id}?error=user-not-found`, getBaseUrl())
new URL(`/invite/${invitation.id}?error=user-not-found${tokenParam}`, getBaseUrl())
)
}
@@ -108,7 +112,7 @@ export async function GET(
if (!isValidMatch) {
return NextResponse.redirect(
new URL(`/invite/${invitation.id}?error=email-mismatch`, getBaseUrl())
new URL(`/invite/${invitation.id}?error=email-mismatch${tokenParam}`, getBaseUrl())
)
}

View File

@@ -178,23 +178,25 @@ export default function Invite() {
useEffect(() => {
const errorReason = searchParams.get('error')
const isNew = searchParams.get('new') === 'true'
setIsNewUser(isNew)
const tokenFromQuery = searchParams.get('token')
if (tokenFromQuery) {
setToken(tokenFromQuery)
sessionStorage.setItem('inviteToken', tokenFromQuery)
} else {
const storedToken = sessionStorage.getItem('inviteToken')
if (storedToken && storedToken !== inviteId) {
setToken(storedToken)
}
}
if (errorReason) {
setError(getInviteError(errorReason))
setIsLoading(false)
return
}
const isNew = searchParams.get('new') === 'true'
setIsNewUser(isNew)
const tokenFromQuery = searchParams.get('token')
const effectiveToken = tokenFromQuery || inviteId
if (effectiveToken) {
setToken(effectiveToken)
sessionStorage.setItem('inviteToken', effectiveToken)
}
}, [searchParams, inviteId])
useEffect(() => {
@@ -203,7 +205,6 @@ export default function Invite() {
async function fetchInvitationDetails() {
setIsLoading(true)
try {
// Fetch invitation details using the invitation ID from the URL path
const workspaceInviteResponse = await fetch(`/api/workspaces/invitations/${inviteId}`, {
method: 'GET',
})
@@ -220,7 +221,6 @@ export default function Invite() {
return
}
// Handle workspace invitation errors with specific status codes
if (!workspaceInviteResponse.ok && workspaceInviteResponse.status !== 404) {
const errorCode = parseApiError(null, workspaceInviteResponse.status)
const errorData = await workspaceInviteResponse.json().catch(() => ({}))
@@ -229,7 +229,6 @@ export default function Invite() {
error: errorData,
})
// Refine error code based on response body if available
if (errorData.error) {
const refinedCode = parseApiError(errorData.error, workspaceInviteResponse.status)
setError(getInviteError(refinedCode))
@@ -254,13 +253,11 @@ export default function Invite() {
if (data) {
setInvitationType('organization')
// Check if user is already in an organization BEFORE showing the invitation
const activeOrgResponse = await client.organization
.getFullOrganization()
.catch(() => ({ data: null }))
if (activeOrgResponse?.data) {
// User is already in an organization
setCurrentOrgName(activeOrgResponse.data.name)
setError(getInviteError('already-in-organization'))
setIsLoading(false)
@@ -289,7 +286,6 @@ export default function Invite() {
throw { code: 'invalid-invitation' }
}
} catch (orgErr: any) {
// If this is our structured error, use it directly
if (orgErr.code) {
throw orgErr
}
@@ -316,7 +312,6 @@ export default function Invite() {
window.location.href = `/api/workspaces/invitations/${encodeURIComponent(inviteId)}?token=${encodeURIComponent(token || '')}`
} else {
try {
// Get the organizationId from invitation details
const orgId = invitationDetails?.data?.organizationId
if (!orgId) {
@@ -325,7 +320,6 @@ export default function Invite() {
return
}
// Use our custom API endpoint that handles Pro usage snapshot
const response = await fetch(`/api/organizations/${orgId}/invitations/${inviteId}`, {
method: 'PUT',
headers: {
@@ -347,7 +341,6 @@ export default function Invite() {
return
}
// Set the organization as active
await client.organization.setActive({
organizationId: orgId,
})
@@ -360,7 +353,6 @@ export default function Invite() {
} catch (err: any) {
logger.error('Error accepting invitation:', err)
// Reset accepted state on error
setAccepted(false)
const errorCode = parseApiError(err)
@@ -371,7 +363,9 @@ export default function Invite() {
}
const getCallbackUrl = () => {
return `/invite/${inviteId}${token && token !== inviteId ? `?token=${token}` : ''}`
const effectiveToken =
token || sessionStorage.getItem('inviteToken') || searchParams.get('token')
return `/invite/${inviteId}${effectiveToken && effectiveToken !== inviteId ? `?token=${effectiveToken}` : ''}`
}
if (!session?.user && !isPending) {
@@ -435,7 +429,6 @@ export default function Invite() {
if (error) {
const callbackUrl = encodeURIComponent(getCallbackUrl())
// Special handling for already in organization
if (error.code === 'already-in-organization') {
return (
<InviteLayout>
@@ -463,7 +456,6 @@ export default function Invite() {
)
}
// Handle email mismatch - user needs to sign in with a different account
if (error.code === 'email-mismatch') {
return (
<InviteLayout>
@@ -490,7 +482,6 @@ export default function Invite() {
)
}
// Handle auth-related errors - prompt user to sign in
if (error.requiresAuth) {
return (
<InviteLayout>
@@ -518,7 +509,6 @@ export default function Invite() {
)
}
// Handle retryable errors
const actions: Array<{
label: string
onClick: () => void
@@ -550,7 +540,6 @@ export default function Invite() {
)
}
// Show success only if accepted AND no error
if (accepted && !error) {
return (
<InviteLayout>

View File

@@ -221,7 +221,9 @@ export function Chat() {
exportChatCSV,
} = useChatStore()
const { entries } = useTerminalConsoleStore()
const hasConsoleHydrated = useTerminalConsoleStore((state) => state._hasHydrated)
const entriesFromStore = useTerminalConsoleStore((state) => state.entries)
const entries = hasConsoleHydrated ? entriesFromStore : []
const { isExecuting } = useExecutionStore()
const { handleRunWorkflow, handleCancelExecution } = useWorkflowExecution()
const { data: session } = useSession()
@@ -531,35 +533,6 @@ export function Chat() {
return
}
if (
selectedOutputs.length > 0 &&
'logs' in result &&
Array.isArray(result.logs) &&
activeWorkflowId
) {
const additionalOutputs: string[] = []
for (const outputId of selectedOutputs) {
const blockId = extractBlockIdFromOutputId(outputId)
const path = extractPathFromOutputId(outputId, blockId)
if (path === 'content') continue
const outputValue = extractOutputFromLogs(result.logs as BlockLog[], outputId)
if (outputValue !== undefined) {
const formattedValue =
typeof outputValue === 'string' ? outputValue : JSON.stringify(outputValue)
if (formattedValue) {
additionalOutputs.push(`**${path}:** ${formattedValue}`)
}
}
}
if (additionalOutputs.length > 0) {
appendMessageContent(responseMessageId, `\n\n${additionalOutputs.join('\n\n')}`)
}
}
finalizeMessageStream(responseMessageId)
} else if (contentChunk) {
accumulatedContent += contentChunk

View File

@@ -26,26 +26,14 @@ function formatTimestamp(iso: string): string {
}
}
/**
* Common text styling for loading and empty states
*/
const STATE_TEXT_CLASSES = 'px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]'
/**
* Loading state component for mention folders
*/
const LoadingState = () => <div className={STATE_TEXT_CLASSES}>Loading...</div>
/**
* Empty state component for mention folders
*/
const EmptyState = ({ message }: { message: string }) => (
<div className={STATE_TEXT_CLASSES}>{message}</div>
)
/**
* Aggregated item type for filtered results
*/
interface AggregatedItem {
id: string
label: string
@@ -78,14 +66,6 @@ interface MentionMenuProps {
}
}
/**
* MentionMenu component for mention menu dropdown.
* Handles rendering of mention options, submenus, and aggregated search results.
* Manages keyboard navigation and selection of mentions.
*
* @param props - Component props
* @returns Rendered mention menu
*/
export function MentionMenu({
mentionMenu,
mentionData,
@@ -100,6 +80,7 @@ export function MentionMenu({
submenuActiveIndex,
mentionActiveIndex,
openSubmenuFor,
setOpenSubmenuFor,
} = mentionMenu
const {
@@ -308,72 +289,55 @@ export function MentionMenu({
'Docs', // 7
] as const
// Get active folder based on navigation when not in submenu and no query
const isInFolderNavigationMode = !openSubmenuFor && !showAggregatedView
// Compute caret viewport position via mirror technique for precise anchoring
const textareaEl = mentionMenu.textareaRef.current
if (!textareaEl) return null
const getCaretViewport = (textarea: HTMLTextAreaElement, caretPosition: number, text: string) => {
const textareaRect = textarea.getBoundingClientRect()
const style = window.getComputedStyle(textarea)
const caretPos = getCaretPos()
const textareaRect = textareaEl.getBoundingClientRect()
const style = window.getComputedStyle(textareaEl)
const mirrorDiv = document.createElement('div')
mirrorDiv.style.position = 'absolute'
mirrorDiv.style.visibility = 'hidden'
mirrorDiv.style.whiteSpace = 'pre-wrap'
mirrorDiv.style.wordWrap = 'break-word'
mirrorDiv.style.font = style.font
mirrorDiv.style.padding = style.padding
mirrorDiv.style.border = style.border
mirrorDiv.style.width = style.width
mirrorDiv.style.lineHeight = style.lineHeight
mirrorDiv.style.boxSizing = style.boxSizing
mirrorDiv.style.letterSpacing = style.letterSpacing
mirrorDiv.style.textTransform = style.textTransform
mirrorDiv.style.textIndent = style.textIndent
mirrorDiv.style.textAlign = style.textAlign
const mirrorDiv = document.createElement('div')
mirrorDiv.style.position = 'absolute'
mirrorDiv.style.visibility = 'hidden'
mirrorDiv.style.whiteSpace = 'pre-wrap'
mirrorDiv.style.wordWrap = 'break-word'
mirrorDiv.style.font = style.font
mirrorDiv.style.padding = style.padding
mirrorDiv.style.border = style.border
mirrorDiv.style.width = style.width
mirrorDiv.style.lineHeight = style.lineHeight
mirrorDiv.style.boxSizing = style.boxSizing
mirrorDiv.style.letterSpacing = style.letterSpacing
mirrorDiv.style.textTransform = style.textTransform
mirrorDiv.style.textIndent = style.textIndent
mirrorDiv.style.textAlign = style.textAlign
mirrorDiv.textContent = message.substring(0, caretPos)
mirrorDiv.textContent = text.substring(0, caretPosition)
const caretMarker = document.createElement('span')
caretMarker.style.display = 'inline-block'
caretMarker.style.width = '0px'
caretMarker.style.padding = '0'
caretMarker.style.border = '0'
mirrorDiv.appendChild(caretMarker)
const caretMarker = document.createElement('span')
caretMarker.style.display = 'inline-block'
caretMarker.style.width = '0px'
caretMarker.style.padding = '0'
caretMarker.style.border = '0'
mirrorDiv.appendChild(caretMarker)
document.body.appendChild(mirrorDiv)
const markerRect = caretMarker.getBoundingClientRect()
const mirrorRect = mirrorDiv.getBoundingClientRect()
document.body.removeChild(mirrorDiv)
document.body.appendChild(mirrorDiv)
const markerRect = caretMarker.getBoundingClientRect()
const mirrorRect = mirrorDiv.getBoundingClientRect()
document.body.removeChild(mirrorDiv)
const leftOffset = markerRect.left - mirrorRect.left - textarea.scrollLeft
const topOffset = markerRect.top - mirrorRect.top - textarea.scrollTop
return {
left: textareaRect.left + leftOffset,
top: textareaRect.top + topOffset,
}
const caretViewport = {
left: textareaRect.left + (markerRect.left - mirrorRect.left) - textareaEl.scrollLeft,
top: textareaRect.top + (markerRect.top - mirrorRect.top) - textareaEl.scrollTop,
}
const caretPos = getCaretPos()
const caretViewport = getCaretViewport(textareaEl, caretPos, message)
// Decide preferred side based on available space
const margin = 8
const spaceAbove = caretViewport.top - margin
const spaceBelow = window.innerHeight - caretViewport.top - margin
const side: 'top' | 'bottom' = spaceBelow >= spaceAbove ? 'bottom' : 'top'
const side: 'top' | 'bottom' = spaceBelow >= caretViewport.top - margin ? 'bottom' : 'top'
return (
<Popover
open={open}
onOpenChange={() => {
/* controlled by mentionMenu */
}}
>
<Popover open={open} onOpenChange={() => {}}>
<PopoverAnchor asChild>
<div
style={{
@@ -399,7 +363,7 @@ export function MentionMenu({
onOpenAutoFocus={(e) => e.preventDefault()}
onCloseAutoFocus={(e) => e.preventDefault()}
>
<PopoverBackButton />
<PopoverBackButton onClick={() => setOpenSubmenuFor(null)} />
<PopoverScrollArea ref={menuListRef} className='space-y-[2px]'>
{openSubmenuFor ? (
// Submenu view - showing contents of a specific folder

View File

@@ -12,31 +12,19 @@ import {
} from '@/components/emcn'
import type { useMentionMenu } from '../../hooks/use-mention-menu'
/**
* Top-level slash command options
*/
const TOP_LEVEL_COMMANDS = [
{ id: 'fast', label: 'fast' },
{ id: 'plan', label: 'plan' },
{ id: 'debug', label: 'debug' },
{ id: 'research', label: 'research' },
{ id: 'deploy', label: 'deploy' },
{ id: 'superagent', label: 'superagent' },
{ id: 'fast', label: 'Fast' },
{ id: 'research', label: 'Research' },
{ id: 'superagent', label: 'Actions' },
] as const
/**
* Web submenu commands
*/
const WEB_COMMANDS = [
{ id: 'search', label: 'search' },
{ id: 'read', label: 'read' },
{ id: 'scrape', label: 'scrape' },
{ id: 'crawl', label: 'crawl' },
{ id: 'search', label: 'Search' },
{ id: 'read', label: 'Read' },
{ id: 'scrape', label: 'Scrape' },
{ id: 'crawl', label: 'Crawl' },
] as const
/**
* All command labels for filtering
*/
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
interface SlashMenuProps {
@@ -45,13 +33,6 @@ interface SlashMenuProps {
onSelectCommand: (command: string) => void
}
/**
* SlashMenu component for slash command dropdown.
* Shows command options when user types '/'.
*
* @param props - Component props
* @returns Rendered slash menu
*/
export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuProps) {
const {
mentionMenuRef,
@@ -64,92 +45,71 @@ export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuPr
setOpenSubmenuFor,
} = mentionMenu
/**
* Get the current query string after /
*/
const currentQuery = useMemo(() => {
const caretPos = getCaretPos()
const active = getActiveSlashQueryAtPosition(caretPos, message)
return active?.query.trim().toLowerCase() || ''
}, [message, getCaretPos, getActiveSlashQueryAtPosition])
/**
* Filter commands based on query (search across all commands when there's a query)
*/
const filteredCommands = useMemo(() => {
if (!currentQuery) return null // Show folder view when no query
return ALL_COMMANDS.filter((cmd) => cmd.label.toLowerCase().includes(currentQuery))
if (!currentQuery) return null
return ALL_COMMANDS.filter(
(cmd) =>
cmd.id.toLowerCase().includes(currentQuery) ||
cmd.label.toLowerCase().includes(currentQuery)
)
}, [currentQuery])
// Show aggregated view when there's a query
const showAggregatedView = currentQuery.length > 0
const isInFolderNavigationMode = !openSubmenuFor && !showAggregatedView
// Compute caret viewport position via mirror technique for precise anchoring
const textareaEl = mentionMenu.textareaRef.current
if (!textareaEl) return null
const getCaretViewport = (textarea: HTMLTextAreaElement, caretPosition: number, text: string) => {
const textareaRect = textarea.getBoundingClientRect()
const style = window.getComputedStyle(textarea)
const caretPos = getCaretPos()
const textareaRect = textareaEl.getBoundingClientRect()
const style = window.getComputedStyle(textareaEl)
const mirrorDiv = document.createElement('div')
mirrorDiv.style.position = 'absolute'
mirrorDiv.style.visibility = 'hidden'
mirrorDiv.style.whiteSpace = 'pre-wrap'
mirrorDiv.style.wordWrap = 'break-word'
mirrorDiv.style.font = style.font
mirrorDiv.style.padding = style.padding
mirrorDiv.style.border = style.border
mirrorDiv.style.width = style.width
mirrorDiv.style.lineHeight = style.lineHeight
mirrorDiv.style.boxSizing = style.boxSizing
mirrorDiv.style.letterSpacing = style.letterSpacing
mirrorDiv.style.textTransform = style.textTransform
mirrorDiv.style.textIndent = style.textIndent
mirrorDiv.style.textAlign = style.textAlign
const mirrorDiv = document.createElement('div')
mirrorDiv.style.position = 'absolute'
mirrorDiv.style.visibility = 'hidden'
mirrorDiv.style.whiteSpace = 'pre-wrap'
mirrorDiv.style.wordWrap = 'break-word'
mirrorDiv.style.font = style.font
mirrorDiv.style.padding = style.padding
mirrorDiv.style.border = style.border
mirrorDiv.style.width = style.width
mirrorDiv.style.lineHeight = style.lineHeight
mirrorDiv.style.boxSizing = style.boxSizing
mirrorDiv.style.letterSpacing = style.letterSpacing
mirrorDiv.style.textTransform = style.textTransform
mirrorDiv.style.textIndent = style.textIndent
mirrorDiv.style.textAlign = style.textAlign
mirrorDiv.textContent = message.substring(0, caretPos)
mirrorDiv.textContent = text.substring(0, caretPosition)
const caretMarker = document.createElement('span')
caretMarker.style.display = 'inline-block'
caretMarker.style.width = '0px'
caretMarker.style.padding = '0'
caretMarker.style.border = '0'
mirrorDiv.appendChild(caretMarker)
const caretMarker = document.createElement('span')
caretMarker.style.display = 'inline-block'
caretMarker.style.width = '0px'
caretMarker.style.padding = '0'
caretMarker.style.border = '0'
mirrorDiv.appendChild(caretMarker)
document.body.appendChild(mirrorDiv)
const markerRect = caretMarker.getBoundingClientRect()
const mirrorRect = mirrorDiv.getBoundingClientRect()
document.body.removeChild(mirrorDiv)
document.body.appendChild(mirrorDiv)
const markerRect = caretMarker.getBoundingClientRect()
const mirrorRect = mirrorDiv.getBoundingClientRect()
document.body.removeChild(mirrorDiv)
const leftOffset = markerRect.left - mirrorRect.left - textarea.scrollLeft
const topOffset = markerRect.top - mirrorRect.top - textarea.scrollTop
return {
left: textareaRect.left + leftOffset,
top: textareaRect.top + topOffset,
}
const caretViewport = {
left: textareaRect.left + (markerRect.left - mirrorRect.left) - textareaEl.scrollLeft,
top: textareaRect.top + (markerRect.top - mirrorRect.top) - textareaEl.scrollTop,
}
const caretPos = getCaretPos()
const caretViewport = getCaretViewport(textareaEl, caretPos, message)
// Decide preferred side based on available space
const margin = 8
const spaceAbove = caretViewport.top - margin
const spaceBelow = window.innerHeight - caretViewport.top - margin
const side: 'top' | 'bottom' = spaceBelow >= spaceAbove ? 'bottom' : 'top'
// Check if we're in folder navigation mode (no query, not in submenu)
const isInFolderNavigationMode = !openSubmenuFor && !showAggregatedView
const side: 'top' | 'bottom' = spaceBelow >= caretViewport.top - margin ? 'bottom' : 'top'
return (
<Popover
open={true}
onOpenChange={() => {
/* controlled externally */
}}
>
<Popover open={true} onOpenChange={() => {}}>
<PopoverAnchor asChild>
<div
style={{
@@ -175,24 +135,22 @@ export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuPr
onOpenAutoFocus={(e) => e.preventDefault()}
onCloseAutoFocus={(e) => e.preventDefault()}
>
<PopoverBackButton />
<PopoverBackButton onClick={() => setOpenSubmenuFor(null)} />
<PopoverScrollArea ref={menuListRef} className='space-y-[2px]'>
{openSubmenuFor === 'Web' ? (
// Web submenu view
<>
{WEB_COMMANDS.map((cmd, index) => (
<PopoverItem
key={cmd.id}
onClick={() => onSelectCommand(cmd.label)}
onClick={() => onSelectCommand(cmd.id)}
data-idx={index}
active={index === submenuActiveIndex}
>
<span className='truncate capitalize'>{cmd.label}</span>
<span className='truncate'>{cmd.label}</span>
</PopoverItem>
))}
</>
) : showAggregatedView ? (
// Aggregated filtered view
<>
{filteredCommands && filteredCommands.length === 0 ? (
<div className='px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]'>
@@ -202,26 +160,25 @@ export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuPr
filteredCommands?.map((cmd, index) => (
<PopoverItem
key={cmd.id}
onClick={() => onSelectCommand(cmd.label)}
onClick={() => onSelectCommand(cmd.id)}
data-idx={index}
active={index === submenuActiveIndex}
>
<span className='truncate capitalize'>{cmd.label}</span>
<span className='truncate'>{cmd.label}</span>
</PopoverItem>
))
)}
</>
) : (
// Folder navigation view
<>
{TOP_LEVEL_COMMANDS.map((cmd, index) => (
<PopoverItem
key={cmd.id}
onClick={() => onSelectCommand(cmd.label)}
onClick={() => onSelectCommand(cmd.id)}
data-idx={index}
active={isInFolderNavigationMode && index === mentionActiveIndex}
>
<span className='truncate capitalize'>{cmd.label}</span>
<span className='truncate'>{cmd.label}</span>
</PopoverItem>
))}
@@ -235,8 +192,8 @@ export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuPr
data-idx={TOP_LEVEL_COMMANDS.length}
>
{WEB_COMMANDS.map((cmd) => (
<PopoverItem key={cmd.id} onClick={() => onSelectCommand(cmd.label)}>
<span className='truncate capitalize'>{cmd.label}</span>
<PopoverItem key={cmd.id} onClick={() => onSelectCommand(cmd.id)}>
<span className='truncate'>{cmd.label}</span>
</PopoverItem>
))}
</PopoverFolder>

View File

@@ -40,6 +40,24 @@ import { useCopilotStore } from '@/stores/panel'
const logger = createLogger('CopilotUserInput')
const TOP_LEVEL_COMMANDS = ['fast', 'research', 'superagent'] as const
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl'] as const
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
const COMMAND_DISPLAY_LABELS: Record<string, string> = {
superagent: 'Actions',
}
/**
* Calculates the next index for circular navigation (wraps around at bounds)
*/
function getNextIndex(current: number, direction: 'up' | 'down', maxIndex: number): number {
if (direction === 'down') {
return current >= maxIndex ? 0 : current + 1
}
return current <= 0 ? maxIndex : current - 1
}
interface UserInputProps {
onSubmit: (
message: string,
@@ -110,7 +128,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
},
ref
) => {
// Refs and external hooks
const { data: session } = useSession()
const params = useParams()
const workspaceId = params.workspaceId as string
@@ -122,19 +139,16 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
selectedModelOverride !== undefined ? selectedModelOverride : copilotStore.selectedModel
const setSelectedModel = onModelChangeOverride || copilotStore.setSelectedModel
// Internal state
const [internalMessage, setInternalMessage] = useState('')
const [isNearTop, setIsNearTop] = useState(false)
const [containerRef, setContainerRef] = useState<HTMLDivElement | null>(null)
const [inputContainerRef, setInputContainerRef] = useState<HTMLDivElement | null>(null)
const [showSlashMenu, setShowSlashMenu] = useState(false)
// Controlled vs uncontrolled message state
const message = controlledValue !== undefined ? controlledValue : internalMessage
const setMessage =
controlledValue !== undefined ? onControlledChange || (() => {}) : setInternalMessage
// Effective placeholder
const effectivePlaceholder =
placeholder ||
(mode === 'ask'
@@ -143,11 +157,8 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
? 'Plan your workflow'
: 'Plan, search, build anything')
// Custom hooks - order matters for ref sharing
// Context management (manages selectedContexts state)
const contextManagement = useContextManagement({ message, initialContexts })
// Mention menu
const mentionMenu = useMentionMenu({
message,
selectedContexts: contextManagement.selectedContexts,
@@ -155,7 +166,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
onMessageChange: setMessage,
})
// Mention token utilities
const mentionTokensWithContext = useMentionTokens({
message,
selectedContexts: contextManagement.selectedContexts,
@@ -183,7 +193,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
isLoading,
})
// Insert mention handlers
const insertHandlers = useMentionInsertHandlers({
mentionMenu,
workflowId: workflowId || null,
@@ -191,14 +200,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
onContextAdd: contextManagement.addContext,
})
// Keyboard navigation hook
const mentionKeyboard = useMentionKeyboard({
mentionMenu,
mentionData,
insertHandlers,
})
// Expose focus method to parent
useImperativeHandle(
ref,
() => ({
@@ -215,9 +222,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
[mentionMenu.textareaRef]
)
// Note: textarea auto-resize is handled by the useTextareaAutoResize hook
// Load workflows on mount if we have a workflowId
useEffect(() => {
if (workflowId) {
void mentionData.ensureWorkflowsLoaded()
@@ -225,7 +229,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [workflowId])
// Detect if input is near top of screen
useEffect(() => {
const checkPosition = () => {
if (containerRef) {
@@ -253,7 +256,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
}, [containerRef])
// Also check position when mention menu opens
useEffect(() => {
if (mentionMenu.showMentionMenu && containerRef) {
const rect = containerRef.getBoundingClientRect()
@@ -261,7 +263,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
}, [mentionMenu.showMentionMenu, containerRef])
// Preload mention data when query is active
useEffect(() => {
if (!mentionMenu.showMentionMenu || mentionMenu.openSubmenuFor) {
return
@@ -273,7 +274,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
.toLowerCase()
if (q && q.length > 0) {
// Prefetch all lists when there's any query for instant filtering
void mentionData.ensurePastChatsLoaded()
void mentionData.ensureWorkflowsLoaded()
void mentionData.ensureWorkflowBlocksLoaded()
@@ -282,15 +282,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
void mentionData.ensureTemplatesLoaded()
void mentionData.ensureLogsLoaded()
// Reset to first item when query changes
mentionMenu.setSubmenuActiveIndex(0)
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(0))
}
// Only depend on values that trigger data loading, not the entire objects
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [mentionMenu.showMentionMenu, mentionMenu.openSubmenuFor, message])
// When switching into a submenu, select the first item and scroll to it
useEffect(() => {
if (mentionMenu.openSubmenuFor) {
mentionMenu.setSubmenuActiveIndex(0)
@@ -299,12 +296,10 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [mentionMenu.openSubmenuFor])
// Handlers
const handleSubmit = useCallback(
async (overrideMessage?: string, options: { preserveInput?: boolean } = {}) => {
const targetMessage = overrideMessage ?? message
const trimmedMessage = targetMessage.trim()
// Allow submission even when isLoading - store will queue the message
if (!trimmedMessage || disabled) return
const failedUploads = fileAttachments.attachedFiles.filter((f) => !f.uploading && !f.key)
@@ -377,17 +372,13 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const handleSlashCommandSelect = useCallback(
(command: string) => {
// Capitalize the command for display
const capitalizedCommand = command.charAt(0).toUpperCase() + command.slice(1)
// Replace the active slash query with the capitalized command
mentionMenu.replaceActiveSlashWith(capitalizedCommand)
// Add as a context so it gets highlighted
const displayLabel =
COMMAND_DISPLAY_LABELS[command] || command.charAt(0).toUpperCase() + command.slice(1)
mentionMenu.replaceActiveSlashWith(displayLabel)
contextManagement.addContext({
kind: 'slash_command',
command,
label: capitalizedCommand,
label: displayLabel,
})
setShowSlashMenu(false)
@@ -398,7 +389,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const handleKeyDown = useCallback(
(e: KeyboardEvent<HTMLTextAreaElement>) => {
// Escape key handling
if (e.key === 'Escape' && (mentionMenu.showMentionMenu || showSlashMenu)) {
e.preventDefault()
if (mentionMenu.openSubmenuFor) {
@@ -411,65 +401,33 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
return
}
// Arrow navigation in slash menu
if (showSlashMenu) {
const TOP_LEVEL_COMMANDS = ['fast', 'plan', 'debug', 'research', 'deploy', 'superagent']
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl']
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
const caretPos = mentionMenu.getCaretPos()
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
const query = activeSlash?.query.trim().toLowerCase() || ''
const showAggregatedView = query.length > 0
const direction = e.key === 'ArrowDown' ? 'down' : 'up'
if (e.key === 'ArrowDown' || e.key === 'ArrowUp') {
e.preventDefault()
if (mentionMenu.openSubmenuFor === 'Web') {
// Navigate in Web submenu
const last = WEB_COMMANDS.length - 1
mentionMenu.setSubmenuActiveIndex((prev) => {
const next =
e.key === 'ArrowDown'
? prev >= last
? 0
: prev + 1
: prev <= 0
? last
: prev - 1
const next = getNextIndex(prev, direction, WEB_COMMANDS.length - 1)
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
return next
})
} else if (showAggregatedView) {
// Navigate in filtered view
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
const last = Math.max(0, filtered.length - 1)
mentionMenu.setSubmenuActiveIndex((prev) => {
if (filtered.length === 0) return 0
const next =
e.key === 'ArrowDown'
? prev >= last
? 0
: prev + 1
: prev <= 0
? last
: prev - 1
const next = getNextIndex(prev, direction, filtered.length - 1)
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
return next
})
} else {
// Navigate in folder view (top-level + Web folder)
const totalItems = TOP_LEVEL_COMMANDS.length + 1 // +1 for Web folder
const last = totalItems - 1
mentionMenu.setMentionActiveIndex((prev) => {
const next =
e.key === 'ArrowDown'
? prev >= last
? 0
: prev + 1
: prev <= 0
? last
: prev - 1
const next = getNextIndex(prev, direction, TOP_LEVEL_COMMANDS.length)
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
return next
})
@@ -477,11 +435,9 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
return
}
// Arrow right to enter Web submenu
if (e.key === 'ArrowRight') {
e.preventDefault()
if (!showAggregatedView && !mentionMenu.openSubmenuFor) {
// Check if Web folder is selected (it's after all top-level commands)
if (mentionMenu.mentionActiveIndex === TOP_LEVEL_COMMANDS.length) {
mentionMenu.setOpenSubmenuFor('Web')
mentionMenu.setSubmenuActiveIndex(0)
@@ -490,7 +446,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
return
}
// Arrow left to exit submenu
if (e.key === 'ArrowLeft') {
e.preventDefault()
if (mentionMenu.openSubmenuFor) {
@@ -500,44 +455,33 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
}
// Arrow navigation in mention menu
if (mentionKeyboard.handleArrowNavigation(e)) return
if (mentionKeyboard.handleArrowRight(e)) return
if (mentionKeyboard.handleArrowLeft(e)) return
// Enter key handling
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
e.preventDefault()
if (showSlashMenu) {
const TOP_LEVEL_COMMANDS = ['fast', 'plan', 'debug', 'research', 'deploy', 'superagent']
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl']
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
const caretPos = mentionMenu.getCaretPos()
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
const query = activeSlash?.query.trim().toLowerCase() || ''
const showAggregatedView = query.length > 0
if (mentionMenu.openSubmenuFor === 'Web') {
// Select from Web submenu
const selectedCommand =
WEB_COMMANDS[mentionMenu.submenuActiveIndex] || WEB_COMMANDS[0]
handleSlashCommandSelect(selectedCommand)
} else if (showAggregatedView) {
// Select from filtered view
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
if (filtered.length > 0) {
const selectedCommand = filtered[mentionMenu.submenuActiveIndex] || filtered[0]
handleSlashCommandSelect(selectedCommand)
}
} else {
// Folder navigation view
const selectedIndex = mentionMenu.mentionActiveIndex
if (selectedIndex < TOP_LEVEL_COMMANDS.length) {
// Top-level command selected
handleSlashCommandSelect(TOP_LEVEL_COMMANDS[selectedIndex])
} else if (selectedIndex === TOP_LEVEL_COMMANDS.length) {
// Web folder selected - open it
mentionMenu.setOpenSubmenuFor('Web')
mentionMenu.setSubmenuActiveIndex(0)
}
@@ -552,7 +496,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
return
}
// Handle mention token behavior (backspace, delete, arrow keys) when menu is closed
if (!mentionMenu.showMentionMenu) {
const textarea = mentionMenu.textareaRef.current
const selStart = textarea?.selectionStart ?? 0
@@ -561,11 +504,8 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
if (e.key === 'Backspace' || e.key === 'Delete') {
if (selectionLength > 0) {
// Multi-character selection: Clean up contexts for any overlapping mentions
// but let the default behavior handle the actual text deletion
mentionTokensWithContext.removeContextsInSelection(selStart, selEnd)
} else {
// Single character delete - check if cursor is inside/at a mention token
const ranges = mentionTokensWithContext.computeMentionRanges()
const target =
e.key === 'Backspace'
@@ -604,7 +544,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
}
// Prevent typing inside token
if (e.key.length === 1 || e.key === 'Space') {
const blocked =
selectionLength === 0 && !!mentionTokensWithContext.findRangeContaining(selStart)
@@ -637,14 +576,10 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const newValue = e.target.value
setMessage(newValue)
// Skip mention menu logic if mentions are disabled
if (disableMentions) return
const caret = e.target.selectionStart ?? newValue.length
// Check for @ mention trigger
const activeMention = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
// Check for / slash command trigger
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caret, newValue)
if (activeMention) {
@@ -686,84 +621,66 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
}, [mentionMenu.textareaRef, mentionTokensWithContext])
const handleOpenMentionMenuWithAt = useCallback(() => {
if (disabled || isLoading) return
const textarea = mentionMenu.textareaRef.current
if (!textarea) return
textarea.focus()
const pos = textarea.selectionStart ?? message.length
const needsSpaceBefore = pos > 0 && !/\s/.test(message.charAt(pos - 1))
const insertTriggerAndOpenMenu = useCallback(
(trigger: '@' | '/') => {
if (disabled || isLoading) return
const textarea = mentionMenu.textareaRef.current
if (!textarea) return
const insertText = needsSpaceBefore ? ' @' : '@'
const start = textarea.selectionStart ?? message.length
const end = textarea.selectionEnd ?? message.length
const before = message.slice(0, start)
const after = message.slice(end)
const next = `${before}${insertText}${after}`
setMessage(next)
setTimeout(() => {
const newPos = before.length + insertText.length
textarea.setSelectionRange(newPos, newPos)
textarea.focus()
}, 0)
const start = textarea.selectionStart ?? message.length
const end = textarea.selectionEnd ?? message.length
const needsSpaceBefore = start > 0 && !/\s/.test(message.charAt(start - 1))
mentionMenu.setShowMentionMenu(true)
mentionMenu.setOpenSubmenuFor(null)
mentionMenu.setMentionActiveIndex(0)
mentionMenu.setSubmenuActiveIndex(0)
}, [disabled, isLoading, mentionMenu, message, setMessage])
const insertText = needsSpaceBefore ? ` ${trigger}` : trigger
const before = message.slice(0, start)
const after = message.slice(end)
setMessage(`${before}${insertText}${after}`)
const handleOpenSlashMenu = useCallback(() => {
if (disabled || isLoading) return
const textarea = mentionMenu.textareaRef.current
if (!textarea) return
textarea.focus()
const pos = textarea.selectionStart ?? message.length
const needsSpaceBefore = pos > 0 && !/\s/.test(message.charAt(pos - 1))
setTimeout(() => {
const newPos = before.length + insertText.length
textarea.setSelectionRange(newPos, newPos)
textarea.focus()
}, 0)
const insertText = needsSpaceBefore ? ' /' : '/'
const start = textarea.selectionStart ?? message.length
const end = textarea.selectionEnd ?? message.length
const before = message.slice(0, start)
const after = message.slice(end)
const next = `${before}${insertText}${after}`
setMessage(next)
if (trigger === '@') {
mentionMenu.setShowMentionMenu(true)
mentionMenu.setOpenSubmenuFor(null)
mentionMenu.setMentionActiveIndex(0)
} else {
setShowSlashMenu(true)
}
mentionMenu.setSubmenuActiveIndex(0)
},
[disabled, isLoading, mentionMenu, message, setMessage]
)
setTimeout(() => {
const newPos = before.length + insertText.length
textarea.setSelectionRange(newPos, newPos)
textarea.focus()
}, 0)
const handleOpenMentionMenuWithAt = useCallback(
() => insertTriggerAndOpenMenu('@'),
[insertTriggerAndOpenMenu]
)
setShowSlashMenu(true)
mentionMenu.setSubmenuActiveIndex(0)
}, [disabled, isLoading, mentionMenu, message, setMessage])
const handleOpenSlashMenu = useCallback(
() => insertTriggerAndOpenMenu('/'),
[insertTriggerAndOpenMenu]
)
const canSubmit = message.trim().length > 0 && !disabled && !isLoading
const showAbortButton = isLoading && onAbort
// Render overlay content with highlighted mentions
const renderOverlayContent = useCallback(() => {
const contexts = contextManagement.selectedContexts
// Handle empty message
if (!message) {
return <span>{'\u00A0'}</span>
}
// If no contexts, render the message directly with proper newline handling
if (contexts.length === 0) {
// Add a zero-width space at the end if message ends with newline
// This ensures the newline is rendered and height is calculated correctly
const displayText = message.endsWith('\n') ? `${message}\u200B` : message
return <span>{displayText}</span>
}
const elements: React.ReactNode[] = []
const labels = contexts.map((c) => c.label).filter(Boolean)
// Build ranges for all mentions to highlight them including spaces
const ranges = mentionTokensWithContext.computeMentionRanges()
if (ranges.length === 0) {
@@ -775,14 +692,11 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
for (let i = 0; i < ranges.length; i++) {
const range = ranges[i]
// Add text before mention
if (range.start > lastIndex) {
const before = message.slice(lastIndex, range.start)
elements.push(<span key={`text-${i}-${lastIndex}-${range.start}`}>{before}</span>)
}
// Add highlighted mention (including spaces)
// Use index + start + end to ensure unique keys even with duplicate contexts
const mentionText = message.slice(range.start, range.end)
elements.push(
<span
@@ -797,12 +711,10 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const tail = message.slice(lastIndex)
if (tail) {
// Add a zero-width space at the end if tail ends with newline
const displayTail = tail.endsWith('\n') ? `${tail}\u200B` : tail
elements.push(<span key={`tail-${lastIndex}`}>{displayTail}</span>)
}
// Ensure there's always something to render for height calculation
return elements.length > 0 ? elements : <span>{'\u00A0'}</span>
}, [message, contextManagement.selectedContexts, mentionTokensWithContext])

View File

@@ -320,12 +320,14 @@ export function Terminal() {
} = useTerminalStore()
const isExpanded = useTerminalStore((state) => state.terminalHeight > NEAR_MIN_THRESHOLD)
const { activeWorkflowId } = useWorkflowRegistry()
const hasConsoleHydrated = useTerminalConsoleStore((state) => state._hasHydrated)
const workflowEntriesSelector = useCallback(
(state: { entries: ConsoleEntry[] }) =>
state.entries.filter((entry) => entry.workflowId === activeWorkflowId),
[activeWorkflowId]
)
const entries = useTerminalConsoleStore(useShallow(workflowEntriesSelector))
const entriesFromStore = useTerminalConsoleStore(useShallow(workflowEntriesSelector))
const entries = hasConsoleHydrated ? entriesFromStore : []
const clearWorkflowConsole = useTerminalConsoleStore((state) => state.clearWorkflowConsole)
const exportConsoleCSV = useTerminalConsoleStore((state) => state.exportConsoleCSV)
const [selectedEntry, setSelectedEntry] = useState<ConsoleEntry | null>(null)

View File

@@ -356,6 +356,9 @@ const WorkflowContent = React.memo(() => {
/** Stores source node/handle info when a connection drag starts for drop-on-block detection. */
const connectionSourceRef = useRef<{ nodeId: string; handleId: string } | null>(null)
/** Tracks whether onConnect successfully handled the connection (ReactFlow pattern). */
const connectionCompletedRef = useRef(false)
/** Stores start positions for multi-node drag undo/redo recording. */
const multiNodeDragStartRef = useRef<Map<string, { x: number; y: number; parentId?: string }>>(
new Map()
@@ -2214,7 +2217,8 @@ const WorkflowContent = React.memo(() => {
)
/**
* Captures the source handle when a connection drag starts
* Captures the source handle when a connection drag starts.
* Resets connectionCompletedRef to track if onConnect handles this connection.
*/
const onConnectStart = useCallback((_event: any, params: any) => {
const handleId: string | undefined = params?.handleId
@@ -2223,6 +2227,7 @@ const WorkflowContent = React.memo(() => {
nodeId: params?.nodeId,
handleId: params?.handleId,
}
connectionCompletedRef.current = false
}, [])
/** Handles new edge connections with container boundary validation. */
@@ -2283,6 +2288,7 @@ const WorkflowContent = React.memo(() => {
isInsideContainer: true,
},
})
connectionCompletedRef.current = true
return
}
@@ -2311,6 +2317,7 @@ const WorkflowContent = React.memo(() => {
}
: undefined,
})
connectionCompletedRef.current = true
}
},
[addEdge, getNodes, blocks]
@@ -2319,8 +2326,9 @@ const WorkflowContent = React.memo(() => {
/**
* Handles connection drag end. Detects if the edge was dropped over a block
* and automatically creates a connection to that block's target handle.
* Only creates a connection if ReactFlow didn't already handle it (e.g., when
* dropping on the block body instead of a handle).
*
* Uses connectionCompletedRef to check if onConnect already handled this connection
* (ReactFlow pattern for distinguishing handle-to-handle vs handle-to-body drops).
*/
const onConnectEnd = useCallback(
(event: MouseEvent | TouchEvent) => {
@@ -2332,6 +2340,12 @@ const WorkflowContent = React.memo(() => {
return
}
// If onConnect already handled this connection, skip (handle-to-handle case)
if (connectionCompletedRef.current) {
connectionSourceRef.current = null
return
}
// Get cursor position in flow coordinates
const clientPos = 'changedTouches' in event ? event.changedTouches[0] : event
const flowPosition = screenToFlowPosition({
@@ -2342,25 +2356,14 @@ const WorkflowContent = React.memo(() => {
// Find node under cursor
const targetNode = findNodeAtPosition(flowPosition)
// Create connection if valid target found AND edge doesn't already exist
// ReactFlow's onConnect fires first when dropping on a handle, so we check
// if that connection already exists to avoid creating duplicates.
// IMPORTANT: We must read directly from the store (not React state) because
// the store update from ReactFlow's onConnect may not have triggered a
// React re-render yet when this callback runs (typically 1-2ms later).
// Create connection if valid target found (handle-to-body case)
if (targetNode && targetNode.id !== source.nodeId) {
const currentEdges = useWorkflowStore.getState().edges
const edgeAlreadyExists = currentEdges.some(
(e) => e.source === source.nodeId && e.target === targetNode.id
)
if (!edgeAlreadyExists) {
onConnect({
source: source.nodeId,
sourceHandle: source.handleId,
target: targetNode.id,
targetHandle: 'target',
})
}
onConnect({
source: source.nodeId,
sourceHandle: source.handleId,
target: targetNode.id,
targetHandle: 'target',
})
}
connectionSourceRef.current = null

View File

@@ -98,6 +98,23 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
condition: { field: 'operation', value: 'a2a_send_message' },
required: true,
},
{
id: 'data',
title: 'Data (JSON)',
type: 'code',
placeholder: '{\n "key": "value"\n}',
description: 'Structured data to include with the message (DataPart)',
condition: { field: 'operation', value: 'a2a_send_message' },
},
{
id: 'files',
title: 'Files',
type: 'file-upload',
placeholder: 'Upload files to send',
description: 'Files to include with the message (FilePart)',
condition: { field: 'operation', value: 'a2a_send_message' },
multiple: true,
},
{
id: 'taskId',
title: 'Task ID',
@@ -208,6 +225,14 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
type: 'string',
description: 'Context ID for conversation continuity',
},
data: {
type: 'json',
description: 'Structured data to include with the message',
},
files: {
type: 'array',
description: 'Files to include with the message',
},
historyLength: {
type: 'number',
description: 'Number of history messages to include',

View File

@@ -26,6 +26,8 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
{ label: 'Send Message', id: 'send' },
{ label: 'Create Canvas', id: 'canvas' },
{ label: 'Read Messages', id: 'read' },
{ label: 'Get Message', id: 'get_message' },
{ label: 'Get Thread', id: 'get_thread' },
{ label: 'List Channels', id: 'list_channels' },
{ label: 'List Channel Members', id: 'list_members' },
{ label: 'List Users', id: 'list_users' },
@@ -316,6 +318,68 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
},
required: true,
},
// Get Message specific fields
{
id: 'getMessageTimestamp',
title: 'Message Timestamp',
type: 'short-input',
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
condition: {
field: 'operation',
value: 'get_message',
},
required: true,
wandConfig: {
enabled: true,
prompt: `Extract or generate a Slack message timestamp from the user's input.
Slack message timestamps are in the format: XXXXXXXXXX.XXXXXX (seconds.microseconds since Unix epoch).
Examples:
- "1405894322.002768" -> 1405894322.002768 (already a valid timestamp)
- "thread_ts from the trigger" -> The user wants to reference a variable, output the original text
- A URL like "https://slack.com/archives/C123/p1405894322002768" -> Extract 1405894322.002768 (remove 'p' prefix, add decimal after 10th digit)
If the input looks like a reference to another block's output (contains < and >) or a variable, return it as-is.
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
placeholder: 'Paste a Slack message URL or timestamp...',
generationType: 'timestamp',
},
},
// Get Thread specific fields
{
id: 'getThreadTimestamp',
title: 'Thread Timestamp',
type: 'short-input',
placeholder: 'Thread timestamp (thread_ts, e.g., 1405894322.002768)',
condition: {
field: 'operation',
value: 'get_thread',
},
required: true,
wandConfig: {
enabled: true,
prompt: `Extract or generate a Slack thread timestamp from the user's input.
Slack thread timestamps (thread_ts) are in the format: XXXXXXXXXX.XXXXXX (seconds.microseconds since Unix epoch).
Examples:
- "1405894322.002768" -> 1405894322.002768 (already a valid timestamp)
- "thread_ts from the trigger" -> The user wants to reference a variable, output the original text
- A URL like "https://slack.com/archives/C123/p1405894322002768" -> Extract 1405894322.002768 (remove 'p' prefix, add decimal after 10th digit)
If the input looks like a reference to another block's output (contains < and >) or a variable, return it as-is.
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
placeholder: 'Paste a Slack thread URL or thread_ts...',
generationType: 'timestamp',
},
},
{
id: 'threadLimit',
title: 'Message Limit',
type: 'short-input',
placeholder: '100',
condition: {
field: 'operation',
value: 'get_thread',
},
},
{
id: 'oldest',
title: 'Oldest Timestamp',
@@ -430,6 +494,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
'slack_message',
'slack_canvas',
'slack_message_reader',
'slack_get_message',
'slack_get_thread',
'slack_list_channels',
'slack_list_members',
'slack_list_users',
@@ -448,6 +514,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
return 'slack_canvas'
case 'read':
return 'slack_message_reader'
case 'get_message':
return 'slack_get_message'
case 'get_thread':
return 'slack_get_thread'
case 'list_channels':
return 'slack_list_channels'
case 'list_members':
@@ -498,6 +568,9 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
includeDeleted,
userLimit,
userId,
getMessageTimestamp,
getThreadTimestamp,
threadLimit,
...rest
} = params
@@ -574,6 +647,27 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
break
}
case 'get_message':
if (!getMessageTimestamp) {
throw new Error('Message timestamp is required for get message operation')
}
baseParams.timestamp = getMessageTimestamp
break
case 'get_thread': {
if (!getThreadTimestamp) {
throw new Error('Thread timestamp is required for get thread operation')
}
baseParams.threadTs = getThreadTimestamp
if (threadLimit) {
const parsedLimit = Number.parseInt(threadLimit, 10)
if (!Number.isNaN(parsedLimit) && parsedLimit > 0) {
baseParams.limit = Math.min(parsedLimit, 200)
}
}
break
}
case 'list_channels': {
baseParams.includePrivate = includePrivate !== 'false'
baseParams.excludeArchived = true
@@ -679,6 +773,14 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
userLimit: { type: 'string', description: 'Maximum number of users to return' },
// Get User inputs
userId: { type: 'string', description: 'User ID to look up' },
// Get Message inputs
getMessageTimestamp: { type: 'string', description: 'Message timestamp to retrieve' },
// Get Thread inputs
getThreadTimestamp: { type: 'string', description: 'Thread timestamp to retrieve' },
threadLimit: {
type: 'string',
description: 'Maximum number of messages to return from thread',
},
},
outputs: {
// slack_message outputs (send operation)
@@ -706,6 +808,24 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
'Array of message objects with comprehensive properties: text, user, timestamp, reactions, threads, files, attachments, blocks, stars, pins, and edit history',
},
// slack_get_thread outputs (get_thread operation)
parentMessage: {
type: 'json',
description: 'The thread parent message with all properties',
},
replies: {
type: 'json',
description: 'Array of reply messages in the thread (excluding the parent)',
},
replyCount: {
type: 'number',
description: 'Number of replies returned in this response',
},
hasMore: {
type: 'boolean',
description: 'Whether there are more messages in the thread',
},
// slack_list_channels outputs (list_channels operation)
channels: {
type: 'json',

View File

@@ -0,0 +1,207 @@
import { TinybirdIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
import type { TinybirdResponse } from '@/tools/tinybird/types'
export const TinybirdBlock: BlockConfig<TinybirdResponse> = {
type: 'tinybird',
name: 'Tinybird',
description: 'Send events and query data with Tinybird',
authMode: AuthMode.ApiKey,
longDescription:
'Interact with Tinybird using the Events API to stream JSON or NDJSON events, or use the Query API to execute SQL queries against Pipes and Data Sources.',
docsLink: 'https://www.tinybird.co/docs/api-reference',
category: 'tools',
bgColor: '#2EF598',
icon: TinybirdIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Send Events', id: 'tinybird_events' },
{ label: 'Query', id: 'tinybird_query' },
],
value: () => 'tinybird_events',
},
{
id: 'base_url',
title: 'Base URL',
type: 'short-input',
placeholder: 'https://api.tinybird.co',
required: true,
},
{
id: 'token',
title: 'API Token',
type: 'short-input',
placeholder: 'Enter your Tinybird API token',
password: true,
required: true,
},
// Send Events operation inputs
{
id: 'datasource',
title: 'Data Source',
type: 'short-input',
placeholder: 'my_events_datasource',
condition: { field: 'operation', value: 'tinybird_events' },
required: true,
},
{
id: 'data',
title: 'Data',
type: 'code',
placeholder:
'{"event": "click", "timestamp": "2024-01-01T12:00:00Z"}\n{"event": "view", "timestamp": "2024-01-01T12:00:01Z"}',
condition: { field: 'operation', value: 'tinybird_events' },
required: true,
},
{
id: 'format',
title: 'Format',
type: 'dropdown',
options: [
{ label: 'NDJSON (Newline-delimited JSON)', id: 'ndjson' },
{ label: 'JSON', id: 'json' },
],
value: () => 'ndjson',
condition: { field: 'operation', value: 'tinybird_events' },
},
{
id: 'compression',
title: 'Compression',
type: 'dropdown',
options: [
{ label: 'None', id: 'none' },
{ label: 'Gzip', id: 'gzip' },
],
value: () => 'none',
mode: 'advanced',
condition: { field: 'operation', value: 'tinybird_events' },
},
{
id: 'wait',
title: 'Wait for Acknowledgment',
type: 'switch',
value: () => 'false',
mode: 'advanced',
condition: { field: 'operation', value: 'tinybird_events' },
},
// Query operation inputs
{
id: 'query',
title: 'SQL Query',
type: 'code',
placeholder: 'SELECT * FROM my_pipe FORMAT JSON\nOR\nSELECT * FROM my_pipe FORMAT CSV',
condition: { field: 'operation', value: 'tinybird_query' },
required: true,
},
{
id: 'pipeline',
title: 'Pipeline Name',
type: 'short-input',
placeholder: 'my_pipe (optional)',
condition: { field: 'operation', value: 'tinybird_query' },
},
],
tools: {
access: ['tinybird_events', 'tinybird_query'],
config: {
tool: (params) => params.operation || 'tinybird_events',
params: (params) => {
const operation = params.operation || 'tinybird_events'
const result: Record<string, any> = {
base_url: params.base_url,
token: params.token,
}
if (operation === 'tinybird_events') {
// Send Events operation
if (!params.datasource) {
throw new Error('Data Source is required for Send Events operation')
}
if (!params.data) {
throw new Error('Data is required for Send Events operation')
}
result.datasource = params.datasource
result.data = params.data
result.format = params.format || 'ndjson'
result.compression = params.compression || 'none'
// Convert wait from string to boolean
// Convert wait from string to boolean
if (params.wait !== undefined) {
const waitValue =
typeof params.wait === 'string' ? params.wait.toLowerCase() : params.wait
result.wait = waitValue === 'true' || waitValue === true
}
} else if (operation === 'tinybird_query') {
// Query operation
if (!params.query) {
throw new Error('SQL Query is required for Query operation')
}
result.query = params.query
if (params.pipeline) {
result.pipeline = params.pipeline
}
}
return result
},
},
},
inputs: {
operation: { type: 'string', description: 'Operation to perform' },
base_url: { type: 'string', description: 'Tinybird API base URL' },
// Send Events inputs
datasource: {
type: 'string',
description: 'Name of the Tinybird Data Source',
},
data: {
type: 'string',
description: 'Data to send as JSON or NDJSON string',
},
wait: { type: 'boolean', description: 'Wait for database acknowledgment' },
format: {
type: 'string',
description: 'Format of the events (ndjson or json)',
},
compression: {
type: 'string',
description: 'Compression format (none or gzip)',
},
// Query inputs
query: { type: 'string', description: 'SQL query to execute' },
pipeline: { type: 'string', description: 'Optional pipeline name' },
// Common
token: { type: 'string', description: 'Tinybird API Token' },
},
outputs: {
// Send Events outputs
successful_rows: {
type: 'number',
description: 'Number of rows successfully ingested',
},
quarantined_rows: {
type: 'number',
description: 'Number of rows quarantined (failed validation)',
},
// Query outputs
data: {
type: 'json',
description:
'Query result data. FORMAT JSON: array of objects. Other formats (CSV, TSV, etc.): raw text string.',
},
rows: { type: 'number', description: 'Number of rows returned (only with FORMAT JSON)' },
statistics: {
type: 'json',
description:
'Query execution statistics - elapsed time, rows read, bytes read (only with FORMAT JSON)',
},
},
}

View File

@@ -121,6 +121,7 @@ import { SupabaseBlock } from '@/blocks/blocks/supabase'
import { TavilyBlock } from '@/blocks/blocks/tavily'
import { TelegramBlock } from '@/blocks/blocks/telegram'
import { ThinkingBlock } from '@/blocks/blocks/thinking'
import { TinybirdBlock } from '@/blocks/blocks/tinybird'
import { TranslateBlock } from '@/blocks/blocks/translate'
import { TrelloBlock } from '@/blocks/blocks/trello'
import { TtsBlock } from '@/blocks/blocks/tts'
@@ -281,6 +282,7 @@ export const registry: Record<string, BlockConfig> = {
tavily: TavilyBlock,
telegram: TelegramBlock,
thinking: ThinkingBlock,
tinybird: TinybirdBlock,
translate: TranslateBlock,
trello: TrelloBlock,
twilio_sms: TwilioSMSBlock,
@@ -313,6 +315,26 @@ export const getBlock = (type: string): BlockConfig | undefined => {
return registry[normalized]
}
export const getLatestBlock = (baseType: string): BlockConfig | undefined => {
const normalized = baseType.replace(/-/g, '_')
const versionedKeys = Object.keys(registry).filter((key) => {
const match = key.match(new RegExp(`^${normalized}_v(\\d+)$`))
return match !== null
})
if (versionedKeys.length > 0) {
const sorted = versionedKeys.sort((a, b) => {
const versionA = Number.parseInt(a.match(/_v(\d+)$/)?.[1] || '0', 10)
const versionB = Number.parseInt(b.match(/_v(\d+)$/)?.[1] || '0', 10)
return versionB - versionA
})
return registry[sorted[0]]
}
return registry[normalized]
}
export const getBlockByToolName = (toolName: string): BlockConfig | undefined => {
return Object.values(registry).find((block) => block.tools?.access?.includes(toolName))
}

View File

@@ -1897,6 +1897,19 @@ export function TelegramIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TinybirdIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none'>
<rect x='0' y='0' width='24' height='24' fill='#2EF598' rx='6' />
<g transform='translate(2, 2) scale(0.833)'>
<path d='M25 2.64 17.195.5 14.45 6.635z' fill='#1E7F63' />
<path d='M17.535 17.77 10.39 15.215 6.195 25.5z' fill='#1E7F63' />
<path d='M0 11.495 17.535 17.77 20.41 4.36z' fill='#1F2437' />
</g>
</svg>
)
}
export function ClayIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' width='40' height='40' viewBox='0 0 400 400'>

View File

@@ -378,21 +378,10 @@ function buildManualTriggerOutput(
}
function buildIntegrationTriggerOutput(
finalInput: unknown,
_finalInput: unknown,
workflowInput: unknown
): NormalizedBlockOutput {
const base: NormalizedBlockOutput = isPlainObject(workflowInput)
? ({ ...(workflowInput as Record<string, unknown>) } as NormalizedBlockOutput)
: {}
if (isPlainObject(finalInput)) {
Object.assign(base, finalInput as Record<string, unknown>)
base.input = { ...(finalInput as Record<string, unknown>) }
} else {
base.input = finalInput
}
return mergeFilesIntoOutput(base, workflowInput)
return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
}
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {

View File

@@ -22,7 +22,7 @@ import { useUndoRedoStore } from '@/stores/undo-redo'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
import { filterNewEdges, mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/workflow/types'
@@ -242,7 +242,10 @@ export function useCollaborativeWorkflow() {
case EDGES_OPERATIONS.BATCH_ADD_EDGES: {
const { edges } = payload
if (Array.isArray(edges) && edges.length > 0) {
workflowStore.batchAddEdges(edges)
const newEdges = filterNewEdges(edges, workflowStore.edges)
if (newEdges.length > 0) {
workflowStore.batchAddEdges(newEdges)
}
}
break
}
@@ -976,6 +979,9 @@ export function useCollaborativeWorkflow() {
if (edges.length === 0) return false
const newEdges = filterNewEdges(edges, workflowStore.edges)
if (newEdges.length === 0) return false
const operationId = crypto.randomUUID()
addToQueue({
@@ -983,16 +989,16 @@ export function useCollaborativeWorkflow() {
operation: {
operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
target: OPERATION_TARGETS.EDGES,
payload: { edges },
payload: { edges: newEdges },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
workflowStore.batchAddEdges(edges)
workflowStore.batchAddEdges(newEdges)
if (!options?.skipUndoRedo) {
edges.forEach((edge) => undoRedo.recordAddEdge(edge.id))
newEdges.forEach((edge) => undoRedo.recordAddEdge(edge.id))
}
return true

View File

@@ -36,9 +36,10 @@ class ApiKeyInterceptor implements CallInterceptor {
/**
* Create an A2A client from an agent URL with optional API key authentication
*
* The agent URL should be the full endpoint URL (e.g., /api/a2a/serve/{agentId}).
* We pass an empty path to createFromUrl so it uses the URL directly for agent card
* discovery (GET on the URL) instead of appending .well-known/agent-card.json.
* Supports both standard A2A agents (agent card at /.well-known/agent.json)
* and Sim Studio agents (agent card at root URL via GET).
*
* Tries standard path first, falls back to root URL for compatibility.
*/
export async function createA2AClient(agentUrl: string, apiKey?: string): Promise<Client> {
const factoryOptions = apiKey
@@ -49,6 +50,18 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis
})
: ClientFactoryOptions.default
const factory = new ClientFactory(factoryOptions)
// Try standard A2A path first (/.well-known/agent.json)
try {
return await factory.createFromUrl(agentUrl, '/.well-known/agent.json')
} catch (standardError) {
logger.debug('Standard agent card path failed, trying root URL', {
agentUrl,
error: standardError instanceof Error ? standardError.message : String(standardError),
})
}
// Fall back to root URL (Sim Studio compatibility)
return factory.createFromUrl(agentUrl, '')
}

View File

@@ -656,7 +656,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: profile.id.toString(),
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
name: profile.name || profile.login,
email: profile.email,
image: profile.avatar_url,
@@ -962,7 +962,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: uniqueId,
id: `${uniqueId}-${crypto.randomUUID()}`,
name: 'Wealthbox User',
email: `${uniqueId}@wealthbox.user`,
emailVerified: false,
@@ -1016,7 +1016,7 @@ export const auth = betterAuth({
const user = data.data
return {
id: user.id.toString(),
id: `${user.id.toString()}-${crypto.randomUUID()}`,
name: user.name,
email: user.email,
emailVerified: user.activated,
@@ -1108,7 +1108,7 @@ export const auth = betterAuth({
})
return {
id: data.user_id || data.hub_id.toString(),
id: `${data.user_id || data.hub_id.toString()}-${crypto.randomUUID()}`,
name: data.user || 'HubSpot User',
email: data.user || `hubspot-${data.hub_id}@hubspot.com`,
emailVerified: true,
@@ -1162,7 +1162,7 @@ export const auth = betterAuth({
const data = await response.json()
return {
id: data.user_id || data.sub,
id: `${data.user_id || data.sub}-${crypto.randomUUID()}`,
name: data.name || 'Salesforce User',
email: data.email || `salesforce-${data.user_id}@salesforce.com`,
emailVerified: data.email_verified || true,
@@ -1221,7 +1221,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: profile.data.id,
id: `${profile.data.id}-${crypto.randomUUID()}`,
name: profile.data.name || 'X User',
email: `${profile.data.username}@x.com`,
image: profile.data.profile_image_url,
@@ -1295,7 +1295,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: profile.account_id,
id: `${profile.account_id}-${crypto.randomUUID()}`,
name: profile.name || profile.display_name || 'Confluence User',
email: profile.email || `${profile.account_id}@atlassian.com`,
image: profile.picture || undefined,
@@ -1406,7 +1406,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: profile.account_id,
id: `${profile.account_id}-${crypto.randomUUID()}`,
name: profile.name || profile.display_name || 'Jira User',
email: profile.email || `${profile.account_id}@atlassian.com`,
image: profile.picture || undefined,
@@ -1456,7 +1456,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: data.id,
id: `${data.id}-${crypto.randomUUID()}`,
name: data.email ? data.email.split('@')[0] : 'Airtable User',
email: data.email || `${data.id}@airtable.user`,
emailVerified: !!data.email,
@@ -1505,7 +1505,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: profile.bot?.owner?.user?.id || profile.id,
id: `${profile.bot?.owner?.user?.id || profile.id}-${crypto.randomUUID()}`,
name: profile.name || profile.bot?.owner?.user?.name || 'Notion User',
email: profile.person?.email || `${profile.id}@notion.user`,
emailVerified: !!profile.person?.email,
@@ -1572,7 +1572,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: data.id,
id: `${data.id}-${crypto.randomUUID()}`,
name: data.name || 'Reddit User',
email: `${data.name}@reddit.user`,
image: data.icon_img || undefined,
@@ -1644,7 +1644,7 @@ export const auth = betterAuth({
const viewer = data.viewer
return {
id: viewer.id,
id: `${viewer.id}-${crypto.randomUUID()}`,
email: viewer.email,
name: viewer.name,
emailVerified: true,
@@ -1707,7 +1707,7 @@ export const auth = betterAuth({
const data = await response.json()
return {
id: data.account_id,
id: `${data.account_id}-${crypto.randomUUID()}`,
email: data.email,
name: data.name?.display_name || data.email,
emailVerified: data.email_verified || false,
@@ -1758,7 +1758,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: profile.gid,
id: `${profile.gid}-${crypto.randomUUID()}`,
name: profile.name || 'Asana User',
email: profile.email || `${profile.gid}@asana.user`,
image: profile.photo?.image_128x128 || undefined,
@@ -1834,7 +1834,7 @@ export const auth = betterAuth({
logger.info('Slack credential identifier', { teamId, userId, uniqueId, teamName })
return {
id: uniqueId,
id: `${uniqueId}-${crypto.randomUUID()}`,
name: teamName,
email: `${teamId}-${userId}@slack.bot`,
emailVerified: false,
@@ -1884,7 +1884,7 @@ export const auth = betterAuth({
const uniqueId = `webflow-${userId}`
return {
id: uniqueId,
id: `${uniqueId}-${crypto.randomUUID()}`,
name: data.user_name || 'Webflow User',
email: `${uniqueId.replace(/[^a-zA-Z0-9]/g, '')}@webflow.user`,
emailVerified: false,
@@ -1931,7 +1931,7 @@ export const auth = betterAuth({
const profile = await response.json()
return {
id: profile.sub,
id: `${profile.sub}-${crypto.randomUUID()}`,
name: profile.name || 'LinkedIn User',
email: profile.email || `${profile.sub}@linkedin.user`,
emailVerified: profile.email_verified || true,
@@ -1993,7 +1993,7 @@ export const auth = betterAuth({
const profile = await response.json()
return {
id: profile.id,
id: `${profile.id}-${crypto.randomUUID()}`,
name:
`${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User',
email: profile.email || `${profile.id}@zoom.user`,
@@ -2060,7 +2060,7 @@ export const auth = betterAuth({
const profile = await response.json()
return {
id: profile.id,
id: `${profile.id}-${crypto.randomUUID()}`,
name: profile.display_name || 'Spotify User',
email: profile.email || `${profile.id}@spotify.user`,
emailVerified: true,
@@ -2108,7 +2108,7 @@ export const auth = betterAuth({
const profile = await response.json()
return {
id: profile.ID?.toString() || profile.id?.toString(),
id: `${profile.ID?.toString() || profile.id?.toString()}-${crypto.randomUUID()}`,
name: profile.display_name || profile.username || 'WordPress User',
email: profile.email || `${profile.username}@wordpress.com`,
emailVerified: profile.email_verified || false,

View File

@@ -18,7 +18,7 @@ export const DEFAULT_ENTERPRISE_TIER_COST_LIMIT = 200
* Base charge applied to every workflow execution
* This charge is applied regardless of whether the workflow uses AI models
*/
export const BASE_EXECUTION_CHARGE = 0.001
export const BASE_EXECUTION_CHARGE = 0.005
/**
* Fixed cost for search tool invocation (in dollars)

View File

@@ -8,7 +8,7 @@ import {
// Mock the billing constants
vi.mock('@/lib/billing/constants', () => ({
BASE_EXECUTION_CHARGE: 0.001,
BASE_EXECUTION_CHARGE: 0.005,
}))
vi.mock('@sim/logger', () => loggerMock)
@@ -148,7 +148,7 @@ describe('createEnvironmentObject', () => {
})
describe('calculateCostSummary', () => {
const BASE_EXECUTION_CHARGE = 0.001
const BASE_EXECUTION_CHARGE = 0.005
test('should return base execution charge for empty trace spans', () => {
const result = calculateCostSummary([])

View File

@@ -1,4 +1,4 @@
import { getBlock } from '@/blocks/registry'
import { getLatestBlock } from '@/blocks/registry'
import { getAllTriggers } from '@/triggers'
export interface TriggerOption {
@@ -49,22 +49,13 @@ export function getTriggerOptions(): TriggerOption[] {
continue
}
const block = getBlock(provider)
const block = getLatestBlock(provider)
if (block) {
providerMap.set(provider, {
value: provider,
label: block.name, // Use block's display name (e.g., "Slack", "GitHub")
color: block.bgColor || '#6b7280', // Use block's hex color, fallback to gray
})
} else {
const label = formatProviderName(provider)
providerMap.set(provider, {
value: provider,
label,
color: '#6b7280', // gray fallback
})
}
providerMap.set(provider, {
value: provider,
label: block?.name || formatProviderName(provider),
color: block?.bgColor || '#6b7280',
})
}
const integrationOptions = Array.from(providerMap.values()).sort((a, b) =>

File diff suppressed because it is too large Load Diff

View File

@@ -2290,7 +2290,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
webhookId: { value: null },
},
}),
@@ -2302,7 +2302,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
webhookId: { value: 'wh_123456' },
},
}),
@@ -2318,7 +2318,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
triggerPath: { value: '' },
},
}),
@@ -2330,7 +2330,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
triggerPath: { value: '/api/webhooks/abc123' },
},
}),
@@ -2346,7 +2346,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
webhookId: { value: null },
triggerPath: { value: '' },
},
@@ -2359,7 +2359,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
webhookId: { value: 'wh_123456' },
triggerPath: { value: '/api/webhooks/abc123' },
},
@@ -2371,14 +2371,18 @@ describe('hasWorkflowChanged', () => {
})
it.concurrent(
'should detect change when triggerConfig differs but runtime metadata also differs',
'should detect change when actual config differs but runtime metadata also differs',
() => {
// Test that when a real config field changes along with runtime metadata,
// the change is still detected. Using 'model' as the config field since
// triggerConfig is now excluded from comparison (individual trigger fields
// are compared separately).
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
webhookId: { value: null },
},
}),
@@ -2390,7 +2394,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'pull_request' } },
model: { value: 'gpt-4o' },
webhookId: { value: 'wh_123456' },
},
}),
@@ -2402,8 +2406,12 @@ describe('hasWorkflowChanged', () => {
)
it.concurrent(
'should not detect change when runtime metadata is added to current state',
'should not detect change when triggerConfig differs (individual fields compared separately)',
() => {
// triggerConfig is excluded from comparison because:
// 1. Individual trigger fields are stored as separate subblocks and compared individually
// 2. The client populates triggerConfig with default values from trigger definitions,
// which aren't present in the deployed state, causing false positive change detection
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
@@ -2420,7 +2428,36 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
triggerConfig: { value: { event: 'pull_request', extraField: true } },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should not detect change when runtime metadata is added to current state',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
webhookId: { value: 'wh_123456' },
triggerPath: { value: '/api/webhooks/abc123' },
},
@@ -2440,7 +2477,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
webhookId: { value: 'wh_old123' },
triggerPath: { value: '/api/webhooks/old' },
},
@@ -2453,7 +2490,7 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
triggerConfig: { value: { event: 'push' } },
model: { value: 'gpt-4' },
},
}),
},

View File

@@ -104,6 +104,7 @@
"groq-sdk": "^0.15.0",
"html-to-image": "1.11.13",
"html-to-text": "^9.0.5",
"idb-keyval": "6.2.2",
"imapflow": "1.2.4",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",

View File

@@ -1,2 +1,3 @@
export { indexedDBStorage } from './storage'
export { useTerminalConsoleStore } from './store'
export type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from './types'

View File

@@ -0,0 +1,81 @@
import { createLogger } from '@sim/logger'
import { del, get, set } from 'idb-keyval'
import type { StateStorage } from 'zustand/middleware'
const logger = createLogger('ConsoleStorage')
const STORE_KEY = 'terminal-console-store'
const MIGRATION_KEY = 'terminal-console-store-migrated'
/**
* Promise that resolves when migration is complete.
* Used to ensure getItem waits for migration before reading.
*/
let migrationPromise: Promise<void> | null = null
/**
* Migrates existing console data from localStorage to IndexedDB.
* Runs once on first load, then marks migration as complete.
*/
async function migrateFromLocalStorage(): Promise<void> {
if (typeof window === 'undefined') return
try {
const migrated = await get<boolean>(MIGRATION_KEY)
if (migrated) return
const localData = localStorage.getItem(STORE_KEY)
if (localData) {
await set(STORE_KEY, localData)
localStorage.removeItem(STORE_KEY)
logger.info('Migrated console store to IndexedDB')
}
await set(MIGRATION_KEY, true)
} catch (error) {
logger.warn('Migration from localStorage failed', { error })
}
}
if (typeof window !== 'undefined') {
migrationPromise = migrateFromLocalStorage().finally(() => {
migrationPromise = null
})
}
export const indexedDBStorage: StateStorage = {
getItem: async (name: string): Promise<string | null> => {
if (typeof window === 'undefined') return null
// Ensure migration completes before reading
if (migrationPromise) {
await migrationPromise
}
try {
const value = await get<string>(name)
return value ?? null
} catch (error) {
logger.warn('IndexedDB read failed', { name, error })
return null
}
},
setItem: async (name: string, value: string): Promise<void> => {
if (typeof window === 'undefined') return
try {
await set(name, value)
} catch (error) {
logger.warn('IndexedDB write failed', { name, error })
}
},
removeItem: async (name: string): Promise<void> => {
if (typeof window === 'undefined') return
try {
await del(name)
} catch (error) {
logger.warn('IndexedDB delete failed', { name, error })
}
},
}

View File

@@ -1,18 +1,22 @@
import { createLogger } from '@sim/logger'
import { create } from 'zustand'
import { devtools, persist } from 'zustand/middleware'
import { createJSONStorage, devtools, persist } from 'zustand/middleware'
import { redactApiKeys } from '@/lib/core/security/redaction'
import type { NormalizedBlockOutput } from '@/executor/types'
import { useExecutionStore } from '@/stores/execution'
import { useNotificationStore } from '@/stores/notifications'
import { useGeneralStore } from '@/stores/settings/general'
import { indexedDBStorage } from '@/stores/terminal/console/storage'
import type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from '@/stores/terminal/console/types'
const logger = createLogger('TerminalConsoleStore')
/**
* Updates a NormalizedBlockOutput with new content
* Maximum number of console entries to keep per workflow.
* Keeps the stored data size reasonable and improves performance.
*/
const MAX_ENTRIES_PER_WORKFLOW = 500
const updateBlockOutput = (
existingOutput: NormalizedBlockOutput | undefined,
contentUpdate: string
@@ -23,9 +27,6 @@ const updateBlockOutput = (
}
}
/**
* Checks if output represents a streaming object that should be skipped
*/
const isStreamingOutput = (output: any): boolean => {
if (typeof ReadableStream !== 'undefined' && output instanceof ReadableStream) {
return true
@@ -44,9 +45,6 @@ const isStreamingOutput = (output: any): boolean => {
)
}
/**
* Checks if entry should be skipped to prevent duplicates
*/
const shouldSkipEntry = (output: any): boolean => {
if (typeof output !== 'object' || !output) {
return false
@@ -69,6 +67,9 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
(set, get) => ({
entries: [],
isOpen: false,
_hasHydrated: false,
setHasHydrated: (hasHydrated) => set({ _hasHydrated: hasHydrated }),
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => {
set((state) => {
@@ -94,7 +95,15 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
timestamp: new Date().toISOString(),
}
return { entries: [newEntry, ...state.entries] }
const newEntries = [newEntry, ...state.entries]
const workflowCounts = new Map<string, number>()
const trimmedEntries = newEntries.filter((entry) => {
const count = workflowCounts.get(entry.workflowId) || 0
if (count >= MAX_ENTRIES_PER_WORKFLOW) return false
workflowCounts.set(entry.workflowId, count + 1)
return true
})
return { entries: trimmedEntries }
})
const newEntry = get().entries[0]
@@ -130,10 +139,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
return newEntry
},
/**
* Clears console entries for a specific workflow and clears the run path
* @param workflowId - The workflow ID to clear entries for
*/
clearWorkflowConsole: (workflowId: string) => {
set((state) => ({
entries: state.entries.filter((entry) => entry.workflowId !== workflowId),
@@ -148,9 +153,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
return
}
/**
* Formats a value for CSV export
*/
const formatCSVValue = (value: any): string => {
if (value === null || value === undefined) {
return ''
@@ -297,7 +299,35 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
}),
{
name: 'terminal-console-store',
storage: createJSONStorage(() => indexedDBStorage),
partialize: (state) => ({
entries: state.entries,
isOpen: state.isOpen,
}),
onRehydrateStorage: () => (_state, error) => {
if (error) {
logger.error('Failed to rehydrate console store', { error })
}
},
merge: (persistedState, currentState) => {
const persisted = persistedState as Partial<ConsoleStore> | undefined
return {
...currentState,
entries: persisted?.entries ?? currentState.entries,
isOpen: persisted?.isOpen ?? currentState.isOpen,
}
},
}
)
)
)
if (typeof window !== 'undefined') {
useTerminalConsoleStore.persist.onFinishHydration(() => {
useTerminalConsoleStore.setState({ _hasHydrated: true })
})
if (useTerminalConsoleStore.persist.hasHydrated()) {
useTerminalConsoleStore.setState({ _hasHydrated: true })
}
}

View File

@@ -1,9 +1,6 @@
import type { NormalizedBlockOutput } from '@/executor/types'
import type { SubflowType } from '@/stores/workflows/workflow/types'
/**
* Console entry for terminal logs
*/
export interface ConsoleEntry {
id: string
timestamp: string
@@ -25,9 +22,6 @@ export interface ConsoleEntry {
iterationType?: SubflowType
}
/**
* Console update payload for partial updates
*/
export interface ConsoleUpdate {
content?: string
output?: Partial<NormalizedBlockOutput>
@@ -40,9 +34,6 @@ export interface ConsoleUpdate {
input?: any
}
/**
* Console store state and actions
*/
export interface ConsoleStore {
entries: ConsoleEntry[]
isOpen: boolean
@@ -52,4 +43,6 @@ export interface ConsoleStore {
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
toggleConsole: () => void
updateConsole: (blockId: string, update: string | ConsoleUpdate, executionId?: string) => void
_hasHydrated: boolean
setHasHydrated: (hasHydrated: boolean) => void
}

View File

@@ -1,5 +1,19 @@
import type { Edge } from 'reactflow'
import { v4 as uuidv4 } from 'uuid'
export function filterNewEdges(edgesToAdd: Edge[], currentEdges: Edge[]): Edge[] {
return edgesToAdd.filter((edge) => {
if (edge.source === edge.target) return false
return !currentEdges.some(
(e) =>
e.source === edge.source &&
e.sourceHandle === edge.sourceHandle &&
e.target === edge.target &&
e.targetHandle === edge.targetHandle
)
})
}
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { getBlock } from '@/blocks'
import { normalizeName } from '@/executor/constants'

View File

@@ -297,7 +297,7 @@ describe('workflow store', () => {
expectEdgeConnects(edges, 'block-1', 'block-2')
})
it('should not add duplicate edges', () => {
it('should not add duplicate connections', () => {
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
@@ -309,17 +309,6 @@ describe('workflow store', () => {
const state = useWorkflowStore.getState()
expectEdgeCount(state, 1)
})
it('should prevent self-referencing edges', () => {
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Self', { x: 0, y: 0 })
batchAddEdges([{ id: 'e1', source: 'block-1', target: 'block-1' }])
const state = useWorkflowStore.getState()
expectEdgeCount(state, 0)
})
})
describe('batchRemoveEdges', () => {

View File

@@ -9,7 +9,12 @@ import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getUniqueBlockName, mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
import {
filterNewEdges,
getUniqueBlockName,
mergeSubblockState,
normalizeName,
} from '@/stores/workflows/utils'
import type {
Position,
SubBlockState,
@@ -496,25 +501,11 @@ export const useWorkflowStore = create<WorkflowStore>()(
batchAddEdges: (edges: Edge[]) => {
const currentEdges = get().edges
const filtered = filterNewEdges(edges, currentEdges)
const newEdges = [...currentEdges]
const existingEdgeIds = new Set(currentEdges.map((e) => e.id))
// Track existing connections to prevent duplicates (same source->target)
const existingConnections = new Set(currentEdges.map((e) => `${e.source}->${e.target}`))
for (const edge of edges) {
// Skip if edge ID already exists
if (existingEdgeIds.has(edge.id)) continue
// Skip self-referencing edges
if (edge.source === edge.target) continue
// Skip if connection already exists (same source and target)
const connectionKey = `${edge.source}->${edge.target}`
if (existingConnections.has(connectionKey)) continue
// Skip if would create a cycle
for (const edge of filtered) {
if (wouldCreateCycle([...newEdges], edge.source, edge.target)) continue
newEdges.push({
id: edge.id || crypto.randomUUID(),
source: edge.source,
@@ -524,8 +515,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
type: edge.type || 'default',
data: edge.data || {},
})
existingEdgeIds.add(edge.id)
existingConnections.add(connectionKey)
}
const blocks = get().blocks

View File

@@ -30,11 +30,14 @@ export const a2aCancelTaskTool: ToolConfig<A2ACancelTaskParams, A2ACancelTaskRes
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: A2ACancelTaskParams) => ({
agentUrl: params.agentUrl,
taskId: params.taskId,
apiKey: params.apiKey,
}),
body: (params: A2ACancelTaskParams) => {
const body: Record<string, string> = {
agentUrl: params.agentUrl,
taskId: params.taskId,
}
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -38,12 +38,16 @@ export const a2aDeletePushNotificationTool: ToolConfig<
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
agentUrl: params.agentUrl,
taskId: params.taskId,
pushNotificationConfigId: params.pushNotificationConfigId,
apiKey: params.apiKey,
}),
body: (params) => {
const body: Record<string, string> = {
agentUrl: params.agentUrl,
taskId: params.taskId,
}
if (params.pushNotificationConfigId)
body.pushNotificationConfigId = params.pushNotificationConfigId
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -25,10 +25,13 @@ export const a2aGetAgentCardTool: ToolConfig<A2AGetAgentCardParams, A2AGetAgentC
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
agentUrl: params.agentUrl,
apiKey: params.apiKey,
}),
body: (params) => {
const body: Record<string, string> = {
agentUrl: params.agentUrl,
}
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -33,11 +33,14 @@ export const a2aGetPushNotificationTool: ToolConfig<
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
agentUrl: params.agentUrl,
taskId: params.taskId,
apiKey: params.apiKey,
}),
body: (params) => {
const body: Record<string, string> = {
agentUrl: params.agentUrl,
taskId: params.taskId,
}
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -34,12 +34,15 @@ export const a2aGetTaskTool: ToolConfig<A2AGetTaskParams, A2AGetTaskResponse> =
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: A2AGetTaskParams) => ({
agentUrl: params.agentUrl,
taskId: params.taskId,
apiKey: params.apiKey,
historyLength: params.historyLength,
}),
body: (params: A2AGetTaskParams) => {
const body: Record<string, string | number> = {
agentUrl: params.agentUrl,
taskId: params.taskId,
}
if (params.apiKey) body.apiKey = params.apiKey
if (params.historyLength) body.historyLength = params.historyLength
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -5,7 +5,6 @@ import { a2aGetPushNotificationTool } from './get_push_notification'
import { a2aGetTaskTool } from './get_task'
import { a2aResubscribeTool } from './resubscribe'
import { a2aSendMessageTool } from './send_message'
import { a2aSendMessageStreamTool } from './send_message_stream'
import { a2aSetPushNotificationTool } from './set_push_notification'
export {
@@ -16,6 +15,5 @@ export {
a2aGetTaskTool,
a2aResubscribeTool,
a2aSendMessageTool,
a2aSendMessageStreamTool,
a2aSetPushNotificationTool,
}

View File

@@ -30,11 +30,14 @@ export const a2aResubscribeTool: ToolConfig<A2AResubscribeParams, A2AResubscribe
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: A2AResubscribeParams) => ({
agentUrl: params.agentUrl,
taskId: params.taskId,
apiKey: params.apiKey,
}),
body: (params: A2AResubscribeParams) => {
const body: Record<string, string> = {
agentUrl: params.agentUrl,
taskId: params.taskId,
}
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response) => {

View File

@@ -26,6 +26,14 @@ export const a2aSendMessageTool: ToolConfig<A2ASendMessageParams, A2ASendMessage
type: 'string',
description: 'Context ID for conversation continuity',
},
data: {
type: 'string',
description: 'Structured data to include with the message (JSON string)',
},
files: {
type: 'array',
description: 'Files to include with the message',
},
apiKey: {
type: 'string',
description: 'API key for authentication',
@@ -35,7 +43,21 @@ export const a2aSendMessageTool: ToolConfig<A2ASendMessageParams, A2ASendMessage
request: {
url: '/api/tools/a2a/send-message',
method: 'POST',
headers: () => ({}),
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => {
const body: Record<string, unknown> = {
agentUrl: params.agentUrl,
message: params.message,
}
if (params.taskId) body.taskId = params.taskId
if (params.contextId) body.contextId = params.contextId
if (params.data) body.data = params.data
if (params.files && params.files.length > 0) body.files = params.files
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -1,81 +0,0 @@
import type { ToolConfig } from '@/tools/types'
import type { A2ASendMessageParams, A2ASendMessageResponse } from './types'
export const a2aSendMessageStreamTool: ToolConfig<A2ASendMessageParams, A2ASendMessageResponse> = {
id: 'a2a_send_message_stream',
name: 'A2A Send Message (Streaming)',
description: 'Send a message to an external A2A-compatible agent with real-time streaming.',
version: '1.0.0',
params: {
agentUrl: {
type: 'string',
required: true,
description: 'The A2A agent endpoint URL',
},
message: {
type: 'string',
required: true,
description: 'Message to send to the agent',
},
taskId: {
type: 'string',
description: 'Task ID for continuing an existing task',
},
contextId: {
type: 'string',
description: 'Context ID for conversation continuity',
},
apiKey: {
type: 'string',
description: 'API key for authentication',
},
},
request: {
url: '/api/tools/a2a/send-message-stream',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
agentUrl: params.agentUrl,
message: params.message,
taskId: params.taskId,
contextId: params.contextId,
apiKey: params.apiKey,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return data
},
outputs: {
content: {
type: 'string',
description: 'The text response from the agent',
},
taskId: {
type: 'string',
description: 'Task ID for follow-up interactions',
},
contextId: {
type: 'string',
description: 'Context ID for conversation continuity',
},
state: {
type: 'string',
description: 'Task state',
},
artifacts: {
type: 'array',
description: 'Structured output artifacts',
},
history: {
type: 'array',
description: 'Full message history',
},
},
}

View File

@@ -42,13 +42,16 @@ export const a2aSetPushNotificationTool: ToolConfig<
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: A2ASetPushNotificationParams) => ({
agentUrl: params.agentUrl,
taskId: params.taskId,
webhookUrl: params.webhookUrl,
token: params.token,
apiKey: params.apiKey,
}),
body: (params: A2ASetPushNotificationParams) => {
const body: Record<string, string> = {
agentUrl: params.agentUrl,
taskId: params.taskId,
webhookUrl: params.webhookUrl,
}
if (params.token) body.token = params.token
if (params.apiKey) body.apiKey = params.apiKey
return body
},
},
transformResponse: async (response: Response) => {

View File

@@ -25,11 +25,20 @@ export interface A2AGetAgentCardResponse extends ToolResponse {
}
}
export interface A2ASendMessageFileInput {
type: 'file' | 'url'
data: string
name: string
mime?: string
}
export interface A2ASendMessageParams {
agentUrl: string
message: string
taskId?: string
contextId?: string
data?: string
files?: A2ASendMessageFileInput[]
apiKey?: string
}

View File

@@ -5,7 +5,6 @@ import {
a2aGetPushNotificationTool,
a2aGetTaskTool,
a2aResubscribeTool,
a2aSendMessageStreamTool,
a2aSendMessageTool,
a2aSetPushNotificationTool,
} from '@/tools/a2a'
@@ -1180,6 +1179,8 @@ import {
slackCanvasTool,
slackDeleteMessageTool,
slackDownloadTool,
slackGetMessageTool,
slackGetThreadTool,
slackGetUserTool,
slackListChannelsTool,
slackListMembersTool,
@@ -1380,6 +1381,7 @@ import {
telegramSendVideoTool,
} from '@/tools/telegram'
import { thinkingTool } from '@/tools/thinking'
import { tinybirdEventsTool, tinybirdQueryTool } from '@/tools/tinybird'
import {
trelloAddCommentTool,
trelloCreateCardTool,
@@ -1541,7 +1543,6 @@ export const tools: Record<string, ToolConfig> = {
a2a_get_task: a2aGetTaskTool,
a2a_resubscribe: a2aResubscribeTool,
a2a_send_message: a2aSendMessageTool,
a2a_send_message_stream: a2aSendMessageStreamTool,
a2a_set_push_notification: a2aSetPushNotificationTool,
arxiv_search: arxivSearchTool,
arxiv_get_paper: arxivGetPaperTool,
@@ -1731,6 +1732,8 @@ export const tools: Record<string, ToolConfig> = {
slack_list_members: slackListMembersTool,
slack_list_users: slackListUsersTool,
slack_get_user: slackGetUserTool,
slack_get_message: slackGetMessageTool,
slack_get_thread: slackGetThreadTool,
slack_canvas: slackCanvasTool,
slack_download: slackDownloadTool,
slack_update_message: slackUpdateMessageTool,
@@ -2235,6 +2238,8 @@ export const tools: Record<string, ToolConfig> = {
apollo_email_accounts: apolloEmailAccountsTool,
mistral_parser: mistralParserTool,
thinking_tool: thinkingTool,
tinybird_events: tinybirdEventsTool,
tinybird_query: tinybirdQueryTool,
stagehand_extract: stagehandExtractTool,
stagehand_agent: stagehandAgentTool,
mem0_add_memories: mem0AddMemoriesTool,

View File

@@ -0,0 +1,213 @@
import type { SlackGetMessageParams, SlackGetMessageResponse } from '@/tools/slack/types'
import type { ToolConfig } from '@/tools/types'
export const slackGetMessageTool: ToolConfig<SlackGetMessageParams, SlackGetMessageResponse> = {
id: 'slack_get_message',
name: 'Slack Get Message',
description:
'Retrieve a specific message by its timestamp. Useful for getting a thread parent message.',
version: '1.0.0',
oauth: {
required: true,
provider: 'slack',
},
params: {
authMethod: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Authentication method: oauth or bot_token',
},
botToken: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Bot token for Custom Bot',
},
accessToken: {
type: 'string',
required: false,
visibility: 'hidden',
description: 'OAuth access token or bot token for Slack API',
},
channel: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Slack channel ID (e.g., C1234567890)',
},
timestamp: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Message timestamp to retrieve (e.g., 1405894322.002768)',
},
},
request: {
url: (params: SlackGetMessageParams) => {
const url = new URL('https://slack.com/api/conversations.history')
url.searchParams.append('channel', params.channel?.trim() ?? '')
url.searchParams.append('oldest', params.timestamp?.trim() ?? '')
url.searchParams.append('limit', '1')
url.searchParams.append('inclusive', 'true')
return url.toString()
},
method: 'GET',
headers: (params: SlackGetMessageParams) => ({
'Content-Type': 'application/json',
Authorization: `Bearer ${params.accessToken || params.botToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!data.ok) {
if (data.error === 'missing_scope') {
throw new Error(
'Missing required permissions. Please reconnect your Slack account with the necessary scopes (channels:history, groups:history).'
)
}
if (data.error === 'invalid_auth') {
throw new Error('Invalid authentication. Please check your Slack credentials.')
}
if (data.error === 'channel_not_found') {
throw new Error('Channel not found. Please check the channel ID.')
}
throw new Error(data.error || 'Failed to get message from Slack')
}
const messages = data.messages || []
if (messages.length === 0) {
throw new Error('Message not found')
}
const msg = messages[0]
const message = {
type: msg.type ?? 'message',
ts: msg.ts,
text: msg.text ?? '',
user: msg.user ?? null,
bot_id: msg.bot_id ?? null,
username: msg.username ?? null,
channel: msg.channel ?? null,
team: msg.team ?? null,
thread_ts: msg.thread_ts ?? null,
parent_user_id: msg.parent_user_id ?? null,
reply_count: msg.reply_count ?? null,
reply_users_count: msg.reply_users_count ?? null,
latest_reply: msg.latest_reply ?? null,
subscribed: msg.subscribed ?? null,
last_read: msg.last_read ?? null,
unread_count: msg.unread_count ?? null,
subtype: msg.subtype ?? null,
reactions: msg.reactions ?? [],
is_starred: msg.is_starred ?? false,
pinned_to: msg.pinned_to ?? [],
files: (msg.files ?? []).map((f: any) => ({
id: f.id,
name: f.name,
mimetype: f.mimetype,
size: f.size,
url_private: f.url_private ?? null,
permalink: f.permalink ?? null,
mode: f.mode ?? null,
})),
attachments: msg.attachments ?? [],
blocks: msg.blocks ?? [],
edited: msg.edited ?? null,
permalink: msg.permalink ?? null,
}
return {
success: true,
output: {
message,
},
}
},
outputs: {
message: {
type: 'object',
description: 'The retrieved message object',
properties: {
type: { type: 'string', description: 'Message type' },
ts: { type: 'string', description: 'Message timestamp' },
text: { type: 'string', description: 'Message text content' },
user: { type: 'string', description: 'User ID who sent the message' },
bot_id: { type: 'string', description: 'Bot ID if sent by a bot', optional: true },
username: { type: 'string', description: 'Display username', optional: true },
channel: { type: 'string', description: 'Channel ID', optional: true },
team: { type: 'string', description: 'Team ID', optional: true },
thread_ts: { type: 'string', description: 'Thread parent timestamp', optional: true },
parent_user_id: { type: 'string', description: 'User ID of thread parent', optional: true },
reply_count: { type: 'number', description: 'Number of thread replies', optional: true },
reply_users_count: {
type: 'number',
description: 'Number of users who replied',
optional: true,
},
latest_reply: { type: 'string', description: 'Timestamp of latest reply', optional: true },
subtype: { type: 'string', description: 'Message subtype', optional: true },
reactions: {
type: 'array',
description: 'Array of reactions on this message',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Emoji name' },
count: { type: 'number', description: 'Number of reactions' },
users: {
type: 'array',
description: 'User IDs who reacted',
items: { type: 'string' },
},
},
},
},
is_starred: { type: 'boolean', description: 'Whether message is starred', optional: true },
pinned_to: {
type: 'array',
description: 'Channel IDs where message is pinned',
items: { type: 'string' },
optional: true,
},
files: {
type: 'array',
description: 'Files attached to message',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'File ID' },
name: { type: 'string', description: 'File name' },
mimetype: { type: 'string', description: 'MIME type' },
size: { type: 'number', description: 'File size in bytes' },
url_private: { type: 'string', description: 'Private download URL' },
permalink: { type: 'string', description: 'Permanent link to file' },
},
},
},
attachments: {
type: 'array',
description: 'Legacy attachments',
items: { type: 'object' },
},
blocks: { type: 'array', description: 'Block Kit blocks', items: { type: 'object' } },
edited: {
type: 'object',
description: 'Edit information if message was edited',
properties: {
user: { type: 'string', description: 'User ID who edited' },
ts: { type: 'string', description: 'Edit timestamp' },
},
optional: true,
},
permalink: { type: 'string', description: 'Permanent link to message', optional: true },
},
},
},
}

View File

@@ -0,0 +1,224 @@
import type { SlackGetThreadParams, SlackGetThreadResponse } from '@/tools/slack/types'
import type { ToolConfig } from '@/tools/types'
export const slackGetThreadTool: ToolConfig<SlackGetThreadParams, SlackGetThreadResponse> = {
id: 'slack_get_thread',
name: 'Slack Get Thread',
description:
'Retrieve an entire thread including the parent message and all replies. Useful for getting full conversation context.',
version: '1.0.0',
oauth: {
required: true,
provider: 'slack',
},
params: {
authMethod: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Authentication method: oauth or bot_token',
},
botToken: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Bot token for Custom Bot',
},
accessToken: {
type: 'string',
required: false,
visibility: 'hidden',
description: 'OAuth access token or bot token for Slack API',
},
channel: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Slack channel ID (e.g., C1234567890)',
},
threadTs: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Thread timestamp (thread_ts) to retrieve (e.g., 1405894322.002768)',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of messages to return (default: 100, max: 200)',
},
},
request: {
url: (params: SlackGetThreadParams) => {
const url = new URL('https://slack.com/api/conversations.replies')
url.searchParams.append('channel', params.channel?.trim() ?? '')
url.searchParams.append('ts', params.threadTs?.trim() ?? '')
url.searchParams.append('inclusive', 'true')
const limit = params.limit ? Math.min(Number(params.limit), 200) : 100
url.searchParams.append('limit', String(limit))
return url.toString()
},
method: 'GET',
headers: (params: SlackGetThreadParams) => ({
'Content-Type': 'application/json',
Authorization: `Bearer ${params.accessToken || params.botToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!data.ok) {
if (data.error === 'missing_scope') {
throw new Error(
'Missing required permissions. Please reconnect your Slack account with the necessary scopes (channels:history, groups:history).'
)
}
if (data.error === 'invalid_auth') {
throw new Error('Invalid authentication. Please check your Slack credentials.')
}
if (data.error === 'channel_not_found') {
throw new Error('Channel not found. Please check the channel ID.')
}
if (data.error === 'thread_not_found') {
throw new Error('Thread not found. Please check the thread timestamp.')
}
throw new Error(data.error || 'Failed to get thread from Slack')
}
const rawMessages = data.messages || []
if (rawMessages.length === 0) {
throw new Error('Thread not found')
}
const messages = rawMessages.map((msg: any) => ({
type: msg.type ?? 'message',
ts: msg.ts,
text: msg.text ?? '',
user: msg.user ?? null,
bot_id: msg.bot_id ?? null,
username: msg.username ?? null,
channel: msg.channel ?? null,
team: msg.team ?? null,
thread_ts: msg.thread_ts ?? null,
parent_user_id: msg.parent_user_id ?? null,
reply_count: msg.reply_count ?? null,
reply_users_count: msg.reply_users_count ?? null,
latest_reply: msg.latest_reply ?? null,
subscribed: msg.subscribed ?? null,
last_read: msg.last_read ?? null,
unread_count: msg.unread_count ?? null,
subtype: msg.subtype ?? null,
reactions: msg.reactions ?? [],
is_starred: msg.is_starred ?? false,
pinned_to: msg.pinned_to ?? [],
files: (msg.files ?? []).map((f: any) => ({
id: f.id,
name: f.name,
mimetype: f.mimetype,
size: f.size,
url_private: f.url_private ?? null,
permalink: f.permalink ?? null,
mode: f.mode ?? null,
})),
attachments: msg.attachments ?? [],
blocks: msg.blocks ?? [],
edited: msg.edited ?? null,
permalink: msg.permalink ?? null,
}))
// First message is always the parent
const parentMessage = messages[0]
// Remaining messages are replies
const replies = messages.slice(1)
return {
success: true,
output: {
parentMessage,
replies,
messages,
replyCount: replies.length,
hasMore: data.has_more ?? false,
},
}
},
outputs: {
parentMessage: {
type: 'object',
description: 'The thread parent message',
properties: {
type: { type: 'string', description: 'Message type' },
ts: { type: 'string', description: 'Message timestamp' },
text: { type: 'string', description: 'Message text content' },
user: { type: 'string', description: 'User ID who sent the message' },
bot_id: { type: 'string', description: 'Bot ID if sent by a bot', optional: true },
username: { type: 'string', description: 'Display username', optional: true },
reply_count: { type: 'number', description: 'Total number of thread replies' },
reply_users_count: { type: 'number', description: 'Number of users who replied' },
latest_reply: { type: 'string', description: 'Timestamp of latest reply' },
reactions: {
type: 'array',
description: 'Array of reactions on the parent message',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Emoji name' },
count: { type: 'number', description: 'Number of reactions' },
users: {
type: 'array',
description: 'User IDs who reacted',
items: { type: 'string' },
},
},
},
},
files: {
type: 'array',
description: 'Files attached to the parent message',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'File ID' },
name: { type: 'string', description: 'File name' },
mimetype: { type: 'string', description: 'MIME type' },
size: { type: 'number', description: 'File size in bytes' },
},
},
},
},
},
replies: {
type: 'array',
description: 'Array of reply messages in the thread (excluding the parent)',
items: {
type: 'object',
properties: {
ts: { type: 'string', description: 'Message timestamp' },
text: { type: 'string', description: 'Message text content' },
user: { type: 'string', description: 'User ID who sent the reply' },
reactions: { type: 'array', description: 'Reactions on the reply' },
files: { type: 'array', description: 'Files attached to the reply' },
},
},
},
messages: {
type: 'array',
description: 'All messages in the thread (parent + replies) in chronological order',
items: { type: 'object' },
},
replyCount: {
type: 'number',
description: 'Number of replies returned in this response',
},
hasMore: {
type: 'boolean',
description: 'Whether there are more messages in the thread (pagination needed)',
},
},
}

View File

@@ -2,6 +2,8 @@ import { slackAddReactionTool } from '@/tools/slack/add_reaction'
import { slackCanvasTool } from '@/tools/slack/canvas'
import { slackDeleteMessageTool } from '@/tools/slack/delete_message'
import { slackDownloadTool } from '@/tools/slack/download'
import { slackGetMessageTool } from '@/tools/slack/get_message'
import { slackGetThreadTool } from '@/tools/slack/get_thread'
import { slackGetUserTool } from '@/tools/slack/get_user'
import { slackListChannelsTool } from '@/tools/slack/list_channels'
import { slackListMembersTool } from '@/tools/slack/list_members'
@@ -22,4 +24,6 @@ export {
slackListMembersTool,
slackListUsersTool,
slackGetUserTool,
slackGetMessageTool,
slackGetThreadTool,
}

View File

@@ -71,6 +71,17 @@ export interface SlackGetUserParams extends SlackBaseParams {
userId: string
}
export interface SlackGetMessageParams extends SlackBaseParams {
channel: string
timestamp: string
}
export interface SlackGetThreadParams extends SlackBaseParams {
channel: string
threadTs: string
limit?: number
}
export interface SlackMessageResponse extends ToolResponse {
output: {
// Legacy properties for backward compatibility
@@ -305,6 +316,22 @@ export interface SlackGetUserResponse extends ToolResponse {
}
}
export interface SlackGetMessageResponse extends ToolResponse {
output: {
message: SlackMessage
}
}
export interface SlackGetThreadResponse extends ToolResponse {
output: {
parentMessage: SlackMessage
replies: SlackMessage[]
messages: SlackMessage[]
replyCount: number
hasMore: boolean
}
}
export type SlackResponse =
| SlackCanvasResponse
| SlackMessageReaderResponse
@@ -317,3 +344,5 @@ export type SlackResponse =
| SlackListMembersResponse
| SlackListUsersResponse
| SlackGetUserResponse
| SlackGetMessageResponse
| SlackGetThreadResponse

View File

@@ -0,0 +1,128 @@
import { gzipSync } from 'zlib'
import { createLogger } from '@sim/logger'
import type { TinybirdEventsParams, TinybirdEventsResponse } from '@/tools/tinybird/types'
import type { ToolConfig } from '@/tools/types'
const logger = createLogger('tinybird-events')
export const eventsTool: ToolConfig<TinybirdEventsParams, TinybirdEventsResponse> = {
id: 'tinybird_events',
name: 'Tinybird Events',
description:
'Send events to a Tinybird Data Source using the Events API. Supports JSON and NDJSON formats with optional gzip compression.',
version: '1.0.0',
errorExtractor: 'nested-error-object',
params: {
base_url: {
type: 'string',
required: true,
visibility: 'user-only',
description:
'Tinybird API base URL (e.g., https://api.tinybird.co or https://api.us-east.tinybird.co)',
},
datasource: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Name of the Tinybird Data Source to send events to',
},
data: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description:
'Data to send as NDJSON (newline-delimited JSON) or JSON string. Each event should be a valid JSON object.',
},
wait: {
type: 'boolean',
required: false,
visibility: 'user-only',
description:
'Wait for database acknowledgment before responding. Enables safer retries but introduces latency. Defaults to false.',
},
format: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Format of the events data: "ndjson" (default) or "json"',
},
compression: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Compression format: "none" (default) or "gzip"',
},
token: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Tinybird API Token with DATASOURCE:APPEND or DATASOURCE:CREATE scope',
},
},
request: {
url: (params) => {
const baseUrl = params.base_url.endsWith('/') ? params.base_url.slice(0, -1) : params.base_url
const url = new URL(`${baseUrl}/v0/events`)
url.searchParams.set('name', params.datasource)
if (params.wait) {
url.searchParams.set('wait', 'true')
}
return url.toString()
},
method: 'POST',
headers: (params) => {
const headers: Record<string, string> = {
Authorization: `Bearer ${params.token}`,
}
if (params.compression === 'gzip') {
headers['Content-Encoding'] = 'gzip'
}
if (params.format === 'json') {
headers['Content-Type'] = 'application/json'
} else {
headers['Content-Type'] = 'application/x-ndjson'
}
return headers
},
body: (params) => {
const data = params.data
if (params.compression === 'gzip') {
return gzipSync(Buffer.from(data, 'utf-8'))
}
return data
},
},
transformResponse: async (response: Response) => {
const data = await response.json()
logger.info('Successfully sent events to Tinybird', {
successful: data.successful_rows,
quarantined: data.quarantined_rows,
})
return {
success: true,
output: {
successful_rows: data.successful_rows ?? 0,
quarantined_rows: data.quarantined_rows ?? 0,
},
}
},
outputs: {
successful_rows: {
type: 'number',
description: 'Number of rows successfully ingested',
},
quarantined_rows: {
type: 'number',
description: 'Number of rows quarantined (failed validation)',
},
},
}

View File

@@ -0,0 +1,5 @@
import { eventsTool } from '@/tools/tinybird/events'
import { queryTool } from '@/tools/tinybird/query'
export const tinybirdEventsTool = eventsTool
export const tinybirdQueryTool = queryTool

View File

@@ -0,0 +1,139 @@
import { createLogger } from '@sim/logger'
import type { TinybirdQueryParams, TinybirdQueryResponse } from '@/tools/tinybird/types'
import type { ToolConfig } from '@/tools/types'
const logger = createLogger('tinybird-query')
/**
* Tinybird Query Tool
*
* Executes SQL queries against Tinybird and returns results in the format specified in the query.
* - FORMAT JSON: Returns structured data with rows/statistics metadata
* - FORMAT CSV/TSV/etc: Returns raw text string
*
* The tool automatically detects the response format based on Content-Type headers.
*/
export const queryTool: ToolConfig<TinybirdQueryParams, TinybirdQueryResponse> = {
id: 'tinybird_query',
name: 'Tinybird Query',
description: 'Execute SQL queries against Tinybird Pipes and Data Sources using the Query API.',
version: '1.0.0',
errorExtractor: 'nested-error-object',
params: {
base_url: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Tinybird API base URL (e.g., https://api.tinybird.co)',
},
query: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description:
'SQL query to execute. Specify your desired output format (e.g., FORMAT JSON, FORMAT CSV, FORMAT TSV). JSON format provides structured data, while other formats return raw text.',
},
pipeline: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Optional pipe name. When provided, enables SELECT * FROM _ syntax',
},
token: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Tinybird API Token with PIPE:READ scope',
},
},
request: {
url: (params) => {
const baseUrl = params.base_url.endsWith('/') ? params.base_url.slice(0, -1) : params.base_url
return `${baseUrl}/v0/sql`
},
method: 'POST',
headers: (params) => ({
'Content-Type': 'application/x-www-form-urlencoded',
Authorization: `Bearer ${params.token}`,
}),
body: (params) => {
const searchParams = new URLSearchParams()
searchParams.set('q', params.query)
if (params.pipeline) {
searchParams.set('pipeline', params.pipeline)
}
return searchParams.toString()
},
},
transformResponse: async (response: Response) => {
const responseText = await response.text()
const contentType = response.headers.get('content-type') || ''
// Check if response is JSON based on content-type or try parsing
const isJson = contentType.includes('application/json') || contentType.includes('text/json')
if (isJson) {
try {
const data = JSON.parse(responseText)
logger.info('Successfully executed Tinybird query (JSON)', {
rows: data.rows,
elapsed: data.statistics?.elapsed,
})
return {
success: true,
output: {
data: data.data || [],
rows: data.rows || 0,
statistics: data.statistics
? {
elapsed: data.statistics.elapsed,
rows_read: data.statistics.rows_read,
bytes_read: data.statistics.bytes_read,
}
: undefined,
},
}
} catch (parseError) {
logger.error('Failed to parse JSON response', {
contentType,
parseError: parseError instanceof Error ? parseError.message : String(parseError),
})
throw new Error(
`Invalid JSON response: ${parseError instanceof Error ? parseError.message : 'Parse error'}`
)
}
}
// For non-JSON formats (CSV, TSV, etc.), return as raw text
logger.info('Successfully executed Tinybird query (non-JSON)', { contentType })
return {
success: true,
output: {
data: responseText,
rows: undefined,
statistics: undefined,
},
}
},
outputs: {
data: {
type: 'json',
description:
'Query result data. For FORMAT JSON: array of objects. For other formats (CSV, TSV, etc.): raw text string.',
},
rows: {
type: 'number',
description: 'Number of rows returned (only available with FORMAT JSON)',
},
statistics: {
type: 'json',
description:
'Query execution statistics - elapsed time, rows read, bytes read (only available with FORMAT JSON)',
},
},
}

View File

@@ -0,0 +1,59 @@
import type { ToolResponse } from '@/tools/types'
/**
* Base parameters for Tinybird API tools
*/
export interface TinybirdBaseParams {
token: string
}
/**
* Parameters for sending events to Tinybird
*/
export interface TinybirdEventsParams extends TinybirdBaseParams {
base_url: string
datasource: string
data: string
wait?: boolean
format?: 'ndjson' | 'json'
compression?: 'none' | 'gzip'
}
/**
* Response from sending events to Tinybird
*/
export interface TinybirdEventsResponse extends ToolResponse {
output: {
successful_rows: number
quarantined_rows: number
}
}
/**
* Parameters for querying Tinybird
*/
export interface TinybirdQueryParams extends TinybirdBaseParams {
base_url: string
query: string
pipeline?: string
}
/**
* Response from querying Tinybird
*/
export interface TinybirdQueryResponse extends ToolResponse {
output: {
data: unknown[] | string
rows?: number
statistics?: {
elapsed: number
rows_read: number
bytes_read: number
}
}
}
/**
* Union type for all possible Tinybird responses
*/
export type TinybirdResponse = TinybirdEventsResponse | TinybirdQueryResponse

View File

@@ -3,6 +3,7 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
import { AGENT, isCustomTool } from '@/executor/constants'
import { useCustomToolsStore } from '@/stores/custom-tools'
import { useEnvironmentStore } from '@/stores/settings/environment'
import { extractErrorMessage } from '@/tools/error-extractors'
import { tools } from '@/tools/registry'
import type { TableRow, ToolConfig, ToolResponse } from '@/tools/types'
@@ -162,14 +163,22 @@ export async function executeRequest(
const externalResponse = await fetch(url, { method, headers, body })
if (!externalResponse.ok) {
let errorContent
let errorData: any
try {
errorContent = await externalResponse.json()
errorData = await externalResponse.json()
} catch (_e) {
errorContent = { message: externalResponse.statusText }
try {
errorData = await externalResponse.text()
} catch (_e2) {
errorData = null
}
}
const error = errorContent.message || `${toolId} API error: ${externalResponse.statusText}`
const error = extractErrorMessage({
status: externalResponse.status,
statusText: externalResponse.statusText,
data: errorData,
})
logger.error(`${toolId} error:`, { error })
throw new Error(error)
}

View File

@@ -96,23 +96,3 @@ export function buildMeetingOutputs(): Record<string, TriggerOutput> {
},
} as Record<string, TriggerOutput>
}
/**
* Build output schema for generic webhook events
*/
export function buildGenericOutputs(): Record<string, TriggerOutput> {
return {
payload: {
type: 'object',
description: 'Raw webhook payload',
},
headers: {
type: 'object',
description: 'Request headers',
},
timestamp: {
type: 'string',
description: 'ISO8601 received timestamp',
},
} as Record<string, TriggerOutput>
}

View File

@@ -1,6 +1,6 @@
import { CirclebackIcon } from '@/components/icons'
import type { TriggerConfig } from '@/triggers/types'
import { buildGenericOutputs, circlebackSetupInstructions, circlebackTriggerOptions } from './utils'
import { buildMeetingOutputs, circlebackSetupInstructions, circlebackTriggerOptions } from './utils'
export const circlebackWebhookTrigger: TriggerConfig = {
id: 'circleback_webhook',
@@ -74,7 +74,7 @@ export const circlebackWebhookTrigger: TriggerConfig = {
},
],
outputs: buildGenericOutputs(),
outputs: buildMeetingOutputs(),
webhook: {
method: 'POST',

View File

@@ -31,8 +31,14 @@ export const TRIGGER_PERSISTED_SUBBLOCK_IDS: string[] = [
/**
* Trigger-related subblock IDs that represent runtime metadata. They should remain
* in the workflow state but must not be modified or cleared by diff operations.
*
* Note: 'triggerConfig' is included because it's an aggregate of individual trigger
* field subblocks. Those individual fields are compared separately, so comparing
* triggerConfig would be redundant. Additionally, the client populates triggerConfig
* with default values from the trigger definition on load, which aren't present in
* the deployed state, causing false positive change detection.
*/
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath']
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig']
/**
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.

View File

@@ -116,6 +116,11 @@ export const githubIssueClosedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description:
'GitHub event type from X-GitHub-Event header (e.g., issues, pull_request, push)',
},
action: {
type: 'string',
description: 'Action performed (opened, closed, reopened, edited, etc.)',

View File

@@ -117,6 +117,10 @@ export const githubIssueCommentTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., issue_comment)',
},
action: {
type: 'string',
description: 'Action performed (created, edited, deleted)',

View File

@@ -137,6 +137,11 @@ export const githubIssueOpenedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description:
'GitHub event type from X-GitHub-Event header (e.g., issues, pull_request, push)',
},
action: {
type: 'string',
description: 'Action performed (opened, closed, reopened, edited, etc.)',

View File

@@ -117,6 +117,10 @@ export const githubPRClosedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., pull_request)',
},
action: {
type: 'string',
description: 'Action performed (opened, closed, synchronize, reopened, edited, etc.)',

View File

@@ -117,6 +117,10 @@ export const githubPRCommentTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., issue_comment)',
},
action: {
type: 'string',
description: 'Action performed (created, edited, deleted)',

View File

@@ -116,6 +116,10 @@ export const githubPRMergedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., pull_request)',
},
action: {
type: 'string',
description: 'Action performed (opened, closed, synchronize, reopened, edited, etc.)',

View File

@@ -116,6 +116,10 @@ export const githubPROpenedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., pull_request)',
},
action: {
type: 'string',
description: 'Action performed (opened, closed, synchronize, reopened, edited, etc.)',

View File

@@ -117,6 +117,10 @@ export const githubPRReviewedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., pull_request_review)',
},
action: {
type: 'string',
description: 'Action performed (submitted, edited, dismissed)',

View File

@@ -116,6 +116,14 @@ export const githubPushTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., push)',
},
branch: {
type: 'string',
description: 'Branch name derived from ref (e.g., main from refs/heads/main)',
},
ref: {
type: 'string',
description: 'Git reference that was pushed (e.g., refs/heads/main)',

View File

@@ -116,6 +116,10 @@ export const githubReleasePublishedTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., release)',
},
action: {
type: 'string',
description:

View File

@@ -117,6 +117,10 @@ export const githubWorkflowRunTrigger: TriggerConfig = {
],
outputs: {
event_type: {
type: 'string',
description: 'GitHub event type from X-GitHub-Event header (e.g., workflow_run)',
},
action: {
type: 'string',
description: 'Action performed (requested, in_progress, completed)',

View File

@@ -265,11 +265,6 @@ function buildBaseWebhookOutputs(): Record<string, TriggerOutput> {
},
},
},
webhook: {
type: 'json',
description: 'Webhook metadata including provider, path, and raw payload',
},
}
}

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildActivityOutputs,
buildEmailBouncedOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -27,7 +27,7 @@ export const lemlistEmailBouncedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_email_bounced'),
}),
outputs: buildActivityOutputs(),
outputs: buildEmailBouncedOutputs(),
webhook: {
method: 'POST',

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildActivityOutputs,
buildEmailClickedOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -27,7 +27,7 @@ export const lemlistEmailClickedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_email_clicked'),
}),
outputs: buildActivityOutputs(),
outputs: buildEmailClickedOutputs(),
webhook: {
method: 'POST',

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildActivityOutputs,
buildEmailOpenedOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -27,7 +27,7 @@ export const lemlistEmailOpenedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_email_opened'),
}),
outputs: buildActivityOutputs(),
outputs: buildEmailOpenedOutputs(),
webhook: {
method: 'POST',

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildEmailReplyOutputs,
buildEmailRepliedOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -30,7 +30,7 @@ export const lemlistEmailRepliedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_email_replied'),
}),
outputs: buildEmailReplyOutputs(),
outputs: buildEmailRepliedOutputs(),
webhook: {
method: 'POST',

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildActivityOutputs,
buildEmailSentOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -27,7 +27,7 @@ export const lemlistEmailSentTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_email_sent'),
}),
outputs: buildActivityOutputs(),
outputs: buildEmailSentOutputs(),
webhook: {
method: 'POST',

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildActivityOutputs,
buildInterestOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -27,7 +27,7 @@ export const lemlistInterestedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_interested'),
}),
outputs: buildActivityOutputs(),
outputs: buildInterestOutputs(),
webhook: {
method: 'POST',

View File

@@ -2,7 +2,7 @@ import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildLemlistExtraFields,
buildLinkedInReplyOutputs,
buildLinkedInRepliedOutputs,
lemlistSetupInstructions,
lemlistTriggerOptions,
} from '@/triggers/lemlist/utils'
@@ -27,7 +27,7 @@ export const lemlistLinkedInRepliedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_linkedin_replied'),
}),
outputs: buildLinkedInReplyOutputs(),
outputs: buildLinkedInRepliedOutputs(),
webhook: {
method: 'POST',

View File

@@ -1,7 +1,7 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildActivityOutputs,
buildInterestOutputs,
buildLemlistExtraFields,
lemlistSetupInstructions,
lemlistTriggerOptions,
@@ -27,7 +27,7 @@ export const lemlistNotInterestedTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_not_interested'),
}),
outputs: buildActivityOutputs(),
outputs: buildInterestOutputs(),
webhook: {
method: 'POST',

View File

@@ -66,203 +66,254 @@ export function buildLemlistExtraFields(triggerId: string) {
}
/**
* Base activity outputs shared across all Lemlist triggers
* Core fields present in ALL Lemlist webhook payloads
* See: https://help.lemlist.com/en/articles/9423940-use-the-api-to-list-activity-types
*/
function buildBaseActivityOutputs(): Record<string, TriggerOutput> {
const coreOutputs = {
_id: {
type: 'string',
description: 'Unique activity identifier',
},
type: {
type: 'string',
description: 'Activity type (e.g., emailsSent, emailsReplied)',
},
createdAt: {
type: 'string',
description: 'Activity creation timestamp (ISO 8601)',
},
teamId: {
type: 'string',
description: 'Lemlist team identifier',
},
leadId: {
type: 'string',
description: 'Lead identifier',
},
campaignId: {
type: 'string',
description: 'Campaign identifier',
},
campaignName: {
type: 'string',
description: 'Campaign name',
},
} as const
/**
* Lead fields present in webhook payloads
*/
const leadOutputs = {
email: {
type: 'string',
description: 'Lead email address',
},
firstName: {
type: 'string',
description: 'Lead first name',
},
lastName: {
type: 'string',
description: 'Lead last name',
},
companyName: {
type: 'string',
description: 'Lead company name',
},
linkedinUrl: {
type: 'string',
description: 'Lead LinkedIn profile URL',
},
} as const
/**
* Sequence/campaign tracking fields for email activities
*/
const sequenceOutputs = {
sequenceId: {
type: 'string',
description: 'Sequence identifier',
},
sequenceStep: {
type: 'number',
description: 'Current step in the sequence (0-indexed)',
},
totalSequenceStep: {
type: 'number',
description: 'Total number of steps in the sequence',
},
isFirst: {
type: 'boolean',
description: 'Whether this is the first activity of this type for this step',
},
} as const
/**
* Sender information fields
*/
const senderOutputs = {
sendUserId: {
type: 'string',
description: 'Sender user identifier',
},
sendUserEmail: {
type: 'string',
description: 'Sender email address',
},
sendUserName: {
type: 'string',
description: 'Sender display name',
},
} as const
/**
* Email content fields
*/
const emailContentOutputs = {
subject: {
type: 'string',
description: 'Email subject line',
},
text: {
type: 'string',
description: 'Email body content (HTML)',
},
messageId: {
type: 'string',
description: 'Email message ID (RFC 2822 format)',
},
emailId: {
type: 'string',
description: 'Lemlist email identifier',
},
} as const
/**
* Build outputs for email sent events
*/
export function buildEmailSentOutputs(): Record<string, TriggerOutput> {
return {
type: {
type: 'string',
description: 'Activity type (emailsReplied, linkedinReplied, interested, emailsOpened, etc.)',
},
_id: {
type: 'string',
description: 'Unique activity identifier',
},
leadId: {
type: 'string',
description: 'Associated lead ID',
},
campaignId: {
type: 'string',
description: 'Campaign ID',
},
campaignName: {
type: 'string',
description: 'Campaign name',
},
sequenceId: {
type: 'string',
description: 'Sequence ID within the campaign',
},
stepId: {
type: 'string',
description: 'Step ID that triggered this activity',
},
createdAt: {
type: 'string',
description: 'When the activity occurred (ISO 8601)',
},
}
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
...senderOutputs,
...emailContentOutputs,
} as Record<string, TriggerOutput>
}
/**
* Lead outputs - information about the lead
* Build outputs for email replied events
*/
function buildLeadOutputs(): Record<string, TriggerOutput> {
export function buildEmailRepliedOutputs(): Record<string, TriggerOutput> {
return {
lead: {
_id: {
type: 'string',
description: 'Lead unique identifier',
},
email: {
type: 'string',
description: 'Lead email address',
},
firstName: {
type: 'string',
description: 'Lead first name',
},
lastName: {
type: 'string',
description: 'Lead last name',
},
companyName: {
type: 'string',
description: 'Lead company name',
},
phone: {
type: 'string',
description: 'Lead phone number',
},
linkedinUrl: {
type: 'string',
description: 'Lead LinkedIn profile URL',
},
picture: {
type: 'string',
description: 'Lead profile picture URL',
},
icebreaker: {
type: 'string',
description: 'Personalized icebreaker text',
},
timezone: {
type: 'string',
description: 'Lead timezone (e.g., America/New_York)',
},
isUnsubscribed: {
type: 'boolean',
description: 'Whether the lead is unsubscribed',
},
},
}
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
...senderOutputs,
...emailContentOutputs,
} as Record<string, TriggerOutput>
}
/**
* Standard activity outputs (activity + lead data)
* Build outputs for email opened events
*/
export function buildActivityOutputs(): Record<string, TriggerOutput> {
export function buildEmailOpenedOutputs(): Record<string, TriggerOutput> {
return {
...buildBaseActivityOutputs(),
...buildLeadOutputs(),
webhook: {
type: 'json',
description: 'Full webhook payload with all activity-specific data',
},
}
}
/**
* Email-specific outputs (includes message content for replies)
*/
export function buildEmailReplyOutputs(): Record<string, TriggerOutput> {
return {
...buildBaseActivityOutputs(),
...buildLeadOutputs(),
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
...senderOutputs,
messageId: {
type: 'string',
description: 'Email message ID',
description: 'Email message ID that was opened',
},
subject: {
type: 'string',
description: 'Email subject line',
},
text: {
type: 'string',
description: 'Email reply text content',
},
html: {
type: 'string',
description: 'Email reply HTML content',
},
sentAt: {
type: 'string',
description: 'When the reply was sent',
},
webhook: {
type: 'json',
description: 'Full webhook payload with all email data',
},
}
} as Record<string, TriggerOutput>
}
/**
* LinkedIn-specific outputs (includes message content)
* Build outputs for email clicked events
*/
export function buildLinkedInReplyOutputs(): Record<string, TriggerOutput> {
export function buildEmailClickedOutputs(): Record<string, TriggerOutput> {
return {
...buildBaseActivityOutputs(),
...buildLeadOutputs(),
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
...senderOutputs,
messageId: {
type: 'string',
description: 'LinkedIn message ID',
description: 'Email message ID containing the clicked link',
},
text: {
clickedUrl: {
type: 'string',
description: 'LinkedIn message text content',
description: 'URL that was clicked',
},
sentAt: {
type: 'string',
description: 'When the message was sent',
},
webhook: {
type: 'json',
description: 'Full webhook payload with all LinkedIn data',
},
}
} as Record<string, TriggerOutput>
}
/**
* All outputs for generic webhook (activity + lead + all possible fields)
* Build outputs for email bounced events
*/
export function buildAllOutputs(): Record<string, TriggerOutput> {
export function buildEmailBouncedOutputs(): Record<string, TriggerOutput> {
return {
...buildBaseActivityOutputs(),
...buildLeadOutputs(),
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
...senderOutputs,
messageId: {
type: 'string',
description: 'Message ID (for email/LinkedIn events)',
description: 'Email message ID that bounced',
},
subject: {
errorMessage: {
type: 'string',
description: 'Email subject (for email events)',
description: 'Bounce error message',
},
} as Record<string, TriggerOutput>
}
/**
* Build outputs for LinkedIn replied events
*/
export function buildLinkedInRepliedOutputs(): Record<string, TriggerOutput> {
return {
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
text: {
type: 'string',
description: 'Message text content',
description: 'LinkedIn message content',
},
html: {
type: 'string',
description: 'Message HTML content (for email events)',
},
sentAt: {
type: 'string',
description: 'When the message was sent',
},
webhook: {
type: 'json',
description: 'Full webhook payload with all data',
},
}
} as Record<string, TriggerOutput>
}
/**
* Build outputs for interested/not interested events
*/
export function buildInterestOutputs(): Record<string, TriggerOutput> {
return {
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
} as Record<string, TriggerOutput>
}
/**
* Build outputs for generic webhook (all events)
* Includes all possible fields across event types
*/
export function buildLemlistOutputs(): Record<string, TriggerOutput> {
return {
...coreOutputs,
...leadOutputs,
...sequenceOutputs,
...senderOutputs,
...emailContentOutputs,
clickedUrl: {
type: 'string',
description: 'URL that was clicked (for emailsClicked events)',
},
errorMessage: {
type: 'string',
description: 'Error message (for bounce/failed events)',
},
} as Record<string, TriggerOutput>
}

View File

@@ -1,8 +1,8 @@
import { LemlistIcon } from '@/components/icons'
import { buildTriggerSubBlocks } from '@/triggers'
import {
buildAllOutputs,
buildLemlistExtraFields,
buildLemlistOutputs,
lemlistSetupInstructions,
lemlistTriggerOptions,
} from '@/triggers/lemlist/utils'
@@ -27,7 +27,7 @@ export const lemlistWebhookTrigger: TriggerConfig = {
extraFields: buildLemlistExtraFields('lemlist_webhook'),
}),
outputs: buildAllOutputs(),
outputs: buildLemlistOutputs(),
webhook: {
method: 'POST',

View File

@@ -110,6 +110,7 @@ export const telegramWebhookTrigger: TriggerConfig = {
},
sender: {
id: { type: 'number', description: 'Sender user ID' },
username: { type: 'string', description: 'Sender username (if available)' },
firstName: { type: 'string', description: 'Sender first name' },
lastName: { type: 'string', description: 'Sender last name' },
languageCode: { type: 'string', description: 'Sender language code (if available)' },

View File

@@ -136,6 +136,8 @@ export const typeformWebhookTrigger: TriggerConfig = {
'Array of respondent answers (only includes answered questions). Each answer contains type, value, and field reference.',
},
definition: {
description:
'Form definition (only included when "Include Form Definition" is enabled in trigger settings)',
id: {
type: 'string',
description: 'Form ID',

View File

@@ -96,10 +96,6 @@ export const webflowCollectionItemChangedTrigger: TriggerConfig = {
type: 'string',
description: 'The site ID where the event occurred',
},
workspaceId: {
type: 'string',
description: 'The workspace ID where the event occurred',
},
collectionId: {
type: 'string',
description: 'The collection ID where the item was changed',

View File

@@ -109,10 +109,6 @@ export const webflowCollectionItemCreatedTrigger: TriggerConfig = {
type: 'string',
description: 'The site ID where the event occurred',
},
workspaceId: {
type: 'string',
description: 'The workspace ID where the event occurred',
},
collectionId: {
type: 'string',
description: 'The collection ID where the item was created',

View File

@@ -97,10 +97,6 @@ export const webflowCollectionItemDeletedTrigger: TriggerConfig = {
type: 'string',
description: 'The site ID where the event occurred',
},
workspaceId: {
type: 'string',
description: 'The workspace ID where the event occurred',
},
collectionId: {
type: 'string',
description: 'The collection ID where the item was deleted',

View File

@@ -76,9 +76,9 @@ export const webflowFormSubmissionTrigger: TriggerConfig = {
type: 'string',
description: 'The site ID where the form was submitted',
},
workspaceId: {
formId: {
type: 'string',
description: 'The workspace ID where the event occurred',
description: 'The form ID',
},
name: {
type: 'string',

View File

@@ -134,6 +134,7 @@
"groq-sdk": "^0.15.0",
"html-to-image": "1.11.13",
"html-to-text": "^9.0.5",
"idb-keyval": "6.2.2",
"imapflow": "1.2.4",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
@@ -2311,6 +2312,8 @@
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"idb-keyval": ["idb-keyval@6.2.2", "", {}, "sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg=="],
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
"image-size": ["image-size@2.0.2", "", { "bin": { "image-size": "bin/image-size.js" } }, "sha512-IRqXKlaXwgSMAMtpNzZa1ZAe8m+Sa1770Dhk8VkSsP9LS+iHD62Zd8FQKs8fbPiagBE7BzoFX23cxFnwshpV6w=="],

View File

@@ -0,0 +1,2 @@
DROP INDEX "account_user_provider_account_unique";--> statement-breakpoint
CREATE UNIQUE INDEX "account_user_provider_unique" ON "account" USING btree ("user_id","provider_id");

File diff suppressed because it is too large Load Diff

View File

@@ -974,6 +974,13 @@
"when": 1768260112533,
"tag": "0139_late_cargill",
"breakpoints": true
},
{
"idx": 140,
"version": "7",
"when": 1768366574848,
"tag": "0140_fuzzy_the_twelve",
"breakpoints": true
}
]
}

View File

@@ -89,10 +89,9 @@ export const account = pgTable(
table.accountId,
table.providerId
),
uniqueUserProviderAccount: uniqueIndex('account_user_provider_account_unique').on(
uniqueUserProvider: uniqueIndex('account_user_provider_unique').on(
table.userId,
table.providerId,
table.accountId
table.providerId
),
})
)