mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-14 01:18:15 -05:00
Compare commits
7 Commits
fix/add-de
...
fix/consol
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4b04e1a76 | ||
|
|
4b026ad54d | ||
|
|
f6b7c15dc4 | ||
|
|
70ed19fcdb | ||
|
|
d6e4c91e81 | ||
|
|
e3fa40af11 | ||
|
|
6e0055f847 |
@@ -1855,17 +1855,25 @@ export function LinearIcon(props: React.SVGProps<SVGSVGElement>) {
|
||||
|
||||
export function LemlistIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 24 24'
|
||||
width='24'
|
||||
height='24'
|
||||
fill='none'
|
||||
>
|
||||
<rect width='24' height='24' rx='4' fill='#316BFF' />
|
||||
<path d='M7 6h2v9h5v2H7V6Z' fill='white' />
|
||||
<circle cx='17' cy='8' r='2' fill='white' />
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 180 181' fill='none'>
|
||||
<path
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M32.0524 0.919922H147.948C165.65 0.919922 180 15.2703 180 32.9723V148.867C180 166.57 165.65 180.92 147.948 180.92H32.0524C14.3504 180.92 0 166.57 0 148.867V32.9723C0 15.2703 14.3504 0.919922 32.0524 0.919922ZM119.562 82.8879H85.0826C82.4732 82.8879 80.3579 85.0032 80.3579 87.6126V94.2348C80.3579 96.8442 82.4732 98.9595 85.0826 98.9595H119.562C122.171 98.9595 124.286 96.8442 124.286 94.2348V87.6126C124.286 85.0032 122.171 82.8879 119.562 82.8879ZM85.0826 49.1346H127.061C129.67 49.1346 131.785 51.2499 131.785 53.8593V60.4815C131.785 63.0909 129.67 65.2062 127.061 65.2062H85.0826C82.4732 65.2062 80.3579 63.0909 80.3579 60.4815V53.8593C80.3579 51.2499 82.4732 49.1346 85.0826 49.1346ZM131.785 127.981V121.358C131.785 118.75 129.669 116.634 127.061 116.634H76.5706C69.7821 116.634 64.2863 111.138 64.2863 104.349V53.8593C64.2863 51.2513 62.1697 49.1346 59.5616 49.1346H52.9395C50.3314 49.1346 48.2147 51.2513 48.2147 53.8593V114.199C48.8497 124.133 56.7873 132.07 66.7205 132.705H127.061C129.669 132.705 131.785 130.589 131.785 127.981Z'
|
||||
fill='#316BFF'
|
||||
/>
|
||||
<path
|
||||
d='M85.0826 49.1346H127.061C129.67 49.1346 131.785 51.2499 131.785 53.8593V60.4815C131.785 63.0909 129.67 65.2062 127.061 65.2062H85.0826C82.4732 65.2062 80.3579 63.0909 80.3579 60.4815V53.8593C80.3579 51.2499 82.4732 49.1346 85.0826 49.1346Z'
|
||||
fill='white'
|
||||
/>
|
||||
<path
|
||||
d='M85.0826 82.8879H119.562C122.171 82.8879 124.286 85.0032 124.286 87.6126V94.2348C124.286 96.8442 122.171 98.9595 119.562 98.9595H85.0826C82.4732 98.9595 80.3579 96.8442 80.3579 94.2348V87.6126C80.3579 85.0032 82.4732 82.8879 85.0826 82.8879Z'
|
||||
fill='white'
|
||||
/>
|
||||
<path
|
||||
d='M131.785 121.358V127.981C131.785 130.589 129.669 132.705 127.061 132.705H66.7205C56.7873 132.07 48.8497 124.133 48.2147 114.199V53.8593C48.2147 51.2513 50.3314 49.1346 52.9395 49.1346H59.5616C62.1697 49.1346 64.2863 51.2513 64.2863 53.8593V104.349C64.2863 111.138 69.7821 116.634 76.5706 116.634H127.061C129.669 116.634 131.785 118.75 131.785 121.358Z'
|
||||
fill='white'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -44,6 +44,8 @@ Send a message to an external A2A-compatible agent.
|
||||
| `message` | string | Yes | Message to send to the agent |
|
||||
| `taskId` | string | No | Task ID for continuing an existing task |
|
||||
| `contextId` | string | No | Context ID for conversation continuity |
|
||||
| `data` | string | No | Structured data to include with the message \(JSON string\) |
|
||||
| `files` | array | No | Files to include with the message |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
@@ -208,8 +210,3 @@ Delete the push notification webhook configuration for a task.
|
||||
| `success` | boolean | Whether deletion was successful |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `a2a`
|
||||
|
||||
@@ -49,8 +49,7 @@ Retrieves lead information by email address or lead ID.
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Lemlist API key |
|
||||
| `email` | string | No | Lead email address \(use either email or id\) |
|
||||
| `id` | string | No | Lead ID \(use either email or id\) |
|
||||
| `leadIdentifier` | string | Yes | Lead email address or lead ID |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -124,6 +124,45 @@ Read the latest messages from Slack channels. Retrieve conversation history with
|
||||
| --------- | ---- | ----------- |
|
||||
| `messages` | array | Array of message objects from the channel |
|
||||
|
||||
### `slack_get_message`
|
||||
|
||||
Retrieve a specific message by its timestamp. Useful for getting a thread parent message.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Slack channel ID \(e.g., C1234567890\) |
|
||||
| `timestamp` | string | Yes | Message timestamp to retrieve \(e.g., 1405894322.002768\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | The retrieved message object |
|
||||
|
||||
### `slack_get_thread`
|
||||
|
||||
Retrieve an entire thread including the parent message and all replies. Useful for getting full conversation context.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Slack channel ID \(e.g., C1234567890\) |
|
||||
| `threadTs` | string | Yes | Thread timestamp \(thread_ts\) to retrieve \(e.g., 1405894322.002768\) |
|
||||
| `limit` | number | No | Maximum number of messages to return \(default: 100, max: 200\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `parentMessage` | object | The thread parent message |
|
||||
|
||||
### `slack_list_channels`
|
||||
|
||||
List all channels in a Slack workspace. Returns public and private channels the bot has access to.
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageStreamAPI')
|
||||
|
||||
const A2ASendMessageStreamSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A send message stream attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message stream request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageStreamSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A streaming message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const stream = client.sendMessageStream({ message })
|
||||
|
||||
let taskId = ''
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] A2A streaming message completed`, {
|
||||
taskId,
|
||||
state,
|
||||
artifactCount: artifacts.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(state) && state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in A2A streaming:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Streaming failed',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { Message, Task } from '@a2a-js/sdk'
|
||||
import type { DataPart, FilePart, Message, Part, Task, TextPart } from '@a2a-js/sdk'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
@@ -10,11 +10,20 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageAPI')
|
||||
|
||||
const FileInputSchema = z.object({
|
||||
type: z.enum(['file', 'url']),
|
||||
data: z.string(),
|
||||
name: z.string(),
|
||||
mime: z.string().optional(),
|
||||
})
|
||||
|
||||
const A2ASendMessageSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
data: z.string().optional(),
|
||||
files: z.array(FileInputSchema).optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
@@ -51,18 +60,100 @@ export async function POST(request: NextRequest) {
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
let client
|
||||
try {
|
||||
client = await createA2AClient(validatedData.agentUrl, validatedData.apiKey)
|
||||
logger.info(`[${requestId}] A2A client created successfully`)
|
||||
} catch (clientError) {
|
||||
logger.error(`[${requestId}] Failed to create A2A client:`, clientError)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to connect to agent: ${clientError instanceof Error ? clientError.message : 'Unknown error'}`,
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const parts: Part[] = []
|
||||
|
||||
const textPart: TextPart = { kind: 'text', text: validatedData.message }
|
||||
parts.push(textPart)
|
||||
|
||||
if (validatedData.data) {
|
||||
try {
|
||||
const parsedData = JSON.parse(validatedData.data)
|
||||
const dataPart: DataPart = { kind: 'data', data: parsedData }
|
||||
parts.push(dataPart)
|
||||
} catch (parseError) {
|
||||
logger.warn(`[${requestId}] Failed to parse data as JSON, skipping DataPart`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
for (const file of validatedData.files) {
|
||||
if (file.type === 'url') {
|
||||
const filePart: FilePart = {
|
||||
kind: 'file',
|
||||
file: {
|
||||
name: file.name,
|
||||
mimeType: file.mime,
|
||||
uri: file.data,
|
||||
},
|
||||
}
|
||||
parts.push(filePart)
|
||||
} else if (file.type === 'file') {
|
||||
let bytes = file.data
|
||||
let mimeType = file.mime
|
||||
|
||||
if (file.data.startsWith('data:')) {
|
||||
const match = file.data.match(/^data:([^;]+);base64,(.+)$/)
|
||||
if (match) {
|
||||
mimeType = mimeType || match[1]
|
||||
bytes = match[2]
|
||||
} else {
|
||||
bytes = file.data
|
||||
}
|
||||
}
|
||||
|
||||
const filePart: FilePart = {
|
||||
kind: 'file',
|
||||
file: {
|
||||
name: file.name,
|
||||
mimeType: mimeType || 'application/octet-stream',
|
||||
bytes,
|
||||
},
|
||||
}
|
||||
parts.push(filePart)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
parts,
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const result = await client.sendMessage({ message })
|
||||
let result
|
||||
try {
|
||||
result = await client.sendMessage({ message })
|
||||
logger.info(`[${requestId}] A2A sendMessage completed`, { resultKind: result?.kind })
|
||||
} catch (sendError) {
|
||||
logger.error(`[${requestId}] Failed to send A2A message:`, sendError)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to send message: ${sendError instanceof Error ? sendError.message : 'Unknown error'}`,
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
if (result.kind === 'message') {
|
||||
const responseMessage = result as Message
|
||||
|
||||
@@ -2,13 +2,6 @@ import { createSession, createWorkspaceRecord, loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Tests for workspace invitation by ID API route
|
||||
* Tests GET (details + token acceptance), DELETE (cancellation)
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const mockGetSession = vi.fn()
|
||||
const mockHasWorkspaceAdminAccess = vi.fn()
|
||||
|
||||
@@ -227,7 +220,7 @@ describe('Workspace Invitation [invitationId] API Route', () => {
|
||||
expect(response.headers.get('location')).toBe('https://test.sim.ai/workspace/workspace-456/w')
|
||||
})
|
||||
|
||||
it('should redirect to error page when invitation expired', async () => {
|
||||
it('should redirect to error page with token preserved when invitation expired', async () => {
|
||||
const session = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'invited@example.com',
|
||||
@@ -250,12 +243,13 @@ describe('Workspace Invitation [invitationId] API Route', () => {
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
expect(response.headers.get('location')).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=expired'
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=expired&token=token-abc123'
|
||||
)
|
||||
})
|
||||
|
||||
it('should redirect to error page when email mismatch', async () => {
|
||||
it('should redirect to error page with token preserved when email mismatch', async () => {
|
||||
const session = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'wrong@example.com',
|
||||
@@ -277,12 +271,13 @@ describe('Workspace Invitation [invitationId] API Route', () => {
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
expect(response.headers.get('location')).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=email-mismatch'
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=email-mismatch&token=token-abc123'
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 404 when invitation not found', async () => {
|
||||
it('should return 404 when invitation not found (without token)', async () => {
|
||||
const session = createSession({ userId: mockUser.id, email: mockUser.email })
|
||||
mockGetSession.mockResolvedValue(session)
|
||||
dbSelectResults = [[]]
|
||||
@@ -296,6 +291,189 @@ describe('Workspace Invitation [invitationId] API Route', () => {
|
||||
expect(response.status).toBe(404)
|
||||
expect(data).toEqual({ error: 'Invitation not found or has expired' })
|
||||
})
|
||||
|
||||
it('should redirect to error page with token preserved when invitation not found (with token)', async () => {
|
||||
const session = createSession({ userId: mockUser.id, email: mockUser.email })
|
||||
mockGetSession.mockResolvedValue(session)
|
||||
dbSelectResults = [[]]
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost/api/workspaces/invitations/non-existent?token=some-invalid-token'
|
||||
)
|
||||
const params = Promise.resolve({ invitationId: 'non-existent' })
|
||||
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toBe(
|
||||
'https://test.sim.ai/invite/non-existent?error=invalid-token&token=some-invalid-token'
|
||||
)
|
||||
})
|
||||
|
||||
it('should redirect to error page with token preserved when invitation already processed', async () => {
|
||||
const session = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'invited@example.com',
|
||||
name: mockUser.name,
|
||||
})
|
||||
mockGetSession.mockResolvedValue(session)
|
||||
|
||||
const acceptedInvitation = {
|
||||
...mockInvitation,
|
||||
status: 'accepted',
|
||||
}
|
||||
|
||||
dbSelectResults = [[acceptedInvitation], [mockWorkspace]]
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
|
||||
)
|
||||
const params = Promise.resolve({ invitationId: 'token-abc123' })
|
||||
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=already-processed&token=token-abc123'
|
||||
)
|
||||
})
|
||||
|
||||
it('should redirect to error page with token preserved when workspace not found', async () => {
|
||||
const session = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'invited@example.com',
|
||||
name: mockUser.name,
|
||||
})
|
||||
mockGetSession.mockResolvedValue(session)
|
||||
|
||||
dbSelectResults = [[mockInvitation], []]
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
|
||||
)
|
||||
const params = Promise.resolve({ invitationId: 'token-abc123' })
|
||||
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=workspace-not-found&token=token-abc123'
|
||||
)
|
||||
})
|
||||
|
||||
it('should redirect to error page with token preserved when user not found', async () => {
|
||||
const session = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'invited@example.com',
|
||||
name: mockUser.name,
|
||||
})
|
||||
mockGetSession.mockResolvedValue(session)
|
||||
|
||||
dbSelectResults = [[mockInvitation], [mockWorkspace], []]
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
|
||||
)
|
||||
const params = Promise.resolve({ invitationId: 'token-abc123' })
|
||||
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=user-not-found&token=token-abc123'
|
||||
)
|
||||
})
|
||||
|
||||
it('should URL encode special characters in token when preserving in error redirects', async () => {
|
||||
const session = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'wrong@example.com',
|
||||
name: mockUser.name,
|
||||
})
|
||||
mockGetSession.mockResolvedValue(session)
|
||||
|
||||
dbSelectResults = [
|
||||
[mockInvitation],
|
||||
[mockWorkspace],
|
||||
[{ ...mockUser, email: 'wrong@example.com' }],
|
||||
]
|
||||
|
||||
const specialToken = 'token+with/special=chars&more'
|
||||
const request = new NextRequest(
|
||||
`http://localhost/api/workspaces/invitations/token-abc123?token=${encodeURIComponent(specialToken)}`
|
||||
)
|
||||
const params = Promise.resolve({ invitationId: 'token-abc123' })
|
||||
|
||||
const response = await GET(request, { params })
|
||||
|
||||
expect(response.status).toBe(307)
|
||||
const location = response.headers.get('location')
|
||||
expect(location).toContain('error=email-mismatch')
|
||||
expect(location).toContain(`token=${encodeURIComponent(specialToken)}`)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Token Preservation - Full Flow Scenario', () => {
|
||||
it('should preserve token through email mismatch so user can retry with correct account', async () => {
|
||||
const wrongSession = createSession({
|
||||
userId: 'wrong-user',
|
||||
email: 'wrong@example.com',
|
||||
name: 'Wrong User',
|
||||
})
|
||||
mockGetSession.mockResolvedValue(wrongSession)
|
||||
|
||||
dbSelectResults = [
|
||||
[mockInvitation],
|
||||
[mockWorkspace],
|
||||
[{ id: 'wrong-user', email: 'wrong@example.com' }],
|
||||
]
|
||||
|
||||
const request1 = new NextRequest(
|
||||
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
|
||||
)
|
||||
const params1 = Promise.resolve({ invitationId: 'token-abc123' })
|
||||
|
||||
const response1 = await GET(request1, { params: params1 })
|
||||
|
||||
expect(response1.status).toBe(307)
|
||||
const location1 = response1.headers.get('location')
|
||||
expect(location1).toBe(
|
||||
'https://test.sim.ai/invite/invitation-789?error=email-mismatch&token=token-abc123'
|
||||
)
|
||||
|
||||
vi.clearAllMocks()
|
||||
dbSelectCallIndex = 0
|
||||
|
||||
const correctSession = createSession({
|
||||
userId: mockUser.id,
|
||||
email: 'invited@example.com',
|
||||
name: mockUser.name,
|
||||
})
|
||||
mockGetSession.mockResolvedValue(correctSession)
|
||||
|
||||
dbSelectResults = [
|
||||
[mockInvitation],
|
||||
[mockWorkspace],
|
||||
[{ ...mockUser, email: 'invited@example.com' }],
|
||||
[],
|
||||
]
|
||||
|
||||
const request2 = new NextRequest(
|
||||
'http://localhost/api/workspaces/invitations/token-abc123?token=token-abc123'
|
||||
)
|
||||
const params2 = Promise.resolve({ invitationId: 'token-abc123' })
|
||||
|
||||
const response2 = await GET(request2, { params: params2 })
|
||||
|
||||
expect(response2.status).toBe(307)
|
||||
expect(response2.headers.get('location')).toBe(
|
||||
'https://test.sim.ai/workspace/workspace-456/w'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('DELETE /api/workspaces/invitations/[invitationId]', () => {
|
||||
|
||||
@@ -31,7 +31,6 @@ export async function GET(
|
||||
const isAcceptFlow = !!token // If token is provided, this is an acceptance flow
|
||||
|
||||
if (!session?.user?.id) {
|
||||
// For token-based acceptance flows, redirect to login
|
||||
if (isAcceptFlow) {
|
||||
return NextResponse.redirect(new URL(`/invite/${invitationId}?token=${token}`, getBaseUrl()))
|
||||
}
|
||||
@@ -51,8 +50,9 @@ export async function GET(
|
||||
|
||||
if (!invitation) {
|
||||
if (isAcceptFlow) {
|
||||
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
|
||||
return NextResponse.redirect(
|
||||
new URL(`/invite/${invitationId}?error=invalid-token`, getBaseUrl())
|
||||
new URL(`/invite/${invitationId}?error=invalid-token${tokenParam}`, getBaseUrl())
|
||||
)
|
||||
}
|
||||
return NextResponse.json({ error: 'Invitation not found or has expired' }, { status: 404 })
|
||||
@@ -60,8 +60,9 @@ export async function GET(
|
||||
|
||||
if (new Date() > new Date(invitation.expiresAt)) {
|
||||
if (isAcceptFlow) {
|
||||
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
|
||||
return NextResponse.redirect(
|
||||
new URL(`/invite/${invitation.id}?error=expired`, getBaseUrl())
|
||||
new URL(`/invite/${invitation.id}?error=expired${tokenParam}`, getBaseUrl())
|
||||
)
|
||||
}
|
||||
return NextResponse.json({ error: 'Invitation has expired' }, { status: 400 })
|
||||
@@ -75,17 +76,20 @@ export async function GET(
|
||||
|
||||
if (!workspaceDetails) {
|
||||
if (isAcceptFlow) {
|
||||
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
|
||||
return NextResponse.redirect(
|
||||
new URL(`/invite/${invitation.id}?error=workspace-not-found`, getBaseUrl())
|
||||
new URL(`/invite/${invitation.id}?error=workspace-not-found${tokenParam}`, getBaseUrl())
|
||||
)
|
||||
}
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (isAcceptFlow) {
|
||||
const tokenParam = token ? `&token=${encodeURIComponent(token)}` : ''
|
||||
|
||||
if (invitation.status !== ('pending' as WorkspaceInvitationStatus)) {
|
||||
return NextResponse.redirect(
|
||||
new URL(`/invite/${invitation.id}?error=already-processed`, getBaseUrl())
|
||||
new URL(`/invite/${invitation.id}?error=already-processed${tokenParam}`, getBaseUrl())
|
||||
)
|
||||
}
|
||||
|
||||
@@ -100,7 +104,7 @@ export async function GET(
|
||||
|
||||
if (!userData) {
|
||||
return NextResponse.redirect(
|
||||
new URL(`/invite/${invitation.id}?error=user-not-found`, getBaseUrl())
|
||||
new URL(`/invite/${invitation.id}?error=user-not-found${tokenParam}`, getBaseUrl())
|
||||
)
|
||||
}
|
||||
|
||||
@@ -108,7 +112,7 @@ export async function GET(
|
||||
|
||||
if (!isValidMatch) {
|
||||
return NextResponse.redirect(
|
||||
new URL(`/invite/${invitation.id}?error=email-mismatch`, getBaseUrl())
|
||||
new URL(`/invite/${invitation.id}?error=email-mismatch${tokenParam}`, getBaseUrl())
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -178,23 +178,25 @@ export default function Invite() {
|
||||
|
||||
useEffect(() => {
|
||||
const errorReason = searchParams.get('error')
|
||||
const isNew = searchParams.get('new') === 'true'
|
||||
setIsNewUser(isNew)
|
||||
|
||||
const tokenFromQuery = searchParams.get('token')
|
||||
if (tokenFromQuery) {
|
||||
setToken(tokenFromQuery)
|
||||
sessionStorage.setItem('inviteToken', tokenFromQuery)
|
||||
} else {
|
||||
const storedToken = sessionStorage.getItem('inviteToken')
|
||||
if (storedToken && storedToken !== inviteId) {
|
||||
setToken(storedToken)
|
||||
}
|
||||
}
|
||||
|
||||
if (errorReason) {
|
||||
setError(getInviteError(errorReason))
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
const isNew = searchParams.get('new') === 'true'
|
||||
setIsNewUser(isNew)
|
||||
|
||||
const tokenFromQuery = searchParams.get('token')
|
||||
const effectiveToken = tokenFromQuery || inviteId
|
||||
|
||||
if (effectiveToken) {
|
||||
setToken(effectiveToken)
|
||||
sessionStorage.setItem('inviteToken', effectiveToken)
|
||||
}
|
||||
}, [searchParams, inviteId])
|
||||
|
||||
useEffect(() => {
|
||||
@@ -203,7 +205,6 @@ export default function Invite() {
|
||||
async function fetchInvitationDetails() {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
// Fetch invitation details using the invitation ID from the URL path
|
||||
const workspaceInviteResponse = await fetch(`/api/workspaces/invitations/${inviteId}`, {
|
||||
method: 'GET',
|
||||
})
|
||||
@@ -220,7 +221,6 @@ export default function Invite() {
|
||||
return
|
||||
}
|
||||
|
||||
// Handle workspace invitation errors with specific status codes
|
||||
if (!workspaceInviteResponse.ok && workspaceInviteResponse.status !== 404) {
|
||||
const errorCode = parseApiError(null, workspaceInviteResponse.status)
|
||||
const errorData = await workspaceInviteResponse.json().catch(() => ({}))
|
||||
@@ -229,7 +229,6 @@ export default function Invite() {
|
||||
error: errorData,
|
||||
})
|
||||
|
||||
// Refine error code based on response body if available
|
||||
if (errorData.error) {
|
||||
const refinedCode = parseApiError(errorData.error, workspaceInviteResponse.status)
|
||||
setError(getInviteError(refinedCode))
|
||||
@@ -254,13 +253,11 @@ export default function Invite() {
|
||||
if (data) {
|
||||
setInvitationType('organization')
|
||||
|
||||
// Check if user is already in an organization BEFORE showing the invitation
|
||||
const activeOrgResponse = await client.organization
|
||||
.getFullOrganization()
|
||||
.catch(() => ({ data: null }))
|
||||
|
||||
if (activeOrgResponse?.data) {
|
||||
// User is already in an organization
|
||||
setCurrentOrgName(activeOrgResponse.data.name)
|
||||
setError(getInviteError('already-in-organization'))
|
||||
setIsLoading(false)
|
||||
@@ -289,7 +286,6 @@ export default function Invite() {
|
||||
throw { code: 'invalid-invitation' }
|
||||
}
|
||||
} catch (orgErr: any) {
|
||||
// If this is our structured error, use it directly
|
||||
if (orgErr.code) {
|
||||
throw orgErr
|
||||
}
|
||||
@@ -316,7 +312,6 @@ export default function Invite() {
|
||||
window.location.href = `/api/workspaces/invitations/${encodeURIComponent(inviteId)}?token=${encodeURIComponent(token || '')}`
|
||||
} else {
|
||||
try {
|
||||
// Get the organizationId from invitation details
|
||||
const orgId = invitationDetails?.data?.organizationId
|
||||
|
||||
if (!orgId) {
|
||||
@@ -325,7 +320,6 @@ export default function Invite() {
|
||||
return
|
||||
}
|
||||
|
||||
// Use our custom API endpoint that handles Pro usage snapshot
|
||||
const response = await fetch(`/api/organizations/${orgId}/invitations/${inviteId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
@@ -347,7 +341,6 @@ export default function Invite() {
|
||||
return
|
||||
}
|
||||
|
||||
// Set the organization as active
|
||||
await client.organization.setActive({
|
||||
organizationId: orgId,
|
||||
})
|
||||
@@ -360,7 +353,6 @@ export default function Invite() {
|
||||
} catch (err: any) {
|
||||
logger.error('Error accepting invitation:', err)
|
||||
|
||||
// Reset accepted state on error
|
||||
setAccepted(false)
|
||||
|
||||
const errorCode = parseApiError(err)
|
||||
@@ -371,7 +363,9 @@ export default function Invite() {
|
||||
}
|
||||
|
||||
const getCallbackUrl = () => {
|
||||
return `/invite/${inviteId}${token && token !== inviteId ? `?token=${token}` : ''}`
|
||||
const effectiveToken =
|
||||
token || sessionStorage.getItem('inviteToken') || searchParams.get('token')
|
||||
return `/invite/${inviteId}${effectiveToken && effectiveToken !== inviteId ? `?token=${effectiveToken}` : ''}`
|
||||
}
|
||||
|
||||
if (!session?.user && !isPending) {
|
||||
@@ -435,7 +429,6 @@ export default function Invite() {
|
||||
if (error) {
|
||||
const callbackUrl = encodeURIComponent(getCallbackUrl())
|
||||
|
||||
// Special handling for already in organization
|
||||
if (error.code === 'already-in-organization') {
|
||||
return (
|
||||
<InviteLayout>
|
||||
@@ -463,7 +456,6 @@ export default function Invite() {
|
||||
)
|
||||
}
|
||||
|
||||
// Handle email mismatch - user needs to sign in with a different account
|
||||
if (error.code === 'email-mismatch') {
|
||||
return (
|
||||
<InviteLayout>
|
||||
@@ -490,7 +482,6 @@ export default function Invite() {
|
||||
)
|
||||
}
|
||||
|
||||
// Handle auth-related errors - prompt user to sign in
|
||||
if (error.requiresAuth) {
|
||||
return (
|
||||
<InviteLayout>
|
||||
@@ -518,7 +509,6 @@ export default function Invite() {
|
||||
)
|
||||
}
|
||||
|
||||
// Handle retryable errors
|
||||
const actions: Array<{
|
||||
label: string
|
||||
onClick: () => void
|
||||
@@ -550,7 +540,6 @@ export default function Invite() {
|
||||
)
|
||||
}
|
||||
|
||||
// Show success only if accepted AND no error
|
||||
if (accepted && !error) {
|
||||
return (
|
||||
<InviteLayout>
|
||||
|
||||
@@ -221,7 +221,9 @@ export function Chat() {
|
||||
exportChatCSV,
|
||||
} = useChatStore()
|
||||
|
||||
const { entries } = useTerminalConsoleStore()
|
||||
const hasConsoleHydrated = useTerminalConsoleStore((state) => state._hasHydrated)
|
||||
const entriesFromStore = useTerminalConsoleStore((state) => state.entries)
|
||||
const entries = hasConsoleHydrated ? entriesFromStore : []
|
||||
const { isExecuting } = useExecutionStore()
|
||||
const { handleRunWorkflow, handleCancelExecution } = useWorkflowExecution()
|
||||
const { data: session } = useSession()
|
||||
@@ -531,35 +533,6 @@ export function Chat() {
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
selectedOutputs.length > 0 &&
|
||||
'logs' in result &&
|
||||
Array.isArray(result.logs) &&
|
||||
activeWorkflowId
|
||||
) {
|
||||
const additionalOutputs: string[] = []
|
||||
|
||||
for (const outputId of selectedOutputs) {
|
||||
const blockId = extractBlockIdFromOutputId(outputId)
|
||||
const path = extractPathFromOutputId(outputId, blockId)
|
||||
|
||||
if (path === 'content') continue
|
||||
|
||||
const outputValue = extractOutputFromLogs(result.logs as BlockLog[], outputId)
|
||||
if (outputValue !== undefined) {
|
||||
const formattedValue =
|
||||
typeof outputValue === 'string' ? outputValue : JSON.stringify(outputValue)
|
||||
if (formattedValue) {
|
||||
additionalOutputs.push(`**${path}:** ${formattedValue}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (additionalOutputs.length > 0) {
|
||||
appendMessageContent(responseMessageId, `\n\n${additionalOutputs.join('\n\n')}`)
|
||||
}
|
||||
}
|
||||
|
||||
finalizeMessageStream(responseMessageId)
|
||||
} else if (contentChunk) {
|
||||
accumulatedContent += contentChunk
|
||||
|
||||
@@ -320,12 +320,14 @@ export function Terminal() {
|
||||
} = useTerminalStore()
|
||||
const isExpanded = useTerminalStore((state) => state.terminalHeight > NEAR_MIN_THRESHOLD)
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const hasConsoleHydrated = useTerminalConsoleStore((state) => state._hasHydrated)
|
||||
const workflowEntriesSelector = useCallback(
|
||||
(state: { entries: ConsoleEntry[] }) =>
|
||||
state.entries.filter((entry) => entry.workflowId === activeWorkflowId),
|
||||
[activeWorkflowId]
|
||||
)
|
||||
const entries = useTerminalConsoleStore(useShallow(workflowEntriesSelector))
|
||||
const entriesFromStore = useTerminalConsoleStore(useShallow(workflowEntriesSelector))
|
||||
const entries = hasConsoleHydrated ? entriesFromStore : []
|
||||
const clearWorkflowConsole = useTerminalConsoleStore((state) => state.clearWorkflowConsole)
|
||||
const exportConsoleCSV = useTerminalConsoleStore((state) => state.exportConsoleCSV)
|
||||
const [selectedEntry, setSelectedEntry] = useState<ConsoleEntry | null>(null)
|
||||
|
||||
@@ -98,6 +98,23 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
||||
condition: { field: 'operation', value: 'a2a_send_message' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Data (JSON)',
|
||||
type: 'code',
|
||||
placeholder: '{\n "key": "value"\n}',
|
||||
description: 'Structured data to include with the message (DataPart)',
|
||||
condition: { field: 'operation', value: 'a2a_send_message' },
|
||||
},
|
||||
{
|
||||
id: 'files',
|
||||
title: 'Files',
|
||||
type: 'file-upload',
|
||||
placeholder: 'Upload files to send',
|
||||
description: 'Files to include with the message (FilePart)',
|
||||
condition: { field: 'operation', value: 'a2a_send_message' },
|
||||
multiple: true,
|
||||
},
|
||||
{
|
||||
id: 'taskId',
|
||||
title: 'Task ID',
|
||||
@@ -208,6 +225,14 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
||||
type: 'string',
|
||||
description: 'Context ID for conversation continuity',
|
||||
},
|
||||
data: {
|
||||
type: 'json',
|
||||
description: 'Structured data to include with the message',
|
||||
},
|
||||
files: {
|
||||
type: 'array',
|
||||
description: 'Files to include with the message',
|
||||
},
|
||||
historyLength: {
|
||||
type: 'number',
|
||||
description: 'Number of history messages to include',
|
||||
|
||||
@@ -26,6 +26,8 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
{ label: 'Send Message', id: 'send' },
|
||||
{ label: 'Create Canvas', id: 'canvas' },
|
||||
{ label: 'Read Messages', id: 'read' },
|
||||
{ label: 'Get Message', id: 'get_message' },
|
||||
{ label: 'Get Thread', id: 'get_thread' },
|
||||
{ label: 'List Channels', id: 'list_channels' },
|
||||
{ label: 'List Channel Members', id: 'list_members' },
|
||||
{ label: 'List Users', id: 'list_users' },
|
||||
@@ -316,6 +318,68 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Get Message specific fields
|
||||
{
|
||||
id: 'getMessageTimestamp',
|
||||
title: 'Message Timestamp',
|
||||
type: 'short-input',
|
||||
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_message',
|
||||
},
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Extract or generate a Slack message timestamp from the user's input.
|
||||
Slack message timestamps are in the format: XXXXXXXXXX.XXXXXX (seconds.microseconds since Unix epoch).
|
||||
Examples:
|
||||
- "1405894322.002768" -> 1405894322.002768 (already a valid timestamp)
|
||||
- "thread_ts from the trigger" -> The user wants to reference a variable, output the original text
|
||||
- A URL like "https://slack.com/archives/C123/p1405894322002768" -> Extract 1405894322.002768 (remove 'p' prefix, add decimal after 10th digit)
|
||||
|
||||
If the input looks like a reference to another block's output (contains < and >) or a variable, return it as-is.
|
||||
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Paste a Slack message URL or timestamp...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
// Get Thread specific fields
|
||||
{
|
||||
id: 'getThreadTimestamp',
|
||||
title: 'Thread Timestamp',
|
||||
type: 'short-input',
|
||||
placeholder: 'Thread timestamp (thread_ts, e.g., 1405894322.002768)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_thread',
|
||||
},
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Extract or generate a Slack thread timestamp from the user's input.
|
||||
Slack thread timestamps (thread_ts) are in the format: XXXXXXXXXX.XXXXXX (seconds.microseconds since Unix epoch).
|
||||
Examples:
|
||||
- "1405894322.002768" -> 1405894322.002768 (already a valid timestamp)
|
||||
- "thread_ts from the trigger" -> The user wants to reference a variable, output the original text
|
||||
- A URL like "https://slack.com/archives/C123/p1405894322002768" -> Extract 1405894322.002768 (remove 'p' prefix, add decimal after 10th digit)
|
||||
|
||||
If the input looks like a reference to another block's output (contains < and >) or a variable, return it as-is.
|
||||
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Paste a Slack thread URL or thread_ts...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'threadLimit',
|
||||
title: 'Message Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '100',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_thread',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'oldest',
|
||||
title: 'Oldest Timestamp',
|
||||
@@ -430,6 +494,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
'slack_message',
|
||||
'slack_canvas',
|
||||
'slack_message_reader',
|
||||
'slack_get_message',
|
||||
'slack_get_thread',
|
||||
'slack_list_channels',
|
||||
'slack_list_members',
|
||||
'slack_list_users',
|
||||
@@ -448,6 +514,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
return 'slack_canvas'
|
||||
case 'read':
|
||||
return 'slack_message_reader'
|
||||
case 'get_message':
|
||||
return 'slack_get_message'
|
||||
case 'get_thread':
|
||||
return 'slack_get_thread'
|
||||
case 'list_channels':
|
||||
return 'slack_list_channels'
|
||||
case 'list_members':
|
||||
@@ -498,6 +568,9 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
includeDeleted,
|
||||
userLimit,
|
||||
userId,
|
||||
getMessageTimestamp,
|
||||
getThreadTimestamp,
|
||||
threadLimit,
|
||||
...rest
|
||||
} = params
|
||||
|
||||
@@ -574,6 +647,27 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
break
|
||||
}
|
||||
|
||||
case 'get_message':
|
||||
if (!getMessageTimestamp) {
|
||||
throw new Error('Message timestamp is required for get message operation')
|
||||
}
|
||||
baseParams.timestamp = getMessageTimestamp
|
||||
break
|
||||
|
||||
case 'get_thread': {
|
||||
if (!getThreadTimestamp) {
|
||||
throw new Error('Thread timestamp is required for get thread operation')
|
||||
}
|
||||
baseParams.threadTs = getThreadTimestamp
|
||||
if (threadLimit) {
|
||||
const parsedLimit = Number.parseInt(threadLimit, 10)
|
||||
if (!Number.isNaN(parsedLimit) && parsedLimit > 0) {
|
||||
baseParams.limit = Math.min(parsedLimit, 200)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'list_channels': {
|
||||
baseParams.includePrivate = includePrivate !== 'false'
|
||||
baseParams.excludeArchived = true
|
||||
@@ -679,6 +773,14 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
userLimit: { type: 'string', description: 'Maximum number of users to return' },
|
||||
// Get User inputs
|
||||
userId: { type: 'string', description: 'User ID to look up' },
|
||||
// Get Message inputs
|
||||
getMessageTimestamp: { type: 'string', description: 'Message timestamp to retrieve' },
|
||||
// Get Thread inputs
|
||||
getThreadTimestamp: { type: 'string', description: 'Thread timestamp to retrieve' },
|
||||
threadLimit: {
|
||||
type: 'string',
|
||||
description: 'Maximum number of messages to return from thread',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
// slack_message outputs (send operation)
|
||||
@@ -706,6 +808,24 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
'Array of message objects with comprehensive properties: text, user, timestamp, reactions, threads, files, attachments, blocks, stars, pins, and edit history',
|
||||
},
|
||||
|
||||
// slack_get_thread outputs (get_thread operation)
|
||||
parentMessage: {
|
||||
type: 'json',
|
||||
description: 'The thread parent message with all properties',
|
||||
},
|
||||
replies: {
|
||||
type: 'json',
|
||||
description: 'Array of reply messages in the thread (excluding the parent)',
|
||||
},
|
||||
replyCount: {
|
||||
type: 'number',
|
||||
description: 'Number of replies returned in this response',
|
||||
},
|
||||
hasMore: {
|
||||
type: 'boolean',
|
||||
description: 'Whether there are more messages in the thread',
|
||||
},
|
||||
|
||||
// slack_list_channels outputs (list_channels operation)
|
||||
channels: {
|
||||
type: 'json',
|
||||
|
||||
@@ -22,7 +22,7 @@ import { useUndoRedoStore } from '@/stores/undo-redo'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
|
||||
import { filterNewEdges, mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -242,7 +242,10 @@ export function useCollaborativeWorkflow() {
|
||||
case EDGES_OPERATIONS.BATCH_ADD_EDGES: {
|
||||
const { edges } = payload
|
||||
if (Array.isArray(edges) && edges.length > 0) {
|
||||
workflowStore.batchAddEdges(edges)
|
||||
const newEdges = filterNewEdges(edges, workflowStore.edges)
|
||||
if (newEdges.length > 0) {
|
||||
workflowStore.batchAddEdges(newEdges)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -976,6 +979,9 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
if (edges.length === 0) return false
|
||||
|
||||
const newEdges = filterNewEdges(edges, workflowStore.edges)
|
||||
if (newEdges.length === 0) return false
|
||||
|
||||
const operationId = crypto.randomUUID()
|
||||
|
||||
addToQueue({
|
||||
@@ -983,16 +989,16 @@ export function useCollaborativeWorkflow() {
|
||||
operation: {
|
||||
operation: EDGES_OPERATIONS.BATCH_ADD_EDGES,
|
||||
target: OPERATION_TARGETS.EDGES,
|
||||
payload: { edges },
|
||||
payload: { edges: newEdges },
|
||||
},
|
||||
workflowId: activeWorkflowId || '',
|
||||
userId: session?.user?.id || 'unknown',
|
||||
})
|
||||
|
||||
workflowStore.batchAddEdges(edges)
|
||||
workflowStore.batchAddEdges(newEdges)
|
||||
|
||||
if (!options?.skipUndoRedo) {
|
||||
edges.forEach((edge) => undoRedo.recordAddEdge(edge.id))
|
||||
newEdges.forEach((edge) => undoRedo.recordAddEdge(edge.id))
|
||||
}
|
||||
|
||||
return true
|
||||
|
||||
@@ -36,9 +36,10 @@ class ApiKeyInterceptor implements CallInterceptor {
|
||||
/**
|
||||
* Create an A2A client from an agent URL with optional API key authentication
|
||||
*
|
||||
* The agent URL should be the full endpoint URL (e.g., /api/a2a/serve/{agentId}).
|
||||
* We pass an empty path to createFromUrl so it uses the URL directly for agent card
|
||||
* discovery (GET on the URL) instead of appending .well-known/agent-card.json.
|
||||
* Supports both standard A2A agents (agent card at /.well-known/agent.json)
|
||||
* and Sim Studio agents (agent card at root URL via GET).
|
||||
*
|
||||
* Tries standard path first, falls back to root URL for compatibility.
|
||||
*/
|
||||
export async function createA2AClient(agentUrl: string, apiKey?: string): Promise<Client> {
|
||||
const factoryOptions = apiKey
|
||||
@@ -49,6 +50,18 @@ export async function createA2AClient(agentUrl: string, apiKey?: string): Promis
|
||||
})
|
||||
: ClientFactoryOptions.default
|
||||
const factory = new ClientFactory(factoryOptions)
|
||||
|
||||
// Try standard A2A path first (/.well-known/agent.json)
|
||||
try {
|
||||
return await factory.createFromUrl(agentUrl, '/.well-known/agent.json')
|
||||
} catch (standardError) {
|
||||
logger.debug('Standard agent card path failed, trying root URL', {
|
||||
agentUrl,
|
||||
error: standardError instanceof Error ? standardError.message : String(standardError),
|
||||
})
|
||||
}
|
||||
|
||||
// Fall back to root URL (Sim Studio compatibility)
|
||||
return factory.createFromUrl(agentUrl, '')
|
||||
}
|
||||
|
||||
|
||||
@@ -656,7 +656,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: profile.id.toString(),
|
||||
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.login,
|
||||
email: profile.email,
|
||||
image: profile.avatar_url,
|
||||
@@ -962,7 +962,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: uniqueId,
|
||||
id: `${uniqueId}-${crypto.randomUUID()}`,
|
||||
name: 'Wealthbox User',
|
||||
email: `${uniqueId}@wealthbox.user`,
|
||||
emailVerified: false,
|
||||
@@ -1016,7 +1016,7 @@ export const auth = betterAuth({
|
||||
const user = data.data
|
||||
|
||||
return {
|
||||
id: user.id.toString(),
|
||||
id: `${user.id.toString()}-${crypto.randomUUID()}`,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
emailVerified: user.activated,
|
||||
@@ -1108,7 +1108,7 @@ export const auth = betterAuth({
|
||||
})
|
||||
|
||||
return {
|
||||
id: data.user_id || data.hub_id.toString(),
|
||||
id: `${data.user_id || data.hub_id.toString()}-${crypto.randomUUID()}`,
|
||||
name: data.user || 'HubSpot User',
|
||||
email: data.user || `hubspot-${data.hub_id}@hubspot.com`,
|
||||
emailVerified: true,
|
||||
@@ -1162,7 +1162,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
id: data.user_id || data.sub,
|
||||
id: `${data.user_id || data.sub}-${crypto.randomUUID()}`,
|
||||
name: data.name || 'Salesforce User',
|
||||
email: data.email || `salesforce-${data.user_id}@salesforce.com`,
|
||||
emailVerified: data.email_verified || true,
|
||||
@@ -1221,7 +1221,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: profile.data.id,
|
||||
id: `${profile.data.id}-${crypto.randomUUID()}`,
|
||||
name: profile.data.name || 'X User',
|
||||
email: `${profile.data.username}@x.com`,
|
||||
image: profile.data.profile_image_url,
|
||||
@@ -1295,7 +1295,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: profile.account_id,
|
||||
id: `${profile.account_id}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.display_name || 'Confluence User',
|
||||
email: profile.email || `${profile.account_id}@atlassian.com`,
|
||||
image: profile.picture || undefined,
|
||||
@@ -1406,7 +1406,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: profile.account_id,
|
||||
id: `${profile.account_id}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.display_name || 'Jira User',
|
||||
email: profile.email || `${profile.account_id}@atlassian.com`,
|
||||
image: profile.picture || undefined,
|
||||
@@ -1456,7 +1456,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: data.id,
|
||||
id: `${data.id}-${crypto.randomUUID()}`,
|
||||
name: data.email ? data.email.split('@')[0] : 'Airtable User',
|
||||
email: data.email || `${data.id}@airtable.user`,
|
||||
emailVerified: !!data.email,
|
||||
@@ -1505,7 +1505,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: profile.bot?.owner?.user?.id || profile.id,
|
||||
id: `${profile.bot?.owner?.user?.id || profile.id}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.bot?.owner?.user?.name || 'Notion User',
|
||||
email: profile.person?.email || `${profile.id}@notion.user`,
|
||||
emailVerified: !!profile.person?.email,
|
||||
@@ -1572,7 +1572,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: data.id,
|
||||
id: `${data.id}-${crypto.randomUUID()}`,
|
||||
name: data.name || 'Reddit User',
|
||||
email: `${data.name}@reddit.user`,
|
||||
image: data.icon_img || undefined,
|
||||
@@ -1644,7 +1644,7 @@ export const auth = betterAuth({
|
||||
const viewer = data.viewer
|
||||
|
||||
return {
|
||||
id: viewer.id,
|
||||
id: `${viewer.id}-${crypto.randomUUID()}`,
|
||||
email: viewer.email,
|
||||
name: viewer.name,
|
||||
emailVerified: true,
|
||||
@@ -1707,7 +1707,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
id: data.account_id,
|
||||
id: `${data.account_id}-${crypto.randomUUID()}`,
|
||||
email: data.email,
|
||||
name: data.name?.display_name || data.email,
|
||||
emailVerified: data.email_verified || false,
|
||||
@@ -1758,7 +1758,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: profile.gid,
|
||||
id: `${profile.gid}-${crypto.randomUUID()}`,
|
||||
name: profile.name || 'Asana User',
|
||||
email: profile.email || `${profile.gid}@asana.user`,
|
||||
image: profile.photo?.image_128x128 || undefined,
|
||||
@@ -1834,7 +1834,7 @@ export const auth = betterAuth({
|
||||
logger.info('Slack credential identifier', { teamId, userId, uniqueId, teamName })
|
||||
|
||||
return {
|
||||
id: uniqueId,
|
||||
id: `${uniqueId}-${crypto.randomUUID()}`,
|
||||
name: teamName,
|
||||
email: `${teamId}-${userId}@slack.bot`,
|
||||
emailVerified: false,
|
||||
@@ -1884,7 +1884,7 @@ export const auth = betterAuth({
|
||||
const uniqueId = `webflow-${userId}`
|
||||
|
||||
return {
|
||||
id: uniqueId,
|
||||
id: `${uniqueId}-${crypto.randomUUID()}`,
|
||||
name: data.user_name || 'Webflow User',
|
||||
email: `${uniqueId.replace(/[^a-zA-Z0-9]/g, '')}@webflow.user`,
|
||||
emailVerified: false,
|
||||
@@ -1931,7 +1931,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: profile.sub,
|
||||
id: `${profile.sub}-${crypto.randomUUID()}`,
|
||||
name: profile.name || 'LinkedIn User',
|
||||
email: profile.email || `${profile.sub}@linkedin.user`,
|
||||
emailVerified: profile.email_verified || true,
|
||||
@@ -1993,7 +1993,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: profile.id,
|
||||
id: `${profile.id}-${crypto.randomUUID()}`,
|
||||
name:
|
||||
`${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User',
|
||||
email: profile.email || `${profile.id}@zoom.user`,
|
||||
@@ -2060,7 +2060,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: profile.id,
|
||||
id: `${profile.id}-${crypto.randomUUID()}`,
|
||||
name: profile.display_name || 'Spotify User',
|
||||
email: profile.email || `${profile.id}@spotify.user`,
|
||||
emailVerified: true,
|
||||
@@ -2108,7 +2108,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: profile.ID?.toString() || profile.id?.toString(),
|
||||
id: `${profile.ID?.toString() || profile.id?.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.display_name || profile.username || 'WordPress User',
|
||||
email: profile.email || `${profile.username}@wordpress.com`,
|
||||
emailVerified: profile.email_verified || false,
|
||||
|
||||
@@ -104,6 +104,7 @@
|
||||
"groq-sdk": "^0.15.0",
|
||||
"html-to-image": "1.11.13",
|
||||
"html-to-text": "^9.0.5",
|
||||
"idb-keyval": "6.2.2",
|
||||
"imapflow": "1.2.4",
|
||||
"input-otp": "^1.4.2",
|
||||
"ioredis": "^5.6.0",
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
export { indexedDBStorage } from './storage'
|
||||
export { useTerminalConsoleStore } from './store'
|
||||
export type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from './types'
|
||||
|
||||
79
apps/sim/stores/terminal/console/storage.ts
Normal file
79
apps/sim/stores/terminal/console/storage.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { del, get, set } from 'idb-keyval'
|
||||
import type { StateStorage } from 'zustand/middleware'
|
||||
|
||||
const logger = createLogger('ConsoleStorage')
|
||||
|
||||
const STORE_KEY = 'terminal-console-store'
|
||||
const MIGRATION_KEY = 'terminal-console-store-migrated'
|
||||
|
||||
/**
|
||||
* Promise that resolves when migration is complete.
|
||||
* Used to ensure getItem waits for migration before reading.
|
||||
*/
|
||||
let migrationPromise: Promise<void> | null = null
|
||||
|
||||
/**
|
||||
* Migrates existing console data from localStorage to IndexedDB.
|
||||
* Runs once on first load, then marks migration as complete.
|
||||
*/
|
||||
async function migrateFromLocalStorage(): Promise<void> {
|
||||
if (typeof window === 'undefined') return
|
||||
|
||||
try {
|
||||
const migrated = await get<boolean>(MIGRATION_KEY)
|
||||
if (migrated) return
|
||||
|
||||
const localData = localStorage.getItem(STORE_KEY)
|
||||
if (localData) {
|
||||
await set(STORE_KEY, localData)
|
||||
localStorage.removeItem(STORE_KEY)
|
||||
logger.info('Migrated console store to IndexedDB')
|
||||
}
|
||||
|
||||
await set(MIGRATION_KEY, true)
|
||||
} catch (error) {
|
||||
logger.warn('Migration from localStorage failed', { error })
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
migrationPromise = migrateFromLocalStorage()
|
||||
}
|
||||
|
||||
export const indexedDBStorage: StateStorage = {
|
||||
getItem: async (name: string): Promise<string | null> => {
|
||||
if (typeof window === 'undefined') return null
|
||||
|
||||
// Ensure migration completes before reading
|
||||
if (migrationPromise) {
|
||||
await migrationPromise
|
||||
}
|
||||
|
||||
try {
|
||||
const value = await get<string>(name)
|
||||
return value ?? null
|
||||
} catch (error) {
|
||||
logger.warn('IndexedDB read failed', { name, error })
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
setItem: async (name: string, value: string): Promise<void> => {
|
||||
if (typeof window === 'undefined') return
|
||||
try {
|
||||
await set(name, value)
|
||||
} catch (error) {
|
||||
logger.warn('IndexedDB write failed', { name, error })
|
||||
}
|
||||
},
|
||||
|
||||
removeItem: async (name: string): Promise<void> => {
|
||||
if (typeof window === 'undefined') return
|
||||
try {
|
||||
await del(name)
|
||||
} catch (error) {
|
||||
logger.warn('IndexedDB delete failed', { name, error })
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,18 +1,22 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { create } from 'zustand'
|
||||
import { devtools, persist } from 'zustand/middleware'
|
||||
import { createJSONStorage, devtools, persist } from 'zustand/middleware'
|
||||
import { redactApiKeys } from '@/lib/core/security/redaction'
|
||||
import type { NormalizedBlockOutput } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useGeneralStore } from '@/stores/settings/general'
|
||||
import { indexedDBStorage } from '@/stores/terminal/console/storage'
|
||||
import type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from '@/stores/terminal/console/types'
|
||||
|
||||
const logger = createLogger('TerminalConsoleStore')
|
||||
|
||||
/**
|
||||
* Updates a NormalizedBlockOutput with new content
|
||||
* Maximum number of console entries to keep per workflow.
|
||||
* Keeps the stored data size reasonable and improves performance.
|
||||
*/
|
||||
const MAX_ENTRIES_PER_WORKFLOW = 500
|
||||
|
||||
const updateBlockOutput = (
|
||||
existingOutput: NormalizedBlockOutput | undefined,
|
||||
contentUpdate: string
|
||||
@@ -23,9 +27,6 @@ const updateBlockOutput = (
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if output represents a streaming object that should be skipped
|
||||
*/
|
||||
const isStreamingOutput = (output: any): boolean => {
|
||||
if (typeof ReadableStream !== 'undefined' && output instanceof ReadableStream) {
|
||||
return true
|
||||
@@ -44,9 +45,6 @@ const isStreamingOutput = (output: any): boolean => {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if entry should be skipped to prevent duplicates
|
||||
*/
|
||||
const shouldSkipEntry = (output: any): boolean => {
|
||||
if (typeof output !== 'object' || !output) {
|
||||
return false
|
||||
@@ -69,6 +67,9 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
(set, get) => ({
|
||||
entries: [],
|
||||
isOpen: false,
|
||||
_hasHydrated: false,
|
||||
|
||||
setHasHydrated: (hasHydrated) => set({ _hasHydrated: hasHydrated }),
|
||||
|
||||
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => {
|
||||
set((state) => {
|
||||
@@ -94,7 +95,15 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
return { entries: [newEntry, ...state.entries] }
|
||||
const newEntries = [newEntry, ...state.entries]
|
||||
const workflowCounts = new Map<string, number>()
|
||||
const trimmedEntries = newEntries.filter((entry) => {
|
||||
const count = workflowCounts.get(entry.workflowId) || 0
|
||||
if (count >= MAX_ENTRIES_PER_WORKFLOW) return false
|
||||
workflowCounts.set(entry.workflowId, count + 1)
|
||||
return true
|
||||
})
|
||||
return { entries: trimmedEntries }
|
||||
})
|
||||
|
||||
const newEntry = get().entries[0]
|
||||
@@ -130,10 +139,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
return newEntry
|
||||
},
|
||||
|
||||
/**
|
||||
* Clears console entries for a specific workflow and clears the run path
|
||||
* @param workflowId - The workflow ID to clear entries for
|
||||
*/
|
||||
clearWorkflowConsole: (workflowId: string) => {
|
||||
set((state) => ({
|
||||
entries: state.entries.filter((entry) => entry.workflowId !== workflowId),
|
||||
@@ -148,9 +153,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a value for CSV export
|
||||
*/
|
||||
const formatCSVValue = (value: any): string => {
|
||||
if (value === null || value === undefined) {
|
||||
return ''
|
||||
@@ -297,7 +299,34 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
}),
|
||||
{
|
||||
name: 'terminal-console-store',
|
||||
storage: createJSONStorage(() => indexedDBStorage),
|
||||
partialize: (state) => ({
|
||||
entries: state.entries,
|
||||
isOpen: state.isOpen,
|
||||
}),
|
||||
onRehydrateStorage: () => (_state, error) => {
|
||||
if (error) {
|
||||
logger.error('Failed to rehydrate console store', { error })
|
||||
}
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
||||
return {
|
||||
...currentState,
|
||||
entries: persisted?.entries ?? currentState.entries,
|
||||
isOpen: persisted?.isOpen ?? currentState.isOpen,
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
if (useTerminalConsoleStore.persist.hasHydrated()) {
|
||||
useTerminalConsoleStore.setState({ _hasHydrated: true })
|
||||
}
|
||||
useTerminalConsoleStore.persist.onFinishHydration(() => {
|
||||
useTerminalConsoleStore.setState({ _hasHydrated: true })
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import type { NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Console entry for terminal logs
|
||||
*/
|
||||
export interface ConsoleEntry {
|
||||
id: string
|
||||
timestamp: string
|
||||
@@ -25,9 +22,6 @@ export interface ConsoleEntry {
|
||||
iterationType?: SubflowType
|
||||
}
|
||||
|
||||
/**
|
||||
* Console update payload for partial updates
|
||||
*/
|
||||
export interface ConsoleUpdate {
|
||||
content?: string
|
||||
output?: Partial<NormalizedBlockOutput>
|
||||
@@ -40,9 +34,6 @@ export interface ConsoleUpdate {
|
||||
input?: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Console store state and actions
|
||||
*/
|
||||
export interface ConsoleStore {
|
||||
entries: ConsoleEntry[]
|
||||
isOpen: boolean
|
||||
@@ -52,4 +43,6 @@ export interface ConsoleStore {
|
||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||
toggleConsole: () => void
|
||||
updateConsole: (blockId: string, update: string | ConsoleUpdate, executionId?: string) => void
|
||||
_hasHydrated: boolean
|
||||
setHasHydrated: (hasHydrated: boolean) => void
|
||||
}
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
export function filterNewEdges(edgesToAdd: Edge[], currentEdges: Edge[]): Edge[] {
|
||||
return edgesToAdd.filter((edge) => {
|
||||
if (edge.source === edge.target) return false
|
||||
return !currentEdges.some(
|
||||
(e) =>
|
||||
e.source === edge.source &&
|
||||
e.sourceHandle === edge.sourceHandle &&
|
||||
e.target === edge.target &&
|
||||
e.targetHandle === edge.targetHandle
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
|
||||
@@ -297,7 +297,7 @@ describe('workflow store', () => {
|
||||
expectEdgeConnects(edges, 'block-1', 'block-2')
|
||||
})
|
||||
|
||||
it('should not add duplicate edges', () => {
|
||||
it('should not add duplicate connections', () => {
|
||||
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
|
||||
|
||||
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
|
||||
@@ -309,17 +309,6 @@ describe('workflow store', () => {
|
||||
const state = useWorkflowStore.getState()
|
||||
expectEdgeCount(state, 1)
|
||||
})
|
||||
|
||||
it('should prevent self-referencing edges', () => {
|
||||
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
|
||||
|
||||
addBlock('block-1', 'function', 'Self', { x: 0, y: 0 })
|
||||
|
||||
batchAddEdges([{ id: 'e1', source: 'block-1', target: 'block-1' }])
|
||||
|
||||
const state = useWorkflowStore.getState()
|
||||
expectEdgeCount(state, 0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('batchRemoveEdges', () => {
|
||||
|
||||
@@ -9,7 +9,12 @@ import { getBlock } from '@/blocks'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getUniqueBlockName, mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
|
||||
import {
|
||||
filterNewEdges,
|
||||
getUniqueBlockName,
|
||||
mergeSubblockState,
|
||||
normalizeName,
|
||||
} from '@/stores/workflows/utils'
|
||||
import type {
|
||||
Position,
|
||||
SubBlockState,
|
||||
@@ -496,29 +501,11 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
|
||||
batchAddEdges: (edges: Edge[]) => {
|
||||
const currentEdges = get().edges
|
||||
const filtered = filterNewEdges(edges, currentEdges)
|
||||
const newEdges = [...currentEdges]
|
||||
const existingEdgeIds = new Set(currentEdges.map((e) => e.id))
|
||||
|
||||
for (const edge of edges) {
|
||||
// Skip if edge ID already exists
|
||||
if (existingEdgeIds.has(edge.id)) continue
|
||||
|
||||
// Skip self-referencing edges
|
||||
if (edge.source === edge.target) continue
|
||||
|
||||
// Skip if identical connection already exists (same ports)
|
||||
const connectionExists = newEdges.some(
|
||||
(e) =>
|
||||
e.source === edge.source &&
|
||||
e.sourceHandle === edge.sourceHandle &&
|
||||
e.target === edge.target &&
|
||||
e.targetHandle === edge.targetHandle
|
||||
)
|
||||
if (connectionExists) continue
|
||||
|
||||
// Skip if would create a cycle
|
||||
for (const edge of filtered) {
|
||||
if (wouldCreateCycle([...newEdges], edge.source, edge.target)) continue
|
||||
|
||||
newEdges.push({
|
||||
id: edge.id || crypto.randomUUID(),
|
||||
source: edge.source,
|
||||
@@ -528,7 +515,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
type: edge.type || 'default',
|
||||
data: edge.data || {},
|
||||
})
|
||||
existingEdgeIds.add(edge.id)
|
||||
}
|
||||
|
||||
const blocks = get().blocks
|
||||
|
||||
@@ -30,11 +30,14 @@ export const a2aCancelTaskTool: ToolConfig<A2ACancelTaskParams, A2ACancelTaskRes
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: A2ACancelTaskParams) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
body: (params: A2ACancelTaskParams) => {
|
||||
const body: Record<string, string> = {
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
}
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -38,12 +38,16 @@ export const a2aDeletePushNotificationTool: ToolConfig<
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
pushNotificationConfigId: params.pushNotificationConfigId,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, string> = {
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
}
|
||||
if (params.pushNotificationConfigId)
|
||||
body.pushNotificationConfigId = params.pushNotificationConfigId
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -25,10 +25,13 @@ export const a2aGetAgentCardTool: ToolConfig<A2AGetAgentCardParams, A2AGetAgentC
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, string> = {
|
||||
agentUrl: params.agentUrl,
|
||||
}
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -33,11 +33,14 @@ export const a2aGetPushNotificationTool: ToolConfig<
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, string> = {
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
}
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -34,12 +34,15 @@ export const a2aGetTaskTool: ToolConfig<A2AGetTaskParams, A2AGetTaskResponse> =
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: A2AGetTaskParams) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
apiKey: params.apiKey,
|
||||
historyLength: params.historyLength,
|
||||
}),
|
||||
body: (params: A2AGetTaskParams) => {
|
||||
const body: Record<string, string | number> = {
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
}
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
if (params.historyLength) body.historyLength = params.historyLength
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { a2aGetPushNotificationTool } from './get_push_notification'
|
||||
import { a2aGetTaskTool } from './get_task'
|
||||
import { a2aResubscribeTool } from './resubscribe'
|
||||
import { a2aSendMessageTool } from './send_message'
|
||||
import { a2aSendMessageStreamTool } from './send_message_stream'
|
||||
import { a2aSetPushNotificationTool } from './set_push_notification'
|
||||
|
||||
export {
|
||||
@@ -16,6 +15,5 @@ export {
|
||||
a2aGetTaskTool,
|
||||
a2aResubscribeTool,
|
||||
a2aSendMessageTool,
|
||||
a2aSendMessageStreamTool,
|
||||
a2aSetPushNotificationTool,
|
||||
}
|
||||
|
||||
@@ -30,11 +30,14 @@ export const a2aResubscribeTool: ToolConfig<A2AResubscribeParams, A2AResubscribe
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: A2AResubscribeParams) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
body: (params: A2AResubscribeParams) => {
|
||||
const body: Record<string, string> = {
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
}
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
|
||||
@@ -26,6 +26,14 @@ export const a2aSendMessageTool: ToolConfig<A2ASendMessageParams, A2ASendMessage
|
||||
type: 'string',
|
||||
description: 'Context ID for conversation continuity',
|
||||
},
|
||||
data: {
|
||||
type: 'string',
|
||||
description: 'Structured data to include with the message (JSON string)',
|
||||
},
|
||||
files: {
|
||||
type: 'array',
|
||||
description: 'Files to include with the message',
|
||||
},
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
description: 'API key for authentication',
|
||||
@@ -35,7 +43,21 @@ export const a2aSendMessageTool: ToolConfig<A2ASendMessageParams, A2ASendMessage
|
||||
request: {
|
||||
url: '/api/tools/a2a/send-message',
|
||||
method: 'POST',
|
||||
headers: () => ({}),
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
agentUrl: params.agentUrl,
|
||||
message: params.message,
|
||||
}
|
||||
if (params.taskId) body.taskId = params.taskId
|
||||
if (params.contextId) body.contextId = params.contextId
|
||||
if (params.data) body.data = params.data
|
||||
if (params.files && params.files.length > 0) body.files = params.files
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { A2ASendMessageParams, A2ASendMessageResponse } from './types'
|
||||
|
||||
export const a2aSendMessageStreamTool: ToolConfig<A2ASendMessageParams, A2ASendMessageResponse> = {
|
||||
id: 'a2a_send_message_stream',
|
||||
name: 'A2A Send Message (Streaming)',
|
||||
description: 'Send a message to an external A2A-compatible agent with real-time streaming.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
agentUrl: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'The A2A agent endpoint URL',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'Message to send to the agent',
|
||||
},
|
||||
taskId: {
|
||||
type: 'string',
|
||||
description: 'Task ID for continuing an existing task',
|
||||
},
|
||||
contextId: {
|
||||
type: 'string',
|
||||
description: 'Context ID for conversation continuity',
|
||||
},
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
description: 'API key for authentication',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/a2a/send-message-stream',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
message: params.message,
|
||||
taskId: params.taskId,
|
||||
contextId: params.contextId,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return data
|
||||
},
|
||||
|
||||
outputs: {
|
||||
content: {
|
||||
type: 'string',
|
||||
description: 'The text response from the agent',
|
||||
},
|
||||
taskId: {
|
||||
type: 'string',
|
||||
description: 'Task ID for follow-up interactions',
|
||||
},
|
||||
contextId: {
|
||||
type: 'string',
|
||||
description: 'Context ID for conversation continuity',
|
||||
},
|
||||
state: {
|
||||
type: 'string',
|
||||
description: 'Task state',
|
||||
},
|
||||
artifacts: {
|
||||
type: 'array',
|
||||
description: 'Structured output artifacts',
|
||||
},
|
||||
history: {
|
||||
type: 'array',
|
||||
description: 'Full message history',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -42,13 +42,16 @@ export const a2aSetPushNotificationTool: ToolConfig<
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: A2ASetPushNotificationParams) => ({
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
webhookUrl: params.webhookUrl,
|
||||
token: params.token,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
body: (params: A2ASetPushNotificationParams) => {
|
||||
const body: Record<string, string> = {
|
||||
agentUrl: params.agentUrl,
|
||||
taskId: params.taskId,
|
||||
webhookUrl: params.webhookUrl,
|
||||
}
|
||||
if (params.token) body.token = params.token
|
||||
if (params.apiKey) body.apiKey = params.apiKey
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
|
||||
@@ -25,11 +25,20 @@ export interface A2AGetAgentCardResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface A2ASendMessageFileInput {
|
||||
type: 'file' | 'url'
|
||||
data: string
|
||||
name: string
|
||||
mime?: string
|
||||
}
|
||||
|
||||
export interface A2ASendMessageParams {
|
||||
agentUrl: string
|
||||
message: string
|
||||
taskId?: string
|
||||
contextId?: string
|
||||
data?: string
|
||||
files?: A2ASendMessageFileInput[]
|
||||
apiKey?: string
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import {
|
||||
a2aGetPushNotificationTool,
|
||||
a2aGetTaskTool,
|
||||
a2aResubscribeTool,
|
||||
a2aSendMessageStreamTool,
|
||||
a2aSendMessageTool,
|
||||
a2aSetPushNotificationTool,
|
||||
} from '@/tools/a2a'
|
||||
@@ -1180,6 +1179,8 @@ import {
|
||||
slackCanvasTool,
|
||||
slackDeleteMessageTool,
|
||||
slackDownloadTool,
|
||||
slackGetMessageTool,
|
||||
slackGetThreadTool,
|
||||
slackGetUserTool,
|
||||
slackListChannelsTool,
|
||||
slackListMembersTool,
|
||||
@@ -1541,7 +1542,6 @@ export const tools: Record<string, ToolConfig> = {
|
||||
a2a_get_task: a2aGetTaskTool,
|
||||
a2a_resubscribe: a2aResubscribeTool,
|
||||
a2a_send_message: a2aSendMessageTool,
|
||||
a2a_send_message_stream: a2aSendMessageStreamTool,
|
||||
a2a_set_push_notification: a2aSetPushNotificationTool,
|
||||
arxiv_search: arxivSearchTool,
|
||||
arxiv_get_paper: arxivGetPaperTool,
|
||||
@@ -1731,6 +1731,8 @@ export const tools: Record<string, ToolConfig> = {
|
||||
slack_list_members: slackListMembersTool,
|
||||
slack_list_users: slackListUsersTool,
|
||||
slack_get_user: slackGetUserTool,
|
||||
slack_get_message: slackGetMessageTool,
|
||||
slack_get_thread: slackGetThreadTool,
|
||||
slack_canvas: slackCanvasTool,
|
||||
slack_download: slackDownloadTool,
|
||||
slack_update_message: slackUpdateMessageTool,
|
||||
|
||||
213
apps/sim/tools/slack/get_message.ts
Normal file
213
apps/sim/tools/slack/get_message.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
import type { SlackGetMessageParams, SlackGetMessageResponse } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackGetMessageTool: ToolConfig<SlackGetMessageParams, SlackGetMessageResponse> = {
|
||||
id: 'slack_get_message',
|
||||
name: 'Slack Get Message',
|
||||
description:
|
||||
'Retrieve a specific message by its timestamp. Useful for getting a thread parent message.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Slack channel ID (e.g., C1234567890)',
|
||||
},
|
||||
timestamp: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Message timestamp to retrieve (e.g., 1405894322.002768)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: SlackGetMessageParams) => {
|
||||
const url = new URL('https://slack.com/api/conversations.history')
|
||||
url.searchParams.append('channel', params.channel?.trim() ?? '')
|
||||
url.searchParams.append('oldest', params.timestamp?.trim() ?? '')
|
||||
url.searchParams.append('limit', '1')
|
||||
url.searchParams.append('inclusive', 'true')
|
||||
return url.toString()
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: SlackGetMessageParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes (channels:history, groups:history).'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_auth') {
|
||||
throw new Error('Invalid authentication. Please check your Slack credentials.')
|
||||
}
|
||||
if (data.error === 'channel_not_found') {
|
||||
throw new Error('Channel not found. Please check the channel ID.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to get message from Slack')
|
||||
}
|
||||
|
||||
const messages = data.messages || []
|
||||
if (messages.length === 0) {
|
||||
throw new Error('Message not found')
|
||||
}
|
||||
|
||||
const msg = messages[0]
|
||||
const message = {
|
||||
type: msg.type ?? 'message',
|
||||
ts: msg.ts,
|
||||
text: msg.text ?? '',
|
||||
user: msg.user ?? null,
|
||||
bot_id: msg.bot_id ?? null,
|
||||
username: msg.username ?? null,
|
||||
channel: msg.channel ?? null,
|
||||
team: msg.team ?? null,
|
||||
thread_ts: msg.thread_ts ?? null,
|
||||
parent_user_id: msg.parent_user_id ?? null,
|
||||
reply_count: msg.reply_count ?? null,
|
||||
reply_users_count: msg.reply_users_count ?? null,
|
||||
latest_reply: msg.latest_reply ?? null,
|
||||
subscribed: msg.subscribed ?? null,
|
||||
last_read: msg.last_read ?? null,
|
||||
unread_count: msg.unread_count ?? null,
|
||||
subtype: msg.subtype ?? null,
|
||||
reactions: msg.reactions ?? [],
|
||||
is_starred: msg.is_starred ?? false,
|
||||
pinned_to: msg.pinned_to ?? [],
|
||||
files: (msg.files ?? []).map((f: any) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
mimetype: f.mimetype,
|
||||
size: f.size,
|
||||
url_private: f.url_private ?? null,
|
||||
permalink: f.permalink ?? null,
|
||||
mode: f.mode ?? null,
|
||||
})),
|
||||
attachments: msg.attachments ?? [],
|
||||
blocks: msg.blocks ?? [],
|
||||
edited: msg.edited ?? null,
|
||||
permalink: msg.permalink ?? null,
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: {
|
||||
type: 'object',
|
||||
description: 'The retrieved message object',
|
||||
properties: {
|
||||
type: { type: 'string', description: 'Message type' },
|
||||
ts: { type: 'string', description: 'Message timestamp' },
|
||||
text: { type: 'string', description: 'Message text content' },
|
||||
user: { type: 'string', description: 'User ID who sent the message' },
|
||||
bot_id: { type: 'string', description: 'Bot ID if sent by a bot', optional: true },
|
||||
username: { type: 'string', description: 'Display username', optional: true },
|
||||
channel: { type: 'string', description: 'Channel ID', optional: true },
|
||||
team: { type: 'string', description: 'Team ID', optional: true },
|
||||
thread_ts: { type: 'string', description: 'Thread parent timestamp', optional: true },
|
||||
parent_user_id: { type: 'string', description: 'User ID of thread parent', optional: true },
|
||||
reply_count: { type: 'number', description: 'Number of thread replies', optional: true },
|
||||
reply_users_count: {
|
||||
type: 'number',
|
||||
description: 'Number of users who replied',
|
||||
optional: true,
|
||||
},
|
||||
latest_reply: { type: 'string', description: 'Timestamp of latest reply', optional: true },
|
||||
subtype: { type: 'string', description: 'Message subtype', optional: true },
|
||||
reactions: {
|
||||
type: 'array',
|
||||
description: 'Array of reactions on this message',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Emoji name' },
|
||||
count: { type: 'number', description: 'Number of reactions' },
|
||||
users: {
|
||||
type: 'array',
|
||||
description: 'User IDs who reacted',
|
||||
items: { type: 'string' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
is_starred: { type: 'boolean', description: 'Whether message is starred', optional: true },
|
||||
pinned_to: {
|
||||
type: 'array',
|
||||
description: 'Channel IDs where message is pinned',
|
||||
items: { type: 'string' },
|
||||
optional: true,
|
||||
},
|
||||
files: {
|
||||
type: 'array',
|
||||
description: 'Files attached to message',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'File ID' },
|
||||
name: { type: 'string', description: 'File name' },
|
||||
mimetype: { type: 'string', description: 'MIME type' },
|
||||
size: { type: 'number', description: 'File size in bytes' },
|
||||
url_private: { type: 'string', description: 'Private download URL' },
|
||||
permalink: { type: 'string', description: 'Permanent link to file' },
|
||||
},
|
||||
},
|
||||
},
|
||||
attachments: {
|
||||
type: 'array',
|
||||
description: 'Legacy attachments',
|
||||
items: { type: 'object' },
|
||||
},
|
||||
blocks: { type: 'array', description: 'Block Kit blocks', items: { type: 'object' } },
|
||||
edited: {
|
||||
type: 'object',
|
||||
description: 'Edit information if message was edited',
|
||||
properties: {
|
||||
user: { type: 'string', description: 'User ID who edited' },
|
||||
ts: { type: 'string', description: 'Edit timestamp' },
|
||||
},
|
||||
optional: true,
|
||||
},
|
||||
permalink: { type: 'string', description: 'Permanent link to message', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
224
apps/sim/tools/slack/get_thread.ts
Normal file
224
apps/sim/tools/slack/get_thread.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import type { SlackGetThreadParams, SlackGetThreadResponse } from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackGetThreadTool: ToolConfig<SlackGetThreadParams, SlackGetThreadResponse> = {
|
||||
id: 'slack_get_thread',
|
||||
name: 'Slack Get Thread',
|
||||
description:
|
||||
'Retrieve an entire thread including the parent message and all replies. Useful for getting full conversation context.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Slack channel ID (e.g., C1234567890)',
|
||||
},
|
||||
threadTs: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Thread timestamp (thread_ts) to retrieve (e.g., 1405894322.002768)',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of messages to return (default: 100, max: 200)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: SlackGetThreadParams) => {
|
||||
const url = new URL('https://slack.com/api/conversations.replies')
|
||||
url.searchParams.append('channel', params.channel?.trim() ?? '')
|
||||
url.searchParams.append('ts', params.threadTs?.trim() ?? '')
|
||||
url.searchParams.append('inclusive', 'true')
|
||||
const limit = params.limit ? Math.min(Number(params.limit), 200) : 100
|
||||
url.searchParams.append('limit', String(limit))
|
||||
return url.toString()
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: SlackGetThreadParams) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
if (data.error === 'missing_scope') {
|
||||
throw new Error(
|
||||
'Missing required permissions. Please reconnect your Slack account with the necessary scopes (channels:history, groups:history).'
|
||||
)
|
||||
}
|
||||
if (data.error === 'invalid_auth') {
|
||||
throw new Error('Invalid authentication. Please check your Slack credentials.')
|
||||
}
|
||||
if (data.error === 'channel_not_found') {
|
||||
throw new Error('Channel not found. Please check the channel ID.')
|
||||
}
|
||||
if (data.error === 'thread_not_found') {
|
||||
throw new Error('Thread not found. Please check the thread timestamp.')
|
||||
}
|
||||
throw new Error(data.error || 'Failed to get thread from Slack')
|
||||
}
|
||||
|
||||
const rawMessages = data.messages || []
|
||||
if (rawMessages.length === 0) {
|
||||
throw new Error('Thread not found')
|
||||
}
|
||||
|
||||
const messages = rawMessages.map((msg: any) => ({
|
||||
type: msg.type ?? 'message',
|
||||
ts: msg.ts,
|
||||
text: msg.text ?? '',
|
||||
user: msg.user ?? null,
|
||||
bot_id: msg.bot_id ?? null,
|
||||
username: msg.username ?? null,
|
||||
channel: msg.channel ?? null,
|
||||
team: msg.team ?? null,
|
||||
thread_ts: msg.thread_ts ?? null,
|
||||
parent_user_id: msg.parent_user_id ?? null,
|
||||
reply_count: msg.reply_count ?? null,
|
||||
reply_users_count: msg.reply_users_count ?? null,
|
||||
latest_reply: msg.latest_reply ?? null,
|
||||
subscribed: msg.subscribed ?? null,
|
||||
last_read: msg.last_read ?? null,
|
||||
unread_count: msg.unread_count ?? null,
|
||||
subtype: msg.subtype ?? null,
|
||||
reactions: msg.reactions ?? [],
|
||||
is_starred: msg.is_starred ?? false,
|
||||
pinned_to: msg.pinned_to ?? [],
|
||||
files: (msg.files ?? []).map((f: any) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
mimetype: f.mimetype,
|
||||
size: f.size,
|
||||
url_private: f.url_private ?? null,
|
||||
permalink: f.permalink ?? null,
|
||||
mode: f.mode ?? null,
|
||||
})),
|
||||
attachments: msg.attachments ?? [],
|
||||
blocks: msg.blocks ?? [],
|
||||
edited: msg.edited ?? null,
|
||||
permalink: msg.permalink ?? null,
|
||||
}))
|
||||
|
||||
// First message is always the parent
|
||||
const parentMessage = messages[0]
|
||||
// Remaining messages are replies
|
||||
const replies = messages.slice(1)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
parentMessage,
|
||||
replies,
|
||||
messages,
|
||||
replyCount: replies.length,
|
||||
hasMore: data.has_more ?? false,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
parentMessage: {
|
||||
type: 'object',
|
||||
description: 'The thread parent message',
|
||||
properties: {
|
||||
type: { type: 'string', description: 'Message type' },
|
||||
ts: { type: 'string', description: 'Message timestamp' },
|
||||
text: { type: 'string', description: 'Message text content' },
|
||||
user: { type: 'string', description: 'User ID who sent the message' },
|
||||
bot_id: { type: 'string', description: 'Bot ID if sent by a bot', optional: true },
|
||||
username: { type: 'string', description: 'Display username', optional: true },
|
||||
reply_count: { type: 'number', description: 'Total number of thread replies' },
|
||||
reply_users_count: { type: 'number', description: 'Number of users who replied' },
|
||||
latest_reply: { type: 'string', description: 'Timestamp of latest reply' },
|
||||
reactions: {
|
||||
type: 'array',
|
||||
description: 'Array of reactions on the parent message',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Emoji name' },
|
||||
count: { type: 'number', description: 'Number of reactions' },
|
||||
users: {
|
||||
type: 'array',
|
||||
description: 'User IDs who reacted',
|
||||
items: { type: 'string' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
files: {
|
||||
type: 'array',
|
||||
description: 'Files attached to the parent message',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'File ID' },
|
||||
name: { type: 'string', description: 'File name' },
|
||||
mimetype: { type: 'string', description: 'MIME type' },
|
||||
size: { type: 'number', description: 'File size in bytes' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
replies: {
|
||||
type: 'array',
|
||||
description: 'Array of reply messages in the thread (excluding the parent)',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
ts: { type: 'string', description: 'Message timestamp' },
|
||||
text: { type: 'string', description: 'Message text content' },
|
||||
user: { type: 'string', description: 'User ID who sent the reply' },
|
||||
reactions: { type: 'array', description: 'Reactions on the reply' },
|
||||
files: { type: 'array', description: 'Files attached to the reply' },
|
||||
},
|
||||
},
|
||||
},
|
||||
messages: {
|
||||
type: 'array',
|
||||
description: 'All messages in the thread (parent + replies) in chronological order',
|
||||
items: { type: 'object' },
|
||||
},
|
||||
replyCount: {
|
||||
type: 'number',
|
||||
description: 'Number of replies returned in this response',
|
||||
},
|
||||
hasMore: {
|
||||
type: 'boolean',
|
||||
description: 'Whether there are more messages in the thread (pagination needed)',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -2,6 +2,8 @@ import { slackAddReactionTool } from '@/tools/slack/add_reaction'
|
||||
import { slackCanvasTool } from '@/tools/slack/canvas'
|
||||
import { slackDeleteMessageTool } from '@/tools/slack/delete_message'
|
||||
import { slackDownloadTool } from '@/tools/slack/download'
|
||||
import { slackGetMessageTool } from '@/tools/slack/get_message'
|
||||
import { slackGetThreadTool } from '@/tools/slack/get_thread'
|
||||
import { slackGetUserTool } from '@/tools/slack/get_user'
|
||||
import { slackListChannelsTool } from '@/tools/slack/list_channels'
|
||||
import { slackListMembersTool } from '@/tools/slack/list_members'
|
||||
@@ -22,4 +24,6 @@ export {
|
||||
slackListMembersTool,
|
||||
slackListUsersTool,
|
||||
slackGetUserTool,
|
||||
slackGetMessageTool,
|
||||
slackGetThreadTool,
|
||||
}
|
||||
|
||||
@@ -71,6 +71,17 @@ export interface SlackGetUserParams extends SlackBaseParams {
|
||||
userId: string
|
||||
}
|
||||
|
||||
export interface SlackGetMessageParams extends SlackBaseParams {
|
||||
channel: string
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
export interface SlackGetThreadParams extends SlackBaseParams {
|
||||
channel: string
|
||||
threadTs: string
|
||||
limit?: number
|
||||
}
|
||||
|
||||
export interface SlackMessageResponse extends ToolResponse {
|
||||
output: {
|
||||
// Legacy properties for backward compatibility
|
||||
@@ -305,6 +316,22 @@ export interface SlackGetUserResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackGetMessageResponse extends ToolResponse {
|
||||
output: {
|
||||
message: SlackMessage
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackGetThreadResponse extends ToolResponse {
|
||||
output: {
|
||||
parentMessage: SlackMessage
|
||||
replies: SlackMessage[]
|
||||
messages: SlackMessage[]
|
||||
replyCount: number
|
||||
hasMore: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export type SlackResponse =
|
||||
| SlackCanvasResponse
|
||||
| SlackMessageReaderResponse
|
||||
@@ -317,3 +344,5 @@ export type SlackResponse =
|
||||
| SlackListMembersResponse
|
||||
| SlackListUsersResponse
|
||||
| SlackGetUserResponse
|
||||
| SlackGetMessageResponse
|
||||
| SlackGetThreadResponse
|
||||
|
||||
4
bun.lock
4
bun.lock
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "simstudio",
|
||||
@@ -133,6 +134,7 @@
|
||||
"groq-sdk": "^0.15.0",
|
||||
"html-to-image": "1.11.13",
|
||||
"html-to-text": "^9.0.5",
|
||||
"idb-keyval": "6.2.2",
|
||||
"imapflow": "1.2.4",
|
||||
"input-otp": "^1.4.2",
|
||||
"ioredis": "^5.6.0",
|
||||
@@ -2310,6 +2312,8 @@
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"idb-keyval": ["idb-keyval@6.2.2", "", {}, "sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg=="],
|
||||
|
||||
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
|
||||
|
||||
"image-size": ["image-size@2.0.2", "", { "bin": { "image-size": "bin/image-size.js" } }, "sha512-IRqXKlaXwgSMAMtpNzZa1ZAe8m+Sa1770Dhk8VkSsP9LS+iHD62Zd8FQKs8fbPiagBE7BzoFX23cxFnwshpV6w=="],
|
||||
|
||||
2
packages/db/migrations/0140_fuzzy_the_twelve.sql
Normal file
2
packages/db/migrations/0140_fuzzy_the_twelve.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
DROP INDEX "account_user_provider_account_unique";--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "account_user_provider_unique" ON "account" USING btree ("user_id","provider_id");
|
||||
10239
packages/db/migrations/meta/0140_snapshot.json
Normal file
10239
packages/db/migrations/meta/0140_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -974,6 +974,13 @@
|
||||
"when": 1768260112533,
|
||||
"tag": "0139_late_cargill",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 140,
|
||||
"version": "7",
|
||||
"when": 1768366574848,
|
||||
"tag": "0140_fuzzy_the_twelve",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -89,10 +89,9 @@ export const account = pgTable(
|
||||
table.accountId,
|
||||
table.providerId
|
||||
),
|
||||
uniqueUserProviderAccount: uniqueIndex('account_user_provider_account_unique').on(
|
||||
uniqueUserProvider: uniqueIndex('account_user_provider_unique').on(
|
||||
table.userId,
|
||||
table.providerId,
|
||||
table.accountId
|
||||
table.providerId
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user