mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
v0.6.27: new triggers, mothership improvements, files archive, queueing improvements, posthog, secrets mutations
This commit is contained in:
@@ -3,63 +3,57 @@ name: add-trigger
|
||||
description: Create or update Sim webhook triggers using the generic trigger builder, service-specific setup instructions, outputs, and registry wiring. Use when working in `apps/sim/triggers/{service}/` or adding webhook support to an integration.
|
||||
---
|
||||
|
||||
# Add Trigger Skill
|
||||
# Add Trigger
|
||||
|
||||
You are an expert at creating webhook triggers for Sim. You understand the trigger system, the generic `buildTriggerSubBlocks` helper, and how triggers connect to blocks.
|
||||
|
||||
## Your Task
|
||||
|
||||
When the user asks you to create triggers for a service:
|
||||
1. Research what webhook events the service supports
|
||||
2. Create the trigger files using the generic builder
|
||||
3. Register triggers and connect them to the block
|
||||
3. Create a provider handler if custom auth, formatting, or subscriptions are needed
|
||||
4. Register triggers and connect them to the block
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
apps/sim/triggers/{service}/
|
||||
├── index.ts # Barrel exports
|
||||
├── utils.ts # Service-specific helpers (trigger options, setup instructions, extra fields)
|
||||
├── utils.ts # Service-specific helpers (options, instructions, extra fields, outputs)
|
||||
├── {event_a}.ts # Primary trigger (includes dropdown)
|
||||
├── {event_b}.ts # Secondary trigger (no dropdown)
|
||||
├── {event_c}.ts # Secondary trigger (no dropdown)
|
||||
└── webhook.ts # Generic webhook trigger (optional, for "all events")
|
||||
|
||||
apps/sim/lib/webhooks/
|
||||
├── provider-subscription-utils.ts # Shared subscription helpers (getProviderConfig, getNotificationUrl)
|
||||
├── providers/
|
||||
│ ├── {service}.ts # Provider handler (auth, formatInput, matchEvent, subscriptions)
|
||||
│ ├── types.ts # WebhookProviderHandler interface
|
||||
│ ├── utils.ts # Shared helpers (createHmacVerifier, verifyTokenAuth, skipByEventTypes)
|
||||
│ └── registry.ts # Handler map + default handler
|
||||
```
|
||||
|
||||
## Step 1: Create utils.ts
|
||||
## Step 1: Create `utils.ts`
|
||||
|
||||
This file contains service-specific helpers used by all triggers.
|
||||
This file contains all service-specific helpers used by triggers.
|
||||
|
||||
```typescript
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { TriggerOutput } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* Dropdown options for the trigger type selector.
|
||||
* These appear in the primary trigger's dropdown.
|
||||
*/
|
||||
export const {service}TriggerOptions = [
|
||||
{ label: 'Event A', id: '{service}_event_a' },
|
||||
{ label: 'Event B', id: '{service}_event_b' },
|
||||
{ label: 'Event C', id: '{service}_event_c' },
|
||||
{ label: 'Generic Webhook (All Events)', id: '{service}_webhook' },
|
||||
]
|
||||
|
||||
/**
|
||||
* Generates HTML setup instructions for the trigger.
|
||||
* Displayed to users to help them configure webhooks in the external service.
|
||||
*/
|
||||
export function {service}SetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Copy the <strong>Webhook URL</strong> above',
|
||||
'Go to <strong>{Service} Settings > Webhooks</strong>',
|
||||
'Click <strong>Add Webhook</strong>',
|
||||
'Paste the webhook URL',
|
||||
`Select the <strong>${eventType}</strong> event type`,
|
||||
'Save the webhook configuration',
|
||||
'Paste the webhook URL and save',
|
||||
'Click "Save" above to activate your trigger',
|
||||
]
|
||||
|
||||
return instructions
|
||||
.map((instruction, index) =>
|
||||
`<div class="mb-3"><strong>${index + 1}.</strong> ${instruction}</div>`
|
||||
@@ -67,10 +61,6 @@ export function {service}SetupInstructions(eventType: string): string {
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Service-specific extra fields to add to triggers.
|
||||
* These are inserted between webhookUrl and triggerSave.
|
||||
*/
|
||||
export function build{Service}ExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
return [
|
||||
{
|
||||
@@ -78,53 +68,34 @@ export function build{Service}ExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
title: 'Project ID (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty for all projects',
|
||||
description: 'Optionally filter to a specific project',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Build outputs for this trigger type.
|
||||
* Outputs define what data is available to downstream blocks.
|
||||
*/
|
||||
export function build{Service}Outputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
eventType: { type: 'string', description: 'The type of event that triggered this workflow' },
|
||||
eventType: { type: 'string', description: 'The type of event' },
|
||||
resourceId: { type: 'string', description: 'ID of the affected resource' },
|
||||
timestamp: { type: 'string', description: 'When the event occurred (ISO 8601)' },
|
||||
// Nested outputs for complex data
|
||||
resource: {
|
||||
id: { type: 'string', description: 'Resource ID' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
status: { type: 'string', description: 'Current status' },
|
||||
},
|
||||
webhook: { type: 'json', description: 'Full webhook payload' },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Step 2: Create the Primary Trigger
|
||||
## Step 2: Create Trigger Files
|
||||
|
||||
The **primary trigger** is the first one listed. It MUST include `includeDropdown: true` so users can switch between trigger types.
|
||||
**Primary trigger** — MUST include `includeDropdown: true`:
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
build{Service}ExtraFields,
|
||||
build{Service}Outputs,
|
||||
{service}SetupInstructions,
|
||||
{service}TriggerOptions,
|
||||
} from '@/triggers/{service}/utils'
|
||||
import { build{Service}ExtraFields, build{Service}Outputs, {service}SetupInstructions, {service}TriggerOptions } from '@/triggers/{service}/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* {Service} Event A Trigger
|
||||
*
|
||||
* This is the PRIMARY trigger - it includes the dropdown for selecting trigger type.
|
||||
*/
|
||||
export const {service}EventATrigger: TriggerConfig = {
|
||||
id: '{service}_event_a',
|
||||
name: '{Service} Event A',
|
||||
@@ -132,496 +103,222 @@ export const {service}EventATrigger: TriggerConfig = {
|
||||
description: 'Trigger workflow when Event A occurs',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: '{service}_event_a',
|
||||
triggerOptions: {service}TriggerOptions,
|
||||
includeDropdown: true, // PRIMARY TRIGGER - includes dropdown
|
||||
includeDropdown: true,
|
||||
setupInstructions: {service}SetupInstructions('Event A'),
|
||||
extraFields: build{Service}ExtraFields('{service}_event_a'),
|
||||
}),
|
||||
|
||||
outputs: build{Service}Outputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
webhook: { method: 'POST', headers: { 'Content-Type': 'application/json' } },
|
||||
}
|
||||
```
|
||||
|
||||
## Step 3: Create Secondary Triggers
|
||||
|
||||
Secondary triggers do NOT include the dropdown (it's already in the primary trigger).
|
||||
**Secondary triggers** — NO `includeDropdown` (it's already in the primary):
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
build{Service}ExtraFields,
|
||||
build{Service}Outputs,
|
||||
{service}SetupInstructions,
|
||||
{service}TriggerOptions,
|
||||
} from '@/triggers/{service}/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* {Service} Event B Trigger
|
||||
*/
|
||||
export const {service}EventBTrigger: TriggerConfig = {
|
||||
id: '{service}_event_b',
|
||||
name: '{Service} Event B',
|
||||
provider: '{service}',
|
||||
description: 'Trigger workflow when Event B occurs',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: '{service}_event_b',
|
||||
triggerOptions: {service}TriggerOptions,
|
||||
// NO includeDropdown - secondary trigger
|
||||
setupInstructions: {service}SetupInstructions('Event B'),
|
||||
extraFields: build{Service}ExtraFields('{service}_event_b'),
|
||||
}),
|
||||
|
||||
outputs: build{Service}Outputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
// Same as above but: id: '{service}_event_b', no includeDropdown
|
||||
}
|
||||
```
|
||||
|
||||
## Step 4: Create index.ts Barrel Export
|
||||
## Step 3: Register and Wire
|
||||
|
||||
### `apps/sim/triggers/{service}/index.ts`
|
||||
|
||||
```typescript
|
||||
export { {service}EventATrigger } from './event_a'
|
||||
export { {service}EventBTrigger } from './event_b'
|
||||
export { {service}EventCTrigger } from './event_c'
|
||||
export { {service}WebhookTrigger } from './webhook'
|
||||
```
|
||||
|
||||
## Step 5: Register Triggers
|
||||
|
||||
### Trigger Registry (`apps/sim/triggers/registry.ts`)
|
||||
### `apps/sim/triggers/registry.ts`
|
||||
|
||||
```typescript
|
||||
// Add import
|
||||
import {
|
||||
{service}EventATrigger,
|
||||
{service}EventBTrigger,
|
||||
{service}EventCTrigger,
|
||||
{service}WebhookTrigger,
|
||||
} from '@/triggers/{service}'
|
||||
import { {service}EventATrigger, {service}EventBTrigger } from '@/triggers/{service}'
|
||||
|
||||
// Add to TRIGGER_REGISTRY
|
||||
export const TRIGGER_REGISTRY: TriggerRegistry = {
|
||||
// ... existing triggers ...
|
||||
// ... existing ...
|
||||
{service}_event_a: {service}EventATrigger,
|
||||
{service}_event_b: {service}EventBTrigger,
|
||||
{service}_event_c: {service}EventCTrigger,
|
||||
{service}_webhook: {service}WebhookTrigger,
|
||||
}
|
||||
```
|
||||
|
||||
## Step 6: Connect Triggers to Block
|
||||
|
||||
In the block file (`apps/sim/blocks/blocks/{service}.ts`):
|
||||
### Block file (`apps/sim/blocks/blocks/{service}.ts`)
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { getTrigger } from '@/triggers'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
|
||||
export const {Service}Block: BlockConfig = {
|
||||
type: '{service}',
|
||||
name: '{Service}',
|
||||
// ... other config ...
|
||||
|
||||
// Enable triggers and list available trigger IDs
|
||||
// ...
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'{service}_event_a',
|
||||
'{service}_event_b',
|
||||
'{service}_event_c',
|
||||
'{service}_webhook',
|
||||
],
|
||||
available: ['{service}_event_a', '{service}_event_b'],
|
||||
},
|
||||
|
||||
subBlocks: [
|
||||
// Regular tool subBlocks first
|
||||
{ id: 'operation', /* ... */ },
|
||||
{ id: 'credential', /* ... */ },
|
||||
// ... other tool fields ...
|
||||
|
||||
// Then spread ALL trigger subBlocks
|
||||
// Regular tool subBlocks first...
|
||||
...getTrigger('{service}_event_a').subBlocks,
|
||||
...getTrigger('{service}_event_b').subBlocks,
|
||||
...getTrigger('{service}_event_c').subBlocks,
|
||||
...getTrigger('{service}_webhook').subBlocks,
|
||||
],
|
||||
|
||||
// ... tools config ...
|
||||
}
|
||||
```
|
||||
|
||||
## Automatic Webhook Registration (Preferred)
|
||||
## Provider Handler
|
||||
|
||||
If the service's API supports programmatic webhook creation, implement automatic webhook registration instead of requiring users to manually configure webhooks. This provides a much better user experience.
|
||||
All provider-specific webhook logic lives in a single handler file: `apps/sim/lib/webhooks/providers/{service}.ts`.
|
||||
|
||||
### When to Use Automatic Registration
|
||||
### When to Create a Handler
|
||||
|
||||
Check the service's API documentation for endpoints like:
|
||||
- `POST /webhooks` or `POST /hooks` - Create webhook
|
||||
- `DELETE /webhooks/{id}` - Delete webhook
|
||||
| Behavior | Method | Examples |
|
||||
|---|---|---|
|
||||
| HMAC signature auth | `verifyAuth` via `createHmacVerifier` | Ashby, Jira, Linear, Typeform |
|
||||
| Custom token auth | `verifyAuth` via `verifyTokenAuth` | Generic, Google Forms |
|
||||
| Event filtering | `matchEvent` | GitHub, Jira, Attio, HubSpot |
|
||||
| Idempotency dedup | `extractIdempotencyId` | Slack, Stripe, Linear, Jira |
|
||||
| Custom input formatting | `formatInput` | Slack, Teams, Attio, Ashby |
|
||||
| Auto webhook creation | `createSubscription` | Ashby, Grain, Calendly, Airtable |
|
||||
| Auto webhook deletion | `deleteSubscription` | Ashby, Grain, Calendly, Airtable |
|
||||
| Challenge/verification | `handleChallenge` | Slack, WhatsApp, Teams |
|
||||
| Custom success response | `formatSuccessResponse` | Slack, Twilio Voice, Teams |
|
||||
|
||||
Services that support this pattern include: Grain, Lemlist, Calendly, Airtable, Webflow, Typeform, etc.
|
||||
If none apply, you don't need a handler. The default handler provides bearer token auth.
|
||||
|
||||
### Implementation Steps
|
||||
|
||||
#### 1. Add API Key to Extra Fields
|
||||
|
||||
Update your `build{Service}ExtraFields` function to include an API key field:
|
||||
### Example Handler
|
||||
|
||||
```typescript
|
||||
export function build{Service}ExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
return [
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your {Service} API key',
|
||||
description: 'Required to create the webhook in {Service}.',
|
||||
password: true,
|
||||
required: true,
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
// Other optional fields (e.g., campaign filter, project filter)
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty for all projects',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
]
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { safeCompare } from '@/lib/core/security/encryption'
|
||||
import type { EventMatchContext, FormatInputContext, FormatInputResult, WebhookProviderHandler } from '@/lib/webhooks/providers/types'
|
||||
import { createHmacVerifier } from '@/lib/webhooks/providers/utils'
|
||||
|
||||
const logger = createLogger('WebhookProvider:{Service}')
|
||||
|
||||
function validate{Service}Signature(secret: string, signature: string, body: string): boolean {
|
||||
if (!secret || !signature || !body) return false
|
||||
const computed = crypto.createHmac('sha256', secret).update(body, 'utf8').digest('hex')
|
||||
return safeCompare(computed, signature)
|
||||
}
|
||||
```
|
||||
|
||||
#### 2. Update Setup Instructions for Automatic Creation
|
||||
export const {service}Handler: WebhookProviderHandler = {
|
||||
verifyAuth: createHmacVerifier({
|
||||
configKey: 'webhookSecret',
|
||||
headerName: 'X-{Service}-Signature',
|
||||
validateFn: validate{Service}Signature,
|
||||
providerLabel: '{Service}',
|
||||
}),
|
||||
|
||||
Change instructions to indicate automatic webhook creation:
|
||||
async matchEvent({ body, requestId, providerConfig }: EventMatchContext) {
|
||||
const triggerId = providerConfig.triggerId as string | undefined
|
||||
if (triggerId && triggerId !== '{service}_webhook') {
|
||||
const { is{Service}EventMatch } = await import('@/triggers/{service}/utils')
|
||||
if (!is{Service}EventMatch(triggerId, body as Record<string, unknown>)) return false
|
||||
}
|
||||
return true
|
||||
},
|
||||
|
||||
```typescript
|
||||
export function {service}SetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Enter your {Service} API Key above.',
|
||||
'You can find your API key in {Service} at <strong>Settings > API</strong>.',
|
||||
`Click <strong>"Save Configuration"</strong> to automatically create the webhook in {Service} for <strong>${eventType}</strong> events.`,
|
||||
'The webhook will be automatically deleted when you remove this trigger.',
|
||||
]
|
||||
|
||||
return instructions
|
||||
.map((instruction, index) =>
|
||||
`<div class="mb-3"><strong>${index + 1}.</strong> ${instruction}</div>`
|
||||
)
|
||||
.join('')
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Add Webhook Creation to API Route
|
||||
|
||||
In `apps/sim/app/api/webhooks/route.ts`, add provider-specific logic after the database save:
|
||||
|
||||
```typescript
|
||||
// --- {Service} specific logic ---
|
||||
if (savedWebhook && provider === '{service}') {
|
||||
logger.info(`[${requestId}] {Service} provider detected. Creating webhook subscription.`)
|
||||
try {
|
||||
const result = await create{Service}WebhookSubscription(
|
||||
{
|
||||
id: savedWebhook.id,
|
||||
path: savedWebhook.path,
|
||||
providerConfig: savedWebhook.providerConfig,
|
||||
async formatInput({ body }: FormatInputContext): Promise<FormatInputResult> {
|
||||
const b = body as Record<string, unknown>
|
||||
return {
|
||||
input: {
|
||||
eventType: b.type,
|
||||
resourceId: (b.data as Record<string, unknown>)?.id || '',
|
||||
resource: b.data,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
if (result) {
|
||||
// Update the webhook record with the external webhook ID
|
||||
const updatedConfig = {
|
||||
...(savedWebhook.providerConfig as Record<string, any>),
|
||||
externalId: result.id,
|
||||
}
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: updatedConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, savedWebhook.id))
|
||||
|
||||
savedWebhook.providerConfig = updatedConfig
|
||||
logger.info(`[${requestId}] Successfully created {Service} webhook`, {
|
||||
externalHookId: result.id,
|
||||
webhookId: savedWebhook.id,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error creating {Service} webhook subscription, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in {Service}',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
extractIdempotencyId(body: unknown) {
|
||||
const obj = body as Record<string, unknown>
|
||||
return obj.id && obj.type ? `${obj.type}:${obj.id}` : null
|
||||
},
|
||||
}
|
||||
// --- End {Service} specific logic ---
|
||||
```
|
||||
|
||||
Then add the helper function at the end of the file:
|
||||
### Register the Handler
|
||||
|
||||
In `apps/sim/lib/webhooks/providers/registry.ts`:
|
||||
|
||||
```typescript
|
||||
async function create{Service}WebhookSubscription(
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, projectId } = providerConfig || {}
|
||||
import { {service}Handler } from '@/lib/webhooks/providers/{service}'
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('{Service} API Key is required.')
|
||||
}
|
||||
const PROVIDER_HANDLERS: Record<string, WebhookProviderHandler> = {
|
||||
// ... existing (alphabetical) ...
|
||||
{service}: {service}Handler,
|
||||
}
|
||||
```
|
||||
|
||||
// Map trigger IDs to service event types
|
||||
const eventTypeMap: Record<string, string | undefined> = {
|
||||
{service}_event_a: 'eventA',
|
||||
{service}_event_b: 'eventB',
|
||||
{service}_webhook: undefined, // Generic - no filter
|
||||
}
|
||||
## Output Alignment (Critical)
|
||||
|
||||
const eventType = eventTypeMap[triggerId]
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
There are two sources of truth that **MUST be aligned**:
|
||||
|
||||
const requestBody: Record<string, any> = {
|
||||
url: notificationUrl,
|
||||
}
|
||||
1. **Trigger `outputs`** — schema defining what fields SHOULD be available (UI tag dropdown)
|
||||
2. **`formatInput` on the handler** — implementation that transforms raw payload into actual data
|
||||
|
||||
if (eventType) {
|
||||
requestBody.eventType = eventType
|
||||
}
|
||||
If they differ: the tag dropdown shows fields that don't exist, or actual data has fields users can't discover.
|
||||
|
||||
if (projectId) {
|
||||
requestBody.projectId = projectId
|
||||
}
|
||||
**Rules for `formatInput`:**
|
||||
- Return `{ input: { ... } }` where inner keys match trigger `outputs` exactly
|
||||
- Return `{ input: ..., skip: { message: '...' } }` to skip execution
|
||||
- No wrapper objects or duplication
|
||||
- Use `null` for missing optional data
|
||||
|
||||
const response = await fetch('https://api.{service}.com/webhooks', {
|
||||
## Automatic Webhook Registration
|
||||
|
||||
If the service API supports programmatic webhook creation, implement `createSubscription` and `deleteSubscription` on the handler. The orchestration layer calls these automatically — **no code touches `route.ts`, `provider-subscriptions.ts`, or `deploy.ts`**.
|
||||
|
||||
```typescript
|
||||
import { getNotificationUrl, getProviderConfig } from '@/lib/webhooks/provider-subscription-utils'
|
||||
import type { DeleteSubscriptionContext, SubscriptionContext, SubscriptionResult } from '@/lib/webhooks/providers/types'
|
||||
|
||||
export const {service}Handler: WebhookProviderHandler = {
|
||||
async createSubscription(ctx: SubscriptionContext): Promise<SubscriptionResult | undefined> {
|
||||
const config = getProviderConfig(ctx.webhook)
|
||||
const apiKey = config.apiKey as string
|
||||
if (!apiKey) throw new Error('{Service} API Key is required.')
|
||||
|
||||
const res = await fetch('https://api.{service}.com/webhooks', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
headers: { Authorization: `Bearer ${apiKey}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url: getNotificationUrl(ctx.webhook) }),
|
||||
})
|
||||
|
||||
const responseBody = await response.json()
|
||||
if (!res.ok) throw new Error(`{Service} error: ${res.status}`)
|
||||
const { id } = (await res.json()) as { id: string }
|
||||
return { providerConfigUpdates: { externalId: id } }
|
||||
},
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMessage = responseBody.message || 'Unknown API error'
|
||||
let userFriendlyMessage = 'Failed to create webhook in {Service}'
|
||||
|
||||
if (response.status === 401) {
|
||||
userFriendlyMessage = 'Invalid API Key. Please verify and try again.'
|
||||
} else if (errorMessage) {
|
||||
userFriendlyMessage = `{Service} error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
return { id: responseBody.id }
|
||||
} catch (error: any) {
|
||||
logger.error(`Exception during {Service} webhook creation`, { error: error.message })
|
||||
throw error
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 4. Add Webhook Deletion to Provider Subscriptions
|
||||
|
||||
In `apps/sim/lib/webhooks/provider-subscriptions.ts`:
|
||||
|
||||
1. Add a logger:
|
||||
```typescript
|
||||
const {service}Logger = createLogger('{Service}Webhook')
|
||||
```
|
||||
|
||||
2. Add the delete function:
|
||||
```typescript
|
||||
export async function delete{Service}Webhook(webhook: any, requestId: string): Promise<void> {
|
||||
try {
|
||||
const config = getProviderConfig(webhook)
|
||||
const apiKey = config.apiKey as string | undefined
|
||||
const externalId = config.externalId as string | undefined
|
||||
|
||||
if (!apiKey || !externalId) {
|
||||
{service}Logger.warn(`[${requestId}] Missing apiKey or externalId, skipping cleanup`)
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(`https://api.{service}.com/webhooks/${externalId}`, {
|
||||
async deleteSubscription(ctx: DeleteSubscriptionContext): Promise<void> {
|
||||
const config = getProviderConfig(ctx.webhook)
|
||||
const { apiKey, externalId } = config as { apiKey?: string; externalId?: string }
|
||||
if (!apiKey || !externalId) return
|
||||
await fetch(`https://api.{service}.com/webhooks/${externalId}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok && response.status !== 404) {
|
||||
{service}Logger.warn(`[${requestId}] Failed to delete webhook (non-fatal): ${response.status}`)
|
||||
} else {
|
||||
{service}Logger.info(`[${requestId}] Successfully deleted webhook ${externalId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
{service}Logger.warn(`[${requestId}] Error deleting webhook (non-fatal)`, error)
|
||||
}
|
||||
headers: { Authorization: `Bearer ${apiKey}` },
|
||||
}).catch(() => {})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
3. Add to `cleanupExternalWebhook`:
|
||||
```typescript
|
||||
export async function cleanupExternalWebhook(...): Promise<void> {
|
||||
// ... existing providers ...
|
||||
} else if (webhook.provider === '{service}') {
|
||||
await delete{Service}Webhook(webhook, requestId)
|
||||
}
|
||||
}
|
||||
```
|
||||
**Key points:**
|
||||
- Throw from `createSubscription` — orchestration rolls back the DB webhook
|
||||
- Never throw from `deleteSubscription` — log non-fatally
|
||||
- Return `{ providerConfigUpdates: { externalId } }` — orchestration merges into `providerConfig`
|
||||
- Add `apiKey` field to `build{Service}ExtraFields` with `password: true`
|
||||
|
||||
### Key Points for Automatic Registration
|
||||
|
||||
- **API Key visibility**: Always use `password: true` for API key fields
|
||||
- **Error handling**: Roll back the database webhook if external creation fails
|
||||
- **External ID storage**: Save the external webhook ID in `providerConfig.externalId`
|
||||
- **Graceful cleanup**: Don't fail webhook deletion if cleanup fails (use non-fatal logging)
|
||||
- **User-friendly errors**: Map HTTP status codes to helpful error messages
|
||||
|
||||
## The buildTriggerSubBlocks Helper
|
||||
|
||||
This is the generic helper from `@/triggers` that creates consistent trigger subBlocks.
|
||||
|
||||
### Function Signature
|
||||
|
||||
```typescript
|
||||
interface BuildTriggerSubBlocksOptions {
|
||||
triggerId: string // e.g., 'service_event_a'
|
||||
triggerOptions: Array<{ label: string; id: string }> // Dropdown options
|
||||
includeDropdown?: boolean // true only for primary trigger
|
||||
setupInstructions: string // HTML instructions
|
||||
extraFields?: SubBlockConfig[] // Service-specific fields
|
||||
webhookPlaceholder?: string // Custom placeholder text
|
||||
}
|
||||
|
||||
function buildTriggerSubBlocks(options: BuildTriggerSubBlocksOptions): SubBlockConfig[]
|
||||
```
|
||||
|
||||
### What It Creates
|
||||
|
||||
The helper creates this structure:
|
||||
1. **Dropdown** (only if `includeDropdown: true`) - Trigger type selector
|
||||
2. **Webhook URL** - Read-only field with copy button
|
||||
3. **Extra Fields** - Your service-specific fields (filters, options, etc.)
|
||||
4. **Save Button** - Activates the trigger
|
||||
5. **Instructions** - Setup guide for users
|
||||
|
||||
All fields automatically have:
|
||||
- `mode: 'trigger'` - Only shown in trigger mode
|
||||
- `condition: { field: 'selectedTriggerId', value: triggerId }` - Only shown when this trigger is selected
|
||||
|
||||
## Trigger Outputs & Webhook Input Formatting
|
||||
|
||||
### Important: Two Sources of Truth
|
||||
|
||||
There are two related but separate concerns:
|
||||
|
||||
1. **Trigger `outputs`** - Schema/contract defining what fields SHOULD be available. Used by UI for tag dropdown.
|
||||
2. **`formatWebhookInput`** - Implementation that transforms raw webhook payload into actual data. Located in `apps/sim/lib/webhooks/utils.server.ts`.
|
||||
|
||||
**These MUST be aligned.** The fields returned by `formatWebhookInput` should match what's defined in trigger `outputs`. If they differ:
|
||||
- Tag dropdown shows fields that don't exist (broken variable resolution)
|
||||
- Or actual data has fields not shown in dropdown (users can't discover them)
|
||||
|
||||
### When to Add a formatWebhookInput Handler
|
||||
|
||||
- **Simple providers**: If the raw webhook payload structure already matches your outputs, you don't need a handler. The generic fallback returns `body` directly.
|
||||
- **Complex providers**: If you need to transform, flatten, extract nested data, compute fields, or handle conditional logic, add a handler.
|
||||
|
||||
### Adding a Handler
|
||||
|
||||
In `apps/sim/lib/webhooks/utils.server.ts`, add a handler block:
|
||||
|
||||
```typescript
|
||||
if (foundWebhook.provider === '{service}') {
|
||||
// Transform raw webhook body to match trigger outputs
|
||||
return {
|
||||
eventType: body.type,
|
||||
resourceId: body.data?.id || '',
|
||||
timestamp: body.created_at,
|
||||
resource: body.data,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Key rules:**
|
||||
- Return fields that match your trigger `outputs` definition exactly
|
||||
- No wrapper objects like `webhook: { data: ... }` or `{service}: { ... }`
|
||||
- No duplication (don't spread body AND add individual fields)
|
||||
- Use `null` for missing optional data, not empty objects with empty strings
|
||||
|
||||
### Verify Alignment
|
||||
|
||||
Run the alignment checker:
|
||||
```bash
|
||||
bunx scripts/check-trigger-alignment.ts {service}
|
||||
```
|
||||
|
||||
## Trigger Outputs
|
||||
## Trigger Outputs Schema
|
||||
|
||||
Trigger outputs use the same schema as block outputs (NOT tool outputs).
|
||||
|
||||
**Supported:**
|
||||
- `type` and `description` for simple fields
|
||||
- Nested object structure for complex data
|
||||
|
||||
**NOT Supported:**
|
||||
- `optional: true` (tool outputs only)
|
||||
- `items` property (tool outputs only)
|
||||
**Supported:** `type` + `description` for leaf fields, nested objects for complex data.
|
||||
**NOT supported:** `optional: true`, `items` (those are tool-output-only features).
|
||||
|
||||
```typescript
|
||||
export function buildOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
// Simple fields
|
||||
eventType: { type: 'string', description: 'Event type' },
|
||||
timestamp: { type: 'string', description: 'When it occurred' },
|
||||
|
||||
// Complex data - use type: 'json'
|
||||
payload: { type: 'json', description: 'Full event payload' },
|
||||
|
||||
// Nested structure
|
||||
resource: {
|
||||
id: { type: 'string', description: 'Resource ID' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
@@ -630,79 +327,32 @@ export function buildOutputs(): Record<string, TriggerOutput> {
|
||||
}
|
||||
```
|
||||
|
||||
## Generic Webhook Trigger Pattern
|
||||
## Checklist
|
||||
|
||||
For services with many event types, create a generic webhook that accepts all events:
|
||||
|
||||
```typescript
|
||||
export const {service}WebhookTrigger: TriggerConfig = {
|
||||
id: '{service}_webhook',
|
||||
name: '{Service} Webhook (All Events)',
|
||||
// ...
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: '{service}_webhook',
|
||||
triggerOptions: {service}TriggerOptions,
|
||||
setupInstructions: {service}SetupInstructions('All Events'),
|
||||
extraFields: [
|
||||
// Event type filter (optional)
|
||||
{
|
||||
id: 'eventTypes',
|
||||
title: 'Event Types',
|
||||
type: 'dropdown',
|
||||
multiSelect: true,
|
||||
options: [
|
||||
{ label: 'Event A', id: 'event_a' },
|
||||
{ label: 'Event B', id: 'event_b' },
|
||||
],
|
||||
placeholder: 'Leave empty for all events',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: '{service}_webhook' },
|
||||
},
|
||||
// Plus any other service-specific fields
|
||||
...build{Service}ExtraFields('{service}_webhook'),
|
||||
],
|
||||
}),
|
||||
}
|
||||
```
|
||||
|
||||
## Checklist Before Finishing
|
||||
|
||||
### Utils
|
||||
- [ ] Created `{service}TriggerOptions` array with all trigger IDs
|
||||
- [ ] Created `{service}SetupInstructions` function with clear steps
|
||||
- [ ] Created `build{Service}ExtraFields` for service-specific fields
|
||||
- [ ] Created output builders for each trigger type
|
||||
|
||||
### Triggers
|
||||
- [ ] Primary trigger has `includeDropdown: true`
|
||||
- [ ] Secondary triggers do NOT have `includeDropdown`
|
||||
### Trigger Definition
|
||||
- [ ] Created `utils.ts` with options, instructions, extra fields, and output builders
|
||||
- [ ] Primary trigger has `includeDropdown: true`; secondary triggers do NOT
|
||||
- [ ] All triggers use `buildTriggerSubBlocks` helper
|
||||
- [ ] All triggers have proper outputs defined
|
||||
- [ ] Created `index.ts` barrel export
|
||||
|
||||
### Registration
|
||||
- [ ] All triggers imported in `triggers/registry.ts`
|
||||
- [ ] All triggers added to `TRIGGER_REGISTRY`
|
||||
- [ ] Block has `triggers.enabled: true`
|
||||
- [ ] Block has all trigger IDs in `triggers.available`
|
||||
- [ ] All triggers in `triggers/registry.ts` → `TRIGGER_REGISTRY`
|
||||
- [ ] Block has `triggers.enabled: true` and lists all trigger IDs in `triggers.available`
|
||||
- [ ] Block spreads all trigger subBlocks: `...getTrigger('id').subBlocks`
|
||||
|
||||
### Automatic Webhook Registration (if supported)
|
||||
- [ ] Added API key field to `build{Service}ExtraFields` with `password: true`
|
||||
- [ ] Updated setup instructions for automatic webhook creation
|
||||
- [ ] Added provider-specific logic to `apps/sim/app/api/webhooks/route.ts`
|
||||
- [ ] Added `create{Service}WebhookSubscription` helper function
|
||||
- [ ] Added `delete{Service}Webhook` function to `provider-subscriptions.ts`
|
||||
- [ ] Added provider to `cleanupExternalWebhook` function
|
||||
### Provider Handler (if needed)
|
||||
- [ ] Handler file at `apps/sim/lib/webhooks/providers/{service}.ts`
|
||||
- [ ] Registered in `providers/registry.ts` (alphabetical)
|
||||
- [ ] Signature validator is a private function inside the handler file
|
||||
- [ ] `formatInput` output keys match trigger `outputs` exactly
|
||||
- [ ] Event matching uses dynamic `await import()` for trigger utils
|
||||
|
||||
### Webhook Input Formatting
|
||||
- [ ] Added handler in `apps/sim/lib/webhooks/utils.server.ts` (if custom formatting needed)
|
||||
- [ ] Handler returns fields matching trigger `outputs` exactly
|
||||
- [ ] Run `bunx scripts/check-trigger-alignment.ts {service}` to verify alignment
|
||||
### Auto Registration (if supported)
|
||||
- [ ] `createSubscription` and `deleteSubscription` on the handler
|
||||
- [ ] NO changes to `route.ts`, `provider-subscriptions.ts`, or `deploy.ts`
|
||||
- [ ] API key field uses `password: true`
|
||||
|
||||
### Testing
|
||||
- [ ] Run `bun run type-check` to verify no TypeScript errors
|
||||
- [ ] Restart dev server to pick up new triggers
|
||||
- [ ] Test trigger UI shows correctly in the block
|
||||
- [ ] Test automatic webhook creation works (if applicable)
|
||||
- [ ] `bun run type-check` passes
|
||||
- [ ] Manually verify `formatInput` output keys match trigger `outputs` keys
|
||||
- [ ] Trigger UI shows correctly in the block
|
||||
|
||||
212
.agents/skills/validate-trigger/SKILL.md
Normal file
212
.agents/skills/validate-trigger/SKILL.md
Normal file
@@ -0,0 +1,212 @@
|
||||
---
|
||||
name: validate-trigger
|
||||
description: Audit an existing Sim webhook trigger against the service's webhook API docs and repository conventions, then report and fix issues across trigger definitions, provider handler, output alignment, registration, and security. Use when validating or repairing a trigger under `apps/sim/triggers/{service}/` or `apps/sim/lib/webhooks/providers/{service}.ts`.
|
||||
---
|
||||
|
||||
# Validate Trigger
|
||||
|
||||
You are an expert auditor for Sim webhook triggers. Your job is to validate that an existing trigger implementation is correct, complete, secure, and aligned across all layers.
|
||||
|
||||
## Your Task
|
||||
|
||||
1. Read the service's webhook/API documentation (via WebFetch)
|
||||
2. Read every trigger file, provider handler, and registry entry
|
||||
3. Cross-reference against the API docs and Sim conventions
|
||||
4. Report all issues grouped by severity (critical, warning, suggestion)
|
||||
5. Fix all issues after reporting them
|
||||
|
||||
## Step 1: Gather All Files
|
||||
|
||||
Read **every** file for the trigger — do not skip any:
|
||||
|
||||
```
|
||||
apps/sim/triggers/{service}/ # All trigger files, utils.ts, index.ts
|
||||
apps/sim/lib/webhooks/providers/{service}.ts # Provider handler (if exists)
|
||||
apps/sim/lib/webhooks/providers/registry.ts # Handler registry
|
||||
apps/sim/triggers/registry.ts # Trigger registry
|
||||
apps/sim/blocks/blocks/{service}.ts # Block definition (trigger wiring)
|
||||
```
|
||||
|
||||
Also read for reference:
|
||||
```
|
||||
apps/sim/lib/webhooks/providers/types.ts # WebhookProviderHandler interface
|
||||
apps/sim/lib/webhooks/providers/utils.ts # Shared helpers (createHmacVerifier, etc.)
|
||||
apps/sim/lib/webhooks/provider-subscription-utils.ts # Subscription helpers
|
||||
apps/sim/lib/webhooks/processor.ts # Central webhook processor
|
||||
```
|
||||
|
||||
## Step 2: Pull API Documentation
|
||||
|
||||
Fetch the service's official webhook documentation. This is the **source of truth** for:
|
||||
- Webhook event types and payload shapes
|
||||
- Signature/auth verification method (HMAC algorithm, header names, secret format)
|
||||
- Challenge/verification handshake requirements
|
||||
- Webhook subscription API (create/delete endpoints, if applicable)
|
||||
- Retry behavior and delivery guarantees
|
||||
|
||||
## Step 3: Validate Trigger Definitions
|
||||
|
||||
### utils.ts
|
||||
- [ ] `{service}TriggerOptions` lists all trigger IDs accurately
|
||||
- [ ] `{service}SetupInstructions` provides clear, correct steps for the service
|
||||
- [ ] `build{Service}ExtraFields` includes relevant filter/config fields with correct `condition`
|
||||
- [ ] Output builders expose all meaningful fields from the webhook payload
|
||||
- [ ] Output builders do NOT use `optional: true` or `items` (tool-output-only features)
|
||||
- [ ] Nested output objects correctly model the payload structure
|
||||
|
||||
### Trigger Files
|
||||
- [ ] Exactly one primary trigger has `includeDropdown: true`
|
||||
- [ ] All secondary triggers do NOT have `includeDropdown`
|
||||
- [ ] All triggers use `buildTriggerSubBlocks` helper (not hand-rolled subBlocks)
|
||||
- [ ] Every trigger's `id` matches the convention `{service}_{event_name}`
|
||||
- [ ] Every trigger's `provider` matches the service name used in the handler registry
|
||||
- [ ] `index.ts` barrel exports all triggers
|
||||
|
||||
### Trigger ↔ Provider Alignment (CRITICAL)
|
||||
- [ ] Every trigger ID referenced in `matchEvent` logic exists in `{service}TriggerOptions`
|
||||
- [ ] Event matching logic in the provider correctly maps trigger IDs to service event types
|
||||
- [ ] Event matching logic in `is{Service}EventMatch` (if exists) correctly identifies events per the API docs
|
||||
|
||||
## Step 4: Validate Provider Handler
|
||||
|
||||
### Auth Verification
|
||||
- [ ] `verifyAuth` correctly validates webhook signatures per the service's documentation
|
||||
- [ ] HMAC algorithm matches (SHA-1, SHA-256, SHA-512)
|
||||
- [ ] Signature header name matches the API docs exactly
|
||||
- [ ] Signature format is handled (raw hex, `sha256=` prefix, base64, etc.)
|
||||
- [ ] Uses `safeCompare` for timing-safe comparison (no `===`)
|
||||
- [ ] If `webhookSecret` is required, handler rejects when it's missing (fail-closed)
|
||||
- [ ] Signature is computed over raw body (not parsed JSON)
|
||||
|
||||
### Event Matching
|
||||
- [ ] `matchEvent` returns `boolean` (not `NextResponse` or other values)
|
||||
- [ ] Challenge/verification events are excluded from matching (e.g., `endpoint.url_validation`)
|
||||
- [ ] When `triggerId` is a generic webhook ID, all events pass through
|
||||
- [ ] When `triggerId` is specific, only matching events pass
|
||||
- [ ] Event matching logic uses dynamic `await import()` for trigger utils (avoids circular deps)
|
||||
|
||||
### formatInput (CRITICAL)
|
||||
- [ ] Every key in the `formatInput` return matches a key in the trigger `outputs` schema
|
||||
- [ ] Every key in the trigger `outputs` schema is populated by `formatInput`
|
||||
- [ ] No extra undeclared keys that users can't discover in the UI
|
||||
- [ ] No wrapper objects (`webhook: { ... }`, `{service}: { ... }`)
|
||||
- [ ] Nested output paths exist at the correct depth (e.g., `resource.id` actually has `resource: { id: ... }`)
|
||||
- [ ] `null` is used for missing optional fields (not empty strings or empty objects)
|
||||
- [ ] Returns `{ input: { ... } }` — not a bare object
|
||||
|
||||
### Idempotency
|
||||
- [ ] `extractIdempotencyId` returns a stable, unique key per delivery
|
||||
- [ ] Uses provider-specific delivery IDs when available (e.g., `X-Request-Id`, `Linear-Delivery`, `svix-id`)
|
||||
- [ ] Falls back to content-based ID (e.g., `${type}:${id}`) when no delivery header exists
|
||||
- [ ] Does NOT include timestamps in the idempotency key (would break dedup on retries)
|
||||
|
||||
### Challenge Handling (if applicable)
|
||||
- [ ] `handleChallenge` correctly implements the service's URL verification handshake
|
||||
- [ ] Returns the expected response format per the API docs
|
||||
- [ ] Env-backed secrets are resolved via `resolveEnvVarsInObject` if needed
|
||||
|
||||
## Step 5: Validate Automatic Subscription Lifecycle
|
||||
|
||||
If the service supports programmatic webhook creation:
|
||||
|
||||
### createSubscription
|
||||
- [ ] Calls the correct API endpoint to create a webhook
|
||||
- [ ] Sends the correct event types/filters
|
||||
- [ ] Passes the notification URL from `getNotificationUrl(ctx.webhook)`
|
||||
- [ ] Returns `{ providerConfigUpdates: { externalId } }` with the external webhook ID
|
||||
- [ ] Throws on failure (orchestration handles rollback)
|
||||
- [ ] Provides user-friendly error messages (401 → "Invalid API Key", etc.)
|
||||
|
||||
### deleteSubscription
|
||||
- [ ] Calls the correct API endpoint to delete the webhook
|
||||
- [ ] Handles 404 gracefully (webhook already deleted)
|
||||
- [ ] Never throws — catches errors and logs non-fatally
|
||||
- [ ] Skips gracefully when `apiKey` or `externalId` is missing
|
||||
|
||||
### Orchestration Isolation
|
||||
- [ ] NO provider-specific logic in `route.ts`, `provider-subscriptions.ts`, or `deploy.ts`
|
||||
- [ ] All subscription logic lives on the handler (`createSubscription`/`deleteSubscription`)
|
||||
|
||||
## Step 6: Validate Registration and Block Wiring
|
||||
|
||||
### Trigger Registry (`triggers/registry.ts`)
|
||||
- [ ] All triggers are imported and registered
|
||||
- [ ] Registry keys match trigger IDs exactly
|
||||
- [ ] No orphaned entries (triggers that don't exist)
|
||||
|
||||
### Provider Handler Registry (`providers/registry.ts`)
|
||||
- [ ] Handler is imported and registered (if handler exists)
|
||||
- [ ] Registry key matches the `provider` field on the trigger configs
|
||||
- [ ] Entries are in alphabetical order
|
||||
|
||||
### Block Wiring (`blocks/blocks/{service}.ts`)
|
||||
- [ ] Block has `triggers.enabled: true`
|
||||
- [ ] `triggers.available` lists all trigger IDs
|
||||
- [ ] All trigger subBlocks are spread into `subBlocks`: `...getTrigger('id').subBlocks`
|
||||
- [ ] No trigger IDs in `triggers.available` that aren't in the registry
|
||||
- [ ] No trigger subBlocks spread that aren't in `triggers.available`
|
||||
|
||||
## Step 7: Validate Security
|
||||
|
||||
- [ ] Webhook secrets are never logged (not even at debug level)
|
||||
- [ ] Auth verification runs before any event processing
|
||||
- [ ] No secret comparison uses `===` (must use `safeCompare` or `crypto.timingSafeEqual`)
|
||||
- [ ] Timestamp/replay protection is reasonable (not too tight for retries, not too loose for security)
|
||||
- [ ] Raw body is used for signature verification (not re-serialized JSON)
|
||||
|
||||
## Step 8: Report and Fix
|
||||
|
||||
### Report Format
|
||||
|
||||
Group findings by severity:
|
||||
|
||||
**Critical** (runtime errors, security issues, or data loss):
|
||||
- Wrong HMAC algorithm or header name
|
||||
- `formatInput` keys don't match trigger `outputs`
|
||||
- Missing `verifyAuth` when the service sends signed webhooks
|
||||
- `matchEvent` returns non-boolean values
|
||||
- Provider-specific logic leaking into shared orchestration files
|
||||
- Trigger IDs mismatch between trigger files, registry, and block
|
||||
- `createSubscription` calling wrong API endpoint
|
||||
- Auth comparison using `===` instead of `safeCompare`
|
||||
|
||||
**Warning** (convention violations or usability issues):
|
||||
- Missing `extractIdempotencyId` when the service provides delivery IDs
|
||||
- Timestamps in idempotency keys (breaks dedup on retries)
|
||||
- Missing challenge handling when the service requires URL verification
|
||||
- Output schema missing fields that `formatInput` returns (undiscoverable data)
|
||||
- Overly tight timestamp skew window that rejects legitimate retries
|
||||
- `matchEvent` not filtering challenge/verification events
|
||||
- Setup instructions missing important steps
|
||||
|
||||
**Suggestion** (minor improvements):
|
||||
- More specific output field descriptions
|
||||
- Additional output fields that could be exposed
|
||||
- Better error messages in `createSubscription`
|
||||
- Logging improvements
|
||||
|
||||
### Fix All Issues
|
||||
|
||||
After reporting, fix every **critical** and **warning** issue. Apply **suggestions** where they don't add unnecessary complexity.
|
||||
|
||||
### Validation Output
|
||||
|
||||
After fixing, confirm:
|
||||
1. `bun run type-check` passes
|
||||
2. Re-read all modified files to verify fixes are correct
|
||||
3. Provider handler tests pass (if they exist): `bun test {service}`
|
||||
|
||||
## Checklist Summary
|
||||
|
||||
- [ ] Read all trigger files, provider handler, types, registries, and block
|
||||
- [ ] Pulled and read official webhook/API documentation
|
||||
- [ ] Validated trigger definitions: options, instructions, extra fields, outputs
|
||||
- [ ] Validated primary/secondary trigger distinction (`includeDropdown`)
|
||||
- [ ] Validated provider handler: auth, matchEvent, formatInput, idempotency
|
||||
- [ ] Validated output alignment: every `outputs` key ↔ every `formatInput` key
|
||||
- [ ] Validated subscription lifecycle: createSubscription, deleteSubscription, no shared-file edits
|
||||
- [ ] Validated registration: trigger registry, handler registry, block wiring
|
||||
- [ ] Validated security: safe comparison, no secret logging, replay protection
|
||||
- [ ] Reported all issues grouped by severity
|
||||
- [ ] Fixed all critical and warning issues
|
||||
- [ ] `bun run type-check` passes after fixes
|
||||
@@ -3,63 +3,57 @@ description: Create webhook triggers for a Sim integration using the generic tri
|
||||
argument-hint: <service-name>
|
||||
---
|
||||
|
||||
# Add Trigger Skill
|
||||
# Add Trigger
|
||||
|
||||
You are an expert at creating webhook triggers for Sim. You understand the trigger system, the generic `buildTriggerSubBlocks` helper, and how triggers connect to blocks.
|
||||
|
||||
## Your Task
|
||||
|
||||
When the user asks you to create triggers for a service:
|
||||
1. Research what webhook events the service supports
|
||||
2. Create the trigger files using the generic builder
|
||||
3. Register triggers and connect them to the block
|
||||
3. Create a provider handler if custom auth, formatting, or subscriptions are needed
|
||||
4. Register triggers and connect them to the block
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
apps/sim/triggers/{service}/
|
||||
├── index.ts # Barrel exports
|
||||
├── utils.ts # Service-specific helpers (trigger options, setup instructions, extra fields)
|
||||
├── utils.ts # Service-specific helpers (options, instructions, extra fields, outputs)
|
||||
├── {event_a}.ts # Primary trigger (includes dropdown)
|
||||
├── {event_b}.ts # Secondary trigger (no dropdown)
|
||||
├── {event_c}.ts # Secondary trigger (no dropdown)
|
||||
└── webhook.ts # Generic webhook trigger (optional, for "all events")
|
||||
|
||||
apps/sim/lib/webhooks/
|
||||
├── provider-subscription-utils.ts # Shared subscription helpers (getProviderConfig, getNotificationUrl)
|
||||
├── providers/
|
||||
│ ├── {service}.ts # Provider handler (auth, formatInput, matchEvent, subscriptions)
|
||||
│ ├── types.ts # WebhookProviderHandler interface
|
||||
│ ├── utils.ts # Shared helpers (createHmacVerifier, verifyTokenAuth, skipByEventTypes)
|
||||
│ └── registry.ts # Handler map + default handler
|
||||
```
|
||||
|
||||
## Step 1: Create utils.ts
|
||||
## Step 1: Create `utils.ts`
|
||||
|
||||
This file contains service-specific helpers used by all triggers.
|
||||
This file contains all service-specific helpers used by triggers.
|
||||
|
||||
```typescript
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { TriggerOutput } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* Dropdown options for the trigger type selector.
|
||||
* These appear in the primary trigger's dropdown.
|
||||
*/
|
||||
export const {service}TriggerOptions = [
|
||||
{ label: 'Event A', id: '{service}_event_a' },
|
||||
{ label: 'Event B', id: '{service}_event_b' },
|
||||
{ label: 'Event C', id: '{service}_event_c' },
|
||||
{ label: 'Generic Webhook (All Events)', id: '{service}_webhook' },
|
||||
]
|
||||
|
||||
/**
|
||||
* Generates HTML setup instructions for the trigger.
|
||||
* Displayed to users to help them configure webhooks in the external service.
|
||||
*/
|
||||
export function {service}SetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Copy the <strong>Webhook URL</strong> above',
|
||||
'Go to <strong>{Service} Settings > Webhooks</strong>',
|
||||
'Click <strong>Add Webhook</strong>',
|
||||
'Paste the webhook URL',
|
||||
`Select the <strong>${eventType}</strong> event type`,
|
||||
'Save the webhook configuration',
|
||||
'Paste the webhook URL and save',
|
||||
'Click "Save" above to activate your trigger',
|
||||
]
|
||||
|
||||
return instructions
|
||||
.map((instruction, index) =>
|
||||
`<div class="mb-3"><strong>${index + 1}.</strong> ${instruction}</div>`
|
||||
@@ -67,10 +61,6 @@ export function {service}SetupInstructions(eventType: string): string {
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Service-specific extra fields to add to triggers.
|
||||
* These are inserted between webhookUrl and triggerSave.
|
||||
*/
|
||||
export function build{Service}ExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
return [
|
||||
{
|
||||
@@ -78,53 +68,34 @@ export function build{Service}ExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
title: 'Project ID (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty for all projects',
|
||||
description: 'Optionally filter to a specific project',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Build outputs for this trigger type.
|
||||
* Outputs define what data is available to downstream blocks.
|
||||
*/
|
||||
export function build{Service}Outputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
eventType: { type: 'string', description: 'The type of event that triggered this workflow' },
|
||||
eventType: { type: 'string', description: 'The type of event' },
|
||||
resourceId: { type: 'string', description: 'ID of the affected resource' },
|
||||
timestamp: { type: 'string', description: 'When the event occurred (ISO 8601)' },
|
||||
// Nested outputs for complex data
|
||||
resource: {
|
||||
id: { type: 'string', description: 'Resource ID' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
status: { type: 'string', description: 'Current status' },
|
||||
},
|
||||
webhook: { type: 'json', description: 'Full webhook payload' },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Step 2: Create the Primary Trigger
|
||||
## Step 2: Create Trigger Files
|
||||
|
||||
The **primary trigger** is the first one listed. It MUST include `includeDropdown: true` so users can switch between trigger types.
|
||||
**Primary trigger** — MUST include `includeDropdown: true`:
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
build{Service}ExtraFields,
|
||||
build{Service}Outputs,
|
||||
{service}SetupInstructions,
|
||||
{service}TriggerOptions,
|
||||
} from '@/triggers/{service}/utils'
|
||||
import { build{Service}ExtraFields, build{Service}Outputs, {service}SetupInstructions, {service}TriggerOptions } from '@/triggers/{service}/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* {Service} Event A Trigger
|
||||
*
|
||||
* This is the PRIMARY trigger - it includes the dropdown for selecting trigger type.
|
||||
*/
|
||||
export const {service}EventATrigger: TriggerConfig = {
|
||||
id: '{service}_event_a',
|
||||
name: '{Service} Event A',
|
||||
@@ -132,496 +103,222 @@ export const {service}EventATrigger: TriggerConfig = {
|
||||
description: 'Trigger workflow when Event A occurs',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: '{service}_event_a',
|
||||
triggerOptions: {service}TriggerOptions,
|
||||
includeDropdown: true, // PRIMARY TRIGGER - includes dropdown
|
||||
includeDropdown: true,
|
||||
setupInstructions: {service}SetupInstructions('Event A'),
|
||||
extraFields: build{Service}ExtraFields('{service}_event_a'),
|
||||
}),
|
||||
|
||||
outputs: build{Service}Outputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
webhook: { method: 'POST', headers: { 'Content-Type': 'application/json' } },
|
||||
}
|
||||
```
|
||||
|
||||
## Step 3: Create Secondary Triggers
|
||||
|
||||
Secondary triggers do NOT include the dropdown (it's already in the primary trigger).
|
||||
**Secondary triggers** — NO `includeDropdown` (it's already in the primary):
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { buildTriggerSubBlocks } from '@/triggers'
|
||||
import {
|
||||
build{Service}ExtraFields,
|
||||
build{Service}Outputs,
|
||||
{service}SetupInstructions,
|
||||
{service}TriggerOptions,
|
||||
} from '@/triggers/{service}/utils'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
|
||||
/**
|
||||
* {Service} Event B Trigger
|
||||
*/
|
||||
export const {service}EventBTrigger: TriggerConfig = {
|
||||
id: '{service}_event_b',
|
||||
name: '{Service} Event B',
|
||||
provider: '{service}',
|
||||
description: 'Trigger workflow when Event B occurs',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: '{service}_event_b',
|
||||
triggerOptions: {service}TriggerOptions,
|
||||
// NO includeDropdown - secondary trigger
|
||||
setupInstructions: {service}SetupInstructions('Event B'),
|
||||
extraFields: build{Service}ExtraFields('{service}_event_b'),
|
||||
}),
|
||||
|
||||
outputs: build{Service}Outputs(),
|
||||
|
||||
webhook: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
// Same as above but: id: '{service}_event_b', no includeDropdown
|
||||
}
|
||||
```
|
||||
|
||||
## Step 4: Create index.ts Barrel Export
|
||||
## Step 3: Register and Wire
|
||||
|
||||
### `apps/sim/triggers/{service}/index.ts`
|
||||
|
||||
```typescript
|
||||
export { {service}EventATrigger } from './event_a'
|
||||
export { {service}EventBTrigger } from './event_b'
|
||||
export { {service}EventCTrigger } from './event_c'
|
||||
export { {service}WebhookTrigger } from './webhook'
|
||||
```
|
||||
|
||||
## Step 5: Register Triggers
|
||||
|
||||
### Trigger Registry (`apps/sim/triggers/registry.ts`)
|
||||
### `apps/sim/triggers/registry.ts`
|
||||
|
||||
```typescript
|
||||
// Add import
|
||||
import {
|
||||
{service}EventATrigger,
|
||||
{service}EventBTrigger,
|
||||
{service}EventCTrigger,
|
||||
{service}WebhookTrigger,
|
||||
} from '@/triggers/{service}'
|
||||
import { {service}EventATrigger, {service}EventBTrigger } from '@/triggers/{service}'
|
||||
|
||||
// Add to TRIGGER_REGISTRY
|
||||
export const TRIGGER_REGISTRY: TriggerRegistry = {
|
||||
// ... existing triggers ...
|
||||
// ... existing ...
|
||||
{service}_event_a: {service}EventATrigger,
|
||||
{service}_event_b: {service}EventBTrigger,
|
||||
{service}_event_c: {service}EventCTrigger,
|
||||
{service}_webhook: {service}WebhookTrigger,
|
||||
}
|
||||
```
|
||||
|
||||
## Step 6: Connect Triggers to Block
|
||||
|
||||
In the block file (`apps/sim/blocks/blocks/{service}.ts`):
|
||||
### Block file (`apps/sim/blocks/blocks/{service}.ts`)
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { getTrigger } from '@/triggers'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
|
||||
export const {Service}Block: BlockConfig = {
|
||||
type: '{service}',
|
||||
name: '{Service}',
|
||||
// ... other config ...
|
||||
|
||||
// Enable triggers and list available trigger IDs
|
||||
// ...
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'{service}_event_a',
|
||||
'{service}_event_b',
|
||||
'{service}_event_c',
|
||||
'{service}_webhook',
|
||||
],
|
||||
available: ['{service}_event_a', '{service}_event_b'],
|
||||
},
|
||||
|
||||
subBlocks: [
|
||||
// Regular tool subBlocks first
|
||||
{ id: 'operation', /* ... */ },
|
||||
{ id: 'credential', /* ... */ },
|
||||
// ... other tool fields ...
|
||||
|
||||
// Then spread ALL trigger subBlocks
|
||||
// Regular tool subBlocks first...
|
||||
...getTrigger('{service}_event_a').subBlocks,
|
||||
...getTrigger('{service}_event_b').subBlocks,
|
||||
...getTrigger('{service}_event_c').subBlocks,
|
||||
...getTrigger('{service}_webhook').subBlocks,
|
||||
],
|
||||
|
||||
// ... tools config ...
|
||||
}
|
||||
```
|
||||
|
||||
## Automatic Webhook Registration (Preferred)
|
||||
## Provider Handler
|
||||
|
||||
If the service's API supports programmatic webhook creation, implement automatic webhook registration instead of requiring users to manually configure webhooks. This provides a much better user experience.
|
||||
All provider-specific webhook logic lives in a single handler file: `apps/sim/lib/webhooks/providers/{service}.ts`.
|
||||
|
||||
### When to Use Automatic Registration
|
||||
### When to Create a Handler
|
||||
|
||||
Check the service's API documentation for endpoints like:
|
||||
- `POST /webhooks` or `POST /hooks` - Create webhook
|
||||
- `DELETE /webhooks/{id}` - Delete webhook
|
||||
| Behavior | Method | Examples |
|
||||
|---|---|---|
|
||||
| HMAC signature auth | `verifyAuth` via `createHmacVerifier` | Ashby, Jira, Linear, Typeform |
|
||||
| Custom token auth | `verifyAuth` via `verifyTokenAuth` | Generic, Google Forms |
|
||||
| Event filtering | `matchEvent` | GitHub, Jira, Attio, HubSpot |
|
||||
| Idempotency dedup | `extractIdempotencyId` | Slack, Stripe, Linear, Jira |
|
||||
| Custom input formatting | `formatInput` | Slack, Teams, Attio, Ashby |
|
||||
| Auto webhook creation | `createSubscription` | Ashby, Grain, Calendly, Airtable |
|
||||
| Auto webhook deletion | `deleteSubscription` | Ashby, Grain, Calendly, Airtable |
|
||||
| Challenge/verification | `handleChallenge` | Slack, WhatsApp, Teams |
|
||||
| Custom success response | `formatSuccessResponse` | Slack, Twilio Voice, Teams |
|
||||
|
||||
Services that support this pattern include: Grain, Lemlist, Calendly, Airtable, Webflow, Typeform, etc.
|
||||
If none apply, you don't need a handler. The default handler provides bearer token auth.
|
||||
|
||||
### Implementation Steps
|
||||
|
||||
#### 1. Add API Key to Extra Fields
|
||||
|
||||
Update your `build{Service}ExtraFields` function to include an API key field:
|
||||
### Example Handler
|
||||
|
||||
```typescript
|
||||
export function build{Service}ExtraFields(triggerId: string): SubBlockConfig[] {
|
||||
return [
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your {Service} API key',
|
||||
description: 'Required to create the webhook in {Service}.',
|
||||
password: true,
|
||||
required: true,
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
// Other optional fields (e.g., campaign filter, project filter)
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty for all projects',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
},
|
||||
]
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { safeCompare } from '@/lib/core/security/encryption'
|
||||
import type { EventMatchContext, FormatInputContext, FormatInputResult, WebhookProviderHandler } from '@/lib/webhooks/providers/types'
|
||||
import { createHmacVerifier } from '@/lib/webhooks/providers/utils'
|
||||
|
||||
const logger = createLogger('WebhookProvider:{Service}')
|
||||
|
||||
function validate{Service}Signature(secret: string, signature: string, body: string): boolean {
|
||||
if (!secret || !signature || !body) return false
|
||||
const computed = crypto.createHmac('sha256', secret).update(body, 'utf8').digest('hex')
|
||||
return safeCompare(computed, signature)
|
||||
}
|
||||
```
|
||||
|
||||
#### 2. Update Setup Instructions for Automatic Creation
|
||||
export const {service}Handler: WebhookProviderHandler = {
|
||||
verifyAuth: createHmacVerifier({
|
||||
configKey: 'webhookSecret',
|
||||
headerName: 'X-{Service}-Signature',
|
||||
validateFn: validate{Service}Signature,
|
||||
providerLabel: '{Service}',
|
||||
}),
|
||||
|
||||
Change instructions to indicate automatic webhook creation:
|
||||
async matchEvent({ body, requestId, providerConfig }: EventMatchContext) {
|
||||
const triggerId = providerConfig.triggerId as string | undefined
|
||||
if (triggerId && triggerId !== '{service}_webhook') {
|
||||
const { is{Service}EventMatch } = await import('@/triggers/{service}/utils')
|
||||
if (!is{Service}EventMatch(triggerId, body as Record<string, unknown>)) return false
|
||||
}
|
||||
return true
|
||||
},
|
||||
|
||||
```typescript
|
||||
export function {service}SetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Enter your {Service} API Key above.',
|
||||
'You can find your API key in {Service} at <strong>Settings > API</strong>.',
|
||||
`Click <strong>"Save Configuration"</strong> to automatically create the webhook in {Service} for <strong>${eventType}</strong> events.`,
|
||||
'The webhook will be automatically deleted when you remove this trigger.',
|
||||
]
|
||||
|
||||
return instructions
|
||||
.map((instruction, index) =>
|
||||
`<div class="mb-3"><strong>${index + 1}.</strong> ${instruction}</div>`
|
||||
)
|
||||
.join('')
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Add Webhook Creation to API Route
|
||||
|
||||
In `apps/sim/app/api/webhooks/route.ts`, add provider-specific logic after the database save:
|
||||
|
||||
```typescript
|
||||
// --- {Service} specific logic ---
|
||||
if (savedWebhook && provider === '{service}') {
|
||||
logger.info(`[${requestId}] {Service} provider detected. Creating webhook subscription.`)
|
||||
try {
|
||||
const result = await create{Service}WebhookSubscription(
|
||||
{
|
||||
id: savedWebhook.id,
|
||||
path: savedWebhook.path,
|
||||
providerConfig: savedWebhook.providerConfig,
|
||||
async formatInput({ body }: FormatInputContext): Promise<FormatInputResult> {
|
||||
const b = body as Record<string, unknown>
|
||||
return {
|
||||
input: {
|
||||
eventType: b.type,
|
||||
resourceId: (b.data as Record<string, unknown>)?.id || '',
|
||||
resource: b.data,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
if (result) {
|
||||
// Update the webhook record with the external webhook ID
|
||||
const updatedConfig = {
|
||||
...(savedWebhook.providerConfig as Record<string, any>),
|
||||
externalId: result.id,
|
||||
}
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: updatedConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, savedWebhook.id))
|
||||
|
||||
savedWebhook.providerConfig = updatedConfig
|
||||
logger.info(`[${requestId}] Successfully created {Service} webhook`, {
|
||||
externalHookId: result.id,
|
||||
webhookId: savedWebhook.id,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error creating {Service} webhook subscription, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in {Service}',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
extractIdempotencyId(body: unknown) {
|
||||
const obj = body as Record<string, unknown>
|
||||
return obj.id && obj.type ? `${obj.type}:${obj.id}` : null
|
||||
},
|
||||
}
|
||||
// --- End {Service} specific logic ---
|
||||
```
|
||||
|
||||
Then add the helper function at the end of the file:
|
||||
### Register the Handler
|
||||
|
||||
In `apps/sim/lib/webhooks/providers/registry.ts`:
|
||||
|
||||
```typescript
|
||||
async function create{Service}WebhookSubscription(
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, projectId } = providerConfig || {}
|
||||
import { {service}Handler } from '@/lib/webhooks/providers/{service}'
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('{Service} API Key is required.')
|
||||
}
|
||||
const PROVIDER_HANDLERS: Record<string, WebhookProviderHandler> = {
|
||||
// ... existing (alphabetical) ...
|
||||
{service}: {service}Handler,
|
||||
}
|
||||
```
|
||||
|
||||
// Map trigger IDs to service event types
|
||||
const eventTypeMap: Record<string, string | undefined> = {
|
||||
{service}_event_a: 'eventA',
|
||||
{service}_event_b: 'eventB',
|
||||
{service}_webhook: undefined, // Generic - no filter
|
||||
}
|
||||
## Output Alignment (Critical)
|
||||
|
||||
const eventType = eventTypeMap[triggerId]
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
There are two sources of truth that **MUST be aligned**:
|
||||
|
||||
const requestBody: Record<string, any> = {
|
||||
url: notificationUrl,
|
||||
}
|
||||
1. **Trigger `outputs`** — schema defining what fields SHOULD be available (UI tag dropdown)
|
||||
2. **`formatInput` on the handler** — implementation that transforms raw payload into actual data
|
||||
|
||||
if (eventType) {
|
||||
requestBody.eventType = eventType
|
||||
}
|
||||
If they differ: the tag dropdown shows fields that don't exist, or actual data has fields users can't discover.
|
||||
|
||||
if (projectId) {
|
||||
requestBody.projectId = projectId
|
||||
}
|
||||
**Rules for `formatInput`:**
|
||||
- Return `{ input: { ... } }` where inner keys match trigger `outputs` exactly
|
||||
- Return `{ input: ..., skip: { message: '...' } }` to skip execution
|
||||
- No wrapper objects or duplication
|
||||
- Use `null` for missing optional data
|
||||
|
||||
const response = await fetch('https://api.{service}.com/webhooks', {
|
||||
## Automatic Webhook Registration
|
||||
|
||||
If the service API supports programmatic webhook creation, implement `createSubscription` and `deleteSubscription` on the handler. The orchestration layer calls these automatically — **no code touches `route.ts`, `provider-subscriptions.ts`, or `deploy.ts`**.
|
||||
|
||||
```typescript
|
||||
import { getNotificationUrl, getProviderConfig } from '@/lib/webhooks/provider-subscription-utils'
|
||||
import type { DeleteSubscriptionContext, SubscriptionContext, SubscriptionResult } from '@/lib/webhooks/providers/types'
|
||||
|
||||
export const {service}Handler: WebhookProviderHandler = {
|
||||
async createSubscription(ctx: SubscriptionContext): Promise<SubscriptionResult | undefined> {
|
||||
const config = getProviderConfig(ctx.webhook)
|
||||
const apiKey = config.apiKey as string
|
||||
if (!apiKey) throw new Error('{Service} API Key is required.')
|
||||
|
||||
const res = await fetch('https://api.{service}.com/webhooks', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
headers: { Authorization: `Bearer ${apiKey}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url: getNotificationUrl(ctx.webhook) }),
|
||||
})
|
||||
|
||||
const responseBody = await response.json()
|
||||
if (!res.ok) throw new Error(`{Service} error: ${res.status}`)
|
||||
const { id } = (await res.json()) as { id: string }
|
||||
return { providerConfigUpdates: { externalId: id } }
|
||||
},
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMessage = responseBody.message || 'Unknown API error'
|
||||
let userFriendlyMessage = 'Failed to create webhook in {Service}'
|
||||
|
||||
if (response.status === 401) {
|
||||
userFriendlyMessage = 'Invalid API Key. Please verify and try again.'
|
||||
} else if (errorMessage) {
|
||||
userFriendlyMessage = `{Service} error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
return { id: responseBody.id }
|
||||
} catch (error: any) {
|
||||
logger.error(`Exception during {Service} webhook creation`, { error: error.message })
|
||||
throw error
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 4. Add Webhook Deletion to Provider Subscriptions
|
||||
|
||||
In `apps/sim/lib/webhooks/provider-subscriptions.ts`:
|
||||
|
||||
1. Add a logger:
|
||||
```typescript
|
||||
const {service}Logger = createLogger('{Service}Webhook')
|
||||
```
|
||||
|
||||
2. Add the delete function:
|
||||
```typescript
|
||||
export async function delete{Service}Webhook(webhook: any, requestId: string): Promise<void> {
|
||||
try {
|
||||
const config = getProviderConfig(webhook)
|
||||
const apiKey = config.apiKey as string | undefined
|
||||
const externalId = config.externalId as string | undefined
|
||||
|
||||
if (!apiKey || !externalId) {
|
||||
{service}Logger.warn(`[${requestId}] Missing apiKey or externalId, skipping cleanup`)
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(`https://api.{service}.com/webhooks/${externalId}`, {
|
||||
async deleteSubscription(ctx: DeleteSubscriptionContext): Promise<void> {
|
||||
const config = getProviderConfig(ctx.webhook)
|
||||
const { apiKey, externalId } = config as { apiKey?: string; externalId?: string }
|
||||
if (!apiKey || !externalId) return
|
||||
await fetch(`https://api.{service}.com/webhooks/${externalId}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok && response.status !== 404) {
|
||||
{service}Logger.warn(`[${requestId}] Failed to delete webhook (non-fatal): ${response.status}`)
|
||||
} else {
|
||||
{service}Logger.info(`[${requestId}] Successfully deleted webhook ${externalId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
{service}Logger.warn(`[${requestId}] Error deleting webhook (non-fatal)`, error)
|
||||
}
|
||||
headers: { Authorization: `Bearer ${apiKey}` },
|
||||
}).catch(() => {})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
3. Add to `cleanupExternalWebhook`:
|
||||
```typescript
|
||||
export async function cleanupExternalWebhook(...): Promise<void> {
|
||||
// ... existing providers ...
|
||||
} else if (webhook.provider === '{service}') {
|
||||
await delete{Service}Webhook(webhook, requestId)
|
||||
}
|
||||
}
|
||||
```
|
||||
**Key points:**
|
||||
- Throw from `createSubscription` — orchestration rolls back the DB webhook
|
||||
- Never throw from `deleteSubscription` — log non-fatally
|
||||
- Return `{ providerConfigUpdates: { externalId } }` — orchestration merges into `providerConfig`
|
||||
- Add `apiKey` field to `build{Service}ExtraFields` with `password: true`
|
||||
|
||||
### Key Points for Automatic Registration
|
||||
|
||||
- **API Key visibility**: Always use `password: true` for API key fields
|
||||
- **Error handling**: Roll back the database webhook if external creation fails
|
||||
- **External ID storage**: Save the external webhook ID in `providerConfig.externalId`
|
||||
- **Graceful cleanup**: Don't fail webhook deletion if cleanup fails (use non-fatal logging)
|
||||
- **User-friendly errors**: Map HTTP status codes to helpful error messages
|
||||
|
||||
## The buildTriggerSubBlocks Helper
|
||||
|
||||
This is the generic helper from `@/triggers` that creates consistent trigger subBlocks.
|
||||
|
||||
### Function Signature
|
||||
|
||||
```typescript
|
||||
interface BuildTriggerSubBlocksOptions {
|
||||
triggerId: string // e.g., 'service_event_a'
|
||||
triggerOptions: Array<{ label: string; id: string }> // Dropdown options
|
||||
includeDropdown?: boolean // true only for primary trigger
|
||||
setupInstructions: string // HTML instructions
|
||||
extraFields?: SubBlockConfig[] // Service-specific fields
|
||||
webhookPlaceholder?: string // Custom placeholder text
|
||||
}
|
||||
|
||||
function buildTriggerSubBlocks(options: BuildTriggerSubBlocksOptions): SubBlockConfig[]
|
||||
```
|
||||
|
||||
### What It Creates
|
||||
|
||||
The helper creates this structure:
|
||||
1. **Dropdown** (only if `includeDropdown: true`) - Trigger type selector
|
||||
2. **Webhook URL** - Read-only field with copy button
|
||||
3. **Extra Fields** - Your service-specific fields (filters, options, etc.)
|
||||
4. **Save Button** - Activates the trigger
|
||||
5. **Instructions** - Setup guide for users
|
||||
|
||||
All fields automatically have:
|
||||
- `mode: 'trigger'` - Only shown in trigger mode
|
||||
- `condition: { field: 'selectedTriggerId', value: triggerId }` - Only shown when this trigger is selected
|
||||
|
||||
## Trigger Outputs & Webhook Input Formatting
|
||||
|
||||
### Important: Two Sources of Truth
|
||||
|
||||
There are two related but separate concerns:
|
||||
|
||||
1. **Trigger `outputs`** - Schema/contract defining what fields SHOULD be available. Used by UI for tag dropdown.
|
||||
2. **`formatWebhookInput`** - Implementation that transforms raw webhook payload into actual data. Located in `apps/sim/lib/webhooks/utils.server.ts`.
|
||||
|
||||
**These MUST be aligned.** The fields returned by `formatWebhookInput` should match what's defined in trigger `outputs`. If they differ:
|
||||
- Tag dropdown shows fields that don't exist (broken variable resolution)
|
||||
- Or actual data has fields not shown in dropdown (users can't discover them)
|
||||
|
||||
### When to Add a formatWebhookInput Handler
|
||||
|
||||
- **Simple providers**: If the raw webhook payload structure already matches your outputs, you don't need a handler. The generic fallback returns `body` directly.
|
||||
- **Complex providers**: If you need to transform, flatten, extract nested data, compute fields, or handle conditional logic, add a handler.
|
||||
|
||||
### Adding a Handler
|
||||
|
||||
In `apps/sim/lib/webhooks/utils.server.ts`, add a handler block:
|
||||
|
||||
```typescript
|
||||
if (foundWebhook.provider === '{service}') {
|
||||
// Transform raw webhook body to match trigger outputs
|
||||
return {
|
||||
eventType: body.type,
|
||||
resourceId: body.data?.id || '',
|
||||
timestamp: body.created_at,
|
||||
resource: body.data,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Key rules:**
|
||||
- Return fields that match your trigger `outputs` definition exactly
|
||||
- No wrapper objects like `webhook: { data: ... }` or `{service}: { ... }`
|
||||
- No duplication (don't spread body AND add individual fields)
|
||||
- Use `null` for missing optional data, not empty objects with empty strings
|
||||
|
||||
### Verify Alignment
|
||||
|
||||
Run the alignment checker:
|
||||
```bash
|
||||
bunx scripts/check-trigger-alignment.ts {service}
|
||||
```
|
||||
|
||||
## Trigger Outputs
|
||||
## Trigger Outputs Schema
|
||||
|
||||
Trigger outputs use the same schema as block outputs (NOT tool outputs).
|
||||
|
||||
**Supported:**
|
||||
- `type` and `description` for simple fields
|
||||
- Nested object structure for complex data
|
||||
|
||||
**NOT Supported:**
|
||||
- `optional: true` (tool outputs only)
|
||||
- `items` property (tool outputs only)
|
||||
**Supported:** `type` + `description` for leaf fields, nested objects for complex data.
|
||||
**NOT supported:** `optional: true`, `items` (those are tool-output-only features).
|
||||
|
||||
```typescript
|
||||
export function buildOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
// Simple fields
|
||||
eventType: { type: 'string', description: 'Event type' },
|
||||
timestamp: { type: 'string', description: 'When it occurred' },
|
||||
|
||||
// Complex data - use type: 'json'
|
||||
payload: { type: 'json', description: 'Full event payload' },
|
||||
|
||||
// Nested structure
|
||||
resource: {
|
||||
id: { type: 'string', description: 'Resource ID' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
@@ -630,79 +327,32 @@ export function buildOutputs(): Record<string, TriggerOutput> {
|
||||
}
|
||||
```
|
||||
|
||||
## Generic Webhook Trigger Pattern
|
||||
## Checklist
|
||||
|
||||
For services with many event types, create a generic webhook that accepts all events:
|
||||
|
||||
```typescript
|
||||
export const {service}WebhookTrigger: TriggerConfig = {
|
||||
id: '{service}_webhook',
|
||||
name: '{Service} Webhook (All Events)',
|
||||
// ...
|
||||
|
||||
subBlocks: buildTriggerSubBlocks({
|
||||
triggerId: '{service}_webhook',
|
||||
triggerOptions: {service}TriggerOptions,
|
||||
setupInstructions: {service}SetupInstructions('All Events'),
|
||||
extraFields: [
|
||||
// Event type filter (optional)
|
||||
{
|
||||
id: 'eventTypes',
|
||||
title: 'Event Types',
|
||||
type: 'dropdown',
|
||||
multiSelect: true,
|
||||
options: [
|
||||
{ label: 'Event A', id: 'event_a' },
|
||||
{ label: 'Event B', id: 'event_b' },
|
||||
],
|
||||
placeholder: 'Leave empty for all events',
|
||||
mode: 'trigger',
|
||||
condition: { field: 'selectedTriggerId', value: '{service}_webhook' },
|
||||
},
|
||||
// Plus any other service-specific fields
|
||||
...build{Service}ExtraFields('{service}_webhook'),
|
||||
],
|
||||
}),
|
||||
}
|
||||
```
|
||||
|
||||
## Checklist Before Finishing
|
||||
|
||||
### Utils
|
||||
- [ ] Created `{service}TriggerOptions` array with all trigger IDs
|
||||
- [ ] Created `{service}SetupInstructions` function with clear steps
|
||||
- [ ] Created `build{Service}ExtraFields` for service-specific fields
|
||||
- [ ] Created output builders for each trigger type
|
||||
|
||||
### Triggers
|
||||
- [ ] Primary trigger has `includeDropdown: true`
|
||||
- [ ] Secondary triggers do NOT have `includeDropdown`
|
||||
### Trigger Definition
|
||||
- [ ] Created `utils.ts` with options, instructions, extra fields, and output builders
|
||||
- [ ] Primary trigger has `includeDropdown: true`; secondary triggers do NOT
|
||||
- [ ] All triggers use `buildTriggerSubBlocks` helper
|
||||
- [ ] All triggers have proper outputs defined
|
||||
- [ ] Created `index.ts` barrel export
|
||||
|
||||
### Registration
|
||||
- [ ] All triggers imported in `triggers/registry.ts`
|
||||
- [ ] All triggers added to `TRIGGER_REGISTRY`
|
||||
- [ ] Block has `triggers.enabled: true`
|
||||
- [ ] Block has all trigger IDs in `triggers.available`
|
||||
- [ ] All triggers in `triggers/registry.ts` → `TRIGGER_REGISTRY`
|
||||
- [ ] Block has `triggers.enabled: true` and lists all trigger IDs in `triggers.available`
|
||||
- [ ] Block spreads all trigger subBlocks: `...getTrigger('id').subBlocks`
|
||||
|
||||
### Automatic Webhook Registration (if supported)
|
||||
- [ ] Added API key field to `build{Service}ExtraFields` with `password: true`
|
||||
- [ ] Updated setup instructions for automatic webhook creation
|
||||
- [ ] Added provider-specific logic to `apps/sim/app/api/webhooks/route.ts`
|
||||
- [ ] Added `create{Service}WebhookSubscription` helper function
|
||||
- [ ] Added `delete{Service}Webhook` function to `provider-subscriptions.ts`
|
||||
- [ ] Added provider to `cleanupExternalWebhook` function
|
||||
### Provider Handler (if needed)
|
||||
- [ ] Handler file at `apps/sim/lib/webhooks/providers/{service}.ts`
|
||||
- [ ] Registered in `providers/registry.ts` (alphabetical)
|
||||
- [ ] Signature validator is a private function inside the handler file
|
||||
- [ ] `formatInput` output keys match trigger `outputs` exactly
|
||||
- [ ] Event matching uses dynamic `await import()` for trigger utils
|
||||
|
||||
### Webhook Input Formatting
|
||||
- [ ] Added handler in `apps/sim/lib/webhooks/utils.server.ts` (if custom formatting needed)
|
||||
- [ ] Handler returns fields matching trigger `outputs` exactly
|
||||
- [ ] Run `bunx scripts/check-trigger-alignment.ts {service}` to verify alignment
|
||||
### Auto Registration (if supported)
|
||||
- [ ] `createSubscription` and `deleteSubscription` on the handler
|
||||
- [ ] NO changes to `route.ts`, `provider-subscriptions.ts`, or `deploy.ts`
|
||||
- [ ] API key field uses `password: true`
|
||||
|
||||
### Testing
|
||||
- [ ] Run `bun run type-check` to verify no TypeScript errors
|
||||
- [ ] Restart dev server to pick up new triggers
|
||||
- [ ] Test trigger UI shows correctly in the block
|
||||
- [ ] Test automatic webhook creation works (if applicable)
|
||||
- [ ] `bun run type-check` passes
|
||||
- [ ] Manually verify `formatInput` output keys match trigger `outputs` keys
|
||||
- [ ] Trigger UI shows correctly in the block
|
||||
|
||||
212
.claude/commands/validate-trigger.md
Normal file
212
.claude/commands/validate-trigger.md
Normal file
@@ -0,0 +1,212 @@
|
||||
---
|
||||
description: Validate an existing Sim webhook trigger against provider API docs and repository conventions
|
||||
argument-hint: <service-name> [api-docs-url]
|
||||
---
|
||||
|
||||
# Validate Trigger
|
||||
|
||||
You are an expert auditor for Sim webhook triggers. Your job is to validate that an existing trigger implementation is correct, complete, secure, and aligned across all layers.
|
||||
|
||||
## Your Task
|
||||
|
||||
1. Read the service's webhook/API documentation (via WebFetch)
|
||||
2. Read every trigger file, provider handler, and registry entry
|
||||
3. Cross-reference against the API docs and Sim conventions
|
||||
4. Report all issues grouped by severity (critical, warning, suggestion)
|
||||
5. Fix all issues after reporting them
|
||||
|
||||
## Step 1: Gather All Files
|
||||
|
||||
Read **every** file for the trigger — do not skip any:
|
||||
|
||||
```
|
||||
apps/sim/triggers/{service}/ # All trigger files, utils.ts, index.ts
|
||||
apps/sim/lib/webhooks/providers/{service}.ts # Provider handler (if exists)
|
||||
apps/sim/lib/webhooks/providers/registry.ts # Handler registry
|
||||
apps/sim/triggers/registry.ts # Trigger registry
|
||||
apps/sim/blocks/blocks/{service}.ts # Block definition (trigger wiring)
|
||||
```
|
||||
|
||||
Also read for reference:
|
||||
```
|
||||
apps/sim/lib/webhooks/providers/types.ts # WebhookProviderHandler interface
|
||||
apps/sim/lib/webhooks/providers/utils.ts # Shared helpers (createHmacVerifier, etc.)
|
||||
apps/sim/lib/webhooks/provider-subscription-utils.ts # Subscription helpers
|
||||
apps/sim/lib/webhooks/processor.ts # Central webhook processor
|
||||
```
|
||||
|
||||
## Step 2: Pull API Documentation
|
||||
|
||||
Fetch the service's official webhook documentation. This is the **source of truth** for:
|
||||
- Webhook event types and payload shapes
|
||||
- Signature/auth verification method (HMAC algorithm, header names, secret format)
|
||||
- Challenge/verification handshake requirements
|
||||
- Webhook subscription API (create/delete endpoints, if applicable)
|
||||
- Retry behavior and delivery guarantees
|
||||
|
||||
## Step 3: Validate Trigger Definitions
|
||||
|
||||
### utils.ts
|
||||
- [ ] `{service}TriggerOptions` lists all trigger IDs accurately
|
||||
- [ ] `{service}SetupInstructions` provides clear, correct steps for the service
|
||||
- [ ] `build{Service}ExtraFields` includes relevant filter/config fields with correct `condition`
|
||||
- [ ] Output builders expose all meaningful fields from the webhook payload
|
||||
- [ ] Output builders do NOT use `optional: true` or `items` (tool-output-only features)
|
||||
- [ ] Nested output objects correctly model the payload structure
|
||||
|
||||
### Trigger Files
|
||||
- [ ] Exactly one primary trigger has `includeDropdown: true`
|
||||
- [ ] All secondary triggers do NOT have `includeDropdown`
|
||||
- [ ] All triggers use `buildTriggerSubBlocks` helper (not hand-rolled subBlocks)
|
||||
- [ ] Every trigger's `id` matches the convention `{service}_{event_name}`
|
||||
- [ ] Every trigger's `provider` matches the service name used in the handler registry
|
||||
- [ ] `index.ts` barrel exports all triggers
|
||||
|
||||
### Trigger ↔ Provider Alignment (CRITICAL)
|
||||
- [ ] Every trigger ID referenced in `matchEvent` logic exists in `{service}TriggerOptions`
|
||||
- [ ] Event matching logic in the provider correctly maps trigger IDs to service event types
|
||||
- [ ] Event matching logic in `is{Service}EventMatch` (if exists) correctly identifies events per the API docs
|
||||
|
||||
## Step 4: Validate Provider Handler
|
||||
|
||||
### Auth Verification
|
||||
- [ ] `verifyAuth` correctly validates webhook signatures per the service's documentation
|
||||
- [ ] HMAC algorithm matches (SHA-1, SHA-256, SHA-512)
|
||||
- [ ] Signature header name matches the API docs exactly
|
||||
- [ ] Signature format is handled (raw hex, `sha256=` prefix, base64, etc.)
|
||||
- [ ] Uses `safeCompare` for timing-safe comparison (no `===`)
|
||||
- [ ] If `webhookSecret` is required, handler rejects when it's missing (fail-closed)
|
||||
- [ ] Signature is computed over raw body (not parsed JSON)
|
||||
|
||||
### Event Matching
|
||||
- [ ] `matchEvent` returns `boolean` (not `NextResponse` or other values)
|
||||
- [ ] Challenge/verification events are excluded from matching (e.g., `endpoint.url_validation`)
|
||||
- [ ] When `triggerId` is a generic webhook ID, all events pass through
|
||||
- [ ] When `triggerId` is specific, only matching events pass
|
||||
- [ ] Event matching logic uses dynamic `await import()` for trigger utils (avoids circular deps)
|
||||
|
||||
### formatInput (CRITICAL)
|
||||
- [ ] Every key in the `formatInput` return matches a key in the trigger `outputs` schema
|
||||
- [ ] Every key in the trigger `outputs` schema is populated by `formatInput`
|
||||
- [ ] No extra undeclared keys that users can't discover in the UI
|
||||
- [ ] No wrapper objects (`webhook: { ... }`, `{service}: { ... }`)
|
||||
- [ ] Nested output paths exist at the correct depth (e.g., `resource.id` actually has `resource: { id: ... }`)
|
||||
- [ ] `null` is used for missing optional fields (not empty strings or empty objects)
|
||||
- [ ] Returns `{ input: { ... } }` — not a bare object
|
||||
|
||||
### Idempotency
|
||||
- [ ] `extractIdempotencyId` returns a stable, unique key per delivery
|
||||
- [ ] Uses provider-specific delivery IDs when available (e.g., `X-Request-Id`, `Linear-Delivery`, `svix-id`)
|
||||
- [ ] Falls back to content-based ID (e.g., `${type}:${id}`) when no delivery header exists
|
||||
- [ ] Does NOT include timestamps in the idempotency key (would break dedup on retries)
|
||||
|
||||
### Challenge Handling (if applicable)
|
||||
- [ ] `handleChallenge` correctly implements the service's URL verification handshake
|
||||
- [ ] Returns the expected response format per the API docs
|
||||
- [ ] Env-backed secrets are resolved via `resolveEnvVarsInObject` if needed
|
||||
|
||||
## Step 5: Validate Automatic Subscription Lifecycle
|
||||
|
||||
If the service supports programmatic webhook creation:
|
||||
|
||||
### createSubscription
|
||||
- [ ] Calls the correct API endpoint to create a webhook
|
||||
- [ ] Sends the correct event types/filters
|
||||
- [ ] Passes the notification URL from `getNotificationUrl(ctx.webhook)`
|
||||
- [ ] Returns `{ providerConfigUpdates: { externalId } }` with the external webhook ID
|
||||
- [ ] Throws on failure (orchestration handles rollback)
|
||||
- [ ] Provides user-friendly error messages (401 → "Invalid API Key", etc.)
|
||||
|
||||
### deleteSubscription
|
||||
- [ ] Calls the correct API endpoint to delete the webhook
|
||||
- [ ] Handles 404 gracefully (webhook already deleted)
|
||||
- [ ] Never throws — catches errors and logs non-fatally
|
||||
- [ ] Skips gracefully when `apiKey` or `externalId` is missing
|
||||
|
||||
### Orchestration Isolation
|
||||
- [ ] NO provider-specific logic in `route.ts`, `provider-subscriptions.ts`, or `deploy.ts`
|
||||
- [ ] All subscription logic lives on the handler (`createSubscription`/`deleteSubscription`)
|
||||
|
||||
## Step 6: Validate Registration and Block Wiring
|
||||
|
||||
### Trigger Registry (`triggers/registry.ts`)
|
||||
- [ ] All triggers are imported and registered
|
||||
- [ ] Registry keys match trigger IDs exactly
|
||||
- [ ] No orphaned entries (triggers that don't exist)
|
||||
|
||||
### Provider Handler Registry (`providers/registry.ts`)
|
||||
- [ ] Handler is imported and registered (if handler exists)
|
||||
- [ ] Registry key matches the `provider` field on the trigger configs
|
||||
- [ ] Entries are in alphabetical order
|
||||
|
||||
### Block Wiring (`blocks/blocks/{service}.ts`)
|
||||
- [ ] Block has `triggers.enabled: true`
|
||||
- [ ] `triggers.available` lists all trigger IDs
|
||||
- [ ] All trigger subBlocks are spread into `subBlocks`: `...getTrigger('id').subBlocks`
|
||||
- [ ] No trigger IDs in `triggers.available` that aren't in the registry
|
||||
- [ ] No trigger subBlocks spread that aren't in `triggers.available`
|
||||
|
||||
## Step 7: Validate Security
|
||||
|
||||
- [ ] Webhook secrets are never logged (not even at debug level)
|
||||
- [ ] Auth verification runs before any event processing
|
||||
- [ ] No secret comparison uses `===` (must use `safeCompare` or `crypto.timingSafeEqual`)
|
||||
- [ ] Timestamp/replay protection is reasonable (not too tight for retries, not too loose for security)
|
||||
- [ ] Raw body is used for signature verification (not re-serialized JSON)
|
||||
|
||||
## Step 8: Report and Fix
|
||||
|
||||
### Report Format
|
||||
|
||||
Group findings by severity:
|
||||
|
||||
**Critical** (runtime errors, security issues, or data loss):
|
||||
- Wrong HMAC algorithm or header name
|
||||
- `formatInput` keys don't match trigger `outputs`
|
||||
- Missing `verifyAuth` when the service sends signed webhooks
|
||||
- `matchEvent` returns non-boolean values
|
||||
- Provider-specific logic leaking into shared orchestration files
|
||||
- Trigger IDs mismatch between trigger files, registry, and block
|
||||
- `createSubscription` calling wrong API endpoint
|
||||
- Auth comparison using `===` instead of `safeCompare`
|
||||
|
||||
**Warning** (convention violations or usability issues):
|
||||
- Missing `extractIdempotencyId` when the service provides delivery IDs
|
||||
- Timestamps in idempotency keys (breaks dedup on retries)
|
||||
- Missing challenge handling when the service requires URL verification
|
||||
- Output schema missing fields that `formatInput` returns (undiscoverable data)
|
||||
- Overly tight timestamp skew window that rejects legitimate retries
|
||||
- `matchEvent` not filtering challenge/verification events
|
||||
- Setup instructions missing important steps
|
||||
|
||||
**Suggestion** (minor improvements):
|
||||
- More specific output field descriptions
|
||||
- Additional output fields that could be exposed
|
||||
- Better error messages in `createSubscription`
|
||||
- Logging improvements
|
||||
|
||||
### Fix All Issues
|
||||
|
||||
After reporting, fix every **critical** and **warning** issue. Apply **suggestions** where they don't add unnecessary complexity.
|
||||
|
||||
### Validation Output
|
||||
|
||||
After fixing, confirm:
|
||||
1. `bun run type-check` passes
|
||||
2. Re-read all modified files to verify fixes are correct
|
||||
3. Provider handler tests pass (if they exist): `bun test {service}`
|
||||
|
||||
## Checklist Summary
|
||||
|
||||
- [ ] Read all trigger files, provider handler, types, registries, and block
|
||||
- [ ] Pulled and read official webhook/API documentation
|
||||
- [ ] Validated trigger definitions: options, instructions, extra fields, outputs
|
||||
- [ ] Validated primary/secondary trigger distinction (`includeDropdown`)
|
||||
- [ ] Validated provider handler: auth, matchEvent, formatInput, idempotency
|
||||
- [ ] Validated output alignment: every `outputs` key ↔ every `formatInput` key
|
||||
- [ ] Validated subscription lifecycle: createSubscription, deleteSubscription, no shared-file edits
|
||||
- [ ] Validated registration: trigger registry, handler registry, block wiring
|
||||
- [ ] Validated security: safe comparison, no secret logging, replay protection
|
||||
- [ ] Reported all issues grouped by severity
|
||||
- [ ] Fixed all critical and warning issues
|
||||
- [ ] `bun run type-check` passes after fixes
|
||||
@@ -2132,7 +2132,15 @@ export function Mem0Icon(props: SVGProps<SVGSVGElement>) {
|
||||
|
||||
export function ExtendIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 33 18' fill='none'>
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 33 24' fill='none'>
|
||||
<path
|
||||
d='M6.3782 13.7746L4.28874 14.8056C4.11382 14.8899 4.11382 15.1367 4.28874 15.2211L15.8924 20.9462C16.1423 21.068 16.433 21.068 16.6797 20.9462L28.2864 15.2211C28.4582 15.1367 28.4582 14.8899 28.2864 14.8056L26.2 13.7746C27.3838 13.1937 28.5145 12.6378 29.4578 12.1787C30.2605 12.5722 31.0666 12.9689 31.8693 13.3625C32.3003 13.5749 32.5721 14.0123 32.5721 14.4932V15.5426C32.5721 16.0204 32.3003 16.4609 31.8693 16.6733C31.8693 16.6733 19.5816 22.7016 17.5542 23.6887C16.7296 24.0916 15.8955 24.1103 15.0615 23.7043C12.8123 22.6078 1.9646 17.2857 0.705842 16.6672C0.274806 16.4579 0 16.0174 0 15.5395V14.4899C4.1552e-05 14.012 0.271779 13.5715 0.702792 13.3591C1.43993 12.9968 2.2584 12.5973 3.12047 12.1756C4.06685 12.641 5.19446 13.1937 6.3782 13.7746Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M15.1021 6.30652C15.9017 5.92234 16.717 5.9348 17.5103 6.32207C20.1715 7.62145 22.8297 8.92398 25.4878 10.2265L22.249 11.8257L16.6797 9.07681C16.433 8.955 16.1423 8.955 15.8924 9.07681L10.3262 11.8257L7.0874 10.2265C11.2142 8.20664 15.0743 6.3201 15.1021 6.30652Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
clipRule='evenodd'
|
||||
d='M16.2893 0C16.6984 1.91708e-05 17.1074 0.0970011 17.5103 0.293745C22.3018 2.63326 27.0841 4.98521 31.8693 7.33722C32.3003 7.54649 32.5721 7.9868 32.5721 8.46461V9.51422C32.5721 9.99522 32.3004 10.4357 31.8693 10.645C31.8693 10.645 19.5816 16.6732 17.5542 17.6634C17.1357 17.8696 16.692 17.9727 16.2859 17.9727C15.8799 17.9727 15.4707 17.8758 15.0615 17.6759C12.8124 16.5795 1.9646 11.2604 0.705842 10.6419C0.274826 10.4295 2.31482e-05 9.99216 0 9.51117V8.46461C4.59913e-05 7.98366 0.271816 7.54656 0.702792 7.33417C5.8977 4.7819 15.0599 0.301869 15.1021 0.281239C15.4957 0.0938275 15.8801 0 16.2893 0ZM16.2859 2.96124C16.1516 2.96126 16.0173 2.98909 15.8924 3.05153L4.28874 8.77696C4.11382 8.86442 4.11382 9.10831 4.28874 9.19577L15.8924 14.9209C16.0173 14.9802 16.1516 15.0115 16.2859 15.0115C16.4202 15.0115 16.5548 14.9802 16.6797 14.9209L28.2864 9.19577C28.4582 9.10831 28.4582 8.86442 28.2864 8.77696L16.6797 3.05153C16.5548 2.98906 16.4202 2.96124 16.2859 2.96124Z'
|
||||
@@ -5930,6 +5938,33 @@ export function PulseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function SixtyfourIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 158 143' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<path
|
||||
d='M32.3952 141.17L31.637 140.73V142.481L31.8417 142.603L32.3952 142.921L32.9487 142.603L33.1534 142.481V140.73L32.3952 141.17Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M33.1534 140.73V142.603H31.637V140.73L32.3952 141.17L33.1534 140.73Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M93.3271 105.608V106.564L94.0854 106.996L94.8436 106.564V105.608H93.3271Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M94.8436 105.608V106.564L94.0854 106.996L93.3271 106.564V105.608H94.8436Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M125.681 12.9895L94.836 30.755L63.9909 12.9895L32.3951 31.1872H32.3875V68.8565L0.79933 87.0542H0.791748V124.723L31.6369 142.481V140.73L2.30822 123.844V89.6701L31.6369 106.564V140.73L32.3951 141.17L33.1533 140.73V106.564L62.482 89.6701V123.844L33.1533 140.73V142.481L63.2402 125.163L93.3271 142.481L93.5318 142.603L94.0853 142.921L94.6388 142.603L94.8436 142.481L125.689 124.723V87.0542L126.235 86.7357L126.439 86.6144L157.284 68.8565V31.1872L125.681 12.9895ZM63.2326 84.8629L33.904 67.9769V33.8031L63.2326 50.6967V84.8629ZM64.7491 50.6967L94.0777 33.8031V67.9769L64.7491 84.8629V50.6967ZM124.172 123.844L94.8436 140.73V106.564L94.0853 106.996L93.3271 106.564V140.73L63.9985 123.844V89.6701L93.3271 106.564V105.608H94.8436V106.564L124.172 89.6701V123.844ZM124.923 84.8629L95.5942 67.9769V33.8031L124.923 50.6891V84.8629ZM155.768 67.9769L126.439 84.8629V50.6967L155.768 33.8031V67.9769Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function SimilarwebIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -154,6 +154,7 @@ import {
|
||||
SftpIcon,
|
||||
ShopifyIcon,
|
||||
SimilarwebIcon,
|
||||
SixtyfourIcon,
|
||||
SlackIcon,
|
||||
SmtpIcon,
|
||||
SQSIcon,
|
||||
@@ -283,7 +284,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
langsmith: LangsmithIcon,
|
||||
launchdarkly: LaunchDarklyIcon,
|
||||
lemlist: LemlistIcon,
|
||||
linear: LinearIcon,
|
||||
linear_v2: LinearIcon,
|
||||
linkedin: LinkedInIcon,
|
||||
linkup: LinkupIcon,
|
||||
loops: LoopsIcon,
|
||||
@@ -340,6 +341,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
sharepoint: MicrosoftSharepointIcon,
|
||||
shopify: ShopifyIcon,
|
||||
similarweb: SimilarwebIcon,
|
||||
sixtyfour: SixtyfourIcon,
|
||||
slack: SlackIcon,
|
||||
smtp: SmtpIcon,
|
||||
sqs: SQSIcon,
|
||||
|
||||
@@ -93,17 +93,36 @@ Access resume data in downstream blocks using `<blockId.resumeInput.fieldName>`.
|
||||
<Tab>
|
||||
### REST API
|
||||
|
||||
Programmatically resume workflows:
|
||||
Programmatically resume workflows using the resume endpoint. The `contextId` is available from the block's `resumeEndpoint` output or from the paused execution detail.
|
||||
|
||||
```bash
|
||||
POST /api/workflows/{workflowId}/executions/{executionId}/resume/{blockId}
|
||||
POST /api/resume/{workflowId}/{executionId}/{contextId}
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"approved": true,
|
||||
"comments": "Looks good to proceed"
|
||||
"input": {
|
||||
"approved": true,
|
||||
"comments": "Looks good to proceed"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The response includes a new `executionId` for the resumed execution:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "started",
|
||||
"executionId": "<resumeExecutionId>",
|
||||
"message": "Resume execution started."
|
||||
}
|
||||
```
|
||||
|
||||
To poll execution progress after resuming, connect to the SSE stream:
|
||||
|
||||
```bash
|
||||
GET /api/workflows/{workflowId}/executions/{resumeExecutionId}/stream
|
||||
```
|
||||
|
||||
Build custom approval UIs or integrate with existing systems.
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Interact with Linear issues, projects, and more
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="linear"
|
||||
type="linear_v2"
|
||||
color="#5E6AD2"
|
||||
/>
|
||||
|
||||
|
||||
@@ -150,6 +150,7 @@
|
||||
"sharepoint",
|
||||
"shopify",
|
||||
"similarweb",
|
||||
"sixtyfour",
|
||||
"slack",
|
||||
"smtp",
|
||||
"sqs",
|
||||
|
||||
128
apps/docs/content/docs/en/tools/sixtyfour.mdx
Normal file
128
apps/docs/content/docs/en/tools/sixtyfour.mdx
Normal file
@@ -0,0 +1,128 @@
|
||||
---
|
||||
title: Sixtyfour AI
|
||||
description: Enrich leads and companies with AI-powered research
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="sixtyfour"
|
||||
color="#000000"
|
||||
/>
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Find emails, phone numbers, and enrich lead or company data with contact information, social profiles, and detailed research using Sixtyfour AI.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `sixtyfour_find_phone`
|
||||
|
||||
Find phone numbers for a lead using Sixtyfour AI.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Sixtyfour API key |
|
||||
| `name` | string | Yes | Full name of the person |
|
||||
| `company` | string | No | Company name |
|
||||
| `linkedinUrl` | string | No | LinkedIn profile URL |
|
||||
| `domain` | string | No | Company website domain |
|
||||
| `email` | string | No | Email address |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `name` | string | Name of the person |
|
||||
| `company` | string | Company name |
|
||||
| `phone` | string | Phone number\(s\) found |
|
||||
| `linkedinUrl` | string | LinkedIn profile URL |
|
||||
|
||||
### `sixtyfour_find_email`
|
||||
|
||||
Find email addresses for a lead using Sixtyfour AI.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Sixtyfour API key |
|
||||
| `name` | string | Yes | Full name of the person |
|
||||
| `company` | string | No | Company name |
|
||||
| `linkedinUrl` | string | No | LinkedIn profile URL |
|
||||
| `domain` | string | No | Company website domain |
|
||||
| `phone` | string | No | Phone number |
|
||||
| `title` | string | No | Job title |
|
||||
| `mode` | string | No | Email discovery mode: PROFESSIONAL \(default\) or PERSONAL |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `name` | string | Name of the person |
|
||||
| `company` | string | Company name |
|
||||
| `title` | string | Job title |
|
||||
| `phone` | string | Phone number |
|
||||
| `linkedinUrl` | string | LinkedIn profile URL |
|
||||
| `emails` | json | Professional email addresses found |
|
||||
| ↳ `address` | string | Email address |
|
||||
| ↳ `status` | string | Validation status \(OK or UNKNOWN\) |
|
||||
| ↳ `type` | string | Email type \(COMPANY or PERSONAL\) |
|
||||
| `personalEmails` | json | Personal email addresses found \(only in PERSONAL mode\) |
|
||||
| ↳ `address` | string | Email address |
|
||||
| ↳ `status` | string | Validation status \(OK or UNKNOWN\) |
|
||||
| ↳ `type` | string | Email type \(COMPANY or PERSONAL\) |
|
||||
|
||||
### `sixtyfour_enrich_lead`
|
||||
|
||||
Enrich lead information with contact details, social profiles, and company data using Sixtyfour AI.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Sixtyfour API key |
|
||||
| `leadInfo` | string | Yes | Lead information as JSON object with key-value pairs \(e.g. name, company, title, linkedin\) |
|
||||
| `struct` | string | Yes | Fields to collect as JSON object. Keys are field names, values are descriptions \(e.g. \{"email": "The individual\'s email address", "phone": "Phone number"\}\) |
|
||||
| `researchPlan` | string | No | Optional research plan to guide enrichment strategy |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `notes` | string | Research notes about the lead |
|
||||
| `structuredData` | json | Enriched lead data matching the requested struct fields |
|
||||
| `references` | json | Source URLs and descriptions used for enrichment |
|
||||
| `confidenceScore` | number | Quality score for the returned data \(0-10\) |
|
||||
|
||||
### `sixtyfour_enrich_company`
|
||||
|
||||
Enrich company data with additional information and find associated people using Sixtyfour AI.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Sixtyfour API key |
|
||||
| `targetCompany` | string | Yes | Company data as JSON object \(e.g. \{"name": "Acme Inc", "domain": "acme.com"\}\) |
|
||||
| `struct` | string | Yes | Fields to collect as JSON object. Keys are field names, values are descriptions \(e.g. \{"website": "Company website URL", "num_employees": "Employee count"\}\) |
|
||||
| `findPeople` | boolean | No | Whether to find people associated with the company |
|
||||
| `fullOrgChart` | boolean | No | Whether to retrieve the full organizational chart |
|
||||
| `researchPlan` | string | No | Optional strategy describing how the agent should search for information |
|
||||
| `peopleFocusPrompt` | string | No | Description of people to find \(roles, responsibilities\) |
|
||||
| `leadStruct` | string | No | Custom schema for returned lead data as JSON object |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `notes` | string | Research notes about the company |
|
||||
| `structuredData` | json | Enriched company data matching the requested struct fields |
|
||||
| `references` | json | Source URLs and descriptions used for enrichment |
|
||||
| `confidenceScore` | number | Quality score for the returned data \(0-10\) |
|
||||
|
||||
|
||||
@@ -97,49 +97,49 @@ export function SetNewPasswordForm({
|
||||
}: SetNewPasswordFormProps) {
|
||||
const [password, setPassword] = useState('')
|
||||
const [confirmPassword, setConfirmPassword] = useState('')
|
||||
const [validationMessage, setValidationMessage] = useState('')
|
||||
const [validationMessages, setValidationMessages] = useState<string[]>([])
|
||||
const [showPassword, setShowPassword] = useState(false)
|
||||
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
|
||||
const errors: string[] = []
|
||||
|
||||
if (password.length < 8) {
|
||||
setValidationMessage('Password must be at least 8 characters long')
|
||||
return
|
||||
errors.push('Password must be at least 8 characters long')
|
||||
}
|
||||
|
||||
if (password.length > 100) {
|
||||
setValidationMessage('Password must not exceed 100 characters')
|
||||
return
|
||||
errors.push('Password must not exceed 100 characters')
|
||||
}
|
||||
|
||||
if (!/[A-Z]/.test(password)) {
|
||||
setValidationMessage('Password must contain at least one uppercase letter')
|
||||
return
|
||||
errors.push('Password must contain at least one uppercase letter')
|
||||
}
|
||||
|
||||
if (!/[a-z]/.test(password)) {
|
||||
setValidationMessage('Password must contain at least one lowercase letter')
|
||||
return
|
||||
errors.push('Password must contain at least one lowercase letter')
|
||||
}
|
||||
|
||||
if (!/[0-9]/.test(password)) {
|
||||
setValidationMessage('Password must contain at least one number')
|
||||
return
|
||||
errors.push('Password must contain at least one number')
|
||||
}
|
||||
|
||||
if (!/[^A-Za-z0-9]/.test(password)) {
|
||||
setValidationMessage('Password must contain at least one special character')
|
||||
return
|
||||
errors.push('Password must contain at least one special character')
|
||||
}
|
||||
|
||||
if (password !== confirmPassword) {
|
||||
setValidationMessage('Passwords do not match')
|
||||
errors.push('Passwords do not match')
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
setValidationMessages(errors)
|
||||
return
|
||||
}
|
||||
|
||||
setValidationMessage('')
|
||||
setValidationMessages([])
|
||||
onSubmit(password)
|
||||
}
|
||||
|
||||
@@ -162,7 +162,10 @@ export function SetNewPasswordForm({
|
||||
onChange={(e) => setPassword(e.target.value)}
|
||||
required
|
||||
placeholder='Enter new password'
|
||||
className={cn('pr-10', validationMessage && 'border-red-500 focus:border-red-500')}
|
||||
className={cn(
|
||||
'pr-10',
|
||||
validationMessages.length > 0 && 'border-red-500 focus:border-red-500'
|
||||
)}
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
@@ -190,7 +193,10 @@ export function SetNewPasswordForm({
|
||||
onChange={(e) => setConfirmPassword(e.target.value)}
|
||||
required
|
||||
placeholder='Confirm new password'
|
||||
className={cn('pr-10', validationMessage && 'border-red-500 focus:border-red-500')}
|
||||
className={cn(
|
||||
'pr-10',
|
||||
validationMessages.length > 0 && 'border-red-500 focus:border-red-500'
|
||||
)}
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
@@ -203,9 +209,11 @@ export function SetNewPasswordForm({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{validationMessage && (
|
||||
{validationMessages.length > 0 && (
|
||||
<div className='mt-1 space-y-1 text-red-400 text-xs'>
|
||||
<p>{validationMessage}</p>
|
||||
{validationMessages.map((error, index) => (
|
||||
<p key={index}>{error}</p>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -228,18 +228,6 @@ function SignupFormContent({
|
||||
emailValidationErrors.length > 0 ||
|
||||
errors.length > 0
|
||||
) {
|
||||
if (nameValidationErrors.length > 0) {
|
||||
setNameErrors([nameValidationErrors[0]])
|
||||
setShowNameValidationError(true)
|
||||
}
|
||||
if (emailValidationErrors.length > 0) {
|
||||
setEmailErrors([emailValidationErrors[0]])
|
||||
setShowEmailValidationError(true)
|
||||
}
|
||||
if (errors.length > 0) {
|
||||
setPasswordErrors([errors[0]])
|
||||
setShowValidationError(true)
|
||||
}
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
@@ -261,6 +249,9 @@ function SignupFormContent({
|
||||
widget.execute()
|
||||
token = await widget.getResponsePromise()
|
||||
} catch {
|
||||
captureEvent(posthog, 'signup_failed', {
|
||||
error_code: 'captcha_client_failure',
|
||||
})
|
||||
setFormError('Captcha verification failed. Please try again.')
|
||||
setIsLoading(false)
|
||||
return
|
||||
@@ -284,7 +275,9 @@ function SignupFormContent({
|
||||
logger.error('Signup error:', ctx.error)
|
||||
const errorMessage: string[] = ['Failed to create account']
|
||||
|
||||
let errorCode = 'unknown'
|
||||
if (ctx.error.code?.includes('USER_ALREADY_EXISTS')) {
|
||||
errorCode = 'user_already_exists'
|
||||
errorMessage.push(
|
||||
'An account with this email already exists. Please sign in instead.'
|
||||
)
|
||||
@@ -293,24 +286,30 @@ function SignupFormContent({
|
||||
ctx.error.code?.includes('BAD_REQUEST') ||
|
||||
ctx.error.message?.includes('Email and password sign up is not enabled')
|
||||
) {
|
||||
errorCode = 'signup_disabled'
|
||||
errorMessage.push('Email signup is currently disabled.')
|
||||
setEmailError(errorMessage[0])
|
||||
} else if (ctx.error.code?.includes('INVALID_EMAIL')) {
|
||||
errorCode = 'invalid_email'
|
||||
errorMessage.push('Please enter a valid email address.')
|
||||
setEmailError(errorMessage[0])
|
||||
} else if (ctx.error.code?.includes('PASSWORD_TOO_SHORT')) {
|
||||
errorCode = 'password_too_short'
|
||||
errorMessage.push('Password must be at least 8 characters long.')
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
} else if (ctx.error.code?.includes('PASSWORD_TOO_LONG')) {
|
||||
errorCode = 'password_too_long'
|
||||
errorMessage.push('Password must be less than 128 characters long.')
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
} else if (ctx.error.code?.includes('network')) {
|
||||
errorCode = 'network_error'
|
||||
errorMessage.push('Network error. Please check your connection and try again.')
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
} else if (ctx.error.code?.includes('rate limit')) {
|
||||
errorCode = 'rate_limited'
|
||||
errorMessage.push('Too many requests. Please wait a moment before trying again.')
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
@@ -318,6 +317,8 @@ function SignupFormContent({
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
}
|
||||
|
||||
captureEvent(posthog, 'signup_failed', { error_code: errorCode })
|
||||
},
|
||||
}
|
||||
)
|
||||
@@ -400,7 +401,7 @@ function SignupFormContent({
|
||||
/>
|
||||
<div
|
||||
className={cn(
|
||||
'absolute right-0 left-0 z-10 grid transition-[grid-template-rows] duration-200 ease-out',
|
||||
'grid transition-[grid-template-rows] duration-200 ease-out',
|
||||
showNameValidationError && nameErrors.length > 0
|
||||
? 'grid-rows-[1fr]'
|
||||
: 'grid-rows-[0fr]'
|
||||
@@ -438,7 +439,7 @@ function SignupFormContent({
|
||||
/>
|
||||
<div
|
||||
className={cn(
|
||||
'absolute right-0 left-0 z-10 grid transition-[grid-template-rows] duration-200 ease-out',
|
||||
'grid transition-[grid-template-rows] duration-200 ease-out',
|
||||
(showEmailValidationError && emailErrors.length > 0) ||
|
||||
(emailError && !showEmailValidationError)
|
||||
? 'grid-rows-[1fr]'
|
||||
@@ -497,7 +498,7 @@ function SignupFormContent({
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
'absolute right-0 left-0 z-10 grid transition-[grid-template-rows] duration-200 ease-out',
|
||||
'grid transition-[grid-template-rows] duration-200 ease-out',
|
||||
showValidationError && passwordErrors.length > 0
|
||||
? 'grid-rows-[1fr]'
|
||||
: 'grid-rows-[0fr]'
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import { Check } from '@/components/emcn/icons'
|
||||
import { captureClientEvent } from '@/lib/posthog/client'
|
||||
import {
|
||||
DEMO_REQUEST_COMPANY_SIZE_OPTIONS,
|
||||
type DemoRequestPayload,
|
||||
@@ -163,6 +164,9 @@ export function DemoRequestModal({ children, theme = 'dark' }: DemoRequestModalP
|
||||
}
|
||||
|
||||
setSubmitSuccess(true)
|
||||
captureClientEvent('landing_demo_request_submitted', {
|
||||
company_size: parsed.data.companySize,
|
||||
})
|
||||
} catch (error) {
|
||||
setSubmitError(
|
||||
error instanceof Error
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { ArrowUp } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { captureClientEvent } from '@/lib/posthog/client'
|
||||
import { useLandingSubmit } from '@/app/(landing)/components/landing-preview/components/landing-preview-panel/landing-preview-panel'
|
||||
import { trackLandingCta } from '@/app/(landing)/landing-analytics'
|
||||
import { useAnimatedPlaceholder } from '@/hooks/use-animated-placeholder'
|
||||
|
||||
const MAX_HEIGHT = 120
|
||||
@@ -21,6 +23,7 @@ export function FooterCTA() {
|
||||
|
||||
const handleSubmit = useCallback(() => {
|
||||
if (isEmpty) return
|
||||
captureClientEvent('landing_prompt_submitted', {})
|
||||
landingSubmit(inputValue)
|
||||
}, [isEmpty, inputValue, landingSubmit])
|
||||
|
||||
@@ -94,12 +97,22 @@ export function FooterCTA() {
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className={`${CTA_BUTTON} border-[var(--landing-border-strong)] text-[var(--landing-text)] transition-colors hover:bg-[var(--landing-bg-elevated)]`}
|
||||
onClick={() =>
|
||||
trackLandingCta({
|
||||
label: 'Docs',
|
||||
section: 'footer_cta',
|
||||
destination: 'https://docs.sim.ai',
|
||||
})
|
||||
}
|
||||
>
|
||||
Docs
|
||||
</a>
|
||||
<Link
|
||||
href='/signup'
|
||||
className={`${CTA_BUTTON} gap-2 border-white bg-white text-black transition-colors hover:border-[#E0E0E0] hover:bg-[#E0E0E0]`}
|
||||
onClick={() =>
|
||||
trackLandingCta({ label: 'Get started', section: 'footer_cta', destination: '/signup' })
|
||||
}
|
||||
>
|
||||
Get started
|
||||
</Link>
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import dynamic from 'next/dynamic'
|
||||
import Link from 'next/link'
|
||||
import { DemoRequestModal } from '@/app/(landing)/components/demo-request/demo-request-modal'
|
||||
import { trackLandingCta } from '@/app/(landing)/landing-analytics'
|
||||
|
||||
const LandingPreview = dynamic(
|
||||
() =>
|
||||
@@ -57,6 +58,9 @@ export default function Hero() {
|
||||
type='button'
|
||||
className={`${CTA_BASE} border-[var(--landing-border-strong)] bg-transparent text-[var(--landing-text)] transition-colors hover:bg-[var(--landing-bg-elevated)]`}
|
||||
aria-label='Get a demo'
|
||||
onClick={() =>
|
||||
trackLandingCta({ label: 'Get a demo', section: 'hero', destination: 'demo_modal' })
|
||||
}
|
||||
>
|
||||
Get a demo
|
||||
</button>
|
||||
@@ -65,6 +69,9 @@ export default function Hero() {
|
||||
href='/signup'
|
||||
className={`${CTA_BASE} gap-2 border-white bg-white text-black transition-colors hover:border-[#E0E0E0] hover:bg-[#E0E0E0]`}
|
||||
aria-label='Get started with Sim'
|
||||
onClick={() =>
|
||||
trackLandingCta({ label: 'Get started', section: 'hero', destination: '/signup' })
|
||||
}
|
||||
>
|
||||
Get started
|
||||
</Link>
|
||||
|
||||
@@ -5,6 +5,7 @@ import { AnimatePresence, motion } from 'framer-motion'
|
||||
import { ArrowUp, Table } from 'lucide-react'
|
||||
import { Blimp, Checkbox, ChevronDown } from '@/components/emcn'
|
||||
import { TypeBoolean, TypeNumber, TypeText } from '@/components/emcn/icons'
|
||||
import { captureClientEvent } from '@/lib/posthog/client'
|
||||
import { useLandingSubmit } from '@/app/(landing)/components/landing-preview/components/landing-preview-panel/landing-preview-panel'
|
||||
import { EASE_OUT } from '@/app/(landing)/components/landing-preview/components/landing-preview-workflow/workflow-data'
|
||||
import { useAnimatedPlaceholder } from '@/hooks/use-animated-placeholder'
|
||||
@@ -151,6 +152,7 @@ export const LandingPreviewHome = memo(function LandingPreviewHome({
|
||||
|
||||
const handleSubmit = useCallback(() => {
|
||||
if (isEmpty) return
|
||||
captureClientEvent('landing_prompt_submitted', {})
|
||||
landingSubmit(inputValue)
|
||||
}, [isEmpty, inputValue, landingSubmit])
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import { createPortal } from 'react-dom'
|
||||
import { Blimp, BubbleChatPreview, ChevronDown, MoreHorizontal, Play } from '@/components/emcn'
|
||||
import { AgentIcon, HubspotIcon, OpenAIIcon, SalesforceIcon } from '@/components/icons'
|
||||
import { LandingPromptStorage } from '@/lib/core/utils/browser-storage'
|
||||
import { captureClientEvent } from '@/lib/posthog/client'
|
||||
import {
|
||||
EASE_OUT,
|
||||
type EditorPromptData,
|
||||
@@ -147,6 +148,7 @@ export const LandingPreviewPanel = memo(function LandingPreviewPanel({
|
||||
|
||||
const handleSubmit = useCallback(() => {
|
||||
if (isEmpty) return
|
||||
captureClientEvent('landing_prompt_submitted', {})
|
||||
landingSubmit(inputValue)
|
||||
}, [isEmpty, inputValue, landingSubmit])
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
} from '@/app/(landing)/components/navbar/components/blog-dropdown'
|
||||
import { DocsDropdown } from '@/app/(landing)/components/navbar/components/docs-dropdown'
|
||||
import { GitHubStars } from '@/app/(landing)/components/navbar/components/github-stars'
|
||||
import { trackLandingCta } from '@/app/(landing)/landing-analytics'
|
||||
import { getBrandConfig } from '@/ee/whitelabeling'
|
||||
|
||||
type DropdownId = 'docs' | 'blog' | null
|
||||
@@ -212,6 +213,13 @@ export default function Navbar({ logoOnly = false, blogPosts = [] }: NavbarProps
|
||||
href='/workspace'
|
||||
className='inline-flex h-[30px] items-center gap-[7px] rounded-[5px] border border-[var(--white)] bg-[var(--white)] px-[9px] text-[13.5px] text-black transition-colors hover:border-[#E0E0E0] hover:bg-[#E0E0E0]'
|
||||
aria-label='Go to app'
|
||||
onClick={() =>
|
||||
trackLandingCta({
|
||||
label: 'Go to App',
|
||||
section: 'navbar',
|
||||
destination: '/workspace',
|
||||
})
|
||||
}
|
||||
>
|
||||
Go to App
|
||||
</Link>
|
||||
@@ -221,6 +229,9 @@ export default function Navbar({ logoOnly = false, blogPosts = [] }: NavbarProps
|
||||
href='/login'
|
||||
className='inline-flex h-[30px] items-center rounded-[5px] border border-[var(--landing-border-strong)] px-[9px] text-[13.5px] text-[var(--landing-text)] transition-colors hover:bg-[var(--landing-bg-elevated)]'
|
||||
aria-label='Log in'
|
||||
onClick={() =>
|
||||
trackLandingCta({ label: 'Log in', section: 'navbar', destination: '/login' })
|
||||
}
|
||||
>
|
||||
Log in
|
||||
</Link>
|
||||
@@ -228,6 +239,13 @@ export default function Navbar({ logoOnly = false, blogPosts = [] }: NavbarProps
|
||||
href='/signup'
|
||||
className='inline-flex h-[30px] items-center gap-[7px] rounded-[5px] border border-[var(--white)] bg-[var(--white)] px-2.5 text-[13.5px] text-black transition-colors hover:border-[#E0E0E0] hover:bg-[#E0E0E0]'
|
||||
aria-label='Get started with Sim'
|
||||
onClick={() =>
|
||||
trackLandingCta({
|
||||
label: 'Get started',
|
||||
section: 'navbar',
|
||||
destination: '/signup',
|
||||
})
|
||||
}
|
||||
>
|
||||
Get started
|
||||
</Link>
|
||||
@@ -303,7 +321,14 @@ export default function Navbar({ logoOnly = false, blogPosts = [] }: NavbarProps
|
||||
<Link
|
||||
href='/workspace'
|
||||
className='flex h-[32px] items-center justify-center rounded-[5px] border border-[var(--white)] bg-[var(--white)] text-[14px] text-black transition-colors active:bg-[#E0E0E0]'
|
||||
onClick={() => setMobileMenuOpen(false)}
|
||||
onClick={() => {
|
||||
trackLandingCta({
|
||||
label: 'Go to App',
|
||||
section: 'navbar',
|
||||
destination: '/workspace',
|
||||
})
|
||||
setMobileMenuOpen(false)
|
||||
}}
|
||||
aria-label='Go to app'
|
||||
>
|
||||
Go to App
|
||||
@@ -313,7 +338,10 @@ export default function Navbar({ logoOnly = false, blogPosts = [] }: NavbarProps
|
||||
<Link
|
||||
href='/login'
|
||||
className='flex h-[32px] items-center justify-center rounded-[5px] border border-[var(--landing-border-strong)] text-[14px] text-[var(--landing-text)] transition-colors active:bg-[var(--landing-bg-elevated)]'
|
||||
onClick={() => setMobileMenuOpen(false)}
|
||||
onClick={() => {
|
||||
trackLandingCta({ label: 'Log in', section: 'navbar', destination: '/login' })
|
||||
setMobileMenuOpen(false)
|
||||
}}
|
||||
aria-label='Log in'
|
||||
>
|
||||
Log in
|
||||
@@ -321,7 +349,14 @@ export default function Navbar({ logoOnly = false, blogPosts = [] }: NavbarProps
|
||||
<Link
|
||||
href='/signup'
|
||||
className='flex h-[32px] items-center justify-center rounded-[5px] border border-[var(--white)] bg-[var(--white)] text-[14px] text-black transition-colors active:bg-[#E0E0E0]'
|
||||
onClick={() => setMobileMenuOpen(false)}
|
||||
onClick={() => {
|
||||
trackLandingCta({
|
||||
label: 'Get started',
|
||||
section: 'navbar',
|
||||
destination: '/signup',
|
||||
})
|
||||
setMobileMenuOpen(false)
|
||||
}}
|
||||
aria-label='Get started with Sim'
|
||||
>
|
||||
Get started
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import Link from 'next/link'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { DemoRequestModal } from '@/app/(landing)/components/demo-request/demo-request-modal'
|
||||
import { trackLandingCta } from '@/app/(landing)/landing-analytics'
|
||||
|
||||
interface PricingTier {
|
||||
id: string
|
||||
@@ -150,6 +151,13 @@ function PricingCard({ tier }: PricingCardProps) {
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[32px] w-full items-center justify-center rounded-[5px] border border-[var(--landing-border-light)] bg-transparent px-2.5 font-[430] font-season text-[14px] text-[var(--landing-text-dark)] transition-colors hover:bg-[var(--landing-bg-hover)]'
|
||||
onClick={() =>
|
||||
trackLandingCta({
|
||||
label: tier.cta.label,
|
||||
section: 'pricing',
|
||||
destination: 'demo_modal',
|
||||
})
|
||||
}
|
||||
>
|
||||
{tier.cta.label}
|
||||
</button>
|
||||
@@ -158,6 +166,13 @@ function PricingCard({ tier }: PricingCardProps) {
|
||||
<Link
|
||||
href={tier.cta.href || '/signup'}
|
||||
className='flex h-[32px] w-full items-center justify-center rounded-[5px] border border-[#1D1D1D] bg-[#1D1D1D] px-2.5 font-[430] font-season text-[14px] text-white transition-colors hover:border-[var(--landing-border)] hover:bg-[var(--landing-bg-elevated)]'
|
||||
onClick={() =>
|
||||
trackLandingCta({
|
||||
label: tier.cta.label,
|
||||
section: 'pricing',
|
||||
destination: tier.cta.href || '/signup',
|
||||
})
|
||||
}
|
||||
>
|
||||
{tier.cta.label}
|
||||
</Link>
|
||||
@@ -165,6 +180,13 @@ function PricingCard({ tier }: PricingCardProps) {
|
||||
<Link
|
||||
href={tier.cta.href || '/signup'}
|
||||
className='flex h-[32px] w-full items-center justify-center rounded-[5px] border border-[var(--landing-border-light)] px-2.5 font-[430] font-season text-[14px] text-[var(--landing-text-dark)] transition-colors hover:bg-[var(--landing-bg-hover)]'
|
||||
onClick={() =>
|
||||
trackLandingCta({
|
||||
label: tier.cta.label,
|
||||
section: 'pricing',
|
||||
destination: tier.cta.href || '/signup',
|
||||
})
|
||||
}
|
||||
>
|
||||
{tier.cta.label}
|
||||
</Link>
|
||||
|
||||
@@ -154,6 +154,7 @@ import {
|
||||
SftpIcon,
|
||||
ShopifyIcon,
|
||||
SimilarwebIcon,
|
||||
SixtyfourIcon,
|
||||
SlackIcon,
|
||||
SmtpIcon,
|
||||
SQSIcon,
|
||||
@@ -283,7 +284,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
langsmith: LangsmithIcon,
|
||||
launchdarkly: LaunchDarklyIcon,
|
||||
lemlist: LemlistIcon,
|
||||
linear: LinearIcon,
|
||||
linear_v2: LinearIcon,
|
||||
linkedin: LinkedInIcon,
|
||||
linkup: LinkupIcon,
|
||||
loops: LoopsIcon,
|
||||
@@ -340,6 +341,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
sharepoint: MicrosoftSharepointIcon,
|
||||
shopify: ShopifyIcon,
|
||||
similarweb: SimilarwebIcon,
|
||||
sixtyfour: SixtyfourIcon,
|
||||
slack: SlackIcon,
|
||||
smtp: SmtpIcon,
|
||||
sqs: SQSIcon,
|
||||
|
||||
@@ -324,7 +324,7 @@
|
||||
"longDescription": "Search across your synced data sources using Airweave. Supports semantic search with hybrid, neural, or keyword retrieval strategies. Optionally generate AI-powered answers from search results.",
|
||||
"bgColor": "#6366F1",
|
||||
"iconName": "AirweaveIcon",
|
||||
"docsUrl": "https://docs.airweave.ai",
|
||||
"docsUrl": "https://docs.sim.ai/tools/airweave",
|
||||
"operations": [],
|
||||
"operationCount": 0,
|
||||
"triggers": [],
|
||||
@@ -4015,8 +4015,14 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 12,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "gmail_poller",
|
||||
"name": "Gmail Email Trigger",
|
||||
"description": "Triggers when new emails are received in Gmail (requires Gmail credentials)"
|
||||
}
|
||||
],
|
||||
"triggerCount": 1,
|
||||
"authType": "oauth",
|
||||
"category": "tools",
|
||||
"integrationType": "email",
|
||||
@@ -4106,8 +4112,19 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 18,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "gong_webhook",
|
||||
"name": "Gong Webhook",
|
||||
"description": "Generic webhook trigger for all Gong events"
|
||||
},
|
||||
{
|
||||
"id": "gong_call_completed",
|
||||
"name": "Gong Call Completed",
|
||||
"description": "Trigger workflow when a call is completed and processed in Gong"
|
||||
}
|
||||
],
|
||||
"triggerCount": 2,
|
||||
"authType": "none",
|
||||
"category": "tools",
|
||||
"integrationType": "sales-intelligence",
|
||||
@@ -5253,8 +5270,49 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 11,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "greenhouse_candidate_hired",
|
||||
"name": "Greenhouse Candidate Hired",
|
||||
"description": "Trigger workflow when a candidate is hired"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_new_application",
|
||||
"name": "Greenhouse New Application",
|
||||
"description": "Trigger workflow when a new application is submitted"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_candidate_stage_change",
|
||||
"name": "Greenhouse Candidate Stage Change",
|
||||
"description": "Trigger workflow when a candidate changes interview stages"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_candidate_rejected",
|
||||
"name": "Greenhouse Candidate Rejected",
|
||||
"description": "Trigger workflow when a candidate is rejected"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_offer_created",
|
||||
"name": "Greenhouse Offer Created",
|
||||
"description": "Trigger workflow when a new offer is created"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_job_created",
|
||||
"name": "Greenhouse Job Created",
|
||||
"description": "Trigger workflow when a new job is created"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_job_updated",
|
||||
"name": "Greenhouse Job Updated",
|
||||
"description": "Trigger workflow when a job is updated"
|
||||
},
|
||||
{
|
||||
"id": "greenhouse_webhook",
|
||||
"name": "Greenhouse Webhook (Endpoint Events)",
|
||||
"description": "Trigger on whichever event types you select for this URL in Greenhouse. Sim does not filter deliveries for this trigger."
|
||||
}
|
||||
],
|
||||
"triggerCount": 8,
|
||||
"authType": "api-key",
|
||||
"category": "tools",
|
||||
"integrationType": "hr",
|
||||
@@ -5517,6 +5575,11 @@
|
||||
"name": "HubSpot Contact Deleted",
|
||||
"description": "Trigger workflow when a contact is deleted in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_contact_merged",
|
||||
"name": "HubSpot Contact Merged",
|
||||
"description": "Trigger workflow when contacts are merged in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_contact_privacy_deleted",
|
||||
"name": "HubSpot Contact Privacy Deleted",
|
||||
@@ -5527,6 +5590,11 @@
|
||||
"name": "HubSpot Contact Property Changed",
|
||||
"description": "Trigger workflow when any property of a contact is updated in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_contact_restored",
|
||||
"name": "HubSpot Contact Restored",
|
||||
"description": "Trigger workflow when a deleted contact is restored in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_company_created",
|
||||
"name": "HubSpot Company Created",
|
||||
@@ -5537,11 +5605,21 @@
|
||||
"name": "HubSpot Company Deleted",
|
||||
"description": "Trigger workflow when a company is deleted in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_company_merged",
|
||||
"name": "HubSpot Company Merged",
|
||||
"description": "Trigger workflow when companies are merged in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_company_property_changed",
|
||||
"name": "HubSpot Company Property Changed",
|
||||
"description": "Trigger workflow when any property of a company is updated in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_company_restored",
|
||||
"name": "HubSpot Company Restored",
|
||||
"description": "Trigger workflow when a deleted company is restored in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_conversation_creation",
|
||||
"name": "HubSpot Conversation Creation",
|
||||
@@ -5577,11 +5655,21 @@
|
||||
"name": "HubSpot Deal Deleted",
|
||||
"description": "Trigger workflow when a deal is deleted in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_deal_merged",
|
||||
"name": "HubSpot Deal Merged",
|
||||
"description": "Trigger workflow when deals are merged in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_deal_property_changed",
|
||||
"name": "HubSpot Deal Property Changed",
|
||||
"description": "Trigger workflow when any property of a deal is updated in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_deal_restored",
|
||||
"name": "HubSpot Deal Restored",
|
||||
"description": "Trigger workflow when a deleted deal is restored in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_ticket_created",
|
||||
"name": "HubSpot Ticket Created",
|
||||
@@ -5592,13 +5680,28 @@
|
||||
"name": "HubSpot Ticket Deleted",
|
||||
"description": "Trigger workflow when a ticket is deleted in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_ticket_merged",
|
||||
"name": "HubSpot Ticket Merged",
|
||||
"description": "Trigger workflow when tickets are merged in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_ticket_property_changed",
|
||||
"name": "HubSpot Ticket Property Changed",
|
||||
"description": "Trigger workflow when any property of a ticket is updated in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_ticket_restored",
|
||||
"name": "HubSpot Ticket Restored",
|
||||
"description": "Trigger workflow when a deleted ticket is restored in HubSpot"
|
||||
},
|
||||
{
|
||||
"id": "hubspot_webhook",
|
||||
"name": "HubSpot Webhook (All Events)",
|
||||
"description": "Trigger workflow on any HubSpot webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 18,
|
||||
"triggerCount": 27,
|
||||
"authType": "oauth",
|
||||
"category": "tools",
|
||||
"integrationType": "crm",
|
||||
@@ -6077,8 +6180,39 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 31,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "intercom_conversation_created",
|
||||
"name": "Intercom Conversation Created",
|
||||
"description": "Trigger workflow when a new conversation is created in Intercom"
|
||||
},
|
||||
{
|
||||
"id": "intercom_conversation_reply",
|
||||
"name": "Intercom Conversation Reply",
|
||||
"description": "Trigger workflow when someone replies to an Intercom conversation"
|
||||
},
|
||||
{
|
||||
"id": "intercom_conversation_closed",
|
||||
"name": "Intercom Conversation Closed",
|
||||
"description": "Trigger workflow when a conversation is closed in Intercom"
|
||||
},
|
||||
{
|
||||
"id": "intercom_contact_created",
|
||||
"name": "Intercom Contact Created",
|
||||
"description": "Trigger workflow when a new lead is created in Intercom"
|
||||
},
|
||||
{
|
||||
"id": "intercom_user_created",
|
||||
"name": "Intercom User Created",
|
||||
"description": "Trigger workflow when a new user is created in Intercom"
|
||||
},
|
||||
{
|
||||
"id": "intercom_webhook",
|
||||
"name": "Intercom Webhook (All Events)",
|
||||
"description": "Trigger workflow on any Intercom webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 6,
|
||||
"authType": "api-key",
|
||||
"category": "tools",
|
||||
"integrationType": "customer-support",
|
||||
@@ -6731,7 +6865,7 @@
|
||||
"tags": ["sales-engagement", "email-marketing", "automation"]
|
||||
},
|
||||
{
|
||||
"type": "linear",
|
||||
"type": "linear_v2",
|
||||
"slug": "linear",
|
||||
"name": "Linear",
|
||||
"description": "Interact with Linear issues, projects, and more",
|
||||
@@ -7056,79 +7190,79 @@
|
||||
"operationCount": 78,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "linear_issue_created",
|
||||
"id": "linear_issue_created_v2",
|
||||
"name": "Linear Issue Created",
|
||||
"description": "Trigger workflow when a new issue is created in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_issue_updated",
|
||||
"id": "linear_issue_updated_v2",
|
||||
"name": "Linear Issue Updated",
|
||||
"description": "Trigger workflow when an issue is updated in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_issue_removed",
|
||||
"id": "linear_issue_removed_v2",
|
||||
"name": "Linear Issue Removed",
|
||||
"description": "Trigger workflow when an issue is removed/deleted in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_comment_created",
|
||||
"id": "linear_comment_created_v2",
|
||||
"name": "Linear Comment Created",
|
||||
"description": "Trigger workflow when a new comment is created in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_comment_updated",
|
||||
"id": "linear_comment_updated_v2",
|
||||
"name": "Linear Comment Updated",
|
||||
"description": "Trigger workflow when a comment is updated in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_project_created",
|
||||
"id": "linear_project_created_v2",
|
||||
"name": "Linear Project Created",
|
||||
"description": "Trigger workflow when a new project is created in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_project_updated",
|
||||
"id": "linear_project_updated_v2",
|
||||
"name": "Linear Project Updated",
|
||||
"description": "Trigger workflow when a project is updated in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_cycle_created",
|
||||
"id": "linear_cycle_created_v2",
|
||||
"name": "Linear Cycle Created",
|
||||
"description": "Trigger workflow when a new cycle is created in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_cycle_updated",
|
||||
"id": "linear_cycle_updated_v2",
|
||||
"name": "Linear Cycle Updated",
|
||||
"description": "Trigger workflow when a cycle is updated in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_label_created",
|
||||
"id": "linear_label_created_v2",
|
||||
"name": "Linear Label Created",
|
||||
"description": "Trigger workflow when a new label is created in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_label_updated",
|
||||
"id": "linear_label_updated_v2",
|
||||
"name": "Linear Label Updated",
|
||||
"description": "Trigger workflow when a label is updated in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_project_update_created",
|
||||
"id": "linear_project_update_created_v2",
|
||||
"name": "Linear Project Update Created",
|
||||
"description": "Trigger workflow when a new project update is posted in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_customer_request_created",
|
||||
"id": "linear_customer_request_created_v2",
|
||||
"name": "Linear Customer Request Created",
|
||||
"description": "Trigger workflow when a new customer request is created in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_customer_request_updated",
|
||||
"id": "linear_customer_request_updated_v2",
|
||||
"name": "Linear Customer Request Updated",
|
||||
"description": "Trigger workflow when a customer request is updated in Linear"
|
||||
},
|
||||
{
|
||||
"id": "linear_webhook",
|
||||
"id": "linear_webhook_v2",
|
||||
"name": "Linear Webhook",
|
||||
"description": "Trigger workflow from any Linear webhook event"
|
||||
"description": "Trigger workflow from Linear events you select when creating the webhook in Linear (not guaranteed to be every model or event type)."
|
||||
}
|
||||
],
|
||||
"triggerCount": 15,
|
||||
@@ -8138,8 +8272,54 @@
|
||||
"docsUrl": "https://docs.sim.ai/tools/notion",
|
||||
"operations": [],
|
||||
"operationCount": 0,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "notion_page_created",
|
||||
"name": "Notion Page Created",
|
||||
"description": "Trigger workflow when a new page is created in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_page_properties_updated",
|
||||
"name": "Notion Page Properties Updated",
|
||||
"description": "Trigger workflow when page properties are modified in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_page_content_updated",
|
||||
"name": "Notion Page Content Updated",
|
||||
"description": "Trigger workflow when page content is changed in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_page_deleted",
|
||||
"name": "Notion Page Deleted",
|
||||
"description": "Trigger workflow when a page is deleted in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_database_created",
|
||||
"name": "Notion Database Created",
|
||||
"description": "Trigger workflow when a new database is created in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_database_schema_updated",
|
||||
"name": "Notion Database Schema Updated",
|
||||
"description": "Trigger workflow when a database schema is modified in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_database_deleted",
|
||||
"name": "Notion Database Deleted",
|
||||
"description": "Trigger workflow when a database is deleted in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_comment_created",
|
||||
"name": "Notion Comment Created",
|
||||
"description": "Trigger workflow when a comment or suggested edit is added in Notion"
|
||||
},
|
||||
{
|
||||
"id": "notion_webhook",
|
||||
"name": "Notion Webhook (All Events)",
|
||||
"description": "Trigger workflow on any Notion webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 9,
|
||||
"authType": "oauth",
|
||||
"category": "tools",
|
||||
"integrationType": "documents",
|
||||
@@ -8406,8 +8586,14 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 9,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "outlook_poller",
|
||||
"name": "Outlook Email Trigger",
|
||||
"description": "Triggers when new emails are received in Outlook (requires Microsoft credentials)"
|
||||
}
|
||||
],
|
||||
"triggerCount": 1,
|
||||
"authType": "oauth",
|
||||
"category": "tools",
|
||||
"integrationType": "email",
|
||||
@@ -9428,8 +9614,49 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 8,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "resend_email_sent",
|
||||
"name": "Resend Email Sent",
|
||||
"description": "Trigger workflow when an email is sent"
|
||||
},
|
||||
{
|
||||
"id": "resend_email_delivered",
|
||||
"name": "Resend Email Delivered",
|
||||
"description": "Trigger workflow when an email is delivered"
|
||||
},
|
||||
{
|
||||
"id": "resend_email_bounced",
|
||||
"name": "Resend Email Bounced",
|
||||
"description": "Trigger workflow when an email bounces"
|
||||
},
|
||||
{
|
||||
"id": "resend_email_complained",
|
||||
"name": "Resend Email Complained",
|
||||
"description": "Trigger workflow when an email is marked as spam"
|
||||
},
|
||||
{
|
||||
"id": "resend_email_opened",
|
||||
"name": "Resend Email Opened",
|
||||
"description": "Trigger workflow when an email is opened"
|
||||
},
|
||||
{
|
||||
"id": "resend_email_clicked",
|
||||
"name": "Resend Email Clicked",
|
||||
"description": "Trigger workflow when a link in an email is clicked"
|
||||
},
|
||||
{
|
||||
"id": "resend_email_failed",
|
||||
"name": "Resend Email Failed",
|
||||
"description": "Trigger workflow when an email fails to send"
|
||||
},
|
||||
{
|
||||
"id": "resend_webhook",
|
||||
"name": "Resend Webhook (All Events)",
|
||||
"description": "Trigger on Resend webhook events we subscribe to (email lifecycle, contacts, domains—see Resend docs). Flattened email fields may be null for non-email events; use <code>data</code> for the full payload."
|
||||
}
|
||||
],
|
||||
"triggerCount": 8,
|
||||
"authType": "none",
|
||||
"category": "tools",
|
||||
"integrationType": "email",
|
||||
@@ -10175,8 +10402,39 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 35,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "salesforce_record_created",
|
||||
"name": "Salesforce Record Created",
|
||||
"description": "Trigger workflow when a Salesforce record is created"
|
||||
},
|
||||
{
|
||||
"id": "salesforce_record_updated",
|
||||
"name": "Salesforce Record Updated",
|
||||
"description": "Trigger workflow when a Salesforce record is updated"
|
||||
},
|
||||
{
|
||||
"id": "salesforce_record_deleted",
|
||||
"name": "Salesforce Record Deleted",
|
||||
"description": "Trigger workflow when a Salesforce record is deleted"
|
||||
},
|
||||
{
|
||||
"id": "salesforce_opportunity_stage_changed",
|
||||
"name": "Salesforce Opportunity Stage Changed",
|
||||
"description": "Trigger workflow when an opportunity stage changes"
|
||||
},
|
||||
{
|
||||
"id": "salesforce_case_status_changed",
|
||||
"name": "Salesforce Case Status Changed",
|
||||
"description": "Trigger workflow when a case status changes"
|
||||
},
|
||||
{
|
||||
"id": "salesforce_webhook",
|
||||
"name": "Salesforce Webhook (All Events)",
|
||||
"description": "Trigger workflow on any Salesforce webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 6,
|
||||
"authType": "oauth",
|
||||
"category": "tools",
|
||||
"integrationType": "crm",
|
||||
@@ -10639,6 +10897,41 @@
|
||||
"integrationType": "analytics",
|
||||
"tags": ["marketing", "data-analytics", "seo"]
|
||||
},
|
||||
{
|
||||
"type": "sixtyfour",
|
||||
"slug": "sixtyfour-ai",
|
||||
"name": "Sixtyfour AI",
|
||||
"description": "Enrich leads and companies with AI-powered research",
|
||||
"longDescription": "Find emails, phone numbers, and enrich lead or company data with contact information, social profiles, and detailed research using Sixtyfour AI.",
|
||||
"bgColor": "#000000",
|
||||
"iconName": "SixtyfourIcon",
|
||||
"docsUrl": "https://docs.sim.ai/tools/sixtyfour",
|
||||
"operations": [
|
||||
{
|
||||
"name": "Find Phone",
|
||||
"description": "Find phone numbers for a lead using Sixtyfour AI."
|
||||
},
|
||||
{
|
||||
"name": "Find Email",
|
||||
"description": "Find email addresses for a lead using Sixtyfour AI."
|
||||
},
|
||||
{
|
||||
"name": "Enrich Lead",
|
||||
"description": "Enrich lead information with contact details, social profiles, and company data using Sixtyfour AI."
|
||||
},
|
||||
{
|
||||
"name": "Enrich Company",
|
||||
"description": "Enrich company data with additional information and find associated people using Sixtyfour AI."
|
||||
}
|
||||
],
|
||||
"operationCount": 4,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"authType": "api-key",
|
||||
"category": "tools",
|
||||
"integrationType": "sales-intelligence",
|
||||
"tags": ["enrichment", "sales-engagement"]
|
||||
},
|
||||
{
|
||||
"type": "slack",
|
||||
"slug": "slack",
|
||||
@@ -11930,8 +12223,49 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 50,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "vercel_deployment_created",
|
||||
"name": "Vercel Deployment Created",
|
||||
"description": "Trigger workflow when a new deployment is created"
|
||||
},
|
||||
{
|
||||
"id": "vercel_deployment_ready",
|
||||
"name": "Vercel Deployment Ready",
|
||||
"description": "Trigger workflow when a deployment is ready to serve traffic"
|
||||
},
|
||||
{
|
||||
"id": "vercel_deployment_error",
|
||||
"name": "Vercel Deployment Error",
|
||||
"description": "Trigger workflow when a deployment fails"
|
||||
},
|
||||
{
|
||||
"id": "vercel_deployment_canceled",
|
||||
"name": "Vercel Deployment Canceled",
|
||||
"description": "Trigger workflow when a deployment is canceled"
|
||||
},
|
||||
{
|
||||
"id": "vercel_project_created",
|
||||
"name": "Vercel Project Created",
|
||||
"description": "Trigger workflow when a new project is created"
|
||||
},
|
||||
{
|
||||
"id": "vercel_project_removed",
|
||||
"name": "Vercel Project Removed",
|
||||
"description": "Trigger workflow when a project is removed"
|
||||
},
|
||||
{
|
||||
"id": "vercel_domain_created",
|
||||
"name": "Vercel Domain Created",
|
||||
"description": "Trigger workflow when a domain is created"
|
||||
},
|
||||
{
|
||||
"id": "vercel_webhook",
|
||||
"name": "Vercel Webhook (Common Events)",
|
||||
"description": "Trigger on a curated set of common Vercel events (deployments, projects, domains, edge config). Pick a specific trigger to listen to one event type only."
|
||||
}
|
||||
],
|
||||
"triggerCount": 8,
|
||||
"authType": "api-key",
|
||||
"category": "tools",
|
||||
"integrationType": "developer-tools",
|
||||
@@ -12733,8 +13067,39 @@
|
||||
}
|
||||
],
|
||||
"operationCount": 10,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "zoom_meeting_started",
|
||||
"name": "Zoom Meeting Started",
|
||||
"description": "Trigger workflow when a Zoom meeting starts"
|
||||
},
|
||||
{
|
||||
"id": "zoom_meeting_ended",
|
||||
"name": "Zoom Meeting Ended",
|
||||
"description": "Trigger workflow when a Zoom meeting ends"
|
||||
},
|
||||
{
|
||||
"id": "zoom_participant_joined",
|
||||
"name": "Zoom Participant Joined",
|
||||
"description": "Trigger workflow when a participant joins a Zoom meeting"
|
||||
},
|
||||
{
|
||||
"id": "zoom_participant_left",
|
||||
"name": "Zoom Participant Left",
|
||||
"description": "Trigger workflow when a participant leaves a Zoom meeting"
|
||||
},
|
||||
{
|
||||
"id": "zoom_recording_completed",
|
||||
"name": "Zoom Recording Completed",
|
||||
"description": "Trigger workflow when a Zoom cloud recording is completed"
|
||||
},
|
||||
{
|
||||
"id": "zoom_webhook",
|
||||
"name": "Zoom Webhook (All Events)",
|
||||
"description": "Trigger workflow on any Zoom webhook event"
|
||||
}
|
||||
],
|
||||
"triggerCount": 6,
|
||||
"authType": "oauth",
|
||||
"category": "tools",
|
||||
"integrationType": "communication",
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
|
||||
import { useEffect } from 'react'
|
||||
import { usePostHog } from 'posthog-js/react'
|
||||
import { captureEvent } from '@/lib/posthog/client'
|
||||
import { captureClientEvent, captureEvent } from '@/lib/posthog/client'
|
||||
import type { PostHogEventMap } from '@/lib/posthog/events'
|
||||
|
||||
export function LandingAnalytics() {
|
||||
const posthog = usePostHog()
|
||||
@@ -13,3 +14,11 @@ export function LandingAnalytics() {
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Fire-and-forget tracker for landing page CTA clicks.
|
||||
* Uses the non-hook client so it works in any click handler without requiring a PostHog provider ref.
|
||||
*/
|
||||
export function trackLandingCta(props: PostHogEventMap['landing_cta_clicked']): void {
|
||||
captureClientEvent('landing_cta_clicked', props)
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ describe('Reset Password API Route', () => {
|
||||
|
||||
it('should handle missing token', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
newPassword: 'newSecurePassword123',
|
||||
newPassword: 'newSecurePassword123!',
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -97,7 +97,7 @@ describe('Reset Password API Route', () => {
|
||||
it('should handle empty token', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
token: '',
|
||||
newPassword: 'newSecurePassword123',
|
||||
newPassword: 'newSecurePassword123!',
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -119,7 +119,11 @@ describe('Reset Password API Route', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.message).toBe('Password must be at least 8 characters long')
|
||||
expect(data.message).toContain('Password must be at least 8 characters long')
|
||||
expect(data.message).toContain('Password must contain at least one uppercase letter')
|
||||
expect(data.message).toContain('Password must contain at least one lowercase letter')
|
||||
expect(data.message).toContain('Password must contain at least one number')
|
||||
expect(data.message).toContain('Password must contain at least one special character')
|
||||
|
||||
expect(mockResetPassword).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -26,8 +26,7 @@ export async function POST(request: NextRequest) {
|
||||
const validationResult = resetPasswordSchema.safeParse(body)
|
||||
|
||||
if (!validationResult.success) {
|
||||
const firstError = validationResult.error.errors[0]
|
||||
const errorMessage = firstError?.message || 'Invalid request data'
|
||||
const errorMessage = validationResult.error.errors.map((e) => e.message).join(' ')
|
||||
|
||||
logger.warn('Invalid password reset request data', {
|
||||
errors: validationResult.error.format(),
|
||||
|
||||
@@ -15,13 +15,19 @@ import type { ChatResource, ResourceType } from '@/lib/copilot/resources'
|
||||
|
||||
const logger = createLogger('CopilotChatResourcesAPI')
|
||||
|
||||
const VALID_RESOURCE_TYPES = new Set<ResourceType>(['table', 'file', 'workflow', 'knowledgebase'])
|
||||
const GENERIC_TITLES = new Set(['Table', 'File', 'Workflow', 'Knowledge Base'])
|
||||
const VALID_RESOURCE_TYPES = new Set<ResourceType>([
|
||||
'table',
|
||||
'file',
|
||||
'workflow',
|
||||
'knowledgebase',
|
||||
'folder',
|
||||
])
|
||||
const GENERIC_TITLES = new Set(['Table', 'File', 'Workflow', 'Knowledge Base', 'Folder'])
|
||||
|
||||
const AddResourceSchema = z.object({
|
||||
chatId: z.string(),
|
||||
resource: z.object({
|
||||
type: z.enum(['table', 'file', 'workflow', 'knowledgebase']),
|
||||
type: z.enum(['table', 'file', 'workflow', 'knowledgebase', 'folder']),
|
||||
id: z.string(),
|
||||
title: z.string(),
|
||||
}),
|
||||
@@ -29,7 +35,7 @@ const AddResourceSchema = z.object({
|
||||
|
||||
const RemoveResourceSchema = z.object({
|
||||
chatId: z.string(),
|
||||
resourceType: z.enum(['table', 'file', 'workflow', 'knowledgebase']),
|
||||
resourceType: z.enum(['table', 'file', 'workflow', 'knowledgebase', 'folder']),
|
||||
resourceId: z.string(),
|
||||
})
|
||||
|
||||
@@ -37,7 +43,7 @@ const ReorderResourcesSchema = z.object({
|
||||
chatId: z.string(),
|
||||
resources: z.array(
|
||||
z.object({
|
||||
type: z.enum(['table', 'file', 'workflow', 'knowledgebase']),
|
||||
type: z.enum(['table', 'file', 'workflow', 'knowledgebase', 'folder']),
|
||||
id: z.string(),
|
||||
title: z.string(),
|
||||
})
|
||||
|
||||
@@ -88,6 +88,7 @@ const ChatMessageSchema = z.object({
|
||||
'docs',
|
||||
'table',
|
||||
'file',
|
||||
'folder',
|
||||
]),
|
||||
label: z.string(),
|
||||
chatId: z.string().optional(),
|
||||
@@ -99,6 +100,7 @@ const ChatMessageSchema = z.object({
|
||||
executionId: z.string().optional(),
|
||||
tableId: z.string().optional(),
|
||||
fileId: z.string().optional(),
|
||||
folderId: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
|
||||
@@ -10,7 +10,7 @@ import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { syncPersonalEnvCredentialsForUser } from '@/lib/credentials/environment'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment'
|
||||
import type { EnvironmentVariable } from '@/lib/environment/api'
|
||||
|
||||
const logger = createLogger('EnvironmentAPI')
|
||||
|
||||
|
||||
58
apps/sim/app/api/folders/[id]/restore/route.ts
Normal file
58
apps/sim/app/api/folders/[id]/restore/route.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { captureServerEvent } from '@/lib/posthog/server'
|
||||
import { performRestoreFolder } from '@/lib/workflows/orchestration/folder-lifecycle'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('RestoreFolderAPI')
|
||||
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const { id: folderId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json().catch(() => ({}))
|
||||
const workspaceId = body.workspaceId as string | undefined
|
||||
|
||||
if (!workspaceId) {
|
||||
return NextResponse.json({ error: 'Workspace ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (permission !== 'admin' && permission !== 'write') {
|
||||
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
const result = await performRestoreFolder({
|
||||
folderId,
|
||||
workspaceId,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
return NextResponse.json({ error: result.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info(`Restored folder ${folderId}`, { restoredItems: result.restoredItems })
|
||||
|
||||
captureServerEvent(
|
||||
session.user.id,
|
||||
'folder_restored',
|
||||
{ folder_id: folderId, workspace_id: workspaceId },
|
||||
{ groups: { workspace: workspaceId } }
|
||||
)
|
||||
|
||||
return NextResponse.json({ success: true, restoredItems: result.restoredItems })
|
||||
} catch (error) {
|
||||
logger.error(`Error restoring folder ${folderId}`, error)
|
||||
return NextResponse.json(
|
||||
{ error: error instanceof Error ? error.message : 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, asc, eq, isNull, min } from 'drizzle-orm'
|
||||
import { and, asc, eq, isNotNull, isNull, min } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
@@ -47,12 +47,16 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Access denied to this workspace' }, { status: 403 })
|
||||
}
|
||||
|
||||
// If user has workspace permissions, fetch ALL folders in the workspace
|
||||
// This allows shared workspace members to see folders created by other users
|
||||
const scope = searchParams.get('scope') ?? 'active'
|
||||
const archivedFilter =
|
||||
scope === 'archived'
|
||||
? isNotNull(workflowFolder.archivedAt)
|
||||
: isNull(workflowFolder.archivedAt)
|
||||
|
||||
const folders = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(eq(workflowFolder.workspaceId, workspaceId))
|
||||
.where(and(eq(workflowFolder.workspaceId, workspaceId), archivedFilter))
|
||||
.orderBy(asc(workflowFolder.sortOrder), asc(workflowFolder.createdAt))
|
||||
|
||||
return NextResponse.json({ folders })
|
||||
|
||||
@@ -36,7 +36,7 @@ const FileAttachmentSchema = z.object({
|
||||
})
|
||||
|
||||
const ResourceAttachmentSchema = z.object({
|
||||
type: z.enum(['workflow', 'table', 'file', 'knowledgebase']),
|
||||
type: z.enum(['workflow', 'table', 'file', 'knowledgebase', 'folder']),
|
||||
id: z.string().min(1),
|
||||
title: z.string().optional(),
|
||||
active: z.boolean().optional(),
|
||||
@@ -66,6 +66,7 @@ const MothershipMessageSchema = z.object({
|
||||
'docs',
|
||||
'table',
|
||||
'file',
|
||||
'folder',
|
||||
]),
|
||||
label: z.string(),
|
||||
chatId: z.string().optional(),
|
||||
@@ -77,6 +78,7 @@ const MothershipMessageSchema = z.object({
|
||||
executionId: z.string().optional(),
|
||||
tableId: z.string().optional(),
|
||||
fileId: z.string().optional(),
|
||||
folderId: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
@@ -224,6 +226,7 @@ export async function POST(req: NextRequest) {
|
||||
...(c.knowledgeId && { knowledgeId: c.knowledgeId }),
|
||||
...(c.tableId && { tableId: c.tableId }),
|
||||
...(c.fileId && { fileId: c.fileId }),
|
||||
...(c.folderId && { folderId: c.folderId }),
|
||||
})),
|
||||
}),
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuthType } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
@@ -125,14 +126,43 @@ export async function POST(
|
||||
})
|
||||
}
|
||||
|
||||
PauseResumeManager.startResumeExecution({
|
||||
await setExecutionMeta(enqueueResult.resumeExecutionId, {
|
||||
status: 'active',
|
||||
userId,
|
||||
workflowId,
|
||||
})
|
||||
|
||||
const resumeArgs = {
|
||||
resumeEntryId: enqueueResult.resumeEntryId,
|
||||
resumeExecutionId: enqueueResult.resumeExecutionId,
|
||||
pausedExecution: enqueueResult.pausedExecution,
|
||||
contextId: enqueueResult.contextId,
|
||||
resumeInput: enqueueResult.resumeInput,
|
||||
userId: enqueueResult.userId,
|
||||
}).catch((error) => {
|
||||
}
|
||||
|
||||
const isApiCaller = access.auth?.authType === AuthType.API_KEY
|
||||
|
||||
if (isApiCaller) {
|
||||
const result = await PauseResumeManager.startResumeExecution(resumeArgs)
|
||||
|
||||
return NextResponse.json({
|
||||
success: result.success,
|
||||
status: result.status ?? (result.success ? 'completed' : 'failed'),
|
||||
executionId: enqueueResult.resumeExecutionId,
|
||||
output: result.output,
|
||||
error: result.error,
|
||||
metadata: result.metadata
|
||||
? {
|
||||
duration: result.metadata.duration,
|
||||
startTime: result.metadata.startTime,
|
||||
endTime: result.metadata.endTime,
|
||||
}
|
||||
: undefined,
|
||||
})
|
||||
}
|
||||
|
||||
PauseResumeManager.startResumeExecution(resumeArgs).catch((error) => {
|
||||
logger.error('Failed to start resume execution', {
|
||||
workflowId,
|
||||
parentExecutionId: executionId,
|
||||
|
||||
@@ -16,13 +16,9 @@ import {
|
||||
createExternalWebhookSubscription,
|
||||
shouldRecreateExternalWebhookSubscription,
|
||||
} from '@/lib/webhooks/provider-subscriptions'
|
||||
import { getProviderHandler } from '@/lib/webhooks/providers'
|
||||
import { mergeNonUserFields } from '@/lib/webhooks/utils'
|
||||
import {
|
||||
configureGmailPolling,
|
||||
configureOutlookPolling,
|
||||
configureRssPolling,
|
||||
syncWebhooksForCredentialSet,
|
||||
} from '@/lib/webhooks/utils.server'
|
||||
import { syncWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { extractCredentialSetId, isCredentialSetValue } from '@/executor/constants'
|
||||
|
||||
@@ -348,7 +344,6 @@ export async function POST(request: NextRequest) {
|
||||
workflowRecord.workspaceId || undefined
|
||||
)
|
||||
|
||||
// --- Credential Set Handling ---
|
||||
// For credential sets, we fan out to create one webhook per credential at save time.
|
||||
// This applies to all OAuth-based triggers, not just polling ones.
|
||||
// Check for credentialSetId directly (frontend may already extract it) or credential set value in credential fields
|
||||
@@ -402,16 +397,13 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const needsConfiguration = provider === 'gmail' || provider === 'outlook'
|
||||
const providerHandler = getProviderHandler(provider)
|
||||
|
||||
if (needsConfiguration) {
|
||||
const configureFunc =
|
||||
provider === 'gmail' ? configureGmailPolling : configureOutlookPolling
|
||||
if (providerHandler.configurePolling) {
|
||||
const configureErrors: string[] = []
|
||||
|
||||
for (const wh of syncResult.webhooks) {
|
||||
if (wh.isNew) {
|
||||
// Fetch the webhook data for configuration
|
||||
const webhookRows = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
@@ -419,7 +411,10 @@ export async function POST(request: NextRequest) {
|
||||
.limit(1)
|
||||
|
||||
if (webhookRows.length > 0) {
|
||||
const success = await configureFunc(webhookRows[0], requestId)
|
||||
const success = await providerHandler.configurePolling({
|
||||
webhook: webhookRows[0],
|
||||
requestId,
|
||||
})
|
||||
if (!success) {
|
||||
configureErrors.push(
|
||||
`Failed to configure webhook for credential ${wh.credentialId}`
|
||||
@@ -436,7 +431,6 @@ export async function POST(request: NextRequest) {
|
||||
configureErrors.length > 0 &&
|
||||
configureErrors.length === syncResult.webhooks.length
|
||||
) {
|
||||
// All configurations failed - roll back
|
||||
logger.error(`[${requestId}] All webhook configurations failed, rolling back`)
|
||||
for (const wh of syncResult.webhooks) {
|
||||
await db.delete(webhook).where(eq(webhook.id, wh.id))
|
||||
@@ -488,8 +482,6 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
}
|
||||
// --- End Credential Set Handling ---
|
||||
|
||||
let externalSubscriptionCreated = false
|
||||
const createTempWebhookData = (providerConfigOverride = resolvedProviderConfig) => ({
|
||||
id: targetWebhookId || generateShortId(),
|
||||
@@ -629,115 +621,49 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
// --- Gmail/Outlook webhook setup (these don't require external subscriptions, configure after DB save) ---
|
||||
if (savedWebhook && provider === 'gmail') {
|
||||
logger.info(`[${requestId}] Gmail provider detected. Setting up Gmail webhook configuration.`)
|
||||
try {
|
||||
const success = await configureGmailPolling(savedWebhook, requestId)
|
||||
if (savedWebhook) {
|
||||
const pollingHandler = getProviderHandler(provider)
|
||||
if (pollingHandler.configurePolling) {
|
||||
logger.info(
|
||||
`[${requestId}] ${provider} provider detected. Setting up polling configuration.`
|
||||
)
|
||||
try {
|
||||
const success = await pollingHandler.configurePolling({
|
||||
webhook: savedWebhook,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (!success) {
|
||||
logger.error(`[${requestId}] Failed to configure Gmail polling, rolling back webhook`)
|
||||
if (!success) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to configure ${provider} polling, rolling back webhook`
|
||||
)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Failed to configure ${provider} polling`,
|
||||
details: 'Please check your account permissions and try again',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully configured ${provider} polling`)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error setting up ${provider} webhook configuration, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure Gmail polling',
|
||||
details: 'Please check your Gmail account permissions and try again',
|
||||
error: `Failed to configure ${provider} webhook`,
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully configured Gmail polling`)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error setting up Gmail webhook configuration, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure Gmail webhook',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End Gmail specific logic ---
|
||||
|
||||
// --- Outlook webhook setup ---
|
||||
if (savedWebhook && provider === 'outlook') {
|
||||
logger.info(
|
||||
`[${requestId}] Outlook provider detected. Setting up Outlook webhook configuration.`
|
||||
)
|
||||
try {
|
||||
const success = await configureOutlookPolling(savedWebhook, requestId)
|
||||
|
||||
if (!success) {
|
||||
logger.error(`[${requestId}] Failed to configure Outlook polling, rolling back webhook`)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure Outlook polling',
|
||||
details: 'Please check your Outlook account permissions and try again',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully configured Outlook polling`)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error setting up Outlook webhook configuration, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure Outlook webhook',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End Outlook specific logic ---
|
||||
|
||||
// --- RSS webhook setup ---
|
||||
if (savedWebhook && provider === 'rss') {
|
||||
logger.info(`[${requestId}] RSS provider detected. Setting up RSS webhook configuration.`)
|
||||
try {
|
||||
const success = await configureRssPolling(savedWebhook, requestId)
|
||||
|
||||
if (!success) {
|
||||
logger.error(`[${requestId}] Failed to configure RSS polling, rolling back webhook`)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure RSS polling',
|
||||
details: 'Please try again',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully configured RSS polling`)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error setting up RSS webhook configuration, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await revertSavedWebhook(savedWebhook, existingWebhook, requestId)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure RSS webhook',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End RSS specific logic ---
|
||||
|
||||
if (!targetWebhookId && savedWebhook) {
|
||||
try {
|
||||
|
||||
@@ -97,7 +97,6 @@ const {
|
||||
handleSlackChallengeMock,
|
||||
processWhatsAppDeduplicationMock,
|
||||
processGenericDeduplicationMock,
|
||||
fetchAndProcessAirtablePayloadsMock,
|
||||
processWebhookMock,
|
||||
executeMock,
|
||||
getWorkspaceBilledAccountUserIdMock,
|
||||
@@ -109,7 +108,6 @@ const {
|
||||
handleSlackChallengeMock: vi.fn().mockReturnValue(null),
|
||||
processWhatsAppDeduplicationMock: vi.fn().mockResolvedValue(null),
|
||||
processGenericDeduplicationMock: vi.fn().mockResolvedValue(null),
|
||||
fetchAndProcessAirtablePayloadsMock: vi.fn().mockResolvedValue(undefined),
|
||||
processWebhookMock: vi.fn().mockResolvedValue(new Response('Webhook processed', { status: 200 })),
|
||||
executeMock: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
@@ -156,10 +154,8 @@ vi.mock('@/background/logs-webhook-delivery', () => ({
|
||||
vi.mock('@/lib/webhooks/utils', () => ({
|
||||
handleWhatsAppVerification: handleWhatsAppVerificationMock,
|
||||
handleSlackChallenge: handleSlackChallengeMock,
|
||||
verifyProviderWebhook: vi.fn().mockReturnValue(null),
|
||||
processWhatsAppDeduplication: processWhatsAppDeduplicationMock,
|
||||
processGenericDeduplication: processGenericDeduplicationMock,
|
||||
fetchAndProcessAirtablePayloads: fetchAndProcessAirtablePayloadsMock,
|
||||
processWebhook: processWebhookMock,
|
||||
}))
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@ async function handleWebhookPost(
|
||||
|
||||
const { body, rawBody } = parseResult
|
||||
|
||||
const challengeResponse = await handleProviderChallenges(body, request, requestId, path)
|
||||
const challengeResponse = await handleProviderChallenges(body, request, requestId, path, rawBody)
|
||||
if (challengeResponse) {
|
||||
return challengeResponse
|
||||
}
|
||||
@@ -87,7 +87,7 @@ async function handleWebhookPost(
|
||||
if (webhooksForPath.length === 0) {
|
||||
const verificationResponse = await handlePreLookupWebhookVerification(
|
||||
request.method,
|
||||
body,
|
||||
body as Record<string, unknown> | undefined,
|
||||
requestId,
|
||||
path
|
||||
)
|
||||
|
||||
@@ -41,7 +41,7 @@ import {
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence'
|
||||
import {
|
||||
DIRECT_WORKFLOW_JOB_NAME,
|
||||
type QueuedWorkflowExecutionPayload,
|
||||
@@ -903,6 +903,8 @@ async function handleExecutePost(
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
await handlePostExecutionPauseState({ result, workflowId, executionId, loggingSession })
|
||||
|
||||
if (
|
||||
result.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
@@ -1359,31 +1361,7 @@ async function handleExecutePost(
|
||||
runFromBlock: resolvedRunFromBlock,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
if (!result.snapshotSeed) {
|
||||
reqLogger.error('Missing snapshot seed for paused execution')
|
||||
await loggingSession.markAsFailed('Missing snapshot seed for paused execution')
|
||||
} else {
|
||||
try {
|
||||
await PauseResumeManager.persistPauseResult({
|
||||
workflowId,
|
||||
executionId,
|
||||
pausePoints: result.pausePoints || [],
|
||||
snapshotSeed: result.snapshotSeed,
|
||||
executorUserId: result.metadata?.userId,
|
||||
})
|
||||
} catch (pauseError) {
|
||||
reqLogger.error('Failed to persist pause result', {
|
||||
error: pauseError instanceof Error ? pauseError.message : String(pauseError),
|
||||
})
|
||||
await loggingSession.markAsFailed(
|
||||
`Failed to persist pause state: ${pauseError instanceof Error ? pauseError.message : String(pauseError)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await PauseResumeManager.processQueuedResumes(executionId)
|
||||
}
|
||||
await handlePostExecutionPauseState({ result, workflowId, executionId, loggingSession })
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
if (timeoutController.isTimedOut() && timeoutController.timeoutMs) {
|
||||
@@ -1422,25 +1400,42 @@ async function handleExecutePost(
|
||||
return
|
||||
}
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: includeFileBase64
|
||||
? await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})
|
||||
: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
const sseOutput = includeFileBase64
|
||||
? await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})
|
||||
: result.output
|
||||
|
||||
if (result.status === 'paused') {
|
||||
sendEvent({
|
||||
type: 'execution:paused',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
output: sseOutput,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: sseOutput,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
}
|
||||
finalMetaStatus = 'complete'
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import {
|
||||
type ExecutionStreamStatus,
|
||||
@@ -29,14 +29,14 @@ export async function GET(
|
||||
const { id: workflowId, executionId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId: auth.userId,
|
||||
userId: session.user.id,
|
||||
action: 'read',
|
||||
})
|
||||
if (!workflowAuthorization.allowed) {
|
||||
@@ -46,16 +46,6 @@ export async function GET(
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
auth.apiKeyType === 'workspace' &&
|
||||
workflowAuthorization.workflow?.workspaceId !== auth.workspaceId
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{ error: 'API key is not authorized for this workspace' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const meta = await getExecutionMeta(executionId)
|
||||
if (!meta) {
|
||||
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
||||
|
||||
@@ -6,7 +6,7 @@ import { PostHogProvider } from '@/app/_shell/providers/posthog-provider'
|
||||
import { generateBrandedMetadata, generateThemeCSS } from '@/ee/whitelabeling'
|
||||
import '@/app/_styles/globals.css'
|
||||
import { OneDollarStats } from '@/components/analytics/onedollarstats'
|
||||
import { isReactGrabEnabled, isReactScanEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { isHosted, isReactGrabEnabled, isReactScanEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler'
|
||||
import { QueryProvider } from '@/app/_shell/providers/query-provider'
|
||||
import { SessionProvider } from '@/app/_shell/providers/session-provider'
|
||||
@@ -25,6 +25,9 @@ export const viewport: Viewport = {
|
||||
|
||||
export const metadata: Metadata = generateBrandedMetadata()
|
||||
|
||||
const GTM_ID = 'GTM-T7PHSRX5' as const
|
||||
const GA_ID = 'G-DR7YBE70VS' as const
|
||||
|
||||
export default function RootLayout({ children }: { children: React.ReactNode }) {
|
||||
const themeCSS = generateThemeCSS()
|
||||
|
||||
@@ -208,9 +211,54 @@ export default function RootLayout({ children }: { children: React.ReactNode })
|
||||
<link rel='dns-prefetch' href='https://assets.onedollarstats.com' />
|
||||
<script defer src='https://assets.onedollarstats.com/stonks.js' />
|
||||
|
||||
{/* Google Tag Manager — hosted only */}
|
||||
{isHosted && (
|
||||
<Script
|
||||
id='gtm'
|
||||
strategy='afterInteractive'
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: `(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
|
||||
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
|
||||
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
|
||||
'https://www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
|
||||
})(window,document,'script','dataLayer','${GTM_ID}');`,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Google Analytics (gtag.js) — hosted only */}
|
||||
{isHosted && (
|
||||
<>
|
||||
<Script
|
||||
id='gtag-src'
|
||||
src={`https://www.googletagmanager.com/gtag/js?id=${GA_ID}`}
|
||||
strategy='afterInteractive'
|
||||
/>
|
||||
<Script
|
||||
id='gtag-init'
|
||||
strategy='afterInteractive'
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: `window.dataLayer=window.dataLayer||[];function gtag(){dataLayer.push(arguments);}gtag('js',new Date());gtag('config','${GA_ID}');`,
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
<PublicEnvScript />
|
||||
</head>
|
||||
<body className={`${season.variable} font-season`} suppressHydrationWarning>
|
||||
{/* Google Tag Manager (noscript) — hosted only */}
|
||||
{isHosted && (
|
||||
<noscript>
|
||||
<iframe
|
||||
src={`https://www.googletagmanager.com/ns.html?id=${GTM_ID}`}
|
||||
title='Google Tag Manager'
|
||||
height='0'
|
||||
width='0'
|
||||
className='invisible hidden'
|
||||
/>
|
||||
</noscript>
|
||||
)}
|
||||
<HydrationErrorHandler />
|
||||
<OneDollarStats />
|
||||
<PostHogProvider>
|
||||
|
||||
@@ -6,6 +6,7 @@ import { useRouter } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Code,
|
||||
Input,
|
||||
Label,
|
||||
Table,
|
||||
@@ -155,7 +156,6 @@ function getBlockNameFromSnapshot(
|
||||
const parsed = JSON.parse(executionSnapshot.snapshot)
|
||||
const workflowState = parsed?.workflow
|
||||
if (!workflowState?.blocks || !Array.isArray(workflowState.blocks)) return null
|
||||
// Blocks are stored as an array of serialized blocks with id and metadata.name
|
||||
const block = workflowState.blocks.find((b: { id: string }) => b.id === blockId)
|
||||
return block?.metadata?.name || null
|
||||
} catch {
|
||||
@@ -163,6 +163,47 @@ function getBlockNameFromSnapshot(
|
||||
}
|
||||
}
|
||||
|
||||
function renderStructuredValuePreview(value: unknown) {
|
||||
if (value === null || value === undefined) {
|
||||
return <span style={{ fontSize: '12px', color: 'var(--text-muted)' }}>—</span>
|
||||
}
|
||||
|
||||
if (typeof value === 'object') {
|
||||
return (
|
||||
<div style={{ minWidth: '220px' }}>
|
||||
<Code.Viewer
|
||||
code={JSON.stringify(value, null, 2)}
|
||||
language='json'
|
||||
wrapText
|
||||
className='max-h-[220px]'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const stringValue = String(value)
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: 'inline-flex',
|
||||
maxWidth: '100%',
|
||||
borderRadius: '6px',
|
||||
border: '1px solid var(--border)',
|
||||
background: 'var(--surface-5)',
|
||||
padding: '4px 8px',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-word',
|
||||
fontFamily: 'var(--font-mono, monospace)',
|
||||
fontSize: '12px',
|
||||
lineHeight: '16px',
|
||||
color: 'var(--text-primary)',
|
||||
}}
|
||||
>
|
||||
{stringValue}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default function ResumeExecutionPage({
|
||||
params,
|
||||
initialExecutionDetail,
|
||||
@@ -874,8 +915,11 @@ export default function ResumeExecutionPage({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={refreshExecutionDetail}
|
||||
disabled={refreshingExecution}
|
||||
className='gap-1.5 px-2.5'
|
||||
aria-label='Refresh execution details'
|
||||
>
|
||||
<RefreshCw
|
||||
style={{
|
||||
@@ -884,6 +928,7 @@ export default function ResumeExecutionPage({
|
||||
animation: refreshingExecution ? 'spin 1s linear infinite' : undefined,
|
||||
}}
|
||||
/>
|
||||
Refresh
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Refresh</Tooltip.Content>
|
||||
@@ -1123,11 +1168,7 @@ export default function ResumeExecutionPage({
|
||||
<TableRow key={row.id}>
|
||||
<TableCell>{row.name}</TableCell>
|
||||
<TableCell>{row.type}</TableCell>
|
||||
<TableCell>
|
||||
<code style={{ fontSize: '12px' }}>
|
||||
{formatStructureValue(row.value)}
|
||||
</code>
|
||||
</TableCell>
|
||||
<TableCell>{renderStructuredValuePreview(row.value)}</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
@@ -1243,6 +1284,8 @@ export default function ResumeExecutionPage({
|
||||
}}
|
||||
placeholder='{"example": "value"}'
|
||||
rows={6}
|
||||
spellCheck={false}
|
||||
className='min-h-[180px] border-[var(--border-1)] bg-[var(--surface-3)] font-mono text-[12px] leading-5'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1267,10 +1310,10 @@ export default function ResumeExecutionPage({
|
||||
{/* Footer */}
|
||||
<div
|
||||
style={{
|
||||
marginTop: '32px',
|
||||
padding: '16px',
|
||||
maxWidth: '1200px',
|
||||
margin: '24px auto 0',
|
||||
padding: '0 24px 24px',
|
||||
textAlign: 'center',
|
||||
borderTop: '1px solid var(--border)',
|
||||
fontSize: '13px',
|
||||
color: 'var(--text-muted)',
|
||||
}}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useLayoutEffect, useRef } from 'react'
|
||||
import { useCallback, useLayoutEffect, useRef } from 'react'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { MessageActions } from '@/app/workspace/[workspaceId]/components'
|
||||
import { ChatMessageAttachments } from '@/app/workspace/[workspaceId]/home/components/chat-message-attachments'
|
||||
@@ -99,6 +99,18 @@ export function MothershipChat({
|
||||
const hasMessages = messages.length > 0
|
||||
const initialScrollDoneRef = useRef(false)
|
||||
|
||||
const messageQueueRef = useRef(messageQueue)
|
||||
messageQueueRef.current = messageQueue
|
||||
const onSendQueuedMessageRef = useRef(onSendQueuedMessage)
|
||||
onSendQueuedMessageRef.current = onSendQueuedMessage
|
||||
|
||||
const handleEnterWhileEmpty = useCallback(() => {
|
||||
const topMessage = messageQueueRef.current[0]
|
||||
if (!topMessage) return false
|
||||
void onSendQueuedMessageRef.current(topMessage.id)
|
||||
return true
|
||||
}, [])
|
||||
|
||||
useLayoutEffect(() => {
|
||||
if (!hasMessages) {
|
||||
initialScrollDoneRef.current = false
|
||||
@@ -197,6 +209,7 @@ export function MothershipChat({
|
||||
onContextAdd={onContextAdd}
|
||||
editValue={editValue}
|
||||
onEditValueConsumed={onEditValueConsumed}
|
||||
onEnterWhileEmpty={handleEnterWhileEmpty}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -24,6 +24,7 @@ import type {
|
||||
MothershipResource,
|
||||
MothershipResourceType,
|
||||
} from '@/app/workspace/[workspaceId]/home/types'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useKnowledgeBasesQuery } from '@/hooks/queries/kb/knowledge'
|
||||
import { useTablesList } from '@/hooks/queries/tables'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
@@ -51,6 +52,7 @@ export function useAvailableResources(
|
||||
const { data: tables = [] } = useTablesList(workspaceId)
|
||||
const { data: files = [] } = useWorkspaceFiles(workspaceId)
|
||||
const { data: knowledgeBases } = useKnowledgeBasesQuery(workspaceId)
|
||||
const { data: folders = [] } = useFolders(workspaceId)
|
||||
|
||||
return useMemo(
|
||||
() => [
|
||||
@@ -63,6 +65,14 @@ export function useAvailableResources(
|
||||
isOpen: existingKeys.has(`workflow:${w.id}`),
|
||||
})),
|
||||
},
|
||||
{
|
||||
type: 'folder' as const,
|
||||
items: folders.map((f) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
isOpen: existingKeys.has(`folder:${f.id}`),
|
||||
})),
|
||||
},
|
||||
{
|
||||
type: 'table' as const,
|
||||
items: tables.map((t) => ({
|
||||
@@ -88,7 +98,7 @@ export function useAvailableResources(
|
||||
})),
|
||||
},
|
||||
],
|
||||
[workflows, tables, files, knowledgeBases, existingKeys]
|
||||
[workflows, folders, tables, files, knowledgeBases, existingKeys]
|
||||
)
|
||||
}
|
||||
|
||||
@@ -206,7 +216,7 @@ export function AddResourceDropdown({
|
||||
)
|
||||
})
|
||||
) : (
|
||||
<div className='px-2 py-[5px] text-center font-medium text-[var(--text-tertiary)] text-caption'>
|
||||
<div className='px-2 py-1.5 text-center font-medium text-[var(--text-tertiary)] text-caption'>
|
||||
No results
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -5,7 +5,13 @@ import { createLogger } from '@sim/logger'
|
||||
import { Square } from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { Button, PlayOutline, Skeleton, Tooltip } from '@/components/emcn'
|
||||
import { Download, FileX, SquareArrowUpRight, WorkflowX } from '@/components/emcn/icons'
|
||||
import {
|
||||
Download,
|
||||
FileX,
|
||||
Folder as FolderIcon,
|
||||
SquareArrowUpRight,
|
||||
WorkflowX,
|
||||
} from '@/components/emcn/icons'
|
||||
import {
|
||||
cancelRunToolExecution,
|
||||
markRunToolManuallyStopped,
|
||||
@@ -37,6 +43,7 @@ import {
|
||||
import { Table } from '@/app/workspace/[workspaceId]/tables/[tableId]/components'
|
||||
import { useUsageLimits } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks'
|
||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { useWorkspaceFiles } from '@/hooks/queries/workspace-files'
|
||||
import { useSettingsNavigation } from '@/hooks/use-settings-navigation'
|
||||
@@ -147,6 +154,9 @@ export const ResourceContent = memo(function ResourceContent({
|
||||
/>
|
||||
)
|
||||
|
||||
case 'folder':
|
||||
return <EmbeddedFolder key={resource.id} workspaceId={workspaceId} folderId={resource.id} />
|
||||
|
||||
case 'generic':
|
||||
return (
|
||||
<GenericResourceContent key={resource.id} data={genericResourceData ?? { entries: [] }} />
|
||||
@@ -172,6 +182,7 @@ export function ResourceActions({ workspaceId, resource }: ResourceActionsProps)
|
||||
return (
|
||||
<EmbeddedKnowledgeBaseActions workspaceId={workspaceId} knowledgeBaseId={resource.id} />
|
||||
)
|
||||
case 'folder':
|
||||
case 'generic':
|
||||
return null
|
||||
default:
|
||||
@@ -450,6 +461,72 @@ function EmbeddedFile({ workspaceId, fileId, previewMode, streamingContent }: Em
|
||||
)
|
||||
}
|
||||
|
||||
interface EmbeddedFolderProps {
|
||||
workspaceId: string
|
||||
folderId: string
|
||||
}
|
||||
|
||||
function EmbeddedFolder({ workspaceId, folderId }: EmbeddedFolderProps) {
|
||||
const { data: folderList, isPending: isFoldersPending } = useFolders(workspaceId)
|
||||
const { data: workflowList = [] } = useWorkflows(workspaceId)
|
||||
|
||||
const folder = useMemo(
|
||||
() => (folderList ?? []).find((f) => f.id === folderId),
|
||||
[folderList, folderId]
|
||||
)
|
||||
|
||||
const folderWorkflows = useMemo(
|
||||
() => workflowList.filter((w) => w.folderId === folderId),
|
||||
[workflowList, folderId]
|
||||
)
|
||||
|
||||
if (isFoldersPending) return LOADING_SKELETON
|
||||
|
||||
if (!folder) {
|
||||
return (
|
||||
<div className='flex h-full flex-col items-center justify-center gap-3'>
|
||||
<FolderIcon className='h-[32px] w-[32px] text-[var(--text-icon)]' />
|
||||
<div className='flex flex-col items-center gap-1'>
|
||||
<h2 className='font-medium text-[20px] text-[var(--text-primary)]'>Folder not found</h2>
|
||||
<p className='text-[var(--text-body)] text-small'>
|
||||
This folder may have been deleted or moved
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col overflow-y-auto p-6'>
|
||||
<h2 className='mb-4 font-medium text-[16px] text-[var(--text-primary)]'>{folder.name}</h2>
|
||||
{folderWorkflows.length === 0 ? (
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>No workflows in this folder</p>
|
||||
) : (
|
||||
<div className='flex flex-col gap-1'>
|
||||
{folderWorkflows.map((w) => (
|
||||
<button
|
||||
key={w.id}
|
||||
type='button'
|
||||
onClick={() => window.open(`/workspace/${workspaceId}/w/${w.id}`, '_blank')}
|
||||
className='flex items-center gap-2 rounded-[6px] px-3 py-2 text-left transition-colors hover:bg-[var(--surface-4)]'
|
||||
>
|
||||
<div
|
||||
className='h-[12px] w-[12px] flex-shrink-0 rounded-[3px] border-[2px]'
|
||||
style={{
|
||||
backgroundColor: w.color,
|
||||
borderColor: `${w.color}60`,
|
||||
backgroundClip: 'padding-box',
|
||||
}}
|
||||
/>
|
||||
<span className='truncate text-[13px] text-[var(--text-primary)]'>{w.name}</span>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function extractFileContent(raw: string): string {
|
||||
const marker = '"content":'
|
||||
const idx = raw.indexOf(marker)
|
||||
|
||||
@@ -6,6 +6,7 @@ import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Database,
|
||||
File as FileIcon,
|
||||
Folder as FolderIcon,
|
||||
Table as TableIcon,
|
||||
TerminalWindow,
|
||||
} from '@/components/emcn/icons'
|
||||
@@ -18,6 +19,7 @@ import type {
|
||||
} from '@/app/workspace/[workspaceId]/home/types'
|
||||
import { knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
import { tableKeys } from '@/hooks/queries/tables'
|
||||
import { folderKeys } from '@/hooks/queries/utils/folder-keys'
|
||||
import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { workspaceFilesKeys } from '@/hooks/queries/workspace-files'
|
||||
@@ -140,6 +142,15 @@ export const RESOURCE_REGISTRY: Record<MothershipResourceType, ResourceTypeConfi
|
||||
),
|
||||
renderDropdownItem: (props) => <IconDropdownItem {...props} icon={Database} />,
|
||||
},
|
||||
folder: {
|
||||
type: 'folder',
|
||||
label: 'Folders',
|
||||
icon: FolderIcon,
|
||||
renderTabIcon: (_resource, className) => (
|
||||
<FolderIcon className={cn(className, 'text-[var(--text-icon)]')} />
|
||||
),
|
||||
renderDropdownItem: (props) => <IconDropdownItem {...props} icon={FolderIcon} />,
|
||||
},
|
||||
} as const
|
||||
|
||||
export const RESOURCE_TYPES = Object.values(RESOURCE_REGISTRY)
|
||||
@@ -171,6 +182,9 @@ const RESOURCE_INVALIDATORS: Record<
|
||||
qc.invalidateQueries({ queryKey: knowledgeKeys.detail(id) })
|
||||
qc.invalidateQueries({ queryKey: knowledgeKeys.tagDefinitions(id) })
|
||||
},
|
||||
folder: (qc) => {
|
||||
qc.invalidateQueries({ queryKey: folderKeys.lists() })
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -23,6 +23,7 @@ import type {
|
||||
MothershipResource,
|
||||
MothershipResourceType,
|
||||
} from '@/app/workspace/[workspaceId]/home/types'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useKnowledgeBasesQuery } from '@/hooks/queries/kb/knowledge'
|
||||
import { useTablesList } from '@/hooks/queries/tables'
|
||||
import {
|
||||
@@ -57,6 +58,7 @@ function useResourceNameLookup(workspaceId: string): Map<string, string> {
|
||||
const { data: tables = [] } = useTablesList(workspaceId)
|
||||
const { data: files = [] } = useWorkspaceFiles(workspaceId)
|
||||
const { data: knowledgeBases } = useKnowledgeBasesQuery(workspaceId)
|
||||
const { data: folders = [] } = useFolders(workspaceId)
|
||||
|
||||
return useMemo(() => {
|
||||
const map = new Map<string, string>()
|
||||
@@ -64,8 +66,9 @@ function useResourceNameLookup(workspaceId: string): Map<string, string> {
|
||||
for (const t of tables) map.set(`table:${t.id}`, t.name)
|
||||
for (const f of files) map.set(`file:${f.id}`, f.name)
|
||||
for (const kb of knowledgeBases ?? []) map.set(`knowledgebase:${kb.id}`, kb.name)
|
||||
for (const folder of folders) map.set(`folder:${folder.id}`, folder.name)
|
||||
return map
|
||||
}, [workflows, tables, files, knowledgeBases])
|
||||
}, [workflows, tables, files, knowledgeBases, folders])
|
||||
}
|
||||
|
||||
interface ResourceTabsProps {
|
||||
|
||||
@@ -87,6 +87,8 @@ export function mapResourceToContext(resource: MothershipResource): ChatContext
|
||||
return { kind: 'table', tableId: resource.id, label: resource.title }
|
||||
case 'file':
|
||||
return { kind: 'file', fileId: resource.id, label: resource.title }
|
||||
case 'folder':
|
||||
return { kind: 'folder', folderId: resource.id, label: resource.title }
|
||||
default:
|
||||
return { kind: 'docs', label: resource.title }
|
||||
}
|
||||
|
||||
@@ -183,7 +183,7 @@ export const PlusMenuDropdown = React.memo(
|
||||
)
|
||||
})
|
||||
) : (
|
||||
<div className='px-2 py-[5px] text-center font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
<div className='px-2 py-1.5 text-center font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
No results
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import type React from 'react'
|
||||
import { useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Database, Table as TableIcon } from '@/components/emcn/icons'
|
||||
import { Database, Folder as FolderIcon, Table as TableIcon } from '@/components/emcn/icons'
|
||||
import { getDocumentIcon } from '@/components/icons/document-icons'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
@@ -108,6 +108,7 @@ interface UserInputProps {
|
||||
isInitialView?: boolean
|
||||
userId?: string
|
||||
onContextAdd?: (context: ChatContext) => void
|
||||
onEnterWhileEmpty?: () => boolean
|
||||
}
|
||||
|
||||
export function UserInput({
|
||||
@@ -120,6 +121,7 @@ export function UserInput({
|
||||
isInitialView = true,
|
||||
userId,
|
||||
onContextAdd,
|
||||
onEnterWhileEmpty,
|
||||
}: UserInputProps) {
|
||||
const { workspaceId } = useParams<{ workspaceId: string }>()
|
||||
const { data: workflowsById = {} } = useWorkflowMap(workspaceId)
|
||||
@@ -175,6 +177,7 @@ export function UserInput({
|
||||
if (ctx.kind === 'knowledge' && ctx.knowledgeId) keys.add(`knowledgebase:${ctx.knowledgeId}`)
|
||||
if (ctx.kind === 'table' && ctx.tableId) keys.add(`table:${ctx.tableId}`)
|
||||
if (ctx.kind === 'file' && ctx.fileId) keys.add(`file:${ctx.fileId}`)
|
||||
if (ctx.kind === 'folder' && ctx.folderId) keys.add(`folder:${ctx.folderId}`)
|
||||
}
|
||||
return keys
|
||||
}, [contextManagement.selectedContexts])
|
||||
@@ -207,6 +210,10 @@ export function UserInput({
|
||||
filesRef.current = files
|
||||
const contextRef = useRef(contextManagement)
|
||||
contextRef.current = contextManagement
|
||||
const onEnterWhileEmptyRef = useRef(onEnterWhileEmpty)
|
||||
onEnterWhileEmptyRef.current = onEnterWhileEmpty
|
||||
const isSendingRef = useRef(isSending)
|
||||
isSendingRef.current = isSending
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -447,6 +454,10 @@ export function UserInput({
|
||||
(e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
|
||||
e.preventDefault()
|
||||
if (isSendingRef.current && !valueRef.current.trim()) {
|
||||
onEnterWhileEmptyRef.current?.()
|
||||
return
|
||||
}
|
||||
handleSubmit()
|
||||
return
|
||||
}
|
||||
@@ -663,6 +674,9 @@ export function UserInput({
|
||||
mentionIconNode = <FileDocIcon className={iconClasses} />
|
||||
break
|
||||
}
|
||||
case 'folder':
|
||||
mentionIconNode = <FolderIcon className={iconClasses} />
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Database, Table as TableIcon } from '@/components/emcn/icons'
|
||||
import { Database, Folder as FolderIcon, Table as TableIcon } from '@/components/emcn/icons'
|
||||
import { getDocumentIcon } from '@/components/icons/document-icons'
|
||||
import type { ChatMessageContext } from '@/app/workspace/[workspaceId]/home/types'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
@@ -81,6 +81,9 @@ function MentionHighlight({ context }: { context: ChatMessageContext }) {
|
||||
icon = <FileDocIcon className={iconClasses} />
|
||||
break
|
||||
}
|
||||
case 'folder':
|
||||
icon = <FolderIcon className={iconClasses} />
|
||||
break
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
@@ -195,6 +195,7 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
} else {
|
||||
url.searchParams.delete('resource')
|
||||
}
|
||||
url.hash = ''
|
||||
window.history.replaceState(null, '', url.toString())
|
||||
}, [activeResourceId])
|
||||
|
||||
|
||||
@@ -294,6 +294,7 @@ function mapStoredMessage(msg: TaskStoredMessage): ChatMessage {
|
||||
...(c.knowledgeId && { knowledgeId: c.knowledgeId }),
|
||||
...(c.tableId && { tableId: c.tableId }),
|
||||
...(c.fileId && { fileId: c.fileId }),
|
||||
...(c.folderId && { folderId: c.folderId }),
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -1953,6 +1954,7 @@ export function useChat(
|
||||
...('knowledgeId' in c && c.knowledgeId ? { knowledgeId: c.knowledgeId } : {}),
|
||||
...('tableId' in c && c.tableId ? { tableId: c.tableId } : {}),
|
||||
...('fileId' in c && c.fileId ? { fileId: c.fileId } : {}),
|
||||
...('folderId' in c && c.folderId ? { folderId: c.folderId } : {}),
|
||||
}))
|
||||
|
||||
setMessages((prev) => [
|
||||
|
||||
@@ -266,6 +266,7 @@ export interface ChatMessageContext {
|
||||
knowledgeId?: string
|
||||
tableId?: string
|
||||
fileId?: string
|
||||
folderId?: string
|
||||
}
|
||||
|
||||
export interface ChatMessage {
|
||||
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
type PendingCredentialCreateRequest,
|
||||
readPendingCredentialCreateRequest,
|
||||
} from '@/lib/credentials/client-state'
|
||||
import type { WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
import { getUserColor } from '@/lib/workspaces/colors'
|
||||
import { isValidEnvVarName } from '@/executor/constants'
|
||||
import {
|
||||
@@ -48,9 +49,9 @@ import {
|
||||
useSavePersonalEnvironment,
|
||||
useUpsertWorkspaceEnvironment,
|
||||
useWorkspaceEnvironment,
|
||||
type WorkspaceEnvironmentData,
|
||||
} from '@/hooks/queries/environment'
|
||||
import { useWorkspacePermissionsQuery } from '@/hooks/queries/workspace'
|
||||
import { useSettingsDirtyStore } from '@/stores/settings/dirty/store'
|
||||
|
||||
const logger = createLogger('SecretsManager')
|
||||
|
||||
@@ -323,7 +324,6 @@ export function CredentialsManager() {
|
||||
const [selectedDescriptionDraft, setSelectedDescriptionDraft] = useState('')
|
||||
const [copyIdSuccess, setCopyIdSuccess] = useState(false)
|
||||
const [detailsError, setDetailsError] = useState<string | null>(null)
|
||||
const [isSavingDetails, setIsSavingDetails] = useState(false)
|
||||
const [showDetailUnsavedChanges, setShowDetailUnsavedChanges] = useState(false)
|
||||
const [memberUserId, setMemberUserId] = useState('')
|
||||
const [memberRole, setMemberRole] = useState<WorkspaceCredentialRole>('member')
|
||||
@@ -350,8 +350,9 @@ export function CredentialsManager() {
|
||||
[envCredentials, selectedCredentialId]
|
||||
)
|
||||
|
||||
if (selectedCredential?.id !== prevSelectedCredentialId) {
|
||||
setPrevSelectedCredentialId(selectedCredential?.id ?? null)
|
||||
const currentCredentialId = selectedCredential?.id ?? null
|
||||
if (currentCredentialId !== prevSelectedCredentialId) {
|
||||
setPrevSelectedCredentialId(currentCredentialId)
|
||||
if (!selectedCredential) {
|
||||
setSelectedDescriptionDraft('')
|
||||
setSelectedDisplayNameDraft('')
|
||||
@@ -474,9 +475,23 @@ export function CredentialsManager() {
|
||||
return personalInvalid || workspaceInvalid
|
||||
}, [envVars, newWorkspaceRows])
|
||||
|
||||
const isListSaving =
|
||||
savePersonalMutation.isPending ||
|
||||
upsertWorkspaceMutation.isPending ||
|
||||
removeWorkspaceMutation.isPending
|
||||
|
||||
hasChangesRef.current = hasChanges
|
||||
shouldBlockNavRef.current = hasChanges || isDetailsDirty
|
||||
|
||||
const setNavGuardDirty = useSettingsDirtyStore((s) => s.setDirty)
|
||||
const resetNavGuard = useSettingsDirtyStore((s) => s.reset)
|
||||
|
||||
useEffect(() => {
|
||||
setNavGuardDirty(hasChanges || isDetailsDirty)
|
||||
}, [hasChanges, isDetailsDirty, setNavGuardDirty])
|
||||
|
||||
useEffect(() => () => resetNavGuard(), [resetNavGuard])
|
||||
|
||||
// --- Effects ---
|
||||
useEffect(() => {
|
||||
if (hasSavedRef.current) return
|
||||
@@ -652,12 +667,12 @@ export function CredentialsManager() {
|
||||
)
|
||||
|
||||
const handleBackAttempt = useCallback(() => {
|
||||
if (isDetailsDirty && !isSavingDetails) {
|
||||
if (isDetailsDirty && !updateCredential.isPending) {
|
||||
setShowDetailUnsavedChanges(true)
|
||||
} else {
|
||||
setSelectedCredentialId(null)
|
||||
}
|
||||
}, [isDetailsDirty, isSavingDetails])
|
||||
}, [isDetailsDirty, updateCredential.isPending])
|
||||
|
||||
const handleDiscardDetailChanges = useCallback(() => {
|
||||
setShowDetailUnsavedChanges(false)
|
||||
@@ -667,9 +682,9 @@ export function CredentialsManager() {
|
||||
}, [selectedCredential])
|
||||
|
||||
const handleSaveDetails = useCallback(async () => {
|
||||
if (!selectedCredential || !isSelectedAdmin || !isDetailsDirty) return
|
||||
if (!selectedCredential || !isSelectedAdmin || !isDetailsDirty || updateCredential.isPending)
|
||||
return
|
||||
setDetailsError(null)
|
||||
setIsSavingDetails(true)
|
||||
|
||||
try {
|
||||
if (isDisplayNameDirty || isDescriptionDirty) {
|
||||
@@ -683,8 +698,6 @@ export function CredentialsManager() {
|
||||
const message = error instanceof Error ? error.message : 'Failed to save changes'
|
||||
setDetailsError(message)
|
||||
logger.error('Failed to save secret details', error)
|
||||
} finally {
|
||||
setIsSavingDetails(false)
|
||||
}
|
||||
}, [
|
||||
selectedCredential,
|
||||
@@ -906,6 +919,8 @@ export function CredentialsManager() {
|
||||
const handleCancel = resetToSaved
|
||||
|
||||
const handleSave = useCallback(async () => {
|
||||
if (isListSaving) return
|
||||
|
||||
const prevInitialVars = [...initialVarsRef.current]
|
||||
const prevInitialWorkspaceVars = { ...initialWorkspaceVarsRef.current }
|
||||
|
||||
@@ -964,6 +979,7 @@ export function CredentialsManager() {
|
||||
logger.error('Failed to save environment variables:', error)
|
||||
}
|
||||
}, [
|
||||
isListSaving,
|
||||
envVars,
|
||||
workspaceVars,
|
||||
newWorkspaceRows,
|
||||
@@ -975,6 +991,7 @@ export function CredentialsManager() {
|
||||
|
||||
const handleDiscardAndNavigate = useCallback(() => {
|
||||
shouldBlockNavRef.current = false
|
||||
resetNavGuard()
|
||||
resetToSaved()
|
||||
setSelectedCredentialId(null)
|
||||
|
||||
@@ -983,7 +1000,7 @@ export function CredentialsManager() {
|
||||
pendingNavigationUrlRef.current = null
|
||||
router.push(url)
|
||||
}
|
||||
}, [router, resetToSaved])
|
||||
}, [router, resetToSaved, resetNavGuard])
|
||||
|
||||
const renderEnvVarRow = useCallback(
|
||||
(envVar: UIEnvironmentVariable, originalIndex: number) => {
|
||||
@@ -1316,9 +1333,9 @@ export function CredentialsManager() {
|
||||
<Button
|
||||
variant='primary'
|
||||
onClick={handleSaveDetails}
|
||||
disabled={!isDetailsDirty || isSavingDetails}
|
||||
disabled={!isDetailsDirty || updateCredential.isPending}
|
||||
>
|
||||
{isSavingDetails ? 'Saving...' : 'Save'}
|
||||
{updateCredential.isPending ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
@@ -1416,11 +1433,13 @@ export function CredentialsManager() {
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={isLoading || !hasChanges || hasConflicts || hasInvalidKeys}
|
||||
disabled={
|
||||
isLoading || !hasChanges || hasConflicts || hasInvalidKeys || isListSaving
|
||||
}
|
||||
variant='primary'
|
||||
className={`${hasConflicts || hasInvalidKeys ? 'cursor-not-allowed opacity-50' : ''}`}
|
||||
>
|
||||
Save
|
||||
{isListSaving ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{hasConflicts && <Tooltip.Content>Resolve all conflicts before saving</Tooltip.Content>}
|
||||
|
||||
@@ -54,6 +54,7 @@ import {
|
||||
} from '@/hooks/queries/oauth/oauth-connections'
|
||||
import { useWorkspacePermissionsQuery } from '@/hooks/queries/workspace'
|
||||
import { useOAuthReturnRouter } from '@/hooks/use-oauth-return'
|
||||
import { useSettingsDirtyStore } from '@/stores/settings/dirty/store'
|
||||
|
||||
const logger = createLogger('IntegrationsManager')
|
||||
|
||||
@@ -246,12 +247,20 @@ export function IntegrationsManager() {
|
||||
}, [selectedCredential, selectedDisplayNameDraft])
|
||||
|
||||
const isDetailsDirty = isDescriptionDirty || isDisplayNameDirty
|
||||
const [isSavingDetails, setIsSavingDetails] = useState(false)
|
||||
|
||||
const setNavGuardDirty = useSettingsDirtyStore((s) => s.setDirty)
|
||||
const resetNavGuard = useSettingsDirtyStore((s) => s.reset)
|
||||
|
||||
useEffect(() => {
|
||||
setNavGuardDirty(isDetailsDirty)
|
||||
}, [isDetailsDirty, setNavGuardDirty])
|
||||
|
||||
useEffect(() => () => resetNavGuard(), [resetNavGuard])
|
||||
|
||||
const handleSaveDetails = async () => {
|
||||
if (!selectedCredential || !isSelectedAdmin || !isDetailsDirty) return
|
||||
if (!selectedCredential || !isSelectedAdmin || !isDetailsDirty || updateCredential.isPending)
|
||||
return
|
||||
setDetailsError(null)
|
||||
setIsSavingDetails(true)
|
||||
|
||||
try {
|
||||
if (isDisplayNameDirty || isDescriptionDirty) {
|
||||
@@ -263,26 +272,22 @@ export function IntegrationsManager() {
|
||||
if (isDisplayNameDirty) setSelectedDisplayNameDraft((v) => v.trim())
|
||||
if (isDescriptionDirty) setSelectedDescriptionDraft((v) => v.trim())
|
||||
}
|
||||
|
||||
await refetchCredentials()
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to save changes'
|
||||
setDetailsError(message)
|
||||
logger.error('Failed to save credential details', error)
|
||||
} finally {
|
||||
setIsSavingDetails(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleBackAttempt = useCallback(() => {
|
||||
if (isDetailsDirty && !isSavingDetails) {
|
||||
if (isDetailsDirty && !updateCredential.isPending) {
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
setSelectedCredentialId(null)
|
||||
setSelectedDescriptionDraft('')
|
||||
setSelectedDisplayNameDraft('')
|
||||
}
|
||||
}, [isDetailsDirty, isSavingDetails])
|
||||
}, [isDetailsDirty, updateCredential.isPending])
|
||||
|
||||
const handleDiscardChanges = useCallback(() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
@@ -1430,9 +1435,9 @@ export function IntegrationsManager() {
|
||||
<Button
|
||||
variant='primary'
|
||||
onClick={handleSaveDetails}
|
||||
disabled={!isDetailsDirty || isSavingDetails}
|
||||
disabled={!isDetailsDirty || updateCredential.isPending}
|
||||
>
|
||||
{isSavingDetails ? 'Saving...' : 'Save'}
|
||||
{updateCredential.isPending ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { Search } from 'lucide-react'
|
||||
import { Folder, Search } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Button, Combobox, SModalTabs, SModalTabsList, SModalTabsTrigger } from '@/components/emcn'
|
||||
import { Input } from '@/components/ui'
|
||||
@@ -9,6 +9,7 @@ import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import { RESOURCE_REGISTRY } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry'
|
||||
import type { MothershipResourceType } from '@/app/workspace/[workspaceId]/home/types'
|
||||
import { DeletedItemSkeleton } from '@/app/workspace/[workspaceId]/settings/components/recently-deleted/deleted-item-skeleton'
|
||||
import { useFolders, useRestoreFolder } from '@/hooks/queries/folders'
|
||||
import { useKnowledgeBasesQuery, useRestoreKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
import { useRestoreTable, useTablesList } from '@/hooks/queries/tables'
|
||||
import { useRestoreWorkflow, useWorkflows } from '@/hooks/queries/workflows'
|
||||
@@ -29,10 +30,12 @@ function getResourceHref(
|
||||
return `${base}/knowledge/${id}`
|
||||
case 'file':
|
||||
return `${base}/files`
|
||||
case 'folder':
|
||||
return `${base}/w`
|
||||
}
|
||||
}
|
||||
|
||||
type ResourceType = 'all' | 'workflow' | 'table' | 'knowledge' | 'file'
|
||||
type ResourceType = 'all' | 'workflow' | 'table' | 'knowledge' | 'file' | 'folder'
|
||||
|
||||
type SortColumn = 'deleted' | 'name' | 'type'
|
||||
|
||||
@@ -51,7 +54,9 @@ const SORT_OPTIONS: { column: SortColumn; direction: 'asc' | 'desc'; label: stri
|
||||
|
||||
const ICON_CLASS = 'h-[14px] w-[14px]'
|
||||
|
||||
const RESOURCE_TYPE_TO_MOTHERSHIP: Record<Exclude<ResourceType, 'all'>, MothershipResourceType> = {
|
||||
const RESOURCE_TYPE_TO_MOTHERSHIP: Partial<
|
||||
Record<Exclude<ResourceType, 'all'>, MothershipResourceType>
|
||||
> = {
|
||||
workflow: 'workflow',
|
||||
table: 'table',
|
||||
knowledge: 'knowledgebase',
|
||||
@@ -70,6 +75,7 @@ interface DeletedResource {
|
||||
const TABS: { id: ResourceType; label: string }[] = [
|
||||
{ id: 'all', label: 'All' },
|
||||
{ id: 'workflow', label: 'Workflows' },
|
||||
{ id: 'folder', label: 'Folders' },
|
||||
{ id: 'table', label: 'Tables' },
|
||||
{ id: 'knowledge', label: 'Knowledge Bases' },
|
||||
{ id: 'file', label: 'Files' },
|
||||
@@ -77,6 +83,7 @@ const TABS: { id: ResourceType; label: string }[] = [
|
||||
|
||||
const TYPE_LABEL: Record<Exclude<ResourceType, 'all'>, string> = {
|
||||
workflow: 'Workflow',
|
||||
folder: 'Folder',
|
||||
table: 'Table',
|
||||
knowledge: 'Knowledge Base',
|
||||
file: 'File',
|
||||
@@ -97,7 +104,13 @@ function ResourceIcon({ resource }: { resource: DeletedResource }) {
|
||||
)
|
||||
}
|
||||
|
||||
if (resource.type === 'folder') {
|
||||
const color = resource.color ?? '#6B7280'
|
||||
return <Folder className={ICON_CLASS} style={{ color }} />
|
||||
}
|
||||
|
||||
const mothershipType = RESOURCE_TYPE_TO_MOTHERSHIP[resource.type]
|
||||
if (!mothershipType) return null
|
||||
const config = RESOURCE_REGISTRY[mothershipType]
|
||||
return (
|
||||
<>
|
||||
@@ -120,23 +133,30 @@ export function RecentlyDeleted() {
|
||||
const [restoredItems, setRestoredItems] = useState<Map<string, DeletedResource>>(new Map())
|
||||
|
||||
const workflowsQuery = useWorkflows(workspaceId, { scope: 'archived' })
|
||||
const foldersQuery = useFolders(workspaceId, { scope: 'archived' })
|
||||
const tablesQuery = useTablesList(workspaceId, 'archived')
|
||||
const knowledgeQuery = useKnowledgeBasesQuery(workspaceId, { scope: 'archived' })
|
||||
const filesQuery = useWorkspaceFiles(workspaceId, 'archived')
|
||||
|
||||
const restoreWorkflow = useRestoreWorkflow()
|
||||
const restoreFolder = useRestoreFolder()
|
||||
const restoreTable = useRestoreTable()
|
||||
const restoreKnowledgeBase = useRestoreKnowledgeBase()
|
||||
const restoreWorkspaceFile = useRestoreWorkspaceFile()
|
||||
|
||||
const isLoading =
|
||||
workflowsQuery.isLoading ||
|
||||
foldersQuery.isLoading ||
|
||||
tablesQuery.isLoading ||
|
||||
knowledgeQuery.isLoading ||
|
||||
filesQuery.isLoading
|
||||
|
||||
const error =
|
||||
workflowsQuery.error || tablesQuery.error || knowledgeQuery.error || filesQuery.error
|
||||
workflowsQuery.error ||
|
||||
foldersQuery.error ||
|
||||
tablesQuery.error ||
|
||||
knowledgeQuery.error ||
|
||||
filesQuery.error
|
||||
|
||||
const resources = useMemo<DeletedResource[]>(() => {
|
||||
const items: DeletedResource[] = []
|
||||
@@ -152,6 +172,17 @@ export function RecentlyDeleted() {
|
||||
})
|
||||
}
|
||||
|
||||
for (const folder of foldersQuery.data ?? []) {
|
||||
items.push({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
type: 'folder',
|
||||
deletedAt: folder.archivedAt ? new Date(folder.archivedAt) : new Date(folder.updatedAt),
|
||||
workspaceId: folder.workspaceId,
|
||||
color: folder.color,
|
||||
})
|
||||
}
|
||||
|
||||
for (const t of tablesQuery.data ?? []) {
|
||||
items.push({
|
||||
id: t.id,
|
||||
@@ -193,6 +224,7 @@ export function RecentlyDeleted() {
|
||||
return items
|
||||
}, [
|
||||
workflowsQuery.data,
|
||||
foldersQuery.data,
|
||||
tablesQuery.data,
|
||||
knowledgeQuery.data,
|
||||
filesQuery.data,
|
||||
@@ -250,6 +282,12 @@ export function RecentlyDeleted() {
|
||||
{ onSettled, onSuccess }
|
||||
)
|
||||
break
|
||||
case 'folder':
|
||||
restoreFolder.mutate(
|
||||
{ folderId: resource.id, workspaceId: resource.workspaceId },
|
||||
{ onSettled, onSuccess }
|
||||
)
|
||||
break
|
||||
case 'table':
|
||||
restoreTable.mutate(resource.id, { onSettled, onSuccess })
|
||||
break
|
||||
|
||||
@@ -10,11 +10,8 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
usePersonalEnvironment,
|
||||
useWorkspaceEnvironment,
|
||||
type WorkspaceEnvironmentData,
|
||||
} from '@/hooks/queries/environment'
|
||||
import type { WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
import { usePersonalEnvironment, useWorkspaceEnvironment } from '@/hooks/queries/environment'
|
||||
import { useSettingsNavigation } from '@/hooks/use-settings-navigation'
|
||||
|
||||
/**
|
||||
|
||||
@@ -210,7 +210,7 @@ function WorkflowToolDeployBadge({
|
||||
workflowId: string
|
||||
onDeploySuccess?: () => void
|
||||
}) {
|
||||
const { data, isLoading } = useDeploymentInfo(workflowId)
|
||||
const { data, isLoading } = useDeploymentInfo(workflowId, { refetchOnMount: 'always' })
|
||||
const { mutate, isPending: isDeploying } = useDeployWorkflow()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
@@ -1021,13 +1021,13 @@ export const ToolInput = memo(function ToolInput({
|
||||
[isPreview, disabled, selectedTools, setStoreValue]
|
||||
)
|
||||
|
||||
const [previewExpanded, setPreviewExpanded] = useState<Record<number, boolean>>({})
|
||||
const [localExpanded, setLocalExpanded] = useState<Record<number, boolean>>({})
|
||||
|
||||
const toggleToolExpansion = (toolIndex: number) => {
|
||||
if ((isPreview && !allowExpandInPreview) || disabled) return
|
||||
if (isPreview && !allowExpandInPreview) return
|
||||
|
||||
if (isPreview) {
|
||||
setPreviewExpanded((prev) => ({
|
||||
if (isPreview || disabled) {
|
||||
setLocalExpanded((prev) => ({
|
||||
...prev,
|
||||
[toolIndex]: !(prev[toolIndex] ?? !!selectedTools[toolIndex]?.isExpanded),
|
||||
}))
|
||||
@@ -1689,8 +1689,8 @@ export const ToolInput = memo(function ToolInput({
|
||||
const hasToolBody = hasOperations || hasParams
|
||||
|
||||
const isExpandedForDisplay = hasToolBody
|
||||
? isPreview
|
||||
? (previewExpanded[toolIndex] ?? !!tool.isExpanded)
|
||||
? isPreview || disabled
|
||||
? (localExpanded[toolIndex] ?? !!tool.isExpanded)
|
||||
: !!tool.isExpanded
|
||||
: false
|
||||
|
||||
|
||||
@@ -1,77 +1,18 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import type { CanonicalModeOverrides } from '@/lib/workflows/subblocks/visibility'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
evaluateSubBlockCondition,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockHidden,
|
||||
isSubBlockVisibleForMode,
|
||||
resolveDependencyValue,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
|
||||
import { useWorkspaceCredential } from '@/hooks/queries/credentials'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { useReactiveConditions } from '@/hooks/use-reactive-conditions'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Evaluates reactive conditions for subblocks. Always calls the same hooks
|
||||
* regardless of whether a reactive condition exists (Rules of Hooks).
|
||||
*
|
||||
* Returns a Set of subblock IDs that should be hidden.
|
||||
*/
|
||||
function useReactiveConditions(
|
||||
subBlocks: SubBlockConfig[],
|
||||
blockId: string,
|
||||
activeWorkflowId: string | null,
|
||||
canonicalModeOverrides?: CanonicalModeOverrides
|
||||
): Set<string> {
|
||||
const reactiveSubBlock = useMemo(() => subBlocks.find((sb) => sb.reactiveCondition), [subBlocks])
|
||||
const reactiveCond = reactiveSubBlock?.reactiveCondition
|
||||
|
||||
const canonicalIndex = useMemo(() => buildCanonicalIndex(subBlocks), [subBlocks])
|
||||
|
||||
// Resolve watchFields through canonical index to get the active credential value
|
||||
const watchedCredentialId = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!reactiveCond || !activeWorkflowId) return ''
|
||||
const blockValues = state.workflowValues[activeWorkflowId]?.[blockId] ?? {}
|
||||
for (const field of reactiveCond.watchFields) {
|
||||
const val = resolveDependencyValue(
|
||||
field,
|
||||
blockValues,
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
if (val && typeof val === 'string') return val
|
||||
}
|
||||
return ''
|
||||
},
|
||||
[reactiveCond, activeWorkflowId, blockId, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
)
|
||||
|
||||
// Always call useWorkspaceCredential (stable hook count), disable when not needed
|
||||
const { data: credential } = useWorkspaceCredential(
|
||||
watchedCredentialId || undefined,
|
||||
Boolean(reactiveCond && watchedCredentialId)
|
||||
)
|
||||
|
||||
return useMemo(() => {
|
||||
const hidden = new Set<string>()
|
||||
if (!reactiveSubBlock || !reactiveCond) return hidden
|
||||
|
||||
const conditionMet = credential?.type === reactiveCond.requiredType
|
||||
if (!conditionMet) {
|
||||
hidden.add(reactiveSubBlock.id)
|
||||
}
|
||||
return hidden
|
||||
}, [reactiveSubBlock, reactiveCond, credential?.type])
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom hook for computing subblock layout in the editor panel.
|
||||
* Determines which subblocks should be visible based on mode, conditions, and feature flags.
|
||||
|
||||
@@ -11,8 +11,9 @@ export interface UseChildWorkflowReturn {
|
||||
|
||||
/**
|
||||
* Manages child workflow deployment status for workflow selector blocks.
|
||||
* Uses the shared useDeploymentInfo query (same source of truth as the
|
||||
* editor header's Deploy button) for consistent deployment detection.
|
||||
* Uses useDeploymentInfo which computes needsRedeployment server-side via
|
||||
* hasWorkflowChanged — the same comparison the deploy button uses — so the
|
||||
* badge stays aligned with the child workflow's Live/Update header.
|
||||
*/
|
||||
export function useChildWorkflow(
|
||||
blockId: string,
|
||||
@@ -39,7 +40,8 @@ export function useChildWorkflow(
|
||||
}
|
||||
|
||||
const { data, isPending } = useDeploymentInfo(
|
||||
isWorkflowSelector ? (childWorkflowId ?? null) : null
|
||||
isWorkflowSelector ? (childWorkflowId ?? null) : null,
|
||||
{ refetchOnMount: 'always' }
|
||||
)
|
||||
|
||||
const childIsDeployed = data?.isDeployed ?? null
|
||||
|
||||
@@ -47,6 +47,7 @@ import { useReactivateSchedule, useScheduleInfo } from '@/hooks/queries/schedule
|
||||
import { useSkills } from '@/hooks/queries/skills'
|
||||
import { useTablesList } from '@/hooks/queries/tables'
|
||||
import { useWorkflowMap } from '@/hooks/queries/workflows'
|
||||
import { useReactiveConditions } from '@/hooks/use-reactive-conditions'
|
||||
import { useSelectorDisplayName } from '@/hooks/use-selector-display-name'
|
||||
import { useVariablesStore } from '@/stores/variables/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -942,6 +943,13 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
const canonicalIndex = useMemo(() => buildCanonicalIndex(config.subBlocks), [config.subBlocks])
|
||||
const canonicalModeOverrides = currentStoreBlock?.data?.canonicalModes
|
||||
|
||||
const hiddenByReactiveCondition = useReactiveConditions(
|
||||
config.subBlocks,
|
||||
id,
|
||||
activeWorkflowId,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
|
||||
const subBlockRowsData = useMemo(() => {
|
||||
const rows: SubBlockConfig[][] = []
|
||||
let currentRow: SubBlockConfig[] = []
|
||||
@@ -979,6 +987,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
const visibleSubBlocks = config.subBlocks.filter((block) => {
|
||||
if (block.hidden) return false
|
||||
if (block.hideFromPreview) return false
|
||||
if (hiddenByReactiveCondition.has(block.id)) return false
|
||||
if (!isSubBlockFeatureEnabled(block)) return false
|
||||
if (isSubBlockHidden(block)) return false
|
||||
|
||||
@@ -1047,6 +1056,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
canonicalModeOverrides,
|
||||
userPermissions.canEdit,
|
||||
canonicalIndex,
|
||||
hiddenByReactiveCondition,
|
||||
blockSubBlockValues,
|
||||
activeWorkflowId,
|
||||
])
|
||||
|
||||
@@ -134,7 +134,7 @@ const WorkflowEdgeComponent = ({
|
||||
position: 'absolute',
|
||||
transform: `translate(-50%, -50%) translate(${labelX}px,${labelY}px)`,
|
||||
pointerEvents: 'all',
|
||||
zIndex: 100,
|
||||
zIndex: 1011,
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
|
||||
@@ -2772,10 +2772,37 @@ const WorkflowContent = React.memo(
|
||||
(changes: NodeChange[]) => {
|
||||
const hasSelectionChange = changes.some((c) => c.type === 'select')
|
||||
setDisplayNodes((currentNodes) => {
|
||||
const updated = applyNodeChanges(changes, currentNodes)
|
||||
// Filter out cross-context selection changes before applying so that
|
||||
// nodes at a different nesting level never appear selected, even for
|
||||
// a single frame.
|
||||
let changesToApply = changes
|
||||
if (hasSelectionChange) {
|
||||
const currentlySelected = currentNodes.filter((n) => n.selected)
|
||||
// Only filter on additive multi-select (shift-click), not replacement
|
||||
// clicks. A replacement click includes deselections of currently selected
|
||||
// nodes; a shift-click only adds selections.
|
||||
const isReplacementClick = changes.some(
|
||||
(c) =>
|
||||
c.type === 'select' &&
|
||||
'selected' in c &&
|
||||
!c.selected &&
|
||||
currentlySelected.some((n) => n.id === c.id)
|
||||
)
|
||||
if (!isReplacementClick && currentlySelected.length > 0) {
|
||||
const selectionContext = getNodeSelectionContextId(currentlySelected[0], blocks)
|
||||
changesToApply = changes.filter((c) => {
|
||||
if (c.type !== 'select' || !('selected' in c) || !c.selected) return true
|
||||
const node = currentNodes.find((n) => n.id === c.id)
|
||||
if (!node) return true
|
||||
return getNodeSelectionContextId(node, blocks) === selectionContext
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const updated = applyNodeChanges(changesToApply, currentNodes)
|
||||
if (!hasSelectionChange) return updated
|
||||
|
||||
const preferredNodeId = [...changes]
|
||||
const preferredNodeId = [...changesToApply]
|
||||
.reverse()
|
||||
.find(
|
||||
(change): change is NodeChange & { id: string; selected: boolean } =>
|
||||
@@ -3271,12 +3298,17 @@ const WorkflowContent = React.memo(
|
||||
previousPositions: multiNodeDragStartRef.current,
|
||||
})
|
||||
|
||||
// Process parent updates using shared helper
|
||||
executeBatchParentUpdate(
|
||||
selectedNodes,
|
||||
potentialParentId,
|
||||
'Batch moved nodes to new parent'
|
||||
)
|
||||
// Only reparent when an actual drag changed the target container.
|
||||
// onNodeDragStart sets both potentialParentId and dragStartParentId to the
|
||||
// clicked node's current parent; they only diverge when onNodeDrag detects
|
||||
// the selection being dragged over a different container.
|
||||
if (potentialParentId !== dragStartParentId) {
|
||||
executeBatchParentUpdate(
|
||||
selectedNodes,
|
||||
potentialParentId,
|
||||
'Batch moved nodes to new parent'
|
||||
)
|
||||
}
|
||||
|
||||
// Clear drag start state
|
||||
setDragStartPosition(null)
|
||||
@@ -3687,6 +3719,20 @@ const WorkflowContent = React.memo(
|
||||
const handleNodeClick = useCallback(
|
||||
(event: React.MouseEvent, node: Node) => {
|
||||
const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey
|
||||
|
||||
// Ignore shift-clicks on nodes at a different nesting level
|
||||
if (isMultiSelect) {
|
||||
const clickedContext = getNodeSelectionContextId(node, blocks)
|
||||
const currentlySelected = getNodes().filter((n) => n.selected)
|
||||
if (currentlySelected.length > 0) {
|
||||
const selectionContext = getNodeSelectionContextId(currentlySelected[0], blocks)
|
||||
if (clickedContext !== selectionContext) {
|
||||
usePanelEditorStore.getState().clearCurrentBlock()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setDisplayNodes((currentNodes) => {
|
||||
const updated = currentNodes.map((currentNode) => ({
|
||||
...currentNode,
|
||||
@@ -3699,7 +3745,7 @@ const WorkflowContent = React.memo(
|
||||
return resolveSelectionConflicts(updated, blocks, isMultiSelect ? node.id : undefined)
|
||||
})
|
||||
},
|
||||
[blocks]
|
||||
[blocks, getNodes]
|
||||
)
|
||||
|
||||
/** Handles edge selection with container context tracking and Shift-click multi-selection. */
|
||||
@@ -3808,9 +3854,17 @@ const WorkflowContent = React.memo(
|
||||
(targetNode?.zIndex ?? 21) + 1
|
||||
)
|
||||
|
||||
// Edges inside subflows need a z-index above the container's body area
|
||||
// (which has pointer-events: auto) so they're directly clickable.
|
||||
// Derive from the container's depth-based zIndex (+1) so the edge sits
|
||||
// just above its parent container but below canvas blocks (z-21+) and
|
||||
// child blocks (z-1000).
|
||||
const containerNode = parentLoopId ? nodeMap.get(parentLoopId) : null
|
||||
const baseZIndex = containerNode ? (containerNode.zIndex ?? 0) + 1 : 0
|
||||
|
||||
return {
|
||||
...edge,
|
||||
zIndex: connectedToElevated ? elevatedZIndex : 0,
|
||||
zIndex: connectedToElevated ? elevatedZIndex : baseZIndex,
|
||||
data: {
|
||||
...edge.data,
|
||||
isSelected: selectedEdges.has(edgeContextId),
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { ChevronDown, Skeleton } from '@/components/emcn'
|
||||
import {
|
||||
Button,
|
||||
ChevronDown,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Skeleton,
|
||||
} from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getSubscriptionAccessState } from '@/lib/billing/client'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
@@ -23,6 +32,7 @@ import { useOrganizations } from '@/hooks/queries/organization'
|
||||
import { prefetchSubscriptionData, useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { useSettingsNavigation } from '@/hooks/use-settings-navigation'
|
||||
import { useSettingsDirtyStore } from '@/stores/settings/dirty/store'
|
||||
|
||||
const SKELETON_SECTIONS = [3, 2, 2] as const
|
||||
|
||||
@@ -41,6 +51,13 @@ export function SettingsSidebar({
|
||||
const router = useRouter()
|
||||
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const requestNavigation = useSettingsDirtyStore((s) => s.requestNavigation)
|
||||
const confirmNavigation = useSettingsDirtyStore((s) => s.confirmNavigation)
|
||||
const cancelNavigation = useSettingsDirtyStore((s) => s.cancelNavigation)
|
||||
const isDirty = useSettingsDirtyStore((s) => s.isDirty)
|
||||
const [showDiscardDialog, setShowDiscardDialog] = useState(false)
|
||||
|
||||
const { data: session, isPending: sessionLoading } = useSession()
|
||||
const { data: organizationsData, isLoading: orgsLoading } = useOrganizations()
|
||||
const { data: generalSettings } = useGeneralSettings()
|
||||
@@ -180,8 +197,27 @@ export function SettingsSidebar({
|
||||
const { popSettingsReturnUrl, getSettingsHref } = useSettingsNavigation()
|
||||
|
||||
const handleBack = useCallback(() => {
|
||||
if (isDirty) {
|
||||
setShowDiscardDialog(true)
|
||||
return
|
||||
}
|
||||
router.push(popSettingsReturnUrl(`/workspace/${workspaceId}/home`))
|
||||
}, [router, popSettingsReturnUrl, workspaceId])
|
||||
}, [router, popSettingsReturnUrl, workspaceId, isDirty])
|
||||
|
||||
const handleConfirmDiscard = useCallback(() => {
|
||||
const section = confirmNavigation()
|
||||
setShowDiscardDialog(false)
|
||||
if (section) {
|
||||
router.replace(getSettingsHref({ section }), { scroll: false })
|
||||
} else {
|
||||
router.push(popSettingsReturnUrl(`/workspace/${workspaceId}/home`))
|
||||
}
|
||||
}, [confirmNavigation, router, getSettingsHref, popSettingsReturnUrl, workspaceId])
|
||||
|
||||
const handleCancelDiscard = useCallback(() => {
|
||||
cancelNavigation()
|
||||
setShowDiscardDialog(false)
|
||||
}, [cancelNavigation])
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -286,11 +322,15 @@ export function SettingsSidebar({
|
||||
className={itemClassName}
|
||||
onMouseEnter={() => handlePrefetch(item.id)}
|
||||
onFocus={() => handlePrefetch(item.id)}
|
||||
onClick={() =>
|
||||
router.replace(getSettingsHref({ section: item.id as SettingsSection }), {
|
||||
scroll: false,
|
||||
})
|
||||
}
|
||||
onClick={() => {
|
||||
const section = item.id as SettingsSection
|
||||
if (section === activeSection) return
|
||||
if (!requestNavigation(section)) {
|
||||
setShowDiscardDialog(true)
|
||||
return
|
||||
}
|
||||
router.replace(getSettingsHref({ section }), { scroll: false })
|
||||
}}
|
||||
>
|
||||
{content}
|
||||
</button>
|
||||
@@ -312,6 +352,25 @@ export function SettingsSidebar({
|
||||
})
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Modal open={showDiscardDialog} onOpenChange={(open) => !open && handleCancelDiscard()}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Unsaved Changes</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[var(--text-secondary)]'>
|
||||
You have unsaved changes. Are you sure you want to discard them?
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={handleCancelDiscard}>
|
||||
Keep Editing
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleConfirmDiscard}>
|
||||
Discard Changes
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ export function DeleteModal({
|
||||
title = 'Delete Workspace'
|
||||
}
|
||||
|
||||
const restorableTypes = new Set<string>(['workflow'])
|
||||
const restorableTypes = new Set<string>(['workflow', 'folder', 'mixed'])
|
||||
|
||||
const renderDescription = () => {
|
||||
if (itemType === 'workflow') {
|
||||
@@ -113,8 +113,7 @@ export function DeleteModal({
|
||||
</span>
|
||||
?{' '}
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This will permanently remove all workflows, logs, and knowledge bases within these
|
||||
folders.
|
||||
All workflows and contents within these folders will be archived.
|
||||
</span>
|
||||
</>
|
||||
)
|
||||
@@ -125,7 +124,7 @@ export function DeleteModal({
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>{displayNames[0]}</span>?{' '}
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This will permanently remove all associated workflows, logs, and knowledge bases.
|
||||
All associated workflows and contents will be archived.
|
||||
</span>
|
||||
</>
|
||||
)
|
||||
@@ -134,7 +133,7 @@ export function DeleteModal({
|
||||
<>
|
||||
Are you sure you want to delete this folder?{' '}
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This will permanently remove all associated workflows, logs, and knowledge bases.
|
||||
All associated workflows and contents will be archived.
|
||||
</span>
|
||||
</>
|
||||
)
|
||||
@@ -186,8 +185,7 @@ export function DeleteModal({
|
||||
</span>
|
||||
?{' '}
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This will permanently remove all selected workflows and folders, including their
|
||||
contents.
|
||||
All selected workflows and folders, including their contents, will be archived.
|
||||
</span>
|
||||
</>
|
||||
)
|
||||
@@ -196,8 +194,7 @@ export function DeleteModal({
|
||||
<>
|
||||
Are you sure you want to delete the selected items?{' '}
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This will permanently remove all selected workflows and folders, including their
|
||||
contents.
|
||||
All selected workflows and folders, including their contents, will be archived.
|
||||
</span>
|
||||
</>
|
||||
)
|
||||
@@ -238,7 +235,7 @@ export function DeleteModal({
|
||||
You can restore it from Recently Deleted in Settings.
|
||||
</span>
|
||||
) : (
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
<span className='text-[var(--text-tertiary)]'>This action cannot be undone.</span>
|
||||
)}
|
||||
</p>
|
||||
</ModalBody>
|
||||
|
||||
@@ -614,7 +614,7 @@ export function InviteModal({ open, onOpenChange, workspaceName }: InviteModalPr
|
||||
{memberToRemove?.email}
|
||||
</span>{' '}
|
||||
from this workspace?{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
<span className='text-[var(--text-tertiary)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
@@ -646,7 +646,7 @@ export function InviteModal({ open, onOpenChange, workspaceName }: InviteModalPr
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{invitationToRemove?.email}
|
||||
</span>
|
||||
? <span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
? <span className='text-[var(--text-tertiary)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -668,7 +668,7 @@ export function WorkspaceHeader({
|
||||
Are you sure you want to leave{' '}
|
||||
<span className='font-base text-[var(--text-primary)]'>{leaveTarget?.name}</span>? You
|
||||
will lose access to all workflows and data in this workspace.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
<span className='text-[var(--text-tertiary)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -166,6 +166,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
const positionUpdateTimeouts = useRef<Map<string, number>>(new Map())
|
||||
const isRejoiningRef = useRef<boolean>(false)
|
||||
const pendingPositionUpdates = useRef<Map<string, any>>(new Map())
|
||||
const deletedWorkflowIdRef = useRef<string | null>(null)
|
||||
|
||||
const generateSocketToken = async (): Promise<string> => {
|
||||
const res = await fetch('/api/auth/socket-token', {
|
||||
@@ -371,6 +372,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
|
||||
socketInstance.on('workflow-deleted', (data) => {
|
||||
logger.warn(`Workflow ${data.workflowId} has been deleted`)
|
||||
deletedWorkflowIdRef.current = data.workflowId
|
||||
setCurrentWorkflowId((current) => {
|
||||
if (current === data.workflowId) {
|
||||
setPresenceUsers([])
|
||||
@@ -500,7 +502,11 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
if (error?.type === 'SESSION_ERROR') {
|
||||
const workflowId = urlWorkflowIdRef.current
|
||||
|
||||
if (workflowId && !isRejoiningRef.current) {
|
||||
if (
|
||||
workflowId &&
|
||||
!isRejoiningRef.current &&
|
||||
deletedWorkflowIdRef.current !== workflowId
|
||||
) {
|
||||
isRejoiningRef.current = true
|
||||
logger.info(`Session expired, rejoining workflow: ${workflowId}`)
|
||||
socketInstance.emit('join-workflow', {
|
||||
@@ -552,13 +558,25 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
const hydrationPhase = useWorkflowRegistryStore((s) => s.hydration.phase)
|
||||
|
||||
useEffect(() => {
|
||||
if (!socket || !isConnected || !urlWorkflowId) return
|
||||
if (!socket || !isConnected || !urlWorkflowId) {
|
||||
if (!urlWorkflowId) {
|
||||
deletedWorkflowIdRef.current = null
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (hydrationPhase === 'creating') return
|
||||
|
||||
// Skip if already in the correct room
|
||||
if (currentWorkflowId === urlWorkflowId) return
|
||||
|
||||
// Prevent rejoining a workflow that was just deleted. The URL param may
|
||||
// still reference the old workflow while router.push() propagates.
|
||||
if (deletedWorkflowIdRef.current === urlWorkflowId) {
|
||||
return
|
||||
}
|
||||
deletedWorkflowIdRef.current = null
|
||||
|
||||
logger.info(
|
||||
`URL workflow changed from ${currentWorkflowId} to ${urlWorkflowId}, switching rooms`
|
||||
)
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
executeWorkflowCore,
|
||||
wasExecutionFinalizedByCore,
|
||||
} from '@/lib/workflows/executor/execution-core'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence'
|
||||
import {
|
||||
blockExistsInDeployment,
|
||||
loadDeployedWorkflowState,
|
||||
@@ -237,33 +237,13 @@ async function runWorkflowExecution({
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (executionResult.status === 'paused') {
|
||||
if (!executionResult.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
})
|
||||
await loggingSession.markAsFailed('Missing snapshot seed for paused execution')
|
||||
} else {
|
||||
try {
|
||||
await PauseResumeManager.persistPauseResult({
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
pausePoints: executionResult.pausePoints || [],
|
||||
snapshotSeed: executionResult.snapshotSeed,
|
||||
executorUserId: executionResult.metadata?.userId,
|
||||
})
|
||||
} catch (pauseError) {
|
||||
logger.error(`[${requestId}] Failed to persist pause result`, {
|
||||
executionId,
|
||||
error: pauseError instanceof Error ? pauseError.message : String(pauseError),
|
||||
})
|
||||
await loggingSession.markAsFailed(
|
||||
`Failed to persist pause state: ${pauseError instanceof Error ? pauseError.message : String(pauseError)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await PauseResumeManager.processQueuedResumes(executionId)
|
||||
await handlePostExecutionPauseState({
|
||||
result: executionResult,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
loggingSession,
|
||||
})
|
||||
}
|
||||
|
||||
await loggingSession.waitForPostExecution()
|
||||
|
||||
@@ -7,22 +7,22 @@ import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types'
|
||||
import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits'
|
||||
import { IdempotencyService, webhookIdempotency } from '@/lib/core/idempotency'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { processExecutionFiles } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
import { WebhookAttachmentProcessor } from '@/lib/webhooks/attachment-processor'
|
||||
import { fetchAndProcessAirtablePayloads, formatWebhookInput } from '@/lib/webhooks/utils.server'
|
||||
import { getProviderHandler } from '@/lib/webhooks/providers'
|
||||
import {
|
||||
executeWorkflowCore,
|
||||
wasExecutionFinalizedByCore,
|
||||
} from '@/lib/workflows/executor/execution-core'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence'
|
||||
import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils'
|
||||
import { resolveOAuthAccountId } from '@/app/api/auth/oauth/utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata } from '@/executor/execution/types'
|
||||
import type { ExecutionResult } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { safeAssign } from '@/tools/safe-assign'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
@@ -48,12 +48,12 @@ export function buildWebhookCorrelation(
|
||||
}
|
||||
|
||||
/**
|
||||
* Process trigger outputs based on their schema definitions
|
||||
* Finds outputs marked as 'file' or 'file[]' and uploads them to execution storage
|
||||
* Process trigger outputs based on their schema definitions.
|
||||
* Finds outputs marked as 'file' or 'file[]' and uploads them to execution storage.
|
||||
*/
|
||||
async function processTriggerFileOutputs(
|
||||
input: any,
|
||||
triggerOutputs: Record<string, any>,
|
||||
input: unknown,
|
||||
triggerOutputs: Record<string, unknown>,
|
||||
context: {
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
@@ -62,29 +62,31 @@ async function processTriggerFileOutputs(
|
||||
userId?: string
|
||||
},
|
||||
path = ''
|
||||
): Promise<any> {
|
||||
): Promise<unknown> {
|
||||
if (!input || typeof input !== 'object') {
|
||||
return input
|
||||
}
|
||||
|
||||
const processed: any = Array.isArray(input) ? [] : {}
|
||||
const processed = (Array.isArray(input) ? [] : {}) as Record<string, unknown>
|
||||
|
||||
for (const [key, value] of Object.entries(input)) {
|
||||
const currentPath = path ? `${path}.${key}` : key
|
||||
const outputDef = triggerOutputs[key]
|
||||
const val: any = value
|
||||
const outputDef = triggerOutputs[key] as Record<string, unknown> | undefined
|
||||
const val = value as Record<string, unknown>
|
||||
|
||||
// If this field is marked as file or file[], process it
|
||||
if (outputDef?.type === 'file[]' && Array.isArray(val)) {
|
||||
try {
|
||||
processed[key] = await WebhookAttachmentProcessor.processAttachments(val as any, context)
|
||||
processed[key] = await WebhookAttachmentProcessor.processAttachments(
|
||||
val as unknown as Parameters<typeof WebhookAttachmentProcessor.processAttachments>[0],
|
||||
context
|
||||
)
|
||||
} catch (error) {
|
||||
processed[key] = []
|
||||
}
|
||||
} else if (outputDef?.type === 'file' && val) {
|
||||
try {
|
||||
const [processedFile] = await WebhookAttachmentProcessor.processAttachments(
|
||||
[val as any],
|
||||
[val] as unknown as Parameters<typeof WebhookAttachmentProcessor.processAttachments>[0],
|
||||
context
|
||||
)
|
||||
processed[key] = processedFile
|
||||
@@ -98,18 +100,20 @@ async function processTriggerFileOutputs(
|
||||
(outputDef.type === 'object' || outputDef.type === 'json') &&
|
||||
outputDef.properties
|
||||
) {
|
||||
// Explicit object schema with properties - recurse into properties
|
||||
processed[key] = await processTriggerFileOutputs(
|
||||
val,
|
||||
outputDef.properties,
|
||||
outputDef.properties as Record<string, unknown>,
|
||||
context,
|
||||
currentPath
|
||||
)
|
||||
} else if (outputDef && typeof outputDef === 'object' && !outputDef.type) {
|
||||
// Nested object in schema (flat pattern) - recurse with the nested schema
|
||||
processed[key] = await processTriggerFileOutputs(val, outputDef, context, currentPath)
|
||||
processed[key] = await processTriggerFileOutputs(
|
||||
val,
|
||||
outputDef as Record<string, unknown>,
|
||||
context,
|
||||
currentPath
|
||||
)
|
||||
} else {
|
||||
// Not a file output - keep as is
|
||||
processed[key] = val
|
||||
}
|
||||
}
|
||||
@@ -125,7 +129,7 @@ export type WebhookExecutionPayload = {
|
||||
requestId?: string
|
||||
correlation?: AsyncExecutionCorrelation
|
||||
provider: string
|
||||
body: any
|
||||
body: unknown
|
||||
headers: Record<string, string>
|
||||
path: string
|
||||
blockId?: string
|
||||
@@ -164,9 +168,6 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the account userId for a credential
|
||||
*/
|
||||
async function resolveCredentialAccountUserId(credentialId: string): Promise<string | undefined> {
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
@@ -180,6 +181,42 @@ async function resolveCredentialAccountUserId(credentialId: string): Promise<str
|
||||
return credentialRecord?.userId
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle execution result status (timeout, pause, resume).
|
||||
* Shared between all provider paths to eliminate duplication.
|
||||
*/
|
||||
async function handleExecutionResult(
|
||||
executionResult: ExecutionResult,
|
||||
ctx: {
|
||||
loggingSession: LoggingSession
|
||||
timeoutController: ReturnType<typeof createTimeoutAbortController>
|
||||
requestId: string
|
||||
executionId: string
|
||||
workflowId: string
|
||||
}
|
||||
) {
|
||||
if (
|
||||
executionResult.status === 'cancelled' &&
|
||||
ctx.timeoutController.isTimedOut() &&
|
||||
ctx.timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, ctx.timeoutController.timeoutMs)
|
||||
logger.info(`[${ctx.requestId}] Webhook execution timed out`, {
|
||||
timeoutMs: ctx.timeoutController.timeoutMs,
|
||||
})
|
||||
await ctx.loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else {
|
||||
await handlePostExecutionPauseState({
|
||||
result: executionResult,
|
||||
workflowId: ctx.workflowId,
|
||||
executionId: ctx.executionId,
|
||||
loggingSession: ctx.loggingSession,
|
||||
})
|
||||
}
|
||||
|
||||
await ctx.loggingSession.waitForPostExecution()
|
||||
}
|
||||
|
||||
async function executeWebhookJobInternal(
|
||||
payload: WebhookExecutionPayload,
|
||||
correlation: AsyncExecutionCorrelation
|
||||
@@ -192,7 +229,6 @@ async function executeWebhookJobInternal(
|
||||
requestId
|
||||
)
|
||||
|
||||
// Resolve workflow record, billing actor, subscription, and timeout
|
||||
const preprocessResult = await preprocessExecution({
|
||||
workflowId: payload.workflowId,
|
||||
userId: payload.userId,
|
||||
@@ -221,14 +257,13 @@ async function executeWebhookJobInternal(
|
||||
throw new Error(`Workflow ${payload.workflowId} has no associated workspace`)
|
||||
}
|
||||
|
||||
const workflowVariables = (workflowRecord.variables as Record<string, any>) || {}
|
||||
const workflowVariables = (workflowRecord.variables as Record<string, unknown>) || {}
|
||||
const asyncTimeout = executionTimeout?.async ?? 120_000
|
||||
const timeoutController = createTimeoutAbortController(asyncTimeout)
|
||||
|
||||
let deploymentVersionId: string | undefined
|
||||
|
||||
try {
|
||||
// Parallelize workflow state, webhook record, and credential resolution
|
||||
const [workflowData, webhookRows, resolvedCredentialUserId] = await Promise.all([
|
||||
loadDeployedWorkflowState(payload.workflowId, workspaceId),
|
||||
db.select().from(webhook).where(eq(webhook.id, payload.webhookId)).limit(1),
|
||||
@@ -255,134 +290,38 @@ async function executeWebhookJobInternal(
|
||||
? (workflowData.deploymentVersionId as string)
|
||||
: undefined
|
||||
|
||||
// Handle special Airtable case
|
||||
if (payload.provider === 'airtable') {
|
||||
logger.info(`[${requestId}] Processing Airtable webhook via fetchAndProcessAirtablePayloads`)
|
||||
const handler = getProviderHandler(payload.provider)
|
||||
|
||||
const webhookRecord = webhookRows[0]
|
||||
if (!webhookRecord) {
|
||||
throw new Error(`Webhook record not found: ${payload.webhookId}`)
|
||||
let input: Record<string, unknown> | null = null
|
||||
let skipMessage: string | undefined
|
||||
|
||||
const webhookRecord = webhookRows[0]
|
||||
if (!webhookRecord) {
|
||||
throw new Error(`Webhook record not found: ${payload.webhookId}`)
|
||||
}
|
||||
|
||||
if (handler.formatInput) {
|
||||
const result = await handler.formatInput({
|
||||
webhook: webhookRecord,
|
||||
workflow: { id: payload.workflowId, userId: payload.userId },
|
||||
body: payload.body,
|
||||
headers: payload.headers,
|
||||
requestId,
|
||||
})
|
||||
input = result.input as Record<string, unknown> | null
|
||||
skipMessage = result.skip?.message
|
||||
} else {
|
||||
input = payload.body as Record<string, unknown> | null
|
||||
}
|
||||
|
||||
if (!input && handler.handleEmptyInput) {
|
||||
const skipResult = handler.handleEmptyInput(requestId)
|
||||
if (skipResult) {
|
||||
skipMessage = skipResult.message
|
||||
}
|
||||
}
|
||||
|
||||
const webhookData = {
|
||||
id: payload.webhookId,
|
||||
provider: payload.provider,
|
||||
providerConfig: webhookRecord.providerConfig,
|
||||
}
|
||||
|
||||
const mockWorkflow = {
|
||||
id: payload.workflowId,
|
||||
userId: payload.userId,
|
||||
}
|
||||
|
||||
const airtableInput = await fetchAndProcessAirtablePayloads(
|
||||
webhookData,
|
||||
mockWorkflow,
|
||||
requestId
|
||||
)
|
||||
|
||||
if (airtableInput) {
|
||||
logger.info(`[${requestId}] Executing workflow with Airtable changes`)
|
||||
|
||||
const metadata: ExecutionMetadata = {
|
||||
requestId,
|
||||
executionId,
|
||||
workflowId: payload.workflowId,
|
||||
workspaceId,
|
||||
userId: payload.userId,
|
||||
sessionUserId: undefined,
|
||||
workflowUserId: workflowRecord.userId,
|
||||
triggerType: payload.provider || 'webhook',
|
||||
triggerBlockId: payload.blockId,
|
||||
useDraftState: false,
|
||||
startTime: new Date().toISOString(),
|
||||
isClientSession: false,
|
||||
credentialAccountUserId,
|
||||
correlation,
|
||||
workflowStateOverride: {
|
||||
blocks,
|
||||
edges,
|
||||
loops: loops || {},
|
||||
parallels: parallels || {},
|
||||
deploymentVersionId,
|
||||
},
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot(
|
||||
metadata,
|
||||
workflowRecord,
|
||||
airtableInput,
|
||||
workflowVariables,
|
||||
[]
|
||||
)
|
||||
|
||||
const executionResult = await executeWorkflowCore({
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
if (
|
||||
executionResult.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Airtable webhook execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (executionResult.status === 'paused') {
|
||||
if (!executionResult.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
})
|
||||
await loggingSession.markAsFailed('Missing snapshot seed for paused execution')
|
||||
} else {
|
||||
try {
|
||||
await PauseResumeManager.persistPauseResult({
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
pausePoints: executionResult.pausePoints || [],
|
||||
snapshotSeed: executionResult.snapshotSeed,
|
||||
executorUserId: executionResult.metadata?.userId,
|
||||
})
|
||||
} catch (pauseError) {
|
||||
logger.error(`[${requestId}] Failed to persist pause result`, {
|
||||
executionId,
|
||||
error: pauseError instanceof Error ? pauseError.message : String(pauseError),
|
||||
})
|
||||
await loggingSession.markAsFailed(
|
||||
`Failed to persist pause state: ${pauseError instanceof Error ? pauseError.message : String(pauseError)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await PauseResumeManager.processQueuedResumes(executionId)
|
||||
}
|
||||
|
||||
await loggingSession.waitForPostExecution()
|
||||
|
||||
logger.info(`[${requestId}] Airtable webhook execution completed`, {
|
||||
success: executionResult.success,
|
||||
workflowId: payload.workflowId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: executionResult.success,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
output: executionResult.output,
|
||||
executedAt: new Date().toISOString(),
|
||||
provider: payload.provider,
|
||||
}
|
||||
}
|
||||
// No changes to process
|
||||
logger.info(`[${requestId}] No Airtable changes to process`)
|
||||
|
||||
if (skipMessage) {
|
||||
await loggingSession.safeStart({
|
||||
userId: payload.userId,
|
||||
workspaceId,
|
||||
@@ -397,7 +336,7 @@ async function executeWebhookJobInternal(
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
finalOutput: { message: 'No Airtable changes to process' },
|
||||
finalOutput: { message: skipMessage },
|
||||
traceSpans: [],
|
||||
})
|
||||
|
||||
@@ -405,61 +344,11 @@ async function executeWebhookJobInternal(
|
||||
success: true,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
output: { message: 'No Airtable changes to process' },
|
||||
output: { message: skipMessage },
|
||||
executedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
// Format input for standard webhooks
|
||||
const actualWebhook =
|
||||
webhookRows.length > 0
|
||||
? webhookRows[0]
|
||||
: {
|
||||
provider: payload.provider,
|
||||
blockId: payload.blockId,
|
||||
providerConfig: {},
|
||||
}
|
||||
|
||||
const mockWorkflow = {
|
||||
id: payload.workflowId,
|
||||
userId: payload.userId,
|
||||
}
|
||||
const mockRequest = {
|
||||
headers: new Map(Object.entries(payload.headers)),
|
||||
} as any
|
||||
|
||||
const input = await formatWebhookInput(actualWebhook, mockWorkflow, payload.body, mockRequest)
|
||||
|
||||
if (!input && payload.provider === 'whatsapp') {
|
||||
logger.info(`[${requestId}] No messages in WhatsApp payload, skipping execution`)
|
||||
|
||||
await loggingSession.safeStart({
|
||||
userId: payload.userId,
|
||||
workspaceId,
|
||||
variables: {},
|
||||
triggerData: {
|
||||
isTest: false,
|
||||
correlation,
|
||||
},
|
||||
deploymentVersionId,
|
||||
})
|
||||
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
finalOutput: { message: 'No messages in WhatsApp payload' },
|
||||
traceSpans: [],
|
||||
})
|
||||
return {
|
||||
success: true,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
output: { message: 'No messages in WhatsApp payload' },
|
||||
executedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
// Process trigger file outputs based on schema
|
||||
if (input && payload.blockId && blocks[payload.blockId]) {
|
||||
try {
|
||||
const triggerBlock = blocks[payload.blockId]
|
||||
@@ -502,49 +391,20 @@ async function executeWebhookJobInternal(
|
||||
}
|
||||
}
|
||||
|
||||
// Process generic webhook files based on inputFormat
|
||||
if (input && payload.provider === 'generic' && payload.blockId && blocks[payload.blockId]) {
|
||||
if (input && handler.processInputFiles && payload.blockId && blocks[payload.blockId]) {
|
||||
try {
|
||||
const triggerBlock = blocks[payload.blockId]
|
||||
|
||||
if (triggerBlock?.subBlocks?.inputFormat?.value) {
|
||||
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||
}>
|
||||
|
||||
const fileFields = inputFormat.filter((field) => field.type === 'file[]')
|
||||
|
||||
if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
|
||||
const executionContext = {
|
||||
workspaceId,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
}
|
||||
|
||||
for (const fileField of fileFields) {
|
||||
const fieldValue = input[fileField.name]
|
||||
|
||||
if (fieldValue && typeof fieldValue === 'object') {
|
||||
const uploadedFiles = await processExecutionFiles(
|
||||
fieldValue,
|
||||
executionContext,
|
||||
requestId,
|
||||
payload.userId
|
||||
)
|
||||
|
||||
if (uploadedFiles.length > 0) {
|
||||
input[fileField.name] = uploadedFiles
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed ${uploadedFiles.length} file(s) for field: ${fileField.name}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await handler.processInputFiles({
|
||||
input,
|
||||
blocks,
|
||||
blockId: payload.blockId,
|
||||
workspaceId,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
requestId,
|
||||
userId: payload.userId,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error processing generic webhook files:`, error)
|
||||
logger.error(`[${requestId}] Error processing provider-specific files:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -589,49 +449,17 @@ async function executeWebhookJobInternal(
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
if (
|
||||
executionResult.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Webhook execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (executionResult.status === 'paused') {
|
||||
if (!executionResult.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
})
|
||||
await loggingSession.markAsFailed('Missing snapshot seed for paused execution')
|
||||
} else {
|
||||
try {
|
||||
await PauseResumeManager.persistPauseResult({
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
pausePoints: executionResult.pausePoints || [],
|
||||
snapshotSeed: executionResult.snapshotSeed,
|
||||
executorUserId: executionResult.metadata?.userId,
|
||||
})
|
||||
} catch (pauseError) {
|
||||
logger.error(`[${requestId}] Failed to persist pause result`, {
|
||||
executionId,
|
||||
error: pauseError instanceof Error ? pauseError.message : String(pauseError),
|
||||
})
|
||||
await loggingSession.markAsFailed(
|
||||
`Failed to persist pause state: ${pauseError instanceof Error ? pauseError.message : String(pauseError)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await PauseResumeManager.processQueuedResumes(executionId)
|
||||
}
|
||||
|
||||
await loggingSession.waitForPostExecution()
|
||||
await handleExecutionResult(executionResult, {
|
||||
loggingSession,
|
||||
timeoutController,
|
||||
requestId,
|
||||
executionId,
|
||||
workflowId: payload.workflowId,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Webhook execution completed`, {
|
||||
success: executionResult.success,
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
executeWorkflowCore,
|
||||
wasExecutionFinalizedByCore,
|
||||
} from '@/lib/workflows/executor/execution-core'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata } from '@/executor/execution/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
@@ -148,33 +148,8 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (result.status === 'paused') {
|
||||
if (!result.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
})
|
||||
await loggingSession.markAsFailed('Missing snapshot seed for paused execution')
|
||||
} else {
|
||||
try {
|
||||
await PauseResumeManager.persistPauseResult({
|
||||
workflowId,
|
||||
executionId,
|
||||
pausePoints: result.pausePoints || [],
|
||||
snapshotSeed: result.snapshotSeed,
|
||||
executorUserId: result.metadata?.userId,
|
||||
})
|
||||
} catch (pauseError) {
|
||||
logger.error(`[${requestId}] Failed to persist pause result`, {
|
||||
executionId,
|
||||
error: pauseError instanceof Error ? pauseError.message : String(pauseError),
|
||||
})
|
||||
await loggingSession.markAsFailed(
|
||||
`Failed to persist pause state: ${pauseError instanceof Error ? pauseError.message : String(pauseError)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await PauseResumeManager.processQueuedResumes(executionId)
|
||||
await handlePostExecutionPauseState({ result, workflowId, executionId, loggingSession })
|
||||
}
|
||||
|
||||
await loggingSession.waitForPostExecution()
|
||||
|
||||
@@ -10,7 +10,7 @@ export const AirweaveBlock: BlockConfig<AirweaveSearchResponse> = {
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription:
|
||||
'Search across your synced data sources using Airweave. Supports semantic search with hybrid, neural, or keyword retrieval strategies. Optionally generate AI-powered answers from search results.',
|
||||
docsLink: 'https://docs.airweave.ai',
|
||||
docsLink: 'https://docs.sim.ai/tools/airweave',
|
||||
category: 'tools',
|
||||
integrationType: IntegrationType.Search,
|
||||
tags: ['vector-search', 'knowledge-base'],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { GongIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, IntegrationType } from '@/blocks/types'
|
||||
import type { GongResponse } from '@/tools/gong/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const GongBlock: BlockConfig<GongResponse> = {
|
||||
type: 'gong',
|
||||
@@ -15,7 +16,10 @@ export const GongBlock: BlockConfig<GongResponse> = {
|
||||
tags: ['meeting', 'sales-engagement', 'speech-to-text'],
|
||||
bgColor: '#8039DF',
|
||||
icon: GongIcon,
|
||||
triggerAllowed: true,
|
||||
subBlocks: [
|
||||
...getTrigger('gong_webhook').subBlocks,
|
||||
...getTrigger('gong_call_completed').subBlocks,
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
@@ -568,4 +572,8 @@ Return ONLY the timestamp string in ISO 8601 format - no explanations, no quotes
|
||||
description: 'Gong API response data',
|
||||
},
|
||||
},
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: ['gong_webhook', 'gong_call_completed'],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { GreenhouseIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, IntegrationType } from '@/blocks/types'
|
||||
import type { GreenhouseResponse } from '@/tools/greenhouse/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const GreenhouseBlock: BlockConfig<GreenhouseResponse> = {
|
||||
type: 'greenhouse',
|
||||
@@ -16,6 +17,20 @@ export const GreenhouseBlock: BlockConfig<GreenhouseResponse> = {
|
||||
icon: GreenhouseIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'greenhouse_candidate_hired',
|
||||
'greenhouse_new_application',
|
||||
'greenhouse_candidate_stage_change',
|
||||
'greenhouse_candidate_rejected',
|
||||
'greenhouse_offer_created',
|
||||
'greenhouse_job_created',
|
||||
'greenhouse_job_updated',
|
||||
'greenhouse_webhook',
|
||||
],
|
||||
},
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
@@ -291,6 +306,17 @@ Return ONLY the ISO 8601 timestamp - no explanations, no extra text.`,
|
||||
required: true,
|
||||
password: true,
|
||||
},
|
||||
|
||||
// ── Trigger subBlocks ──
|
||||
|
||||
...getTrigger('greenhouse_candidate_hired').subBlocks,
|
||||
...getTrigger('greenhouse_new_application').subBlocks,
|
||||
...getTrigger('greenhouse_candidate_stage_change').subBlocks,
|
||||
...getTrigger('greenhouse_candidate_rejected').subBlocks,
|
||||
...getTrigger('greenhouse_offer_created').subBlocks,
|
||||
...getTrigger('greenhouse_job_created').subBlocks,
|
||||
...getTrigger('greenhouse_job_updated').subBlocks,
|
||||
...getTrigger('greenhouse_webhook').subBlocks,
|
||||
],
|
||||
|
||||
tools: {
|
||||
|
||||
@@ -985,11 +985,15 @@ Return ONLY the JSON array of property names - no explanations, no markdown, no
|
||||
},
|
||||
...getTrigger('hubspot_contact_created').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_contact_deleted').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_contact_merged').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_contact_privacy_deleted').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_contact_property_changed').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_contact_restored').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_company_created').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_company_deleted').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_company_merged').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_company_property_changed').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_company_restored').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_conversation_creation').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_conversation_deletion').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_conversation_new_message').subBlocks.slice(1),
|
||||
@@ -997,10 +1001,15 @@ Return ONLY the JSON array of property names - no explanations, no markdown, no
|
||||
...getTrigger('hubspot_conversation_property_changed').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_deal_created').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_deal_deleted').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_deal_merged').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_deal_property_changed').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_deal_restored').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_ticket_created').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_ticket_deleted').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_ticket_merged').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_ticket_property_changed').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_ticket_restored').subBlocks.slice(1),
|
||||
...getTrigger('hubspot_webhook').subBlocks.slice(1),
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -1329,11 +1338,15 @@ Return ONLY the JSON array of property names - no explanations, no markdown, no
|
||||
available: [
|
||||
'hubspot_contact_created',
|
||||
'hubspot_contact_deleted',
|
||||
'hubspot_contact_merged',
|
||||
'hubspot_contact_privacy_deleted',
|
||||
'hubspot_contact_property_changed',
|
||||
'hubspot_contact_restored',
|
||||
'hubspot_company_created',
|
||||
'hubspot_company_deleted',
|
||||
'hubspot_company_merged',
|
||||
'hubspot_company_property_changed',
|
||||
'hubspot_company_restored',
|
||||
'hubspot_conversation_creation',
|
||||
'hubspot_conversation_deletion',
|
||||
'hubspot_conversation_new_message',
|
||||
@@ -1341,10 +1354,15 @@ Return ONLY the JSON array of property names - no explanations, no markdown, no
|
||||
'hubspot_conversation_property_changed',
|
||||
'hubspot_deal_created',
|
||||
'hubspot_deal_deleted',
|
||||
'hubspot_deal_merged',
|
||||
'hubspot_deal_property_changed',
|
||||
'hubspot_deal_restored',
|
||||
'hubspot_ticket_created',
|
||||
'hubspot_ticket_deleted',
|
||||
'hubspot_ticket_merged',
|
||||
'hubspot_ticket_property_changed',
|
||||
'hubspot_ticket_restored',
|
||||
'hubspot_webhook',
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { IntercomIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode, IntegrationType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const IntercomBlock: BlockConfig = {
|
||||
type: 'intercom',
|
||||
@@ -1409,6 +1410,26 @@ export const IntercomV2Block: BlockConfig = {
|
||||
integrationType: IntegrationType.CustomerSupport,
|
||||
tags: ['customer-support', 'messaging'],
|
||||
hideFromToolbar: false,
|
||||
subBlocks: [
|
||||
...IntercomBlock.subBlocks,
|
||||
...getTrigger('intercom_conversation_created').subBlocks,
|
||||
...getTrigger('intercom_conversation_reply').subBlocks,
|
||||
...getTrigger('intercom_conversation_closed').subBlocks,
|
||||
...getTrigger('intercom_contact_created').subBlocks,
|
||||
...getTrigger('intercom_user_created').subBlocks,
|
||||
...getTrigger('intercom_webhook').subBlocks,
|
||||
],
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'intercom_conversation_created',
|
||||
'intercom_conversation_reply',
|
||||
'intercom_conversation_closed',
|
||||
'intercom_contact_created',
|
||||
'intercom_user_created',
|
||||
'intercom_webhook',
|
||||
],
|
||||
},
|
||||
tools: {
|
||||
...IntercomBlock.tools,
|
||||
access: [
|
||||
|
||||
@@ -8,8 +8,9 @@ import { getTrigger } from '@/triggers'
|
||||
|
||||
export const LinearBlock: BlockConfig<LinearResponse> = {
|
||||
type: 'linear',
|
||||
name: 'Linear',
|
||||
name: 'Linear (Legacy)',
|
||||
description: 'Interact with Linear issues, projects, and more',
|
||||
hideFromToolbar: true,
|
||||
authMode: AuthMode.OAuth,
|
||||
triggerAllowed: true,
|
||||
longDescription:
|
||||
@@ -2543,3 +2544,62 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Linear V2 Block
|
||||
*
|
||||
* Uses automatic webhook registration via the Linear GraphQL API.
|
||||
* Inherits all tool operations from the legacy block.
|
||||
*/
|
||||
export const LinearV2Block: BlockConfig<LinearResponse> = {
|
||||
...LinearBlock,
|
||||
type: 'linear_v2',
|
||||
name: 'Linear',
|
||||
hideFromToolbar: false,
|
||||
subBlocks: [
|
||||
...LinearBlock.subBlocks.filter(
|
||||
(sb) =>
|
||||
!sb.id?.startsWith('webhookUrlDisplay') &&
|
||||
!sb.id?.startsWith('webhookSecret') &&
|
||||
!sb.id?.startsWith('triggerSave') &&
|
||||
!sb.id?.startsWith('triggerInstructions') &&
|
||||
!sb.id?.startsWith('selectedTriggerId')
|
||||
),
|
||||
// V2 Trigger SubBlocks
|
||||
...getTrigger('linear_issue_created_v2').subBlocks,
|
||||
...getTrigger('linear_issue_updated_v2').subBlocks,
|
||||
...getTrigger('linear_issue_removed_v2').subBlocks,
|
||||
...getTrigger('linear_comment_created_v2').subBlocks,
|
||||
...getTrigger('linear_comment_updated_v2').subBlocks,
|
||||
...getTrigger('linear_project_created_v2').subBlocks,
|
||||
...getTrigger('linear_project_updated_v2').subBlocks,
|
||||
...getTrigger('linear_cycle_created_v2').subBlocks,
|
||||
...getTrigger('linear_cycle_updated_v2').subBlocks,
|
||||
...getTrigger('linear_label_created_v2').subBlocks,
|
||||
...getTrigger('linear_label_updated_v2').subBlocks,
|
||||
...getTrigger('linear_project_update_created_v2').subBlocks,
|
||||
...getTrigger('linear_customer_request_created_v2').subBlocks,
|
||||
...getTrigger('linear_customer_request_updated_v2').subBlocks,
|
||||
...getTrigger('linear_webhook_v2').subBlocks,
|
||||
],
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'linear_issue_created_v2',
|
||||
'linear_issue_updated_v2',
|
||||
'linear_issue_removed_v2',
|
||||
'linear_comment_created_v2',
|
||||
'linear_comment_updated_v2',
|
||||
'linear_project_created_v2',
|
||||
'linear_project_updated_v2',
|
||||
'linear_cycle_created_v2',
|
||||
'linear_cycle_updated_v2',
|
||||
'linear_label_created_v2',
|
||||
'linear_label_updated_v2',
|
||||
'linear_project_update_created_v2',
|
||||
'linear_customer_request_created_v2',
|
||||
'linear_customer_request_updated_v2',
|
||||
'linear_webhook_v2',
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode, IntegrationType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { NotionResponse } from '@/tools/notion/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
// Legacy block - hidden from toolbar
|
||||
export const NotionBlock: BlockConfig<NotionResponse> = {
|
||||
@@ -436,7 +437,34 @@ export const NotionV2Block: BlockConfig<any> = {
|
||||
bgColor: '#181C1E',
|
||||
icon: NotionIcon,
|
||||
hideFromToolbar: false,
|
||||
subBlocks: NotionBlock.subBlocks,
|
||||
subBlocks: [
|
||||
...NotionBlock.subBlocks,
|
||||
|
||||
// Trigger subBlocks
|
||||
...getTrigger('notion_page_created').subBlocks,
|
||||
...getTrigger('notion_page_properties_updated').subBlocks,
|
||||
...getTrigger('notion_page_content_updated').subBlocks,
|
||||
...getTrigger('notion_page_deleted').subBlocks,
|
||||
...getTrigger('notion_database_created').subBlocks,
|
||||
...getTrigger('notion_database_schema_updated').subBlocks,
|
||||
...getTrigger('notion_database_deleted').subBlocks,
|
||||
...getTrigger('notion_comment_created').subBlocks,
|
||||
...getTrigger('notion_webhook').subBlocks,
|
||||
],
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'notion_page_created',
|
||||
'notion_page_properties_updated',
|
||||
'notion_page_content_updated',
|
||||
'notion_page_deleted',
|
||||
'notion_database_created',
|
||||
'notion_database_schema_updated',
|
||||
'notion_database_deleted',
|
||||
'notion_comment_created',
|
||||
'notion_webhook',
|
||||
],
|
||||
},
|
||||
tools: {
|
||||
access: [
|
||||
'notion_read_v2',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { ResendIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode, IntegrationType } from '@/blocks/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const ResendBlock: BlockConfig = {
|
||||
type: 'resend',
|
||||
@@ -16,6 +17,20 @@ export const ResendBlock: BlockConfig = {
|
||||
icon: ResendIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'resend_email_sent',
|
||||
'resend_email_delivered',
|
||||
'resend_email_bounced',
|
||||
'resend_email_complained',
|
||||
'resend_email_opened',
|
||||
'resend_email_clicked',
|
||||
'resend_email_failed',
|
||||
'resend_webhook',
|
||||
],
|
||||
},
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
@@ -221,6 +236,15 @@ Return ONLY the email body - no explanations, no extra text.`,
|
||||
condition: { field: 'operation', value: ['get_contact', 'update_contact', 'delete_contact'] },
|
||||
required: true,
|
||||
},
|
||||
|
||||
...getTrigger('resend_email_sent').subBlocks,
|
||||
...getTrigger('resend_email_delivered').subBlocks,
|
||||
...getTrigger('resend_email_bounced').subBlocks,
|
||||
...getTrigger('resend_email_complained').subBlocks,
|
||||
...getTrigger('resend_email_opened').subBlocks,
|
||||
...getTrigger('resend_email_clicked').subBlocks,
|
||||
...getTrigger('resend_email_failed').subBlocks,
|
||||
...getTrigger('resend_webhook').subBlocks,
|
||||
],
|
||||
|
||||
tools: {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { getScopesForService } from '@/lib/oauth/utils'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode, IntegrationType } from '@/blocks/types'
|
||||
import type { SalesforceResponse } from '@/tools/salesforce/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const SalesforceBlock: BlockConfig<SalesforceResponse> = {
|
||||
type: 'salesforce',
|
||||
@@ -17,6 +18,17 @@ export const SalesforceBlock: BlockConfig<SalesforceResponse> = {
|
||||
tags: ['sales-engagement', 'customer-support'],
|
||||
bgColor: '#E0E0E0',
|
||||
icon: SalesforceIcon,
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'salesforce_record_created',
|
||||
'salesforce_record_updated',
|
||||
'salesforce_record_deleted',
|
||||
'salesforce_opportunity_stage_changed',
|
||||
'salesforce_case_status_changed',
|
||||
'salesforce_webhook',
|
||||
],
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
@@ -511,6 +523,12 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
],
|
||||
},
|
||||
},
|
||||
...getTrigger('salesforce_record_created').subBlocks,
|
||||
...getTrigger('salesforce_record_updated').subBlocks,
|
||||
...getTrigger('salesforce_record_deleted').subBlocks,
|
||||
...getTrigger('salesforce_opportunity_stage_changed').subBlocks,
|
||||
...getTrigger('salesforce_case_status_changed').subBlocks,
|
||||
...getTrigger('salesforce_webhook').subBlocks,
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
|
||||
292
apps/sim/blocks/blocks/sixtyfour.ts
Normal file
292
apps/sim/blocks/blocks/sixtyfour.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { SixtyfourIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, IntegrationType } from '@/blocks/types'
|
||||
|
||||
export const SixtyfourBlock: BlockConfig = {
|
||||
type: 'sixtyfour',
|
||||
name: 'Sixtyfour AI',
|
||||
description: 'Enrich leads and companies with AI-powered research',
|
||||
longDescription:
|
||||
'Find emails, phone numbers, and enrich lead or company data with contact information, social profiles, and detailed research using Sixtyfour AI.',
|
||||
docsLink: 'https://docs.sim.ai/tools/sixtyfour',
|
||||
category: 'tools',
|
||||
integrationType: IntegrationType.SalesIntelligence,
|
||||
tags: ['enrichment', 'sales-engagement'],
|
||||
bgColor: '#000000',
|
||||
icon: SixtyfourIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Find Phone', id: 'find_phone' },
|
||||
{ label: 'Find Email', id: 'find_email' },
|
||||
{ label: 'Enrich Lead', id: 'enrich_lead' },
|
||||
{ label: 'Enrich Company', id: 'enrich_company' },
|
||||
],
|
||||
value: () => 'find_phone',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
required: true,
|
||||
placeholder: 'Enter your Sixtyfour API key',
|
||||
password: true,
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Full name of the person',
|
||||
required: { field: 'operation', value: ['find_phone', 'find_email'] },
|
||||
condition: { field: 'operation', value: ['find_phone', 'find_email'] },
|
||||
},
|
||||
{
|
||||
id: 'company',
|
||||
title: 'Company',
|
||||
type: 'short-input',
|
||||
placeholder: 'Company name',
|
||||
condition: { field: 'operation', value: ['find_phone', 'find_email'] },
|
||||
},
|
||||
{
|
||||
id: 'linkedinUrl',
|
||||
title: 'LinkedIn URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'https://linkedin.com/in/johndoe',
|
||||
condition: { field: 'operation', value: ['find_phone', 'find_email'] },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'example.com',
|
||||
condition: { field: 'operation', value: ['find_phone', 'find_email'] },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'emailInput',
|
||||
title: 'Email',
|
||||
type: 'short-input',
|
||||
placeholder: 'Email address',
|
||||
condition: { field: 'operation', value: 'find_phone' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'phoneInput',
|
||||
title: 'Phone',
|
||||
type: 'short-input',
|
||||
placeholder: 'Phone number',
|
||||
condition: { field: 'operation', value: 'find_email' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'title',
|
||||
title: 'Job Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Job title',
|
||||
condition: { field: 'operation', value: 'find_email' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'mode',
|
||||
title: 'Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Professional', id: 'PROFESSIONAL' },
|
||||
{ label: 'Personal', id: 'PERSONAL' },
|
||||
],
|
||||
value: () => 'PROFESSIONAL',
|
||||
condition: { field: 'operation', value: 'find_email' },
|
||||
},
|
||||
{
|
||||
id: 'leadInfo',
|
||||
title: 'Lead Info',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'{"name": "John Doe", "company": "Acme Inc", "title": "CEO", "linkedin": "https://linkedin.com/in/johndoe"}',
|
||||
required: { field: 'operation', value: 'enrich_lead' },
|
||||
condition: { field: 'operation', value: 'enrich_lead' },
|
||||
},
|
||||
{
|
||||
id: 'leadStruct',
|
||||
title: 'Fields to Collect',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'{"email": "Email address", "phone": "Phone number", "company": "Company name", "title": "Job title"}',
|
||||
required: { field: 'operation', value: 'enrich_lead' },
|
||||
condition: { field: 'operation', value: 'enrich_lead' },
|
||||
},
|
||||
{
|
||||
id: 'leadResearchPlan',
|
||||
title: 'Research Plan',
|
||||
type: 'long-input',
|
||||
placeholder: 'Optional guidance for the enrichment agent',
|
||||
condition: { field: 'operation', value: 'enrich_lead' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'targetCompany',
|
||||
title: 'Company Info',
|
||||
type: 'long-input',
|
||||
placeholder: '{"name": "Acme Inc", "domain": "acme.com", "industry": "Technology"}',
|
||||
required: { field: 'operation', value: 'enrich_company' },
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
},
|
||||
{
|
||||
id: 'companyStruct',
|
||||
title: 'Fields to Collect',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'{"website": "Company website URL", "num_employees": "Employee count", "address": "Company address"}',
|
||||
required: { field: 'operation', value: 'enrich_company' },
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
},
|
||||
{
|
||||
id: 'findPeople',
|
||||
title: 'Find People',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
},
|
||||
{
|
||||
id: 'peopleFocusPrompt',
|
||||
title: 'People Focus',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Find the VP of Marketing and the CTO',
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'fullOrgChart',
|
||||
title: 'Full Org Chart',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'companyLeadStruct',
|
||||
title: 'Lead Schema',
|
||||
type: 'long-input',
|
||||
placeholder: '{"name": "Full name", "email": "Email", "title": "Job title"}',
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'companyResearchPlan',
|
||||
title: 'Research Plan',
|
||||
type: 'long-input',
|
||||
placeholder: 'Optional guidance for the enrichment agent',
|
||||
condition: { field: 'operation', value: 'enrich_company' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'sixtyfour_find_phone',
|
||||
'sixtyfour_find_email',
|
||||
'sixtyfour_enrich_lead',
|
||||
'sixtyfour_enrich_company',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => `sixtyfour_${params.operation}`,
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (params.operation === 'find_phone') {
|
||||
if (params.emailInput) result.email = params.emailInput
|
||||
} else if (params.operation === 'find_email') {
|
||||
if (params.phoneInput) result.phone = params.phoneInput
|
||||
} else if (params.operation === 'enrich_lead') {
|
||||
result.leadInfo = params.leadInfo
|
||||
result.struct = params.leadStruct
|
||||
if (params.leadResearchPlan) result.researchPlan = params.leadResearchPlan
|
||||
} else if (params.operation === 'enrich_company') {
|
||||
result.targetCompany = params.targetCompany
|
||||
result.struct = params.companyStruct
|
||||
if (params.findPeople !== undefined) result.findPeople = Boolean(params.findPeople)
|
||||
if (params.fullOrgChart !== undefined) result.fullOrgChart = Boolean(params.fullOrgChart)
|
||||
if (params.peopleFocusPrompt) result.peopleFocusPrompt = params.peopleFocusPrompt
|
||||
if (params.companyLeadStruct) result.leadStruct = params.companyLeadStruct
|
||||
if (params.companyResearchPlan) result.researchPlan = params.companyResearchPlan
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
apiKey: { type: 'string', description: 'Sixtyfour API key' },
|
||||
name: { type: 'string', description: 'Person name' },
|
||||
company: { type: 'string', description: 'Company name' },
|
||||
linkedinUrl: { type: 'string', description: 'LinkedIn URL' },
|
||||
domain: { type: 'string', description: 'Company domain' },
|
||||
emailInput: { type: 'string', description: 'Email address (find phone)' },
|
||||
phoneInput: { type: 'string', description: 'Phone number (find email)' },
|
||||
title: { type: 'string', description: 'Job title' },
|
||||
mode: { type: 'string', description: 'Email mode (PROFESSIONAL or PERSONAL)' },
|
||||
leadInfo: { type: 'string', description: 'Lead information JSON' },
|
||||
leadStruct: { type: 'string', description: 'Fields to collect for lead' },
|
||||
leadResearchPlan: { type: 'string', description: 'Research plan for lead enrichment' },
|
||||
targetCompany: { type: 'string', description: 'Company information JSON' },
|
||||
companyStruct: { type: 'string', description: 'Fields to collect for company' },
|
||||
findPeople: { type: 'boolean', description: 'Find associated people' },
|
||||
fullOrgChart: { type: 'boolean', description: 'Retrieve full org chart' },
|
||||
peopleFocusPrompt: { type: 'string', description: 'People focus description' },
|
||||
companyLeadStruct: { type: 'string', description: 'Lead schema for company enrichment' },
|
||||
companyResearchPlan: { type: 'string', description: 'Research plan for company enrichment' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Name of the person (find_phone, find_email)',
|
||||
},
|
||||
company: {
|
||||
type: 'string',
|
||||
description: 'Company name (find_phone, find_email)',
|
||||
},
|
||||
phone: {
|
||||
type: 'string',
|
||||
description: 'Phone number(s) found (find_phone)',
|
||||
},
|
||||
linkedinUrl: {
|
||||
type: 'string',
|
||||
description: 'LinkedIn profile URL (find_phone, find_email)',
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
description: 'Job title (find_email)',
|
||||
},
|
||||
emails: {
|
||||
type: 'json',
|
||||
description: 'Email addresses found with validation status and type (find_email)',
|
||||
},
|
||||
personalEmails: {
|
||||
type: 'json',
|
||||
description: 'Personal email addresses found in PERSONAL mode (find_email)',
|
||||
},
|
||||
notes: {
|
||||
type: 'string',
|
||||
description: 'Research notes (enrich_lead, enrich_company)',
|
||||
},
|
||||
structuredData: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Enriched data matching the requested struct fields (enrich_lead, enrich_company)',
|
||||
},
|
||||
references: {
|
||||
type: 'json',
|
||||
description: 'Source URLs and descriptions used for enrichment (enrich_lead, enrich_company)',
|
||||
},
|
||||
confidenceScore: {
|
||||
type: 'number',
|
||||
description: 'Quality score for the returned data, 0-10 (enrich_lead, enrich_company)',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import { VercelIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode, IntegrationType } from '@/blocks/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const VercelBlock: BlockConfig = {
|
||||
type: 'vercel',
|
||||
@@ -15,6 +16,19 @@ export const VercelBlock: BlockConfig = {
|
||||
bgColor: '#171717',
|
||||
icon: VercelIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'vercel_deployment_created',
|
||||
'vercel_deployment_ready',
|
||||
'vercel_deployment_error',
|
||||
'vercel_deployment_canceled',
|
||||
'vercel_project_created',
|
||||
'vercel_project_removed',
|
||||
'vercel_domain_created',
|
||||
'vercel_webhook',
|
||||
],
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
@@ -649,6 +663,16 @@ export const VercelBlock: BlockConfig = {
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
|
||||
// === Trigger subBlocks ===
|
||||
...getTrigger('vercel_deployment_created').subBlocks,
|
||||
...getTrigger('vercel_deployment_ready').subBlocks,
|
||||
...getTrigger('vercel_deployment_error').subBlocks,
|
||||
...getTrigger('vercel_deployment_canceled').subBlocks,
|
||||
...getTrigger('vercel_project_created').subBlocks,
|
||||
...getTrigger('vercel_project_removed').subBlocks,
|
||||
...getTrigger('vercel_domain_created').subBlocks,
|
||||
...getTrigger('vercel_webhook').subBlocks,
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
|
||||
@@ -3,6 +3,7 @@ import { getScopesForService } from '@/lib/oauth/utils'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode, IntegrationType } from '@/blocks/types'
|
||||
import type { ZoomResponse } from '@/tools/zoom/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const ZoomBlock: BlockConfig<ZoomResponse> = {
|
||||
type: 'zoom',
|
||||
@@ -17,6 +18,17 @@ export const ZoomBlock: BlockConfig<ZoomResponse> = {
|
||||
tags: ['meeting', 'calendar', 'scheduling'],
|
||||
bgColor: '#2D8CFF',
|
||||
icon: ZoomIcon,
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: [
|
||||
'zoom_meeting_started',
|
||||
'zoom_meeting_ended',
|
||||
'zoom_participant_joined',
|
||||
'zoom_participant_left',
|
||||
'zoom_recording_completed',
|
||||
'zoom_webhook',
|
||||
],
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
@@ -440,6 +452,12 @@ Return ONLY the date string - no explanations, no quotes, no extra text.`,
|
||||
value: ['zoom_delete_meeting'],
|
||||
},
|
||||
},
|
||||
...getTrigger('zoom_meeting_started').subBlocks,
|
||||
...getTrigger('zoom_meeting_ended').subBlocks,
|
||||
...getTrigger('zoom_participant_joined').subBlocks,
|
||||
...getTrigger('zoom_participant_left').subBlocks,
|
||||
...getTrigger('zoom_recording_completed').subBlocks,
|
||||
...getTrigger('zoom_webhook').subBlocks,
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
|
||||
@@ -102,7 +102,7 @@ import { KnowledgeBlock } from '@/blocks/blocks/knowledge'
|
||||
import { LangsmithBlock } from '@/blocks/blocks/langsmith'
|
||||
import { LaunchDarklyBlock } from '@/blocks/blocks/launchdarkly'
|
||||
import { LemlistBlock } from '@/blocks/blocks/lemlist'
|
||||
import { LinearBlock } from '@/blocks/blocks/linear'
|
||||
import { LinearBlock, LinearV2Block } from '@/blocks/blocks/linear'
|
||||
import { LinkedInBlock } from '@/blocks/blocks/linkedin'
|
||||
import { LinkupBlock } from '@/blocks/blocks/linkup'
|
||||
import { LoopsBlock } from '@/blocks/blocks/loops'
|
||||
@@ -171,6 +171,7 @@ import { SftpBlock } from '@/blocks/blocks/sftp'
|
||||
import { SharepointBlock } from '@/blocks/blocks/sharepoint'
|
||||
import { ShopifyBlock } from '@/blocks/blocks/shopify'
|
||||
import { SimilarwebBlock } from '@/blocks/blocks/similarweb'
|
||||
import { SixtyfourBlock } from '@/blocks/blocks/sixtyfour'
|
||||
import { SlackBlock } from '@/blocks/blocks/slack'
|
||||
import { SmtpBlock } from '@/blocks/blocks/smtp'
|
||||
import { SpotifyBlock } from '@/blocks/blocks/spotify'
|
||||
@@ -337,6 +338,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
launchdarkly: LaunchDarklyBlock,
|
||||
lemlist: LemlistBlock,
|
||||
linear: LinearBlock,
|
||||
linear_v2: LinearV2Block,
|
||||
linkedin: LinkedInBlock,
|
||||
linkup: LinkupBlock,
|
||||
loops: LoopsBlock,
|
||||
@@ -407,6 +409,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
sharepoint: SharepointBlock,
|
||||
shopify: ShopifyBlock,
|
||||
similarweb: SimilarwebBlock,
|
||||
sixtyfour: SixtyfourBlock,
|
||||
slack: SlackBlock,
|
||||
smtp: SmtpBlock,
|
||||
spotify: SpotifyBlock,
|
||||
|
||||
26
apps/sim/components/emcn/icons/folder.tsx
Normal file
26
apps/sim/components/emcn/icons/folder.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { SVGProps } from 'react'
|
||||
|
||||
/**
|
||||
* Folder icon component
|
||||
* @param props - SVG properties including className, fill, etc.
|
||||
*/
|
||||
export function Folder(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
width='15'
|
||||
height='13'
|
||||
viewBox='0 0 15 13'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
aria-hidden='true'
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d='M4.32234e-07 5.83339V3.79628C4.32234e-07 3.19982 -0.000206684 2.71995 0.0338546 2.33339C0.0685083 1.94027 0.141749 1.59614 0.317058 1.28196C0.542977 0.877129 0.87707 0.543036 1.2819 0.317117C1.59608 0.141808 1.94021 0.0685674 2.33333 0.0339137C2.71989 -0.000147559 3.19976 5.9557e-05 3.79622 5.9557e-05C4.53268 5.9264e-05 5.03054 -0.0078558 5.47526 0.158914C6.46893 0.531571 6.86678 1.44909 7.19141 2.09837L7.47591 2.66673H10.3333C11.025 2.66673 11.5814 2.66637 12.0267 2.71165C12.4803 2.75779 12.874 2.85548 13.222 3.08795C13.495 3.27035 13.7297 3.50508 13.9121 3.77805C14.1446 4.12607 14.2423 4.51976 14.2884 4.97337C14.3337 5.41867 14.3333 5.97505 14.3333 6.66673C14.3333 7.82671 14.3338 8.73433 14.2604 9.45579C14.1862 10.1855 14.0323 10.7801 13.6875 11.2963C13.4078 11.7148 13.0481 12.0746 12.6296 12.3542C12.1134 12.6991 11.5188 12.8529 10.7891 12.9271C10.0676 13.0005 9.15998 13.0001 8 13.0001H7.16667C5.6096 13.0001 4.39144 13.0013 3.44271 12.8738C2.47955 12.7443 1.71959 12.4736 1.12305 11.877C0.526507 11.2805 0.255796 10.5205 0.126303 9.55735C-0.00122168 8.60861 4.32234e-07 7.39046 4.32234e-07 5.83339ZM1 5.83339C1 7.41888 1.00132 8.55789 1.11784 9.42454C1.23243 10.2767 1.45034 10.7902 1.83008 11.17C2.20982 11.5497 2.72339 11.7676 3.57552 11.8822C4.44217 11.9987 5.58118 12.0001 7.16667 12.0001H8C9.18079 12.0001 10.029 11.9994 10.6882 11.9324C11.3387 11.8661 11.7498 11.7396 12.0742 11.5228C12.3836 11.3161 12.6494 11.0503 12.8561 10.7409C13.0729 10.4165 13.1994 10.0054 13.2656 9.35488C13.3327 8.69577 13.3333 7.84752 13.3333 6.66673C13.3333 5.9541 13.3326 5.45727 13.2936 5.07428C13.2555 4.69972 13.1852 4.48976 13.0807 4.33339C12.9713 4.16961 12.8305 4.02877 12.6667 3.91933C12.5103 3.81488 12.3003 3.74454 11.9258 3.70644C11.5428 3.66748 11.046 3.66673 10.3333 3.66673H5.16667C4.89052 3.66673 4.66667 3.44287 4.66667 3.16673C4.66667 2.89058 4.89052 2.66673 5.16667 2.66673H6.35742L6.29688 2.54563C5.92188 1.79565 5.68045 1.30454 5.1237 1.09576C4.88932 1.00791 4.6112 1.00006 3.79622 1.00006C3.18196 1.00006 2.75368 1.00072 2.42122 1.03001C2.09531 1.05874 1.90901 1.11196 1.76888 1.19016C1.52605 1.3257 1.32564 1.52611 1.1901 1.76894C1.1119 1.90907 1.05868 2.09537 1.02995 2.42128C1.00066 2.75373 1 3.18202 1 3.79628V5.83339Z'
|
||||
fill='currentColor'
|
||||
stroke='currentColor'
|
||||
strokeWidth='0.3'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
@@ -30,6 +30,7 @@ export { Eye } from './eye'
|
||||
export { File } from './file'
|
||||
export { FileX } from './file-x'
|
||||
export { Fingerprint } from './fingerprint'
|
||||
export { Folder } from './folder'
|
||||
export { FolderCode } from './folder-code'
|
||||
export { FolderPlus } from './folder-plus'
|
||||
export { Hammer } from './hammer'
|
||||
|
||||
@@ -2132,7 +2132,15 @@ export function Mem0Icon(props: SVGProps<SVGSVGElement>) {
|
||||
|
||||
export function ExtendIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 33 18' fill='none'>
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 33 24' fill='none'>
|
||||
<path
|
||||
d='M6.3782 13.7746L4.28874 14.8056C4.11382 14.8899 4.11382 15.1367 4.28874 15.2211L15.8924 20.9462C16.1423 21.068 16.433 21.068 16.6797 20.9462L28.2864 15.2211C28.4582 15.1367 28.4582 14.8899 28.2864 14.8056L26.2 13.7746C27.3838 13.1937 28.5145 12.6378 29.4578 12.1787C30.2605 12.5722 31.0666 12.9689 31.8693 13.3625C32.3003 13.5749 32.5721 14.0123 32.5721 14.4932V15.5426C32.5721 16.0204 32.3003 16.4609 31.8693 16.6733C31.8693 16.6733 19.5816 22.7016 17.5542 23.6887C16.7296 24.0916 15.8955 24.1103 15.0615 23.7043C12.8123 22.6078 1.9646 17.2857 0.705842 16.6672C0.274806 16.4579 0 16.0174 0 15.5395V14.4899C4.1552e-05 14.012 0.271779 13.5715 0.702792 13.3591C1.43993 12.9968 2.2584 12.5973 3.12047 12.1756C4.06685 12.641 5.19446 13.1937 6.3782 13.7746Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M15.1021 6.30652C15.9017 5.92234 16.717 5.9348 17.5103 6.32207C20.1715 7.62145 22.8297 8.92398 25.4878 10.2265L22.249 11.8257L16.6797 9.07681C16.433 8.955 16.1423 8.955 15.8924 9.07681L10.3262 11.8257L7.0874 10.2265C11.2142 8.20664 15.0743 6.3201 15.1021 6.30652Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
clipRule='evenodd'
|
||||
d='M16.2893 0C16.6984 1.91708e-05 17.1074 0.0970011 17.5103 0.293745C22.3018 2.63326 27.0841 4.98521 31.8693 7.33722C32.3003 7.54649 32.5721 7.9868 32.5721 8.46461V9.51422C32.5721 9.99522 32.3004 10.4357 31.8693 10.645C31.8693 10.645 19.5816 16.6732 17.5542 17.6634C17.1357 17.8696 16.692 17.9727 16.2859 17.9727C15.8799 17.9727 15.4707 17.8758 15.0615 17.6759C12.8124 16.5795 1.9646 11.2604 0.705842 10.6419C0.274826 10.4295 2.31482e-05 9.99216 0 9.51117V8.46461C4.59913e-05 7.98366 0.271816 7.54656 0.702792 7.33417C5.8977 4.7819 15.0599 0.301869 15.1021 0.281239C15.4957 0.0938275 15.8801 0 16.2893 0ZM16.2859 2.96124C16.1516 2.96126 16.0173 2.98909 15.8924 3.05153L4.28874 8.77696C4.11382 8.86442 4.11382 9.10831 4.28874 9.19577L15.8924 14.9209C16.0173 14.9802 16.1516 15.0115 16.2859 15.0115C16.4202 15.0115 16.5548 14.9802 16.6797 14.9209L28.2864 9.19577C28.4582 9.10831 28.4582 8.86442 28.2864 8.77696L16.6797 3.05153C16.5548 2.98906 16.4202 2.96124 16.2859 2.96124Z'
|
||||
@@ -5930,6 +5938,33 @@ export function PulseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function SixtyfourIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 158 143' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<path
|
||||
d='M32.3952 141.17L31.637 140.73V142.481L31.8417 142.603L32.3952 142.921L32.9487 142.603L33.1534 142.481V140.73L32.3952 141.17Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M33.1534 140.73V142.603H31.637V140.73L32.3952 141.17L33.1534 140.73Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M93.3271 105.608V106.564L94.0854 106.996L94.8436 106.564V105.608H93.3271Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M94.8436 105.608V106.564L94.0854 106.996L93.3271 106.564V105.608H94.8436Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M125.681 12.9895L94.836 30.755L63.9909 12.9895L32.3951 31.1872H32.3875V68.8565L0.79933 87.0542H0.791748V124.723L31.6369 142.481V140.73L2.30822 123.844V89.6701L31.6369 106.564V140.73L32.3951 141.17L33.1533 140.73V106.564L62.482 89.6701V123.844L33.1533 140.73V142.481L63.2402 125.163L93.3271 142.481L93.5318 142.603L94.0853 142.921L94.6388 142.603L94.8436 142.481L125.689 124.723V87.0542L126.235 86.7357L126.439 86.6144L157.284 68.8565V31.1872L125.681 12.9895ZM63.2326 84.8629L33.904 67.9769V33.8031L63.2326 50.6967V84.8629ZM64.7491 50.6967L94.0777 33.8031V67.9769L64.7491 84.8629V50.6967ZM124.172 123.844L94.8436 140.73V106.564L94.0853 106.996L93.3271 106.564V140.73L63.9985 123.844V89.6701L93.3271 106.564V105.608H94.8436V106.564L124.172 89.6701V123.844ZM124.923 84.8629L95.5942 67.9769V33.8031L124.923 50.6891V84.8629ZM155.768 67.9769L126.439 84.8629V50.6967L155.768 33.8031V67.9769Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function SimilarwebIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -1290,7 +1290,7 @@ export function AccessControl() {
|
||||
<span className='text-[var(--text-error)]'>
|
||||
All members will be removed from this group.
|
||||
</span>{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
<span className='text-[var(--text-tertiary)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -223,6 +223,45 @@ export function useUpdateWorkspaceCredential() {
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onMutate: async (variables) => {
|
||||
await queryClient.cancelQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
await queryClient.cancelQueries({ queryKey: workspaceCredentialKeys.lists() })
|
||||
|
||||
const previousLists = queryClient.getQueriesData<WorkspaceCredential[]>({
|
||||
queryKey: workspaceCredentialKeys.lists(),
|
||||
})
|
||||
|
||||
queryClient.setQueriesData<WorkspaceCredential[]>(
|
||||
{ queryKey: workspaceCredentialKeys.lists() },
|
||||
(old) => {
|
||||
if (!old) return old
|
||||
return old.map((cred) =>
|
||||
cred.id === variables.credentialId
|
||||
? {
|
||||
...cred,
|
||||
...(variables.displayName !== undefined
|
||||
? { displayName: variables.displayName }
|
||||
: {}),
|
||||
...(variables.description !== undefined
|
||||
? { description: variables.description ?? null }
|
||||
: {}),
|
||||
}
|
||||
: cred
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return { previousLists }
|
||||
},
|
||||
onError: (_err, _variables, context) => {
|
||||
if (context?.previousLists) {
|
||||
for (const [queryKey, data] of context.previousLists) {
|
||||
queryClient.setQueryData(queryKey, data)
|
||||
}
|
||||
}
|
||||
},
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
|
||||
@@ -83,13 +83,17 @@ async function fetchDeploymentInfo(
|
||||
* Hook to fetch deployment info for a workflow.
|
||||
* Provides isDeployed status, deployedAt timestamp, apiKey info, and needsRedeployment flag.
|
||||
*/
|
||||
export function useDeploymentInfo(workflowId: string | null, options?: { enabled?: boolean }) {
|
||||
export function useDeploymentInfo(
|
||||
workflowId: string | null,
|
||||
options?: { enabled?: boolean; refetchOnMount?: boolean | 'always' }
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: deploymentKeys.info(workflowId),
|
||||
queryFn: ({ signal }) => fetchDeploymentInfo(workflowId!, signal),
|
||||
enabled: Boolean(workflowId) && (options?.enabled ?? true),
|
||||
staleTime: 30 * 1000, // 30 seconds
|
||||
placeholderData: keepPreviousData,
|
||||
...(options?.refetchOnMount !== undefined && { refetchOnMount: options.refetchOnMount }),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type { WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
import type { EnvironmentVariable, WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
import { fetchPersonalEnvironment, fetchWorkspaceEnvironment } from '@/lib/environment/api'
|
||||
import { workspaceCredentialKeys } from '@/hooks/queries/credentials'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment'
|
||||
|
||||
export type { WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
export type { EnvironmentVariable } from '@/stores/settings/environment'
|
||||
|
||||
const logger = createLogger('EnvironmentQueries')
|
||||
|
||||
@@ -27,8 +23,7 @@ export function usePersonalEnvironment() {
|
||||
return useQuery({
|
||||
queryKey: environmentKeys.personal(),
|
||||
queryFn: ({ signal }) => fetchPersonalEnvironment(signal),
|
||||
staleTime: 60 * 1000, // 1 minute
|
||||
placeholderData: keepPreviousData,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { getFolderMap } from '@/hooks/queries/utils/folder-cache'
|
||||
import { folderKeys } from '@/hooks/queries/utils/folder-keys'
|
||||
import { type FolderQueryScope, folderKeys } from '@/hooks/queries/utils/folder-keys'
|
||||
import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists'
|
||||
import {
|
||||
createOptimisticMutationHandlers,
|
||||
@@ -26,11 +26,16 @@ function mapFolder(folder: any): WorkflowFolder {
|
||||
sortOrder: folder.sortOrder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
archivedAt: folder.archivedAt ? new Date(folder.archivedAt) : null,
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchFolders(workspaceId: string, signal?: AbortSignal): Promise<WorkflowFolder[]> {
|
||||
const response = await fetch(`/api/folders?workspaceId=${workspaceId}`, { signal })
|
||||
async function fetchFolders(
|
||||
workspaceId: string,
|
||||
scope: FolderQueryScope = 'active',
|
||||
signal?: AbortSignal
|
||||
): Promise<WorkflowFolder[]> {
|
||||
const response = await fetch(`/api/folders?workspaceId=${workspaceId}&scope=${scope}`, { signal })
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
@@ -40,10 +45,11 @@ async function fetchFolders(workspaceId: string, signal?: AbortSignal): Promise<
|
||||
return folders.map(mapFolder)
|
||||
}
|
||||
|
||||
export function useFolders(workspaceId?: string) {
|
||||
export function useFolders(workspaceId?: string, options?: { scope?: FolderQueryScope }) {
|
||||
const scope = options?.scope ?? 'active'
|
||||
return useQuery({
|
||||
queryKey: folderKeys.list(workspaceId),
|
||||
queryFn: ({ signal }) => fetchFolders(workspaceId as string, signal),
|
||||
queryKey: folderKeys.list(workspaceId, scope),
|
||||
queryFn: ({ signal }) => fetchFolders(workspaceId as string, scope, signal),
|
||||
enabled: Boolean(workspaceId),
|
||||
placeholderData: keepPreviousData,
|
||||
staleTime: 60 * 1000,
|
||||
@@ -53,7 +59,7 @@ export function useFolders(workspaceId?: string) {
|
||||
export function useFolderMap(workspaceId?: string) {
|
||||
return useQuery({
|
||||
queryKey: folderKeys.list(workspaceId),
|
||||
queryFn: ({ signal }) => fetchFolders(workspaceId as string, signal),
|
||||
queryFn: ({ signal }) => fetchFolders(workspaceId as string, 'active', signal),
|
||||
enabled: Boolean(workspaceId),
|
||||
placeholderData: keepPreviousData,
|
||||
staleTime: 60 * 1000,
|
||||
@@ -158,6 +164,7 @@ export function useCreateFolder() {
|
||||
),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
archivedAt: null,
|
||||
}
|
||||
},
|
||||
(variables) => variables.id ?? generateId()
|
||||
@@ -223,7 +230,37 @@ export function useDeleteFolderMutation() {
|
||||
return response.json()
|
||||
},
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.lists() })
|
||||
return invalidateWorkflowLists(queryClient, variables.workspaceId, ['active', 'archived'])
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
interface RestoreFolderVariables {
|
||||
workspaceId: string
|
||||
folderId: string
|
||||
}
|
||||
|
||||
export function useRestoreFolder() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, folderId }: RestoreFolderVariables) => {
|
||||
const response = await fetch(`/api/folders/${folderId}/restore`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error || 'Failed to restore folder')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
},
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.lists() })
|
||||
return invalidateWorkflowLists(queryClient, variables.workspaceId, ['active', 'archived'])
|
||||
},
|
||||
})
|
||||
@@ -258,6 +295,7 @@ export function useDuplicateFolderMutation() {
|
||||
),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
archivedAt: null,
|
||||
}
|
||||
},
|
||||
(variables) => variables.newId ?? generateId()
|
||||
|
||||
@@ -48,6 +48,7 @@ export interface TaskStoredMessageContext {
|
||||
knowledgeId?: string
|
||||
tableId?: string
|
||||
fileId?: string
|
||||
folderId?: string
|
||||
}
|
||||
|
||||
export interface TaskStoredMessage {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
export type FolderQueryScope = 'active' | 'archived'
|
||||
|
||||
export const folderKeys = {
|
||||
all: ['folders'] as const,
|
||||
lists: () => [...folderKeys.all, 'list'] as const,
|
||||
list: (workspaceId: string | undefined) => [...folderKeys.lists(), workspaceId ?? ''] as const,
|
||||
list: (workspaceId: string | undefined, scope: FolderQueryScope = 'active') =>
|
||||
[...folderKeys.lists(), workspaceId ?? '', scope] as const,
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
ExecutionCompletedData,
|
||||
ExecutionErrorData,
|
||||
ExecutionEvent,
|
||||
ExecutionPausedData,
|
||||
ExecutionStartedData,
|
||||
StreamChunkData,
|
||||
StreamDoneData,
|
||||
@@ -74,6 +75,9 @@ export async function processSSEStream(
|
||||
case 'execution:completed':
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:paused':
|
||||
callbacks.onExecutionPaused?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
@@ -114,6 +118,7 @@ export async function processSSEStream(
|
||||
export interface ExecutionStreamCallbacks {
|
||||
onExecutionStarted?: (data: ExecutionStartedData) => void
|
||||
onExecutionCompleted?: (data: ExecutionCompletedData) => void
|
||||
onExecutionPaused?: (data: ExecutionPausedData) => void
|
||||
onExecutionError?: (data: ExecutionErrorData) => void
|
||||
onExecutionCancelled?: (data: ExecutionCancelledData) => void
|
||||
onBlockStarted?: (data: BlockStartedData) => void
|
||||
|
||||
62
apps/sim/hooks/use-reactive-conditions.ts
Normal file
62
apps/sim/hooks/use-reactive-conditions.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import type { CanonicalModeOverrides } from '@/lib/workflows/subblocks/visibility'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useWorkspaceCredential } from '@/hooks/queries/credentials'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
|
||||
/**
|
||||
* Evaluates reactive conditions for subblocks. Always calls the same hooks
|
||||
* regardless of whether a reactive condition exists (Rules of Hooks).
|
||||
*
|
||||
* Returns a Set of subblock IDs that should be hidden.
|
||||
*/
|
||||
export function useReactiveConditions(
|
||||
subBlocks: SubBlockConfig[],
|
||||
blockId: string,
|
||||
activeWorkflowId: string | null,
|
||||
canonicalModeOverrides?: CanonicalModeOverrides
|
||||
): Set<string> {
|
||||
const reactiveSubBlock = useMemo(() => subBlocks.find((sb) => sb.reactiveCondition), [subBlocks])
|
||||
const reactiveCond = reactiveSubBlock?.reactiveCondition
|
||||
|
||||
const canonicalIndex = useMemo(() => buildCanonicalIndex(subBlocks), [subBlocks])
|
||||
|
||||
// Resolve watchFields through canonical index to get the active credential value
|
||||
const watchedCredentialId = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!reactiveCond || !activeWorkflowId) return ''
|
||||
const blockValues = state.workflowValues[activeWorkflowId]?.[blockId] ?? {}
|
||||
for (const field of reactiveCond.watchFields) {
|
||||
const val = resolveDependencyValue(
|
||||
field,
|
||||
blockValues,
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
if (val && typeof val === 'string') return val
|
||||
}
|
||||
return ''
|
||||
},
|
||||
[reactiveCond, activeWorkflowId, blockId, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
)
|
||||
|
||||
// Always call useWorkspaceCredential (stable hook count), disable when not needed
|
||||
const { data: credential } = useWorkspaceCredential(
|
||||
watchedCredentialId || undefined,
|
||||
Boolean(reactiveCond && watchedCredentialId)
|
||||
)
|
||||
|
||||
return useMemo(() => {
|
||||
const hidden = new Set<string>()
|
||||
if (!reactiveSubBlock || !reactiveCond) return hidden
|
||||
|
||||
const conditionMet = credential?.type === reactiveCond.requiredType
|
||||
if (!conditionMet) {
|
||||
hidden.add(reactiveSubBlock.id)
|
||||
}
|
||||
return hidden
|
||||
}, [reactiveSubBlock, reactiveCond, credential?.type])
|
||||
}
|
||||
@@ -264,7 +264,7 @@ export interface ParsedSSEChunk {
|
||||
/** Final success flag if this chunk contains the final event */
|
||||
finalSuccess?: boolean
|
||||
/** Terminal task state if known */
|
||||
terminalState?: 'completed' | 'failed' | 'canceled'
|
||||
terminalState?: 'completed' | 'failed' | 'canceled' | 'input-required'
|
||||
/** Final artifacts if present on terminal event */
|
||||
finalArtifacts?: Artifact[]
|
||||
/** Whether this chunk indicates the stream is done */
|
||||
@@ -326,6 +326,15 @@ export function parseWorkflowSSEChunk(chunk: string): ParsedSSEChunk {
|
||||
result.finalSuccess = parsed.data?.success !== false
|
||||
result.terminalState = result.finalSuccess ? 'completed' : 'failed'
|
||||
result.isDone = true
|
||||
} else if (parsed.type === 'execution:paused') {
|
||||
if (parsed.data?.output?.content) {
|
||||
result.finalContent = parsed.data.output.content
|
||||
} else if (parsed.data?.output) {
|
||||
result.finalContent = JSON.stringify(parsed.data.output)
|
||||
}
|
||||
result.finalSuccess = true
|
||||
result.terminalState = 'input-required'
|
||||
result.isDone = true
|
||||
} else if (parsed.type === 'execution:cancelled') {
|
||||
result.finalSuccess = false
|
||||
result.terminalState = 'canceled'
|
||||
|
||||
@@ -67,6 +67,7 @@ export const AuditAction = {
|
||||
FOLDER_CREATED: 'folder.created',
|
||||
FOLDER_DELETED: 'folder.deleted',
|
||||
FOLDER_DUPLICATED: 'folder.duplicated',
|
||||
FOLDER_RESTORED: 'folder.restored',
|
||||
|
||||
// Forms
|
||||
FORM_CREATED: 'form.created',
|
||||
|
||||
@@ -320,17 +320,46 @@ async function reportCompletion(
|
||||
data?: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
try {
|
||||
const body = JSON.stringify({
|
||||
toolCallId,
|
||||
status,
|
||||
message: message || (status === 'success' ? 'Tool completed' : 'Tool failed'),
|
||||
...(data ? { data } : {}),
|
||||
})
|
||||
const res = await fetch(COPILOT_CONFIRM_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
toolCallId,
|
||||
status,
|
||||
message: message || (status === 'success' ? 'Tool completed' : 'Tool failed'),
|
||||
...(data ? { data } : {}),
|
||||
}),
|
||||
body,
|
||||
})
|
||||
if (!res.ok) {
|
||||
// Next.js silently truncates request bodies beyond its body size limit (default 10MB),
|
||||
// corrupting the JSON and causing a server-side parse error (500). When the request fails
|
||||
// and the payload is large, retry without logs (the largest field) to fit under the limit.
|
||||
const LARGE_PAYLOAD_THRESHOLD = 10 * 1024 * 1024
|
||||
const bodySize = new Blob([body]).size
|
||||
if (!res.ok && data && bodySize > LARGE_PAYLOAD_THRESHOLD) {
|
||||
const { logs: _logs, ...dataWithoutLogs } = data
|
||||
logger.warn('[RunTool] reportCompletion failed with large payload, retrying without logs', {
|
||||
toolCallId,
|
||||
status: res.status,
|
||||
bodySize,
|
||||
})
|
||||
const retryRes = await fetch(COPILOT_CONFIRM_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
toolCallId,
|
||||
status,
|
||||
message: message || (status === 'success' ? 'Tool completed' : 'Tool failed'),
|
||||
data: dataWithoutLogs,
|
||||
}),
|
||||
})
|
||||
if (!retryRes.ok) {
|
||||
logger.warn('[RunTool] reportCompletion retry also failed', {
|
||||
toolCallId,
|
||||
status: retryRes.status,
|
||||
})
|
||||
}
|
||||
} else if (!res.ok) {
|
||||
logger.warn('[RunTool] reportCompletion failed', { toolCallId, status: res.status })
|
||||
}
|
||||
} catch (err) {
|
||||
|
||||
@@ -33,6 +33,7 @@ export type AgentContextType =
|
||||
| 'templates'
|
||||
| 'workflow_block'
|
||||
| 'docs'
|
||||
| 'folder'
|
||||
| 'active_resource'
|
||||
|
||||
export interface AgentContext {
|
||||
@@ -178,6 +179,11 @@ export async function processContextsServer(
|
||||
if (!result) return null
|
||||
return { type: 'file', tag: ctx.label ? `@${ctx.label}` : '@', content: result.content }
|
||||
}
|
||||
if (ctx.kind === 'folder' && 'folderId' in ctx && ctx.folderId && currentWorkspaceId) {
|
||||
const result = await resolveFolderResource(ctx.folderId, currentWorkspaceId)
|
||||
if (!result) return null
|
||||
return { type: 'folder', tag: ctx.label ? `@${ctx.label}` : '@', content: result.content }
|
||||
}
|
||||
if (ctx.kind === 'docs') {
|
||||
try {
|
||||
const { searchDocumentationServerTool } = await import(
|
||||
@@ -776,6 +782,9 @@ export async function resolveActiveResourceContext(
|
||||
case 'file': {
|
||||
return await resolveFileResource(resourceId, workspaceId)
|
||||
}
|
||||
case 'folder': {
|
||||
return await resolveFolderResource(resourceId, workspaceId)
|
||||
}
|
||||
default:
|
||||
return null
|
||||
}
|
||||
@@ -812,3 +821,31 @@ async function resolveFileResource(
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveFolderResource(
|
||||
folderId: string,
|
||||
workspaceId: string
|
||||
): Promise<AgentContext | null> {
|
||||
try {
|
||||
const { workflowFolder, workflow } = await import('@sim/db/schema')
|
||||
const [folder] = await db
|
||||
.select({ id: workflowFolder.id, name: workflowFolder.name })
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, folderId), eq(workflowFolder.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
if (!folder) return null
|
||||
|
||||
const workflows = await db
|
||||
.select({ id: workflow.id, name: workflow.name })
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId)))
|
||||
|
||||
const workflowList = workflows.map((w) => `- ${w.name} (id: ${w.id})`).join('\n')
|
||||
const content = `Folder: ${folder.name} (id: ${folder.id})\nWorkflows:\n${workflowList || '(empty)'}`
|
||||
|
||||
return { type: 'active_resource', tag: '@active_resource', content }
|
||||
} catch (error) {
|
||||
logger.error('Failed to resolve folder resource', { folderId, error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
export type MothershipResourceType = 'table' | 'file' | 'workflow' | 'knowledgebase' | 'generic'
|
||||
export type MothershipResourceType =
|
||||
| 'table'
|
||||
| 'file'
|
||||
| 'workflow'
|
||||
| 'knowledgebase'
|
||||
| 'folder'
|
||||
| 'generic'
|
||||
|
||||
export interface MothershipResource {
|
||||
type: MothershipResourceType
|
||||
@@ -11,4 +17,5 @@ export const VFS_DIR_TO_RESOURCE: Record<string, MothershipResourceType> = {
|
||||
files: 'file',
|
||||
workflows: 'workflow',
|
||||
knowledgebases: 'knowledgebase',
|
||||
folders: 'folder',
|
||||
} as const
|
||||
|
||||
@@ -41,7 +41,7 @@ export async function persistChatResources(
|
||||
|
||||
const existing = Array.isArray(chat.resources) ? (chat.resources as MothershipResource[]) : []
|
||||
const map = new Map<string, MothershipResource>()
|
||||
const GENERIC = new Set(['Table', 'File', 'Workflow', 'Knowledge Base'])
|
||||
const GENERIC = new Set(['Table', 'File', 'Workflow', 'Knowledge Base', 'Folder'])
|
||||
|
||||
for (const r of existing) {
|
||||
map.set(`${r.type}:${r.id}`, r)
|
||||
|
||||
@@ -5,6 +5,7 @@ import { getCopilotToolDescription } from '@/lib/copilot/tool-descriptions'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { GetBlocksMetadataInput, GetBlocksMetadataResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getAllowedIntegrationsFromEnv, isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { getServiceAccountProviderForProviderId } from '@/lib/oauth/utils'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
import { AuthMode, type BlockConfig, isHiddenFromDisplay } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
@@ -342,6 +343,20 @@ function transformBlockMetadata(metadata: CopilotBlockMetadata): any {
|
||||
service: metadata.id, // e.g., 'gmail', 'slack', etc.
|
||||
description: `OAuth authentication required for ${metadata.name}`,
|
||||
}
|
||||
|
||||
// Check if this service also supports service account credentials
|
||||
const oauthSubBlock = metadata.inputSchema?.find(
|
||||
(sb: CopilotSubblockMetadata) => sb.type === 'oauth-input' && sb.serviceId
|
||||
)
|
||||
if (oauthSubBlock?.serviceId) {
|
||||
const serviceAccountProviderId = getServiceAccountProviderForProviderId(
|
||||
oauthSubBlock.serviceId
|
||||
)
|
||||
if (serviceAccountProviderId) {
|
||||
transformed.requiredCredentials.serviceAccountType = serviceAccountProviderId
|
||||
transformed.requiredCredentials.description = `OAuth or service account authentication supported for ${metadata.name}`
|
||||
}
|
||||
}
|
||||
} else if (metadata.authType === 'API Key') {
|
||||
transformed.requiredCredentials = {
|
||||
type: 'api_key',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user