Compare commits

..

1 Commits

Author SHA1 Message Date
Emir Karabeg
a5a554ae67 feat(a2a): a2a added 2026-01-08 20:34:11 -08:00
1121 changed files with 23691 additions and 68555 deletions

View File

@@ -1,57 +1,60 @@
---
description: Testing patterns with Vitest and @sim/testing
description: Testing patterns with Vitest
globs: ["apps/sim/**/*.test.ts", "apps/sim/**/*.test.tsx"]
---
# Testing Patterns
Use Vitest. Test files: `feature.ts` → `feature.test.ts`
Use Vitest. Test files live next to source: `feature.ts` → `feature.test.ts`
## Structure
```typescript
/**
* Tests for [feature name]
*
* @vitest-environment node
*/
import { databaseMock, loggerMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
vi.mock('@sim/db', () => databaseMock)
// 1. Mocks BEFORE imports
vi.mock('@sim/db', () => ({ db: { select: vi.fn() } }))
vi.mock('@sim/logger', () => loggerMock)
// 2. Imports AFTER mocks
import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest'
import { createSession, loggerMock } from '@sim/testing'
import { myFunction } from '@/lib/feature'
describe('myFunction', () => {
beforeEach(() => vi.clearAllMocks())
it.concurrent('isolated tests run in parallel', () => { ... })
it('should do something', () => {
expect(myFunction()).toBe(expected)
})
it.concurrent('runs in parallel', () => { ... })
})
```
## @sim/testing Package
Always prefer over local mocks.
```typescript
// Factories - create test data
import { createBlock, createWorkflow, createSession } from '@sim/testing'
| Category | Utilities |
|----------|-----------|
| **Mocks** | `loggerMock`, `databaseMock`, `setupGlobalFetchMock()` |
| **Factories** | `createSession()`, `createWorkflowRecord()`, `createBlock()`, `createExecutorContext()` |
| **Builders** | `WorkflowBuilder`, `ExecutionContextBuilder` |
| **Assertions** | `expectWorkflowAccessGranted()`, `expectBlockExecuted()` |
// Mocks - pre-configured mocks
import { loggerMock, databaseMock, fetchMock } from '@sim/testing'
// Builders - fluent API for complex objects
import { ExecutionBuilder, WorkflowBuilder } from '@sim/testing'
```
## Rules
1. `@vitest-environment node` directive at file top
2. `vi.mock()` calls before importing mocked modules
3. `@sim/testing` utilities over local mocks
4. `it.concurrent` for isolated tests (no shared mutable state)
2. **Mocks before imports** - `vi.mock()` calls must come first
3. Use `@sim/testing` factories over manual test data
4. `it.concurrent` for independent tests (faster)
5. `beforeEach(() => vi.clearAllMocks())` to reset state
## Hoisted Mocks
For mutable mock references:
```typescript
const mockFn = vi.hoisted(() => vi.fn())
vi.mock('@/lib/module', () => ({ myFunction: mockFn }))
mockFn.mockResolvedValue({ data: 'test' })
```
6. Group related tests with nested `describe` blocks
7. Test file naming: `*.test.ts` (not `*.spec.ts`)

View File

@@ -173,13 +173,13 @@ Use Vitest. Test files: `feature.ts` → `feature.test.ts`
/**
* @vitest-environment node
*/
import { databaseMock, loggerMock } from '@sim/testing'
// Mocks BEFORE imports
vi.mock('@sim/db', () => ({ db: { select: vi.fn() } }))
// Imports AFTER mocks
import { describe, expect, it, vi } from 'vitest'
vi.mock('@sim/db', () => databaseMock)
vi.mock('@sim/logger', () => loggerMock)
import { myFunction } from '@/lib/feature'
import { createSession, loggerMock } from '@sim/testing'
describe('feature', () => {
beforeEach(() => vi.clearAllMocks())
@@ -187,7 +187,7 @@ describe('feature', () => {
})
```
Use `@sim/testing` mocks/factories over local test data. See `.cursor/rules/sim-testing.mdc` for details.
Use `@sim/testing` factories over manual test data.
## Utils Rules

View File

@@ -4575,22 +4575,3 @@ export function FirefliesIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
<defs>
<linearGradient id='bedrock_gradient' x1='80%' x2='20%' y1='20%' y2='80%'>
<stop offset='0%' stopColor='#6350FB' />
<stop offset='50%' stopColor='#3D8FFF' />
<stop offset='100%' stopColor='#9AD8F8' />
</linearGradient>
</defs>
<path
d='M13.05 15.513h3.08c.214 0 .389.177.389.394v1.82a1.704 1.704 0 011.296 1.661c0 .943-.755 1.708-1.685 1.708-.931 0-1.686-.765-1.686-1.708 0-.807.554-1.484 1.297-1.662v-1.425h-2.69v4.663a.395.395 0 01-.188.338l-2.69 1.641a.385.385 0 01-.405-.002l-4.926-3.086a.395.395 0 01-.185-.336V16.3L2.196 14.87A.395.395 0 012 14.555L2 14.528V9.406c0-.14.073-.27.192-.34l2.465-1.462V4.448c0-.129.062-.249.165-.322l.021-.014L9.77 1.058a.385.385 0 01.407 0l2.69 1.675a.395.395 0 01.185.336V7.6h3.856V5.683a1.704 1.704 0 01-1.296-1.662c0-.943.755-1.708 1.685-1.708.931 0 1.685.765 1.685 1.708 0 .807-.553 1.484-1.296 1.662v2.311a.391.391 0 01-.389.394h-4.245v1.806h6.624a1.69 1.69 0 011.64-1.313c.93 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708a1.69 1.69 0 01-1.64-1.314H13.05v1.937h4.953l.915 1.18a1.66 1.66 0 01.84-.227c.931 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708-.93 0-1.685-.765-1.685-1.708 0-.346.102-.668.276-.937l-.724-.935H13.05v1.806zM9.973 1.856L7.93 3.122V6.09h-.778V3.604L5.435 4.669v2.945l2.11 1.36L9.712 7.61V5.334h.778V7.83c0 .136-.07.263-.184.335L7.963 9.638v2.081l1.422 1.009-.446.646-1.406-.998-1.53 1.005-.423-.66 1.605-1.055v-1.99L5.038 8.29l-2.26 1.34v1.676l1.972-1.189.398.677-2.37 1.429V14.3l2.166 1.258 2.27-1.368.397.677-2.176 1.311V19.3l1.876 1.175 2.365-1.426.398.678-2.017 1.216 1.918 1.201 2.298-1.403v-5.78l-4.758 2.893-.4-.675 5.158-3.136V3.289L9.972 1.856zM16.13 18.47a.913.913 0 00-.908.92c0 .507.406.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zm3.63-3.81a.913.913 0 00-.908.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92zm1.555-4.99a.913.913 0 00-.908.92c0 .507.407.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zM17.296 3.1a.913.913 0 00-.907.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92z'
fill='url(#bedrock_gradient)'
fillRule='nonzero'
/>
</svg>
)
}

View File

@@ -137,7 +137,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
circleback: CirclebackIcon,
clay: ClayIcon,
confluence: ConfluenceIcon,
cursor_v2: CursorIcon,
cursor: CursorIcon,
datadog: DatadogIcon,
discord: DiscordIcon,
dropbox: DropboxIcon,
@@ -149,10 +149,10 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
file: DocumentIcon,
firecrawl: FirecrawlIcon,
fireflies: FirefliesIcon,
github_v2: GithubIcon,
github: GithubIcon,
gitlab: GitLabIcon,
gmail_v2: GmailIcon,
google_calendar_v2: GoogleCalendarIcon,
gmail: GmailIcon,
google_calendar: GoogleCalendarIcon,
google_docs: GoogleDocsIcon,
google_drive: GoogleDriveIcon,
google_forms: GoogleFormsIcon,
@@ -170,7 +170,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
image_generator: ImageIcon,
imap: MailServerIcon,
incidentio: IncidentioIcon,
intercom_v2: IntercomIcon,
intercom: IntercomIcon,
jina: JinaAIIcon,
jira: JiraIcon,
jira_service_management: JiraServiceManagementIcon,
@@ -190,7 +190,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
mongodb: MongoDBIcon,
mysql: MySQLIcon,
neo4j: Neo4jIcon,
notion_v2: NotionIcon,
notion: NotionIcon,
onedrive: MicrosoftOneDriveIcon,
openai: OpenAIIcon,
outlook: OutlookIcon,
@@ -226,6 +226,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
supabase: SupabaseIcon,
tavily: TavilyIcon,
telegram: TelegramIcon,
thinking: BrainIcon,
translate: TranslateIcon,
trello: TrelloIcon,
tts: TTSIcon,

View File

@@ -1,76 +0,0 @@
---
title: Enterprise
description: Enterprise-Funktionen für Organisationen mit erweiterten
Sicherheits- und Compliance-Anforderungen
---
import { Callout } from 'fumadocs-ui/components/callout'
Sim Studio Enterprise bietet erweiterte Funktionen für Organisationen mit erhöhten Sicherheits-, Compliance- und Verwaltungsanforderungen.
---
## Bring Your Own Key (BYOK)
Verwenden Sie Ihre eigenen API-Schlüssel für KI-Modellanbieter anstelle der gehosteten Schlüssel von Sim Studio.
### Unterstützte Anbieter
| Anbieter | Verwendung |
|----------|-------|
| OpenAI | Knowledge Base-Embeddings, Agent-Block |
| Anthropic | Agent-Block |
| Google | Agent-Block |
| Mistral | Knowledge Base OCR |
### Einrichtung
1. Navigieren Sie zu **Einstellungen** → **BYOK** in Ihrem Workspace
2. Klicken Sie auf **Schlüssel hinzufügen** für Ihren Anbieter
3. Geben Sie Ihren API-Schlüssel ein und speichern Sie
<Callout type="warn">
BYOK-Schlüssel werden verschlüsselt gespeichert. Nur Organisationsadministratoren und -inhaber können Schlüssel verwalten.
</Callout>
Wenn konfiguriert, verwenden Workflows Ihren Schlüssel anstelle der gehosteten Schlüssel von Sim Studio. Bei Entfernung wechseln Workflows automatisch zu den gehosteten Schlüsseln zurück.
---
## Single Sign-On (SSO)
Enterprise-Authentifizierung mit SAML 2.0- und OIDC-Unterstützung für zentralisiertes Identitätsmanagement.
### Unterstützte Anbieter
- Okta
- Azure AD / Entra ID
- Google Workspace
- OneLogin
- Jeder SAML 2.0- oder OIDC-Anbieter
### Einrichtung
1. Navigieren Sie zu **Einstellungen** → **SSO** in Ihrem Workspace
2. Wählen Sie Ihren Identitätsanbieter
3. Konfigurieren Sie die Verbindung mithilfe der Metadaten Ihres IdP
4. Aktivieren Sie SSO für Ihre Organisation
<Callout type="info">
Sobald SSO aktiviert ist, authentifizieren sich Teammitglieder über Ihren Identitätsanbieter anstelle von E-Mail/Passwort.
</Callout>
---
## Self-Hosted
Für selbst gehostete Bereitstellungen können Enterprise-Funktionen über Umgebungsvariablen aktiviert werden:
| Variable | Beschreibung |
|----------|-------------|
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Single Sign-On mit SAML/OIDC |
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Polling-Gruppen für E-Mail-Trigger |
<Callout type="warn">
BYOK ist nur im gehosteten Sim Studio verfügbar. Selbst gehostete Deployments konfigurieren AI-Provider-Schlüssel direkt über Umgebungsvariablen.
</Callout>

View File

@@ -49,40 +49,40 @@ Die Modellaufschlüsselung zeigt:
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
<Tab>
**Hosted Models** - Sim bietet API-Schlüssel mit einem 1,4-fachen Preismultiplikator für Agent-Blöcke:
**Gehostete Modelle** - Sim stellt API-Schlüssel mit einem 2-fachen Preismultiplikator bereit:
**OpenAI**
| Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) |
| Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
| GPT-5.1 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
| GPT-5 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
| GPT-5 Mini | 0,25 $ / 2,00 $ | 0,50 $ / 4,00 $ |
| GPT-5 Nano | 0,05 $ / 0,40 $ | 0,10 $ / 0,80 $ |
| GPT-4o | 2,50 $ / 10,00 $ | 5,00 $ / 20,00 $ |
| GPT-4.1 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
| GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,80 $ / 3,20 $ |
| GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,20 $ / 0,80 $ |
| o1 | 15,00 $ / 60,00 $ | 30,00 $ / 120,00 $ |
| o3 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
| o4 Mini | 1,10 $ / 4,40 $ | 2,20 $ / 8,80 $ |
**Anthropic**
| Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) |
| Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
| Claude Opus 4.5 | 5,00 $ / 25,00 $ | 10,00 $ / 50,00 $ |
| Claude Opus 4.1 | 15,00 $ / 75,00 $ | 30,00 $ / 150,00 $ |
| Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
| Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
| Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 2,00 $ / 10,00 $ |
**Google**
| Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) |
| Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
| Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 4,00 $ / 24,00 $ |
| Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
| Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,60 $ / 5,00 $ |
*Der 1,4-fache Multiplikator deckt Infrastruktur- und API-Verwaltungskosten ab.*
*Der 2x-Multiplikator deckt Infrastruktur- und API-Verwaltungskosten ab.*
</Tab>
<Tab>

View File

@@ -2,15 +2,16 @@
title: Router
---
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
The Router block uses AI to intelligently route workflows based on content analysis. Unlike Condition blocks that use simple rules, Routers understand context and intent. Each route you define creates a separate output port, allowing you to connect different paths to different downstream blocks.
The Router block uses AI to intelligently route workflows based on content analysis. Unlike Condition blocks that use simple rules, Routers understand context and intent.
<div className="flex justify-center">
<Image
src="/static/blocks/router.png"
alt="Router Block with Multiple Route Ports"
alt="Router Block with Multiple Paths"
width={500}
height={400}
className="my-6"
@@ -31,23 +32,21 @@ The Router block uses AI to intelligently route workflows based on content analy
## Configuration Options
### Context
### Content/Prompt
The context that the Router will analyze to make routing decisions. This is the input data that gets evaluated against your route descriptions. It can be:
The content or prompt that the Router will analyze to make routing decisions. This can be:
- A direct user query or input
- Output from a previous block
- A system-generated message
- Any text content that needs intelligent routing
### Routes
### Target Blocks
Define the possible paths that the Router can take. Each route consists of:
The possible destination blocks that the Router can select from. The Router will automatically detect connected blocks, but you can also:
- **Route Title**: A name for the route (e.g., "Sales", "Support", "Technical")
- **Route Description**: A clear description of when this route should be selected (e.g., "Route here when the query is about pricing, purchasing, or sales inquiries")
Each route you add creates a **separate output port** on the Router block. Connect each port to the appropriate downstream block for that route.
- Customize the descriptions of target blocks to improve routing accuracy
- Specify routing criteria for each target block
- Exclude certain blocks from being considered as routing targets
### Model Selection
@@ -67,9 +66,8 @@ Your API key for the selected LLM provider. This is securely stored and used for
## Outputs
- **`<router.context>`**: The context that was analyzed
- **`<router.selectedRoute>`**: The ID of the selected route
- **`<router.selected_path>`**: Details of the chosen destination block
- **`<router.prompt>`**: Summary of the routing prompt
- **`<router.selected_path>`**: Chosen destination block
- **`<router.tokens>`**: Token usage statistics
- **`<router.cost>`**: Estimated routing cost
- **`<router.model>`**: Model used for decision-making
@@ -77,43 +75,26 @@ Your API key for the selected LLM provider. This is securely stored and used for
## Example Use Cases
**Customer Support Triage** - Route tickets to specialized departments
```
Input (Ticket) → Router
├── [Sales Route] → Agent (Sales Team)
├── [Technical Route] → Agent (Engineering)
└── [Billing Route] → Agent (Finance)
Input (Ticket) → Router → Agent (Engineering) or Agent (Finance)
```
**Content Classification** - Classify and route user-generated content
```
Input (Feedback) → Router
├── [Product Feedback] → Workflow (Product Team)
└── [Bug Report] → Workflow (Technical Team)
Input (Feedback) → Router → Workflow (Product) or Workflow (Technical)
```
**Lead Qualification** - Route leads based on qualification criteria
```
Input (Lead) → Router
├── [Enterprise] → Agent (Enterprise Sales)
└── [Self-serve] → Workflow (Automated Onboarding)
Input (Lead) → Router → Agent (Enterprise Sales) or Workflow (Self-serve)
```
## Error Handling
When the Router cannot determine an appropriate route for the given context, it will route to the **error path** instead of arbitrarily selecting a route. This happens when:
- The context doesn't clearly match any of the defined route descriptions
- The AI determines that none of the available routes are appropriate
## Best Practices
- **Write clear route descriptions**: Each route description should clearly explain when that route should be selected. Be specific about the criteria.
- **Make routes mutually exclusive**: When possible, ensure route descriptions don't overlap to prevent ambiguous routing decisions.
- **Connect an error path**: Handle cases where no route matches by connecting an error handler for graceful fallback behavior.
- **Use descriptive route titles**: Route titles appear in the workflow canvas, so make them meaningful for readability.
- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content.
- **Monitor routing performance**: Review routing decisions regularly and refine route descriptions based on actual usage patterns.
- **Choose appropriate models**: Use models with strong reasoning capabilities for complex routing decisions.
- **Provide clear target descriptions**: Help the Router understand when to select each destination with specific, detailed descriptions
- **Use specific routing criteria**: Define clear conditions and examples for each path to improve accuracy
- **Implement fallback paths**: Connect a default destination for when no specific path is appropriate
- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content
- **Monitor routing performance**: Review routing decisions regularly and refine criteria based on actual usage patterns
- **Choose appropriate models**: Use models with strong reasoning capabilities for complex routing decisions

View File

@@ -1,120 +0,0 @@
---
title: Enterprise
description: Enterprise features for business organizations
---
import { Callout } from 'fumadocs-ui/components/callout'
Sim Studio Enterprise provides advanced features for organizations with enhanced security, compliance, and management requirements.
---
## Access Control
Define permission groups to control what features and integrations team members can use.
### Features
- **Allowed Model Providers** - Restrict which AI providers users can access (OpenAI, Anthropic, Google, etc.)
- **Allowed Blocks** - Control which workflow blocks are available
- **Platform Settings** - Hide Knowledge Base, disable MCP tools, or disable custom tools
### Setup
1. Navigate to **Settings** → **Access Control** in your workspace
2. Create a permission group with your desired restrictions
3. Add team members to the permission group
<Callout type="info">
Users not assigned to any permission group have full access. Permission restrictions are enforced at both UI and execution time.
</Callout>
---
## Bring Your Own Key (BYOK)
Use your own API keys for AI model providers instead of Sim Studio's hosted keys.
### Supported Providers
| Provider | Usage |
|----------|-------|
| OpenAI | Knowledge Base embeddings, Agent block |
| Anthropic | Agent block |
| Google | Agent block |
| Mistral | Knowledge Base OCR |
### Setup
1. Navigate to **Settings** → **BYOK** in your workspace
2. Click **Add Key** for your provider
3. Enter your API key and save
<Callout type="warn">
BYOK keys are encrypted at rest. Only organization admins and owners can manage keys.
</Callout>
When configured, workflows use your key instead of Sim Studio's hosted keys. If removed, workflows automatically fall back to hosted keys.
---
## Single Sign-On (SSO)
Enterprise authentication with SAML 2.0 and OIDC support for centralized identity management.
### Supported Providers
- Okta
- Azure AD / Entra ID
- Google Workspace
- OneLogin
- Any SAML 2.0 or OIDC provider
### Setup
1. Navigate to **Settings** → **SSO** in your workspace
2. Choose your identity provider
3. Configure the connection using your IdP's metadata
4. Enable SSO for your organization
<Callout type="info">
Once SSO is enabled, team members authenticate through your identity provider instead of email/password.
</Callout>
---
## Self-Hosted Configuration
For self-hosted deployments, enterprise features can be enabled via environment variables without requiring billing.
### Environment Variables
| Variable | Description |
|----------|-------------|
| `ORGANIZATIONS_ENABLED`, `NEXT_PUBLIC_ORGANIZATIONS_ENABLED` | Enable team/organization management |
| `ACCESS_CONTROL_ENABLED`, `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED` | Permission groups for access restrictions |
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Single Sign-On with SAML/OIDC |
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Polling Groups for email triggers |
### Organization Management
When billing is disabled, use the Admin API to manage organizations:
```bash
# Create an organization
curl -X POST https://your-instance/api/v1/admin/organizations \
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
-H "Content-Type: application/json" \
-d '{"name": "My Organization", "ownerId": "user-id-here"}'
# Add a member
curl -X POST https://your-instance/api/v1/admin/organizations/{orgId}/members \
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
-H "Content-Type: application/json" \
-d '{"userId": "user-id-here", "role": "admin"}'
```
### Notes
- Enabling `ACCESS_CONTROL_ENABLED` automatically enables organizations, as access control requires organization membership.
- BYOK is only available on hosted Sim Studio. Self-hosted deployments configure AI provider keys directly via environment variables.

View File

@@ -48,40 +48,40 @@ The model breakdown shows:
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
<Tab>
**Hosted Models** - Sim provides API keys with a 1.4x pricing multiplier for Agent blocks:
**Hosted Models** - Sim provides API keys with a 2x pricing multiplier:
**OpenAI**
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
**Anthropic**
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
**Google**
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
*The 1.4x multiplier covers infrastructure and API management costs.*
*The 2x multiplier covers infrastructure and API management costs.*
</Tab>
<Tab>

View File

@@ -1,136 +0,0 @@
---
title: Form Deployment
---
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
Deploy your workflow as an embeddable form that users can fill out on your website or share via link. Form submissions trigger your workflow with the `form` trigger type.
## Overview
Form deployment turns your workflow's Input Format into a responsive form that can be:
- Shared via a direct link (e.g., `https://sim.ai/form/my-survey`)
- Embedded in any website using an iframe
When a user submits the form, it triggers your workflow with the form data.
<Callout type="info">
Forms derive their fields from your workflow's Start block Input Format. Each field becomes a form input with the appropriate type.
</Callout>
## Creating a Form
1. Open your workflow and click **Deploy**
2. Select the **Form** tab
3. Configure:
- **URL**: Unique identifier (e.g., `contact-form` → `sim.ai/form/contact-form`)
- **Title**: Form heading
- **Description**: Optional subtitle
- **Form Fields**: Customize labels and descriptions for each field
- **Authentication**: Public, password-protected, or email whitelist
- **Thank You Message**: Shown after submission
4. Click **Launch**
## Field Type Mapping
| Input Format Type | Form Field |
|------------------|------------|
| `string` | Text input |
| `number` | Number input |
| `boolean` | Toggle switch |
| `object` | JSON editor |
| `array` | JSON array editor |
| `files` | File upload |
## Access Control
| Mode | Description |
|------|-------------|
| **Public** | Anyone with the link can submit |
| **Password** | Users must enter a password |
| **Email Whitelist** | Only specified emails/domains can submit |
For email whitelist:
- Exact: `user@example.com`
- Domain: `@example.com` (all emails from domain)
## Embedding
### Direct Link
```
https://sim.ai/form/your-identifier
```
### Iframe
```html
<iframe
src="https://sim.ai/form/your-identifier"
width="100%"
height="600"
frameborder="0"
title="Form"
></iframe>
```
## API Submission
Submit forms programmatically:
<Tabs items={['cURL', 'TypeScript']}>
<Tab value="cURL">
```bash
curl -X POST https://sim.ai/api/form/your-identifier \
-H "Content-Type: application/json" \
-d '{
"formData": {
"name": "John Doe",
"email": "john@example.com"
}
}'
```
</Tab>
<Tab value="TypeScript">
```typescript
const response = await fetch('https://sim.ai/api/form/your-identifier', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
formData: {
name: 'John Doe',
email: 'john@example.com'
}
})
});
const result = await response.json();
// { success: true, data: { executionId: '...' } }
```
</Tab>
</Tabs>
### Protected Forms
For password-protected forms:
```bash
curl -X POST https://sim.ai/api/form/your-identifier \
-H "Content-Type: application/json" \
-d '{ "password": "secret", "formData": { "name": "John" } }'
```
For email-protected forms:
```bash
curl -X POST https://sim.ai/api/form/your-identifier \
-H "Content-Type: application/json" \
-d '{ "email": "allowed@example.com", "formData": { "name": "John" } }'
```
## Troubleshooting
**"No input fields configured"** - Add Input Format fields to your Start block.
**Form not loading in iframe** - Check your site's CSP allows iframes from `sim.ai`.
**Submissions failing** - Verify the identifier is correct and required fields are filled.

View File

@@ -1,3 +1,3 @@
{
"pages": ["index", "basics", "api", "form", "logging", "costs"]
"pages": ["index", "basics", "api", "logging", "costs"]
}

View File

@@ -15,7 +15,6 @@
"permissions",
"sdks",
"self-hosting",
"./enterprise/index",
"./keyboard-shortcuts/index"
],
"defaultOpen": false

View File

@@ -61,9 +61,7 @@ Search Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `people` | json | Array of people matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_people_enrich`
@@ -88,7 +86,7 @@ Enrich data for a single person using Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `person` | json | Enriched person data from Apollo |
| `enriched` | boolean | Whether the person was successfully enriched |
| `metadata` | json | Enrichment metadata including enriched status |
### `apollo_people_bulk_enrich`
@@ -108,8 +106,7 @@ Enrich data for up to 10 people at once using Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `people` | json | Array of enriched people data |
| `total` | number | Total number of people processed |
| `enriched` | number | Number of people successfully enriched |
| `metadata` | json | Bulk enrichment metadata including total and enriched counts |
### `apollo_organization_search`
@@ -132,9 +129,7 @@ Search Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `organizations` | json | Array of organizations matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_organization_enrich`
@@ -153,7 +148,7 @@ Enrich data for a single organization using Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `organization` | json | Enriched organization data from Apollo |
| `enriched` | boolean | Whether the organization was successfully enriched |
| `metadata` | json | Enrichment metadata including enriched status |
### `apollo_organization_bulk_enrich`
@@ -171,8 +166,7 @@ Enrich data for up to 10 organizations at once using Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `organizations` | json | Array of enriched organization data |
| `total` | number | Total number of organizations processed |
| `enriched` | number | Number of organizations successfully enriched |
| `metadata` | json | Bulk enrichment metadata including total and enriched counts |
### `apollo_contact_create`
@@ -195,7 +189,7 @@ Create a new contact in your Apollo database
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Created contact data from Apollo |
| `created` | boolean | Whether the contact was successfully created |
| `metadata` | json | Creation metadata including created status |
### `apollo_contact_update`
@@ -219,7 +213,7 @@ Update an existing contact in your Apollo database
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Updated contact data from Apollo |
| `updated` | boolean | Whether the contact was successfully updated |
| `metadata` | json | Update metadata including updated status |
### `apollo_contact_search`
@@ -240,9 +234,7 @@ Search your team
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contacts` | json | Array of contacts matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_contact_bulk_create`
@@ -262,9 +254,7 @@ Create up to 100 contacts at once in your Apollo database. Supports deduplicatio
| --------- | ---- | ----------- |
| `created_contacts` | json | Array of newly created contacts |
| `existing_contacts` | json | Array of existing contacts \(when deduplication is enabled\) |
| `total_submitted` | number | Total number of contacts submitted |
| `created` | number | Number of contacts successfully created |
| `existing` | number | Number of existing contacts found |
| `metadata` | json | Bulk creation metadata including counts of created and existing contacts |
### `apollo_contact_bulk_update`
@@ -283,9 +273,7 @@ Update up to 100 existing contacts at once in your Apollo database. Each contact
| --------- | ---- | ----------- |
| `updated_contacts` | json | Array of successfully updated contacts |
| `failed_contacts` | json | Array of contacts that failed to update |
| `total_submitted` | number | Total number of contacts submitted |
| `updated` | number | Number of contacts successfully updated |
| `failed` | number | Number of contacts that failed to update |
| `metadata` | json | Bulk update metadata including counts of updated and failed contacts |
### `apollo_account_create`
@@ -306,7 +294,7 @@ Create a new account (company) in your Apollo database
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `account` | json | Created account data from Apollo |
| `created` | boolean | Whether the account was successfully created |
| `metadata` | json | Creation metadata including created status |
### `apollo_account_update`
@@ -328,7 +316,7 @@ Update an existing account in your Apollo database
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `account` | json | Updated account data from Apollo |
| `updated` | boolean | Whether the account was successfully updated |
| `metadata` | json | Update metadata including updated status |
### `apollo_account_search`
@@ -350,9 +338,7 @@ Search your team
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `accounts` | json | Array of accounts matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_account_bulk_create`
@@ -371,9 +357,7 @@ Create up to 100 accounts at once in your Apollo database. Note: Apollo does not
| --------- | ---- | ----------- |
| `created_accounts` | json | Array of newly created accounts |
| `failed_accounts` | json | Array of accounts that failed to create |
| `total_submitted` | number | Total number of accounts submitted |
| `created` | number | Number of accounts successfully created |
| `failed` | number | Number of accounts that failed to create |
| `metadata` | json | Bulk creation metadata including counts of created and failed accounts |
### `apollo_account_bulk_update`
@@ -392,9 +376,7 @@ Update up to 1000 existing accounts at once in your Apollo database (higher limi
| --------- | ---- | ----------- |
| `updated_accounts` | json | Array of successfully updated accounts |
| `failed_accounts` | json | Array of accounts that failed to update |
| `total_submitted` | number | Total number of accounts submitted |
| `updated` | number | Number of accounts successfully updated |
| `failed` | number | Number of accounts that failed to update |
| `metadata` | json | Bulk update metadata including counts of updated and failed accounts |
### `apollo_opportunity_create`
@@ -418,7 +400,7 @@ Create a new deal for an account in your Apollo database (master key required)
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `opportunity` | json | Created opportunity data from Apollo |
| `created` | boolean | Whether the opportunity was successfully created |
| `metadata` | json | Creation metadata including created status |
### `apollo_opportunity_search`
@@ -441,9 +423,7 @@ Search and list all deals/opportunities in your team
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `opportunities` | json | Array of opportunities matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_opportunity_get`
@@ -461,7 +441,7 @@ Retrieve complete details of a specific deal/opportunity by ID
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `opportunity` | json | Complete opportunity data from Apollo |
| `found` | boolean | Whether the opportunity was found |
| `metadata` | json | Retrieval metadata including found status |
### `apollo_opportunity_update`
@@ -485,7 +465,7 @@ Update an existing deal/opportunity in your Apollo database
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `opportunity` | json | Updated opportunity data from Apollo |
| `updated` | boolean | Whether the opportunity was successfully updated |
| `metadata` | json | Update metadata including updated status |
### `apollo_sequence_search`
@@ -506,9 +486,7 @@ Search for sequences/campaigns in your team
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `sequences` | json | Array of sequences/campaigns matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_sequence_add_contacts`
@@ -529,8 +507,7 @@ Add contacts to an Apollo sequence
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contacts_added` | json | Array of contact IDs added to the sequence |
| `sequence_id` | string | ID of the sequence contacts were added to |
| `total_added` | number | Total number of contacts added |
| `metadata` | json | Sequence metadata including sequence_id and total_added count |
### `apollo_task_create`
@@ -553,7 +530,7 @@ Create a new task in Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `task` | json | Created task data from Apollo |
| `created` | boolean | Whether the task was successfully created |
| `metadata` | json | Creation metadata including created status |
### `apollo_task_search`
@@ -575,9 +552,7 @@ Search for tasks in Apollo
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `tasks` | json | Array of tasks matching the search criteria |
| `page` | number | Current page number |
| `per_page` | number | Results per page |
| `total_entries` | number | Total matching entries |
| `metadata` | json | Pagination information including page, per_page, and total_entries |
### `apollo_email_accounts`
@@ -594,7 +569,7 @@ Get list of team
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email_accounts` | json | Array of team email accounts linked in Apollo |
| `total` | number | Total count of email accounts |
| `metadata` | json | Metadata including total count of email accounts |

View File

@@ -0,0 +1,186 @@
---
title: Cursor
description: Launch and manage Cursor cloud agents to work on GitHub repositories
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="cursor"
color="#1E1E1E"
/>
{/* MANUAL-CONTENT-START:intro */}
[Cursor](https://www.cursor.so/) is an AI IDE and cloud-based platform that lets you launch and manage powerful AI agents able to work directly on your GitHub repositories. Cursor agents can automate development tasks, enhance your team's productivity, and collaborate with you by making code changes, responding to natural language instructions, and maintaining conversation history about their activities.
With Cursor, you can:
- **Launch cloud agents for codebases**: Instantly create new AI agents that work on your repositories in the cloud
- **Delegate coding tasks using natural language**: Guide agents with written instructions, amendments, and clarifications
- **Monitor progress and outputs**: Retrieve agent status, view detailed results, and inspect current or completed tasks
- **Access full conversation history**: Review all prompts and AI responses for transparency and auditability
- **Control and manage agent lifecycle**: List active agents, terminate agents, and manage API-based agent launches and follow-ups
In Sim, the Cursor integration enables your agents and workflows to interact programmatically with Cursor cloud agents. This means you can use Sim to:
- List all cloud agents and browse their current state (`cursor_list_agents`)
- Retrieve up-to-date status and outputs for any agent (`cursor_get_agent`)
- View the full conversation history for any coding agent (`cursor_get_conversation`)
- Add follow-up instructions or new prompts to a running agent
- Manage and terminate agents as needed
This integration helps you combine the flexible intelligence of Sim agents with the powerful development automation capabilities of Cursor, making it possible to scale AI-driven development across your projects.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Interact with Cursor Cloud Agents API to launch AI agents that can work on your GitHub repositories. Supports launching agents, adding follow-up instructions, checking status, viewing conversations, and managing agent lifecycle.
## Tools
### `cursor_list_agents`
List all cloud agents for the authenticated user with optional pagination.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `limit` | number | No | Number of agents to return \(default: 20, max: 100\) |
| `cursor` | string | No | Pagination cursor from previous response |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Human-readable list of agents |
| `metadata` | object | Agent list metadata |
### `cursor_get_agent`
Retrieve the current status and results of a cloud agent.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `agentId` | string | Yes | Unique identifier for the cloud agent \(e.g., bc_abc123\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Human-readable agent details |
| `metadata` | object | Agent metadata |
### `cursor_get_conversation`
Retrieve the conversation history of a cloud agent, including all user prompts and assistant responses.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `agentId` | string | Yes | Unique identifier for the cloud agent \(e.g., bc_abc123\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Human-readable conversation history |
| `metadata` | object | Conversation metadata |
### `cursor_launch_agent`
Start a new cloud agent to work on a GitHub repository with the given instructions.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `repository` | string | Yes | GitHub repository URL \(e.g., https://github.com/your-org/your-repo\) |
| `ref` | string | No | Branch, tag, or commit to work from \(defaults to default branch\) |
| `promptText` | string | Yes | The instruction text for the agent |
| `promptImages` | string | No | JSON array of image objects with base64 data and dimensions |
| `model` | string | No | Model to use \(leave empty for auto-selection\) |
| `branchName` | string | No | Custom branch name for the agent to use |
| `autoCreatePr` | boolean | No | Automatically create a PR when the agent finishes |
| `openAsCursorGithubApp` | boolean | No | Open the PR as the Cursor GitHub App |
| `skipReviewerRequest` | boolean | No | Skip requesting reviewers on the PR |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message with agent details |
| `metadata` | object | Launch result metadata |
### `cursor_add_followup`
Add a follow-up instruction to an existing cloud agent.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `agentId` | string | Yes | Unique identifier for the cloud agent \(e.g., bc_abc123\) |
| `followupPromptText` | string | Yes | The follow-up instruction text for the agent |
| `promptImages` | string | No | JSON array of image objects with base64 data and dimensions \(max 5\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Result metadata |
### `cursor_stop_agent`
Stop a running cloud agent. This pauses the agent without deleting it.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `agentId` | string | Yes | Unique identifier for the cloud agent \(e.g., bc_abc123\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Result metadata |
### `cursor_delete_agent`
Permanently delete a cloud agent. This action cannot be undone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Cursor API key |
| `agentId` | string | Yes | Unique identifier for the cloud agent \(e.g., bc_abc123\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Result metadata |
## Notes
- Category: `tools`
- Type: `cursor`

View File

@@ -1,183 +0,0 @@
---
title: Cursor
description: Agent identifier
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="cursor_v2"
color="#F5F5F5"
/>
## Tools
### `cursor_list_agents_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
### `cursor_get_agent_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
### `cursor_get_conversation_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
### `cursor_launch_agent_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
### `cursor_add_followup_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
### `cursor_stop_agent_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
### `cursor_delete_agent_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Agent identifier |
| `url` | string | Agent URL \(launch operation\) |
| `name` | string | Agent name |
| `status` | string | Agent status |
| `source` | json | Agent source repository info |
| `target` | json | Agent target branch/PR info |
| `summary` | string | Agent summary |
| `createdAt` | string | Agent creation timestamp |
| `agents` | json | Array of agent objects \(list operation\) |
| `nextCursor` | string | Pagination cursor \(list operation\) |
| `messages` | json | Conversation messages \(get conversation operation\) |
## Notes
- Category: `misc`
- Type: `cursor_v2`

File diff suppressed because it is too large Load Diff

View File

@@ -1,20 +0,0 @@
---
title: GitHub
description: Operation result data (API-aligned)
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="github_v2"
color="#F5F5F5"
/>
## Notes
- Category: `misc`
- Type: `github_v2`

View File

@@ -0,0 +1,280 @@
---
title: Gmail
description: Send, read, search, and move Gmail messages or trigger workflows from Gmail events
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="gmail"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
[Gmail](https://gmail.com) is Google's popular email service that provides a robust platform for sending, receiving, and managing email communications. With over 1.8 billion active users worldwide, Gmail offers a feature-rich experience with powerful search capabilities, organizational tools, and integration options.
With Gmail, you can:
- **Send and receive emails**: Communicate with contacts through a clean, intuitive interface
- **Organize messages**: Use labels, folders, and filters to keep your inbox organized
- **Search efficiently**: Find specific messages quickly with Google's powerful search technology
- **Automate workflows**: Create filters and rules to automatically process incoming emails
- **Access from anywhere**: Use Gmail across devices with synchronized content and settings
- **Integrate with other services**: Connect with Google Calendar, Drive, and other productivity tools
In Sim, the Gmail integration enables your agents to fully manage emails programmatically with comprehensive automation capabilities. This allows for powerful automation scenarios such as sending notifications, processing incoming messages, extracting information from emails, and managing communication workflows at scale. Your agents can:
- **Compose and send**: Create personalized emails with attachments and send to recipients
- **Read and search**: Find specific messages using Gmail's query syntax and extract content
- **Organize intelligently**: Mark messages as read/unread, archive or unarchive emails, and manage labels
- **Clean up inbox**: Delete messages, move emails between labels, and maintain inbox zero
- **Trigger workflows**: Listen for new emails in real-time, enabling responsive workflows that react to incoming messages
This integration bridges the gap between your AI workflows and email communications, enabling seamless interaction with one of the world's most widely used communication platforms. Whether you're automating customer support responses, processing receipts, managing subscriptions, or coordinating team communications, the Gmail integration provides all the tools you need for comprehensive email automation.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Gmail into the workflow. Can send, read, search, and move emails. Can be used in trigger mode to trigger a workflow when a new email is received.
## Tools
### `gmail_send`
Send emails using Gmail
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `to` | string | Yes | Recipient email address |
| `subject` | string | No | Email subject |
| `body` | string | Yes | Email body content |
| `contentType` | string | No | Content type for the email body \(text or html\) |
| `threadId` | string | No | Thread ID to reply to \(for threading\) |
| `replyToMessageId` | string | No | Gmail message ID to reply to - use the "id" field from Gmail Read results \(not the RFC "messageId"\) |
| `cc` | string | No | CC recipients \(comma-separated\) |
| `bcc` | string | No | BCC recipients \(comma-separated\) |
| `attachments` | file[] | No | Files to attach to the email |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_draft`
Draft emails using Gmail
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `to` | string | Yes | Recipient email address |
| `subject` | string | No | Email subject |
| `body` | string | Yes | Email body content |
| `contentType` | string | No | Content type for the email body \(text or html\) |
| `threadId` | string | No | Thread ID to reply to \(for threading\) |
| `replyToMessageId` | string | No | Gmail message ID to reply to - use the "id" field from Gmail Read results \(not the RFC "messageId"\) |
| `cc` | string | No | CC recipients \(comma-separated\) |
| `bcc` | string | No | BCC recipients \(comma-separated\) |
| `attachments` | file[] | No | Files to attach to the email draft |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Draft metadata |
### `gmail_read`
Read emails from Gmail
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | No | ID of the message to read |
| `folder` | string | No | Folder/label to read emails from |
| `unreadOnly` | boolean | No | Only retrieve unread messages |
| `maxResults` | number | No | Maximum number of messages to retrieve \(default: 1, max: 10\) |
| `includeAttachments` | boolean | No | Download and include email attachments |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Text content of the email |
| `metadata` | json | Metadata of the email |
| `attachments` | file[] | Attachments of the email |
### `gmail_search`
Search emails in Gmail
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `query` | string | Yes | Search query for emails |
| `maxResults` | number | No | Maximum number of results to return |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Search results summary |
| `metadata` | object | Search metadata |
### `gmail_move`
Move emails between Gmail labels/folders
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to move |
| `addLabelIds` | string | Yes | Comma-separated label IDs to add \(e.g., INBOX, Label_123\) |
| `removeLabelIds` | string | No | Comma-separated label IDs to remove \(e.g., INBOX, SPAM\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_mark_read`
Mark a Gmail message as read
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to mark as read |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_mark_unread`
Mark a Gmail message as unread
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to mark as unread |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_archive`
Archive a Gmail message (remove from inbox)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to archive |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_unarchive`
Unarchive a Gmail message (move back to inbox)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to unarchive |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_delete`
Delete a Gmail message (move to trash)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_add_label`
Add label(s) to a Gmail message
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to add labels to |
| `labelIds` | string | Yes | Comma-separated label IDs to add \(e.g., INBOX, Label_123\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
### `gmail_remove_label`
Remove label(s) from a Gmail message
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `messageId` | string | Yes | ID of the message to remove labels from |
| `labelIds` | string | Yes | Comma-separated label IDs to remove \(e.g., INBOX, Label_123\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message |
| `metadata` | object | Email metadata |
## Notes
- Category: `tools`
- Type: `gmail`

View File

@@ -1,394 +0,0 @@
---
title: Gmail
description: Gmail message ID
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="gmail_v2"
color="#F5F5F5"
/>
## Tools
### `gmail_send_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_draft_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_read_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_search_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_move_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_mark_read_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_mark_unread_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_archive_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_unarchive_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_delete_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_add_label_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
### `gmail_remove_label_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Gmail message ID |
| `threadId` | string | Gmail thread ID |
| `labelIds` | array | Email label IDs |
| `from` | string | Sender |
| `to` | string | To |
| `subject` | string | Subject |
| `date` | string | Date |
| `body` | string | Email body text \(best-effort\) |
| `results` | json | Search/read summary results |
| `attachments` | json | Downloaded attachments \(if enabled\) |
| `email_id` | string | Gmail message ID |
| `thread_id` | string | Gmail thread ID |
| `cc` | string | CC recipients \(comma-separated\) |
| `body_text` | string | Plain text email body |
| `body_html` | string | HTML email body |
| `labels` | string | Email labels \(comma-separated\) |
| `has_attachments` | boolean | Whether email has attachments |
| `raw_email` | json | Complete raw email data from Gmail API \(if enabled\) |
| `timestamp` | string | Event timestamp |
## Notes
- Category: `misc`
- Type: `gmail_v2`

View File

@@ -0,0 +1,146 @@
---
title: Google Calendar
description: Manage Google Calendar events
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="google_calendar"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
[Google Calendar](https://calendar.google.com) is Google's powerful calendar and scheduling service that provides a comprehensive platform for managing events, meetings, and appointments. With seamless integration across Google's ecosystem and widespread adoption, Google Calendar offers robust features for both personal and professional scheduling needs.
With Google Calendar, you can:
- **Create and manage events**: Schedule meetings, appointments, and reminders with detailed information
- **Send calendar invites**: Automatically notify and coordinate with attendees through email invitations
- **Natural language event creation**: Quickly add events using conversational language like "Meeting with John tomorrow at 3pm"
- **View and search events**: Easily find and access your scheduled events across multiple calendars
- **Manage multiple calendars**: Organize different types of events across various calendars
In Sim, the Google Calendar integration enables your agents to programmatically create, read, and manage calendar events. This allows for powerful automation scenarios such as scheduling meetings, sending calendar invites, checking availability, and managing event details. Your agents can create events with natural language input, send automated calendar invitations to attendees, retrieve event information, and list upcoming events. This integration bridges the gap between your AI workflows and calendar management, enabling seamless scheduling automation and coordination with one of the world's most widely used calendar platforms.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Google Calendar into the workflow. Can create, read, update, and list calendar events.
## Tools
### `google_calendar_create`
Create a new event in Google Calendar
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `summary` | string | Yes | Event title/summary |
| `description` | string | No | Event description |
| `location` | string | No | Event location |
| `startDateTime` | string | Yes | Start date and time. MUST include timezone offset \(e.g., 2025-06-03T10:00:00-08:00\) OR provide timeZone parameter |
| `endDateTime` | string | Yes | End date and time. MUST include timezone offset \(e.g., 2025-06-03T11:00:00-08:00\) OR provide timeZone parameter |
| `timeZone` | string | No | Time zone \(e.g., America/Los_Angeles\). Required if datetime does not include offset. Defaults to America/Los_Angeles if not provided. |
| `attendees` | array | No | Array of attendee email addresses |
| `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Event creation confirmation message |
| `metadata` | json | Created event metadata including ID, status, and details |
### `google_calendar_list`
List events from Google Calendar
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `timeMin` | string | No | Lower bound for events \(RFC3339 timestamp, e.g., 2025-06-03T00:00:00Z\) |
| `timeMax` | string | No | Upper bound for events \(RFC3339 timestamp, e.g., 2025-06-04T00:00:00Z\) |
| `orderBy` | string | No | Order of events returned \(startTime or updated\) |
| `showDeleted` | boolean | No | Include deleted events |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Summary of found events count |
| `metadata` | json | List of events with pagination tokens and event details |
### `google_calendar_get`
Get a specific event from Google Calendar
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `eventId` | string | Yes | Event ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Event retrieval confirmation message |
| `metadata` | json | Event details including ID, status, times, and attendees |
### `google_calendar_quick_add`
Create events from natural language text
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `text` | string | Yes | Natural language text describing the event \(e.g., "Meeting with John tomorrow at 3pm"\) |
| `attendees` | array | No | Array of attendee email addresses \(comma-separated string also accepted\) |
| `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Event creation confirmation message from natural language |
| `metadata` | json | Created event metadata including parsed details |
### `google_calendar_invite`
Invite attendees to an existing Google Calendar event
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `eventId` | string | Yes | Event ID to invite attendees to |
| `attendees` | array | Yes | Array of attendee email addresses to invite |
| `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none |
| `replaceExisting` | boolean | No | Whether to replace existing attendees or add to them \(defaults to false\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Attendee invitation confirmation message with email delivery status |
| `metadata` | json | Updated event metadata including attendee list and details |
## Notes
- Category: `tools`
- Type: `google_calendar`

View File

@@ -1,157 +0,0 @@
---
title: Google Calendar
description: Event ID
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="google_calendar_v2"
color="#F5F5F5"
/>
## Tools
### `google_calendar_create_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
| `events` | json | List of events \(list operation\) |
| `nextPageToken` | string | Next page token |
| `nextSyncToken` | string | Next sync token |
| `timeZone` | string | Calendar time zone |
### `google_calendar_list_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
| `events` | json | List of events \(list operation\) |
| `nextPageToken` | string | Next page token |
| `nextSyncToken` | string | Next sync token |
| `timeZone` | string | Calendar time zone |
### `google_calendar_get_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
| `events` | json | List of events \(list operation\) |
| `nextPageToken` | string | Next page token |
| `nextSyncToken` | string | Next sync token |
| `timeZone` | string | Calendar time zone |
### `google_calendar_quick_add_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
| `events` | json | List of events \(list operation\) |
| `nextPageToken` | string | Next page token |
| `nextSyncToken` | string | Next sync token |
| `timeZone` | string | Calendar time zone |
### `google_calendar_invite_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
| `events` | json | List of events \(list operation\) |
| `nextPageToken` | string | Next page token |
| `nextSyncToken` | string | Next sync token |
| `timeZone` | string | Calendar time zone |
## Notes
- Category: `misc`
- Type: `google_calendar_v2`

View File

@@ -48,7 +48,7 @@ Integrate Google Drive into the workflow. Can create, upload, and list files.
### `google_drive_upload`
Upload a file to Google Drive with complete metadata returned
Upload a file to Google Drive
#### Input
@@ -65,11 +65,11 @@ Upload a file to Google Drive with complete metadata returned
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Complete uploaded file metadata from Google Drive |
| `file` | json | Uploaded file metadata including ID, name, and links |
### `google_drive_create_folder`
Create a new folder in Google Drive with complete metadata returned
Create a new folder in Google Drive
#### Input
@@ -83,11 +83,11 @@ Create a new folder in Google Drive with complete metadata returned
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Complete created folder metadata from Google Drive |
| `file` | json | Created folder metadata including ID, name, and parent information |
### `google_drive_download`
Download a file from Google Drive with complete metadata (exports Google Workspace files automatically)
Download a file from Google Drive (exports Google Workspace files automatically)
#### Input
@@ -96,17 +96,16 @@ Download a file from Google Drive with complete metadata (exports Google Workspa
| `fileId` | string | Yes | The ID of the file to download |
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
| `fileName` | string | No | Optional filename override |
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Downloaded file data |
| `file` | file | Downloaded file stored in execution files |
### `google_drive_list`
List files and folders in Google Drive with complete metadata
List files and folders in Google Drive
#### Input
@@ -122,7 +121,7 @@ List files and folders in Google Drive with complete metadata
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `files` | array | Array of file metadata objects from Google Drive |
| `files` | json | Array of file metadata objects from the specified folder |

View File

@@ -162,7 +162,6 @@ Create a webhook to receive recording events
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `hookUrl` | string | Yes | Webhook endpoint URL \(must respond 2xx\) |
| `hookType` | string | Yes | Type of webhook: "recording_added" or "upload_status" |
| `filterBeforeDatetime` | string | No | Filter: recordings before this date |
| `filterAfterDatetime` | string | No | Filter: recordings after this date |
| `filterParticipantScope` | string | No | Filter: "internal" or "external" |
@@ -179,7 +178,6 @@ Create a webhook to receive recording events
| `id` | string | Hook UUID |
| `enabled` | boolean | Whether hook is active |
| `hook_url` | string | The webhook URL |
| `hook_type` | string | Type of hook: recording_added or upload_status |
| `filter` | object | Applied filters |
| `include` | object | Included fields |
| `inserted_at` | string | ISO8601 creation timestamp |

View File

@@ -51,7 +51,7 @@ Retrieve all users from HubSpot account
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `users` | array | Array of HubSpot user objects |
| `totalItems` | number | Total number of users returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_list_contacts`
@@ -73,7 +73,7 @@ Retrieve all contacts from HubSpot account with pagination support
| --------- | ---- | ----------- |
| `contacts` | array | Array of HubSpot contact objects |
| `paging` | object | Pagination information |
| `metadata` | object | Metadata with totalReturned and hasMore |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_get_contact`
@@ -94,7 +94,7 @@ Retrieve a single contact by ID or email from HubSpot
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | object | HubSpot contact object with properties |
| `contactId` | string | The retrieved contact ID |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_create_contact`
@@ -113,7 +113,7 @@ Create a new contact in HubSpot. Requires at least one of: email, firstname, or
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | object | Created HubSpot contact object |
| `contactId` | string | The created contact ID |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_update_contact`
@@ -133,7 +133,7 @@ Update an existing contact in HubSpot by ID or email
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | object | Updated HubSpot contact object |
| `contactId` | string | The updated contact ID |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_search_contacts`
@@ -158,7 +158,7 @@ Search for contacts in HubSpot using filters, sorting, and queries
| `contacts` | array | Array of matching HubSpot contact objects |
| `total` | number | Total number of matching contacts |
| `paging` | object | Pagination information |
| `metadata` | object | Metadata with totalReturned and hasMore |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_list_companies`
@@ -180,7 +180,7 @@ Retrieve all companies from HubSpot account with pagination support
| --------- | ---- | ----------- |
| `companies` | array | Array of HubSpot company objects |
| `paging` | object | Pagination information |
| `metadata` | object | Metadata with totalReturned and hasMore |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_get_company`
@@ -201,7 +201,7 @@ Retrieve a single company by ID or domain from HubSpot
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `company` | object | HubSpot company object with properties |
| `companyId` | string | The retrieved company ID |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_create_company`
@@ -220,7 +220,7 @@ Create a new company in HubSpot
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `company` | object | Created HubSpot company object |
| `companyId` | string | The created company ID |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_update_company`
@@ -240,7 +240,7 @@ Update an existing company in HubSpot by ID or domain
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `company` | object | Updated HubSpot company object |
| `companyId` | string | The updated company ID |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_search_companies`
@@ -265,7 +265,7 @@ Search for companies in HubSpot using filters, sorting, and queries
| `companies` | array | Array of matching HubSpot company objects |
| `total` | number | Total number of matching companies |
| `paging` | object | Pagination information |
| `metadata` | object | Metadata with totalReturned and hasMore |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `hubspot_list_deals`
@@ -287,7 +287,7 @@ Retrieve all deals from HubSpot account with pagination support
| --------- | ---- | ----------- |
| `deals` | array | Array of HubSpot deal objects |
| `paging` | object | Pagination information |
| `metadata` | object | Metadata with totalReturned and hasMore |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |

View File

@@ -0,0 +1,365 @@
---
title: Intercom
description: Manage contacts, companies, conversations, tickets, and messages in Intercom
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="intercom"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
[Intercom](https://www.intercom.com/) is a leading customer communications platform that enables you to manage and automate your interactions with contacts, companies, conversations, tickets, and messages—all in one place. The Intercom integration in Sim lets your agents programmatically manage customer relationships, support requests, and conversations directly from your automated workflows.
With the Intercom tools, you can:
- **Contacts Management:** Create, get, update, list, search, and delete contacts—automate your CRM processes and keep your customer records up-to-date.
- **Company Management:** Create new companies, retrieve company details, and list all companies related to your users or business clients.
- **Conversation Handling:** Get, list, reply to, and search through conversations—allowing agents to track ongoing support threads, provide answers, and automate follow-up actions.
- **Ticket Management:** Create and retrieve tickets programmatically, helping you automate customer service, support issue tracking, and workflow escalations.
- **Send Messages:** Trigger messages to users or leads for onboarding, support, or marketing, all from within your workflow automation.
By integrating Intercom tools into Sim, you empower your workflows to communicate directly with your users, automate customer support processes, manage leads, and streamline communications at scale. Whether you need to create new contacts, keep customer data synchronized, manage support tickets, or send personalized engagement messages, the Intercom tools provide a comprehensive way to manage customer interactions as part of your intelligent automations.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Intercom into the workflow. Can create, get, update, list, search, and delete contacts; create, get, and list companies; get, list, reply, and search conversations; create and get tickets; and create messages.
## Tools
### `intercom_create_contact`
Create a new contact in Intercom with email, external_id, or role
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `role` | string | No | The role of the contact. Accepts 'user' or 'lead'. Defaults to 'lead' if not specified. |
| `email` | string | No | The contact's email address |
| `external_id` | string | No | A unique identifier for the contact provided by the client |
| `phone` | string | No | The contact's phone number |
| `name` | string | No | The contact's name |
| `avatar` | string | No | An avatar image URL for the contact |
| `signed_up_at` | number | No | The time the user signed up as a Unix timestamp |
| `last_seen_at` | number | No | The time the user was last seen as a Unix timestamp |
| `owner_id` | string | No | The id of an admin that has been assigned account ownership of the contact |
| `unsubscribed_from_emails` | boolean | No | Whether the contact is unsubscribed from emails |
| `custom_attributes` | string | No | Custom attributes as JSON object \(e.g., \{"attribute_name": "value"\}\) |
| `company_id` | string | No | Company ID to associate the contact with during creation |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | object | Created contact object |
### `intercom_get_contact`
Get a single contact by ID from Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `contactId` | string | Yes | Contact ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | object | Contact object |
### `intercom_update_contact`
Update an existing contact in Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `contactId` | string | Yes | Contact ID to update |
| `role` | string | No | The role of the contact. Accepts 'user' or 'lead'. |
| `external_id` | string | No | A unique identifier for the contact provided by the client |
| `email` | string | No | The contact's email address |
| `phone` | string | No | The contact's phone number |
| `name` | string | No | The contact's name |
| `avatar` | string | No | An avatar image URL for the contact |
| `signed_up_at` | number | No | The time the user signed up as a Unix timestamp |
| `last_seen_at` | number | No | The time the user was last seen as a Unix timestamp |
| `owner_id` | string | No | The id of an admin that has been assigned account ownership of the contact |
| `unsubscribed_from_emails` | boolean | No | Whether the contact is unsubscribed from emails |
| `custom_attributes` | string | No | Custom attributes as JSON object \(e.g., \{"attribute_name": "value"\}\) |
| `company_id` | string | No | Company ID to associate the contact with |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | object | Updated contact object |
### `intercom_list_contacts`
List all contacts from Intercom with pagination support
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `per_page` | number | No | Number of results per page \(max: 150\) |
| `starting_after` | string | No | Cursor for pagination - ID to start after |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contacts` | array | Array of contact objects |
### `intercom_search_contacts`
Search for contacts in Intercom using a query
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `query` | string | Yes | Search query \(e.g., \{"field":"email","operator":"=","value":"user@example.com"\}\) |
| `per_page` | number | No | Number of results per page \(max: 150\) |
| `starting_after` | string | No | Cursor for pagination |
| `sort_field` | string | No | Field to sort by \(e.g., "name", "created_at", "last_seen_at"\) |
| `sort_order` | string | No | Sort order: "ascending" or "descending" |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contacts` | array | Array of matching contact objects |
### `intercom_delete_contact`
Delete a contact from Intercom by ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `contactId` | string | Yes | Contact ID to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | ID of deleted contact |
| `deleted` | boolean | Whether the contact was deleted |
| `metadata` | object | Operation metadata |
### `intercom_create_company`
Create or update a company in Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `company_id` | string | Yes | Your unique identifier for the company |
| `name` | string | No | The name of the company |
| `website` | string | No | The company website |
| `plan` | string | No | The company plan name |
| `size` | number | No | The number of employees in the company |
| `industry` | string | No | The industry the company operates in |
| `monthly_spend` | number | No | How much revenue the company generates for your business. Note: This field truncates floats to whole integers \(e.g., 155.98 becomes 155\) |
| `custom_attributes` | string | No | Custom attributes as JSON object |
| `remote_created_at` | number | No | The time the company was created by you as a Unix timestamp |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `company` | object | Created or updated company object |
### `intercom_get_company`
Retrieve a single company by ID from Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `companyId` | string | Yes | Company ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `company` | object | Company object |
### `intercom_list_companies`
List all companies from Intercom with pagination support. Note: This endpoint has a limit of 10,000 companies that can be returned using pagination. For datasets larger than 10,000 companies, use the Scroll API instead.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `per_page` | number | No | Number of results per page |
| `page` | number | No | Page number |
| `starting_after` | string | No | Cursor for pagination \(preferred over page-based pagination\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `companies` | array | Array of company objects |
### `intercom_get_conversation`
Retrieve a single conversation by ID from Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `conversationId` | string | Yes | Conversation ID to retrieve |
| `display_as` | string | No | Set to "plaintext" to retrieve messages in plain text |
| `include_translations` | boolean | No | When true, conversation parts will be translated to the detected language of the conversation |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `conversation` | object | Conversation object |
### `intercom_list_conversations`
List all conversations from Intercom with pagination support
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `per_page` | number | No | Number of results per page \(max: 150\) |
| `starting_after` | string | No | Cursor for pagination |
| `sort` | string | No | Field to sort by \(e.g., "waiting_since", "updated_at", "created_at"\) |
| `order` | string | No | Sort order: "asc" \(ascending\) or "desc" \(descending\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `conversations` | array | Array of conversation objects |
### `intercom_reply_conversation`
Reply to a conversation as an admin in Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `conversationId` | string | Yes | Conversation ID to reply to |
| `message_type` | string | Yes | Message type: "comment" or "note" |
| `body` | string | Yes | The text body of the reply |
| `admin_id` | string | No | The ID of the admin authoring the reply. If not provided, a default admin \(Operator/Fin\) will be used. |
| `attachment_urls` | string | No | Comma-separated list of image URLs \(max 10\) |
| `created_at` | number | No | Unix timestamp for when the reply was created. If not provided, current time is used. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `conversation` | object | Updated conversation object |
### `intercom_search_conversations`
Search for conversations in Intercom using a query
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `query` | string | Yes | Search query as JSON object |
| `per_page` | number | No | Number of results per page \(max: 150\) |
| `starting_after` | string | No | Cursor for pagination |
| `sort_field` | string | No | Field to sort by \(e.g., "created_at", "updated_at"\) |
| `sort_order` | string | No | Sort order: "ascending" or "descending" |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `conversations` | array | Array of matching conversation objects |
### `intercom_create_ticket`
Create a new ticket in Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `ticket_type_id` | string | Yes | The ID of the ticket type |
| `contacts` | string | Yes | JSON array of contact identifiers \(e.g., \[\{"id": "contact_id"\}\]\) |
| `ticket_attributes` | string | Yes | JSON object with ticket attributes including _default_title_ and _default_description_ |
| `company_id` | string | No | Company ID to associate the ticket with |
| `created_at` | number | No | Unix timestamp for when the ticket was created. If not provided, current time is used. |
| `conversation_to_link_id` | string | No | ID of an existing conversation to link to this ticket |
| `disable_notifications` | boolean | No | When true, suppresses notifications when the ticket is created |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ticket` | object | Created ticket object |
### `intercom_get_ticket`
Retrieve a single ticket by ID from Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `ticketId` | string | Yes | Ticket ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ticket` | object | Ticket object |
### `intercom_create_message`
Create and send a new admin-initiated message in Intercom
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `message_type` | string | Yes | Message type: "inapp" for in-app messages or "email" for email messages |
| `template` | string | Yes | Message template style: "plain" for plain text or "personal" for personalized style |
| `subject` | string | No | The subject of the message \(for email type\) |
| `body` | string | Yes | The body of the message |
| `from_type` | string | Yes | Sender type: "admin" |
| `from_id` | string | Yes | The ID of the admin sending the message |
| `to_type` | string | Yes | Recipient type: "contact" |
| `to_id` | string | Yes | The ID of the contact receiving the message |
| `created_at` | number | No | Unix timestamp for when the message was created. If not provided, current time is used. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | object | Created message object |
## Notes
- Category: `tools`
- Type: `intercom`

View File

@@ -1,486 +0,0 @@
---
title: Intercom
description: Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="intercom_v2"
color="#F5F5F5"
/>
## Tools
### `intercom_create_contact_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_get_contact_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_update_contact_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_list_contacts_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_search_contacts_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_delete_contact_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_create_company_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_get_company_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_list_companies_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_get_conversation_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_list_conversations_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_reply_conversation_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_search_conversations_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_create_ticket_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_get_ticket_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
### `intercom_create_message_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `contact` | json | Contact object with id, type, role, email, phone, name, external_id, created_at, updated_at |
| `contactId` | string | ID of the contact \(for create/update operations\) |
| `contacts` | array | Array of contacts \(for list/search operations\) |
| `company` | json | Company object with id, company_id, name, website |
| `companyId` | string | ID of the company \(for create operations\) |
| `companies` | array | Array of companies \(for list operations\) |
| `conversation` | json | Conversation object with id, title, state, open |
| `conversationId` | string | ID of the conversation \(for reply operations\) |
| `conversations` | array | Array of conversations \(for list/search operations\) |
| `ticket` | json | Ticket object with id, ticket_id, ticket_state |
| `ticketId` | string | ID of the ticket \(for create operations\) |
| `message` | json | Message object with id, type |
| `messageId` | string | ID of the message \(for create operations\) |
| `total_count` | number | Total count \(for list/search operations\) |
| `pages` | json | Pagination info with page, per_page, total_pages |
| `id` | string | ID of the deleted item \(for delete operations\) |
| `deleted` | boolean | Whether the item was deleted \(for delete operations\) |
## Notes
- Category: `misc`
- Type: `intercom_v2`

View File

@@ -851,6 +851,24 @@ List all status updates for a project in Linear
| --------- | ---- | ----------- |
| `updates` | array | Array of project updates |
### `linear_create_project_link`
Add an external link to a project in Linear
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | Project ID to add link to |
| `url` | string | Yes | URL of the external link |
| `label` | string | No | Link label/title |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `link` | object | The created project link |
### `linear_list_notifications`
List notifications for the current user in Linear
@@ -1228,6 +1246,7 @@ Create a new project label in Linear
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | The project for this label |
| `name` | string | Yes | Project label name |
| `color` | string | No | Label color \(hex code\) |
| `description` | string | No | Label description |
@@ -1405,12 +1424,12 @@ Create a new project status in Linear
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | The project to create the status for |
| `name` | string | Yes | Project status name |
| `type` | string | Yes | Status type: "backlog", "planned", "started", "paused", "completed", or "canceled" |
| `color` | string | Yes | Status color \(hex code\) |
| `position` | number | Yes | Position in status list \(e.g. 0, 1, 2...\) |
| `description` | string | No | Status description |
| `indefinite` | boolean | No | Whether the status is indefinite |
| `position` | number | No | Position in status list |
#### Output

View File

@@ -70,8 +70,8 @@ Retrieve a list of audiences (lists) from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the audiences were successfully retrieved |
| `output` | object | Audiences data |
| `success` | boolean | Operation success status |
| `output` | object | Audiences data and metadata |
### `mailchimp_get_audience`
@@ -88,8 +88,8 @@ Retrieve details of a specific audience (list) from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the audience was successfully retrieved |
| `output` | object | Audience data |
| `success` | boolean | Operation success status |
| `output` | object | Audience data and metadata |
### `mailchimp_create_audience`
@@ -150,7 +150,8 @@ Delete an audience (list) from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the audience was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_get_members`
@@ -170,8 +171,8 @@ Retrieve a list of members from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the members were successfully retrieved |
| `output` | object | Members data |
| `success` | boolean | Operation success status |
| `output` | object | Members data and metadata |
### `mailchimp_get_member`
@@ -189,8 +190,8 @@ Retrieve details of a specific member from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the member was successfully retrieved |
| `output` | object | Member data |
| `success` | boolean | Operation success status |
| `output` | object | Member data and metadata |
### `mailchimp_add_member`
@@ -276,7 +277,8 @@ Delete a member from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the member was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_archive_member`
@@ -336,8 +338,8 @@ Retrieve a list of campaigns from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaigns were successfully retrieved |
| `output` | object | Campaigns data |
| `success` | boolean | Operation success status |
| `output` | object | Campaigns data and metadata |
### `mailchimp_get_campaign`
@@ -354,8 +356,8 @@ Retrieve details of a specific campaign from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaign was successfully retrieved |
| `output` | object | Campaign data |
| `success` | boolean | Operation success status |
| `output` | object | Campaign data and metadata |
### `mailchimp_create_campaign`
@@ -412,7 +414,8 @@ Delete a campaign from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaign was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_send_campaign`
@@ -448,7 +451,8 @@ Schedule a Mailchimp campaign to be sent at a specific time
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaign was successfully scheduled |
| `success` | boolean | Operation success status |
| `output` | object | Schedule confirmation |
### `mailchimp_unschedule_campaign`
@@ -501,7 +505,7 @@ Retrieve the HTML and plain-text content for a Mailchimp campaign
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaign content was successfully retrieved |
| `success` | boolean | Operation success status |
| `output` | object | Campaign content data |
### `mailchimp_set_campaign_content`
@@ -541,8 +545,8 @@ Retrieve a list of automations from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the automations were successfully retrieved |
| `output` | object | Automations data |
| `success` | boolean | Operation success status |
| `output` | object | Automations data and metadata |
### `mailchimp_get_automation`
@@ -559,8 +563,8 @@ Retrieve details of a specific automation from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the automation was successfully retrieved |
| `output` | object | Automation data |
| `success` | boolean | Operation success status |
| `output` | object | Automation data and metadata |
### `mailchimp_start_automation`
@@ -634,8 +638,8 @@ Retrieve a list of templates from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the templates were successfully retrieved |
| `output` | object | Templates data |
| `success` | boolean | Operation success status |
| `output` | object | Templates data and metadata |
### `mailchimp_get_template`
@@ -652,8 +656,8 @@ Retrieve details of a specific template from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the template was successfully retrieved |
| `output` | object | Template data |
| `success` | boolean | Operation success status |
| `output` | object | Template data and metadata |
### `mailchimp_create_template`
@@ -709,7 +713,8 @@ Delete a template from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the template was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_get_campaign_reports`
@@ -727,8 +732,8 @@ Retrieve a list of campaign reports from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaign reports were successfully retrieved |
| `output` | object | Campaign reports data |
| `success` | boolean | Operation success status |
| `output` | object | Campaign reports data and metadata |
### `mailchimp_get_campaign_report`
@@ -745,8 +750,8 @@ Retrieve the report for a specific campaign from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the campaign report was successfully retrieved |
| `output` | object | Campaign report data |
| `success` | boolean | Operation success status |
| `output` | object | Campaign report data and metadata |
### `mailchimp_get_segments`
@@ -765,8 +770,8 @@ Retrieve a list of segments from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the segments were successfully retrieved |
| `output` | object | Segments data |
| `success` | boolean | Operation success status |
| `output` | object | Segments data and metadata |
### `mailchimp_get_segment`
@@ -784,8 +789,8 @@ Retrieve details of a specific segment from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the segment was successfully retrieved |
| `output` | object | Segment data |
| `success` | boolean | Operation success status |
| `output` | object | Segment data and metadata |
### `mailchimp_create_segment`
@@ -844,7 +849,8 @@ Delete a segment from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the segment was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_get_segment_members`
@@ -864,8 +870,8 @@ Retrieve members of a specific segment from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the segment members were successfully retrieved |
| `output` | object | Segment members data |
| `success` | boolean | Operation success status |
| `output` | object | Segment members data and metadata |
### `mailchimp_add_segment_member`
@@ -923,8 +929,8 @@ Retrieve tags associated with a member in a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the member tags were successfully retrieved |
| `output` | object | Member tags data |
| `success` | boolean | Operation success status |
| `output` | object | Member tags data and metadata |
### `mailchimp_add_member_tags`
@@ -983,8 +989,8 @@ Retrieve a list of merge fields from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the merge fields were successfully retrieved |
| `output` | object | Merge fields data |
| `success` | boolean | Operation success status |
| `output` | object | Merge fields data and metadata |
### `mailchimp_get_merge_field`
@@ -1002,8 +1008,8 @@ Retrieve details of a specific merge field from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the merge field was successfully retrieved |
| `output` | object | Merge field data |
| `success` | boolean | Operation success status |
| `output` | object | Merge field data and metadata |
### `mailchimp_create_merge_field`
@@ -1061,7 +1067,8 @@ Delete a merge field from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the merge field was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_get_interest_categories`
@@ -1080,8 +1087,8 @@ Retrieve a list of interest categories from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the interest categories were successfully retrieved |
| `output` | object | Interest categories data |
| `success` | boolean | Operation success status |
| `output` | object | Interest categories data and metadata |
### `mailchimp_get_interest_category`
@@ -1099,8 +1106,8 @@ Retrieve details of a specific interest category from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the interest category was successfully retrieved |
| `output` | object | Interest category data |
| `success` | boolean | Operation success status |
| `output` | object | Interest category data and metadata |
### `mailchimp_create_interest_category`
@@ -1158,7 +1165,8 @@ Delete an interest category from a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the interest category was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_get_interests`
@@ -1178,8 +1186,8 @@ Retrieve a list of interests from an interest category in a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the interests were successfully retrieved |
| `output` | object | Interests data |
| `success` | boolean | Operation success status |
| `output` | object | Interests data and metadata |
### `mailchimp_get_interest`
@@ -1198,8 +1206,8 @@ Retrieve details of a specific interest from an interest category in a Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the interest was successfully retrieved |
| `output` | object | Interest data |
| `success` | boolean | Operation success status |
| `output` | object | Interest data and metadata |
### `mailchimp_create_interest`
@@ -1259,7 +1267,8 @@ Delete an interest from an interest category in a Mailchimp audience
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the interest was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_get_landing_pages`
@@ -1277,8 +1286,8 @@ Retrieve a list of landing pages from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the landing pages were successfully retrieved |
| `output` | object | Landing pages data |
| `success` | boolean | Operation success status |
| `output` | object | Landing pages data and metadata |
### `mailchimp_get_landing_page`
@@ -1295,8 +1304,8 @@ Retrieve details of a specific landing page from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the landing page was successfully retrieved |
| `output` | object | Landing page data |
| `success` | boolean | Operation success status |
| `output` | object | Landing page data and metadata |
### `mailchimp_create_landing_page`
@@ -1351,7 +1360,8 @@ Delete a landing page from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the landing page was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |
### `mailchimp_publish_landing_page`
@@ -1405,8 +1415,8 @@ Retrieve a list of batch operations from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the batch operations were successfully retrieved |
| `output` | object | Batch operations data |
| `success` | boolean | Operation success status |
| `output` | object | Batch operations data and metadata |
### `mailchimp_get_batch_operation`
@@ -1423,8 +1433,8 @@ Retrieve details of a specific batch operation from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the batch operation was successfully retrieved |
| `output` | object | Batch operation data |
| `success` | boolean | Operation success status |
| `output` | object | Batch operation data and metadata |
### `mailchimp_create_batch_operation`
@@ -1459,7 +1469,8 @@ Delete a batch operation from Mailchimp
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the batch operation was successfully deleted |
| `success` | boolean | Operation success status |
| `output` | object | Deletion confirmation |

View File

@@ -12,7 +12,7 @@
"circleback",
"clay",
"confluence",
"cursor_v2",
"cursor",
"datadog",
"discord",
"dropbox",
@@ -24,10 +24,10 @@
"file",
"firecrawl",
"fireflies",
"github_v2",
"github",
"gitlab",
"gmail_v2",
"google_calendar_v2",
"gmail",
"google_calendar",
"google_docs",
"google_drive",
"google_forms",
@@ -45,7 +45,7 @@
"image_generator",
"imap",
"incidentio",
"intercom_v2",
"intercom",
"jina",
"jira",
"jira_service_management",
@@ -65,7 +65,7 @@
"mongodb",
"mysql",
"neo4j",
"notion_v2",
"notion",
"onedrive",
"openai",
"outlook",
@@ -101,6 +101,7 @@
"supabase",
"tavily",
"telegram",
"thinking",
"translate",
"trello",
"tts",

View File

@@ -0,0 +1,180 @@
---
title: Notion
description: Manage Notion pages
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="notion"
color="#181C1E"
/>
{/* MANUAL-CONTENT-START:intro */}
[Notion](https://www.notion.so) is an all-in-one workspace that combines notes, documents, wikis, and project management tools into a single platform. It offers a flexible and customizable environment where users can create, organize, and collaborate on content in various formats.
With Notion, you can:
- **Create versatile content**: Build documents, wikis, databases, kanban boards, calendars, and more
- **Organize information**: Structure content hierarchically with nested pages and powerful databases
- **Collaborate seamlessly**: Share workspaces and pages with team members for real-time collaboration
- **Customize your workspace**: Design your ideal workflow with flexible templates and building blocks
- **Connect information**: Link between pages and databases to create a knowledge network
- **Access anywhere**: Use Notion across web, desktop, and mobile platforms with automatic syncing
In Sim, the Notion integration enables your agents to interact directly with your Notion workspace programmatically. This allows for powerful automation scenarios such as knowledge management, content creation, and information retrieval. Your agents can:
- **Read Notion pages**: Extract content and metadata from any Notion page.
- **Read Notion databases**: Retrieve database structure and information.
- **Write to pages**: Append new content to existing Notion pages.
- **Create new pages**: Generate new Notion pages under a parent page, with custom titles and content.
- **Query databases**: Search and filter database entries using advanced filter and sort criteria.
- **Search workspace**: Search across your entire Notion workspace for pages or databases matching specific queries.
- **Create new databases**: Programmatically create new databases with custom properties and structure.
This integration bridges the gap between your AI workflows and your knowledge base, enabling seamless documentation and information management. By connecting Sim with Notion, you can automate documentation processes, maintain up-to-date information repositories, generate reports, and organize information intelligently—all through your intelligent agents.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.
## Tools
### `notion_read`
Read content from a Notion page
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `pageId` | string | Yes | The ID of the Notion page to read |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format with headers, paragraphs, lists, and todos |
| `metadata` | object | Page metadata including title, URL, and timestamps |
### `notion_read_database`
Read database information and structure from Notion
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `databaseId` | string | Yes | The ID of the Notion database to read |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Database information including title, properties schema, and metadata |
| `metadata` | object | Database metadata including title, ID, URL, timestamps, and properties schema |
### `notion_write`
Append content to a Notion page
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `pageId` | string | Yes | The ID of the Notion page to append content to |
| `content` | string | Yes | The content to append to the page |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message confirming content was appended to page |
### `notion_create_page`
Create a new page in Notion
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `parentId` | string | Yes | ID of the parent page |
| `title` | string | No | Title of the new page |
| `content` | string | No | Optional content to add to the page upon creation |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message confirming page creation |
| `metadata` | object | Page metadata including title, page ID, URL, and timestamps |
### `notion_query_database`
Query and filter Notion database entries with advanced filtering
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `databaseId` | string | Yes | The ID of the database to query |
| `filter` | string | No | Filter conditions as JSON \(optional\) |
| `sorts` | string | No | Sort criteria as JSON array \(optional\) |
| `pageSize` | number | No | Number of results to return \(default: 100, max: 100\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Formatted list of database entries with their properties |
| `metadata` | object | Query metadata including total results count, pagination info, and raw results array |
### `notion_search`
Search across all pages and databases in Notion workspace
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `query` | string | No | Search terms \(leave empty to get all pages\) |
| `filterType` | string | No | Filter by object type: page, database, or leave empty for all |
| `pageSize` | number | No | Number of results to return \(default: 100, max: 100\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Formatted list of search results including pages and databases |
| `metadata` | object | Search metadata including total results count, pagination info, and raw results array |
### `notion_create_database`
Create a new database in Notion with custom properties
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `parentId` | string | Yes | ID of the parent page where the database will be created |
| `title` | string | Yes | Title for the new database |
| `properties` | string | No | Database properties as JSON object \(optional, will create a default "Name" property if empty\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Success message with database details and properties list |
| `metadata` | object | Database metadata including ID, title, URL, creation time, and properties schema |
## Notes
- Category: `tools`
- Type: `notion`

View File

@@ -1,194 +0,0 @@
---
title: Notion
description: Manage Notion pages
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="notion_v2"
color="#181C1E"
/>
## Usage Instructions
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.
## Tools
### `notion_read_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
### `notion_read_database_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
### `notion_write_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
### `notion_create_page_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
### `notion_query_database_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
### `notion_search_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
### `notion_create_database_v2`
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Page content in markdown format |
| `title` | string | Page or database title |
| `url` | string | Notion URL |
| `id` | string | Page or database ID |
| `created_time` | string | Creation timestamp |
| `last_edited_time` | string | Last edit timestamp |
| `results` | array | Array of results from query or search |
| `has_more` | boolean | Whether more results are available |
| `next_cursor` | string | Cursor for pagination |
| `total_results` | number | Number of results returned |
| `properties` | json | Database properties schema |
| `appended` | boolean | Whether content was successfully appended |
## Notes
- Category: `tools`
- Type: `notion_v2`

View File

@@ -55,7 +55,7 @@ Retrieve all deals from Pipedrive with optional filters
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deals` | array | Array of deal objects from Pipedrive |
| `metadata` | object | Pagination metadata for the response |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_deal`
@@ -73,6 +73,7 @@ Retrieve detailed information about a specific deal
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deal` | object | Deal object with full details |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_create_deal`
@@ -98,6 +99,7 @@ Create a new deal in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deal` | object | The created deal object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_update_deal`
@@ -120,6 +122,7 @@ Update an existing deal in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deal` | object | The updated deal object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_files`
@@ -140,7 +143,7 @@ Retrieve files from Pipedrive with optional filters
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `files` | array | Array of file objects from Pipedrive |
| `total_items` | number | Total number of files returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_mail_messages`
@@ -159,7 +162,7 @@ Retrieve mail threads from Pipedrive mailbox
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `messages` | array | Array of mail thread objects from Pipedrive mailbox |
| `total_items` | number | Total number of mail threads returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_mail_thread`
@@ -177,7 +180,7 @@ Retrieve all messages from a specific mail thread
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `messages` | array | Array of mail message objects from the thread |
| `metadata` | object | Thread and pagination metadata |
| `metadata` | object | Operation metadata including thread ID |
| `success` | boolean | Operation success status |
### `pipedrive_get_pipelines`
@@ -198,7 +201,7 @@ Retrieve all pipelines from Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `pipelines` | array | Array of pipeline objects from Pipedrive |
| `total_items` | number | Total number of pipelines returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_pipeline_deals`
@@ -219,7 +222,7 @@ Retrieve all deals in a specific pipeline
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deals` | array | Array of deal objects from the pipeline |
| `metadata` | object | Pipeline and pagination metadata |
| `metadata` | object | Operation metadata including pipeline ID |
| `success` | boolean | Operation success status |
### `pipedrive_get_projects`
@@ -240,7 +243,7 @@ Retrieve all projects or a specific project from Pipedrive
| --------- | ---- | ----------- |
| `projects` | array | Array of project objects \(when listing all\) |
| `project` | object | Single project object \(when project_id is provided\) |
| `total_items` | number | Total number of projects returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_create_project`
@@ -261,6 +264,7 @@ Create a new project in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `project` | object | The created project object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_activities`
@@ -283,7 +287,7 @@ Retrieve activities (tasks) from Pipedrive with optional filters
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `activities` | array | Array of activity objects from Pipedrive |
| `total_items` | number | Total number of activities returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_create_activity`
@@ -309,6 +313,7 @@ Create a new activity (task) in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `activity` | object | The created activity object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_update_activity`
@@ -332,6 +337,7 @@ Update an existing activity (task) in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `activity` | object | The updated activity object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_get_leads`
@@ -355,7 +361,7 @@ Retrieve all leads or a specific lead from Pipedrive
| --------- | ---- | ----------- |
| `leads` | array | Array of lead objects \(when listing all\) |
| `lead` | object | Single lead object \(when lead_id is provided\) |
| `total_items` | number | Total number of leads returned |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_create_lead`
@@ -380,6 +386,7 @@ Create a new lead in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `lead` | object | The created lead object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_update_lead`
@@ -405,6 +412,7 @@ Update an existing lead in Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `lead` | object | The updated lead object |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |
### `pipedrive_delete_lead`
@@ -422,6 +430,7 @@ Delete a specific lead from Pipedrive
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `data` | object | Deletion confirmation data |
| `metadata` | object | Operation metadata |
| `success` | boolean | Operation success status |

View File

@@ -77,7 +77,31 @@ Capture multiple events at once in PostHog. Use this for bulk event ingestion to
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Status message indicating whether the batch was captured successfully |
| `events_processed` | number | Number of events processed in the batch |
| `eventsProcessed` | number | Number of events processed in the batch |
### `posthog_list_events`
List events in PostHog. Note: This endpoint is deprecated but kept for backwards compatibility. For production use, prefer the Query endpoint with HogQL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `personalApiKey` | string | Yes | PostHog Personal API Key \(for authenticated API access\) |
| `region` | string | No | PostHog region: us \(default\) or eu |
| `projectId` | string | Yes | PostHog Project ID |
| `limit` | number | No | Number of events to return \(default: 100, max: 100\) |
| `offset` | number | No | Number of events to skip for pagination |
| `event` | string | No | Filter by specific event name |
| `distinctId` | string | No | Filter by specific distinct_id |
| `before` | string | No | ISO 8601 timestamp - only return events before this time |
| `after` | string | No | ISO 8601 timestamp - only return events after this time |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `events` | array | List of events with their properties and metadata |
### `posthog_list_persons`
@@ -600,9 +624,9 @@ Evaluate feature flags for a specific user or group. This is a public endpoint t
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `feature_flags` | object | Feature flag evaluations \(key-value pairs where values are boolean or string variants\) |
| `feature_flag_payloads` | object | Additional payloads attached to feature flags |
| `errors_while_computing_flags` | boolean | Whether there were errors while computing flags |
| `featureFlags` | object | Feature flag evaluations \(key-value pairs where values are boolean or string variants\) |
| `featureFlagPayloads` | object | Additional payloads attached to feature flags |
| `errorsWhileComputingFlags` | boolean | Whether there were errors while computing flags |
### `posthog_list_experiments`

View File

@@ -259,7 +259,7 @@ Get lead(s) from Salesforce
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Lead data |
### `salesforce_create_lead`
@@ -286,8 +286,8 @@ Create a new lead
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Created lead data |
| `success` | boolean | Success |
| `output` | object | Created lead |
### `salesforce_update_lead`
@@ -314,8 +314,8 @@ Update an existing lead
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Updated lead data |
| `success` | boolean | Success |
| `output` | object | Updated lead |
### `salesforce_delete_lead`
@@ -333,8 +333,8 @@ Delete a lead
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Deleted lead data |
| `success` | boolean | Success |
| `output` | object | Deleted lead |
### `salesforce_get_opportunities`
@@ -355,7 +355,7 @@ Get opportunity(ies) from Salesforce
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success |
| `output` | object | Opportunity data |
### `salesforce_create_opportunity`
@@ -380,8 +380,8 @@ Create a new opportunity
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Created opportunity data |
| `success` | boolean | Success |
| `output` | object | Created opportunity |
### `salesforce_update_opportunity`
@@ -406,8 +406,8 @@ Update an existing opportunity
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Updated opportunity data |
| `success` | boolean | Success |
| `output` | object | Updated opportunity |
### `salesforce_delete_opportunity`
@@ -425,8 +425,8 @@ Delete an opportunity
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Deleted opportunity data |
| `success` | boolean | Success |
| `output` | object | Deleted opportunity |
### `salesforce_get_cases`
@@ -447,7 +447,7 @@ Get case(s) from Salesforce
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success |
| `output` | object | Case data |
### `salesforce_create_case`
@@ -472,8 +472,8 @@ Create a new case
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Created case data |
| `success` | boolean | Success |
| `output` | object | Created case |
### `salesforce_update_case`
@@ -495,8 +495,8 @@ Update an existing case
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Updated case data |
| `success` | boolean | Success |
| `output` | object | Updated case |
### `salesforce_delete_case`
@@ -514,8 +514,8 @@ Delete a case
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Deleted case data |
| `success` | boolean | Success |
| `output` | object | Deleted case |
### `salesforce_get_tasks`
@@ -536,7 +536,7 @@ Get task(s) from Salesforce
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success |
| `output` | object | Task data |
### `salesforce_create_task`
@@ -561,8 +561,8 @@ Create a new task
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Created task data |
| `success` | boolean | Success |
| `output` | object | Created task |
### `salesforce_update_task`
@@ -585,8 +585,8 @@ Update an existing task
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Updated task data |
| `success` | boolean | Success |
| `output` | object | Updated task |
### `salesforce_delete_task`
@@ -604,8 +604,8 @@ Delete a task
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Deleted task data |
| `success` | boolean | Success |
| `output` | object | Deleted task |
### `salesforce_list_reports`
@@ -624,7 +624,7 @@ Get a list of reports accessible by the current user
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Reports data |
### `salesforce_get_report`
@@ -643,7 +643,7 @@ Get metadata and describe information for a specific report
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Report metadata |
### `salesforce_run_report`
@@ -664,7 +664,7 @@ Execute a report and retrieve the results
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Report results |
### `salesforce_list_report_types`
@@ -682,7 +682,7 @@ Get a list of available report types
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Report types data |
### `salesforce_list_dashboards`
@@ -701,7 +701,7 @@ Get a list of dashboards accessible by the current user
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Dashboards data |
### `salesforce_get_dashboard`
@@ -720,7 +720,7 @@ Get details and results for a specific dashboard
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Dashboard data |
### `salesforce_refresh_dashboard`
@@ -739,7 +739,7 @@ Refresh a dashboard to get the latest data
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Refreshed dashboard data |
### `salesforce_query`
@@ -758,7 +758,7 @@ Execute a custom SOQL query to retrieve data from Salesforce
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Query results |
### `salesforce_query_more`
@@ -777,7 +777,7 @@ Retrieve additional query results using the nextRecordsUrl from a previous query
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Query results |
### `salesforce_describe_object`
@@ -796,7 +796,7 @@ Get metadata and field information for a Salesforce object
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Object metadata |
### `salesforce_list_objects`
@@ -814,7 +814,7 @@ Get a list of all available Salesforce objects
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `success` | boolean | Success status |
| `output` | object | Objects list |

View File

@@ -282,6 +282,7 @@ Permanently delete a customer
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the customer was deleted |
| `id` | string | The ID of the deleted customer |
| `metadata` | json | Deletion metadata |
### `stripe_list_customers`
@@ -540,6 +541,7 @@ Permanently delete a draft invoice
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the invoice was deleted |
| `id` | string | The ID of the deleted invoice |
| `metadata` | json | Deletion metadata |
### `stripe_finalize_invoice`
@@ -854,6 +856,7 @@ Permanently delete a product
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the product was deleted |
| `id` | string | The ID of the deleted product |
| `metadata` | json | Deletion metadata |
### `stripe_list_products`

View File

@@ -0,0 +1,56 @@
---
title: Thinking
description: Forces model to outline its thought process.
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="thinking"
color="#181C1E"
/>
{/* MANUAL-CONTENT-START:intro */}
The Thinking tool encourages AI models to engage in explicit reasoning before responding to complex queries. By providing a dedicated space for step-by-step analysis, this tool helps models break down problems, consider multiple perspectives, and arrive at more thoughtful conclusions.
Research has shown that prompting language models to "think step by step" can significantly improve their reasoning capabilities. According to [Anthropic's research on Claude's Think tool](https://www.anthropic.com/engineering/claude-think-tool), when models are given space to work through their reasoning explicitly, they demonstrate:
- **Improved problem-solving**: Breaking complex problems into manageable steps
- **Enhanced accuracy**: Reducing errors by carefully working through each component of a problem
- **Greater transparency**: Making the model's reasoning process visible and auditable
- **More nuanced responses**: Considering multiple angles before arriving at conclusions
In Sim, the Thinking tool creates a structured opportunity for your agents to engage in this kind of deliberate reasoning. By incorporating thinking steps into your workflows, you can help your agents tackle complex tasks more effectively, avoid common reasoning pitfalls, and produce higher-quality outputs. This is particularly valuable for tasks involving multi-step reasoning, complex decision-making, or situations where accuracy is critical.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Adds a step where the model explicitly outlines its thought process before proceeding. This can improve reasoning quality by encouraging step-by-step analysis.
## Tools
### `thinking_tool`
Processes a provided thought/instruction, making it available for subsequent steps.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `thought` | string | Yes | Your internal reasoning, analysis, or thought process. Use this to think through the problem step by step before responding. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `acknowledgedThought` | string | The thought that was processed and acknowledged |
## Notes
- Category: `tools`
- Type: `thinking`

View File

@@ -53,9 +53,6 @@ Send a chat completion request to any supported LLM provider
| `vertexProject` | string | No | Google Cloud project ID for Vertex AI |
| `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) |
| `vertexCredential` | string | No | Google Cloud OAuth credential ID for Vertex AI |
| `bedrockAccessKeyId` | string | No | AWS Access Key ID for Bedrock |
| `bedrockSecretKey` | string | No | AWS Secret Access Key for Bedrock |
| `bedrockRegion` | string | No | AWS region for Bedrock \(defaults to us-east-1\) |
#### Output

View File

@@ -78,6 +78,7 @@ Retrieve a list of tickets from Zendesk with optional filtering
| --------- | ---- | ----------- |
| `tickets` | array | Array of ticket objects |
| `paging` | object | Pagination information |
| `metadata` | object | Operation metadata |
### `zendesk_get_ticket`
@@ -97,7 +98,7 @@ Get a single ticket by ID from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ticket` | object | Ticket object |
| `ticket_id` | number | The ticket ID |
| `metadata` | object | Operation metadata |
### `zendesk_create_ticket`
@@ -126,7 +127,7 @@ Create a new ticket in Zendesk with support for custom fields
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ticket` | object | Created ticket object |
| `ticket_id` | number | The created ticket ID |
| `metadata` | object | Operation metadata |
### `zendesk_create_tickets_bulk`
@@ -145,8 +146,8 @@ Create multiple tickets in Zendesk at once (max 100)
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_status` | object | Job status object |
| `job_id` | string | The bulk operation job ID |
| `jobStatus` | object | Job status object |
| `metadata` | object | Operation metadata |
### `zendesk_update_ticket`
@@ -175,7 +176,7 @@ Update an existing ticket in Zendesk with support for custom fields
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ticket` | object | Updated ticket object |
| `ticket_id` | number | The updated ticket ID |
| `metadata` | object | Operation metadata |
### `zendesk_update_tickets_bulk`
@@ -199,8 +200,8 @@ Update multiple tickets in Zendesk at once (max 100)
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_status` | object | Job status object |
| `job_id` | string | The bulk operation job ID |
| `jobStatus` | object | Job status object |
| `metadata` | object | Operation metadata |
### `zendesk_delete_ticket`
@@ -220,7 +221,7 @@ Delete a ticket from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Deletion success |
| `ticket_id` | string | The deleted ticket ID |
| `metadata` | object | Operation metadata |
### `zendesk_merge_tickets`
@@ -241,9 +242,8 @@ Merge multiple tickets into a target ticket
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_status` | object | Job status object |
| `job_id` | string | The merge job ID |
| `target_ticket_id` | string | The target ticket ID that tickets were merged into |
| `jobStatus` | object | Job status object |
| `metadata` | object | Operation metadata |
### `zendesk_get_users`
@@ -267,6 +267,7 @@ Retrieve a list of users from Zendesk with optional filtering
| --------- | ---- | ----------- |
| `users` | array | Array of user objects |
| `paging` | object | Pagination information |
| `metadata` | object | Operation metadata |
### `zendesk_get_user`
@@ -286,7 +287,7 @@ Get a single user by ID from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `user` | object | User object |
| `user_id` | number | The user ID |
| `metadata` | object | Operation metadata |
### `zendesk_get_current_user`
@@ -305,7 +306,7 @@ Get the currently authenticated user from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `user` | object | Current user object |
| `user_id` | number | The current user ID |
| `metadata` | object | Operation metadata |
### `zendesk_search_users`
@@ -329,6 +330,7 @@ Search for users in Zendesk using a query string
| --------- | ---- | ----------- |
| `users` | array | Array of user objects |
| `paging` | object | Pagination information |
| `metadata` | object | Operation metadata |
### `zendesk_create_user`
@@ -355,7 +357,7 @@ Create a new user in Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `user` | object | Created user object |
| `user_id` | number | The created user ID |
| `metadata` | object | Operation metadata |
### `zendesk_create_users_bulk`
@@ -374,8 +376,8 @@ Create multiple users in Zendesk using bulk import
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_status` | object | Job status object |
| `job_id` | string | The bulk operation job ID |
| `jobStatus` | object | Job status object |
| `metadata` | object | Operation metadata |
### `zendesk_update_user`
@@ -402,8 +404,8 @@ Update an existing user in Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `user` | json | Updated user object |
| `user_id` | number | The updated user ID |
| `user` | object | Updated user object |
| `metadata` | object | Operation metadata |
### `zendesk_update_users_bulk`
@@ -422,8 +424,8 @@ Update multiple users in Zendesk using bulk update
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_status` | object | Job status object |
| `job_id` | string | The bulk operation job ID |
| `jobStatus` | object | Job status object |
| `metadata` | object | Operation metadata |
### `zendesk_delete_user`
@@ -443,7 +445,7 @@ Delete a user from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Deletion success |
| `user_id` | string | The deleted user ID |
| `metadata` | object | Operation metadata |
### `zendesk_get_organizations`
@@ -465,6 +467,7 @@ Retrieve a list of organizations from Zendesk
| --------- | ---- | ----------- |
| `organizations` | array | Array of organization objects |
| `paging` | object | Pagination information |
| `metadata` | object | Operation metadata |
### `zendesk_get_organization`
@@ -483,8 +486,8 @@ Get a single organization by ID from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `organization` | json | Organization object |
| `organization_id` | number | The organization ID |
| `organization` | object | Organization object |
| `metadata` | object | Operation metadata |
### `zendesk_autocomplete_organizations`
@@ -507,6 +510,7 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
| --------- | ---- | ----------- |
| `organizations` | array | Array of organization objects |
| `paging` | object | Pagination information |
| `metadata` | object | Operation metadata |
### `zendesk_create_organization`
@@ -530,8 +534,8 @@ Create a new organization in Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `organization` | json | Created organization object |
| `organization_id` | number | The created organization ID |
| `organization` | object | Created organization object |
| `metadata` | object | Operation metadata |
### `zendesk_create_organizations_bulk`
@@ -550,8 +554,8 @@ Create multiple organizations in Zendesk using bulk import
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_status` | object | Job status object |
| `job_id` | string | The bulk operation job ID |
| `jobStatus` | object | Job status object |
| `metadata` | object | Operation metadata |
### `zendesk_update_organization`
@@ -576,8 +580,8 @@ Update an existing organization in Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `organization` | json | Updated organization object |
| `organization_id` | number | The updated organization ID |
| `organization` | object | Updated organization object |
| `metadata` | object | Operation metadata |
### `zendesk_delete_organization`
@@ -596,8 +600,8 @@ Delete an organization from Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the organization was successfully deleted |
| `organization_id` | string | The deleted organization ID |
| `deleted` | boolean | Deletion success |
| `metadata` | object | Operation metadata |
### `zendesk_search`
@@ -622,6 +626,7 @@ Unified search across tickets, users, and organizations in Zendesk
| --------- | ---- | ----------- |
| `results` | array | Array of result objects |
| `paging` | object | Pagination information |
| `metadata` | object | Operation metadata |
### `zendesk_search_count`
@@ -641,6 +646,7 @@ Count the number of search results matching a query in Zendesk
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `count` | number | Number of matching results |
| `metadata` | object | Operation metadata |

View File

@@ -44,7 +44,7 @@ Reference structured values downstream with expressions such as <code>&lt;start.
## How it behaves per entry point
<Tabs items={['Editor run', 'Deploy to API', 'Deploy to chat', 'Deploy to form']}>
<Tabs items={['Editor run', 'Deploy to API', 'Deploy to chat']}>
<Tab>
When you click <strong>Run</strong> in the editor, the Start block renders the Input Format as a form. Default values make it easy to retest without retyping data. Submitting the form triggers the workflow immediately and the values become available on <code>&lt;start.fieldName&gt;</code> (for example <code>&lt;start.sampleField&gt;</code>).
@@ -64,13 +64,6 @@ Reference structured values downstream with expressions such as <code>&lt;start.
If you launch chat with additional structured context (for example from an embed), it merges into the corresponding <code>&lt;start.fieldName&gt;</code> outputs, keeping downstream blocks consistent with API and manual runs.
</Tab>
<Tab>
Form deployments render the Input Format as a standalone, embeddable form page. Each field becomes a form input with appropriate UI controls—text inputs for strings, number inputs for numbers, toggle switches for booleans, and file upload zones for files.
When a user submits the form, values become available on <code>&lt;start.fieldName&gt;</code> just like other entry points. The workflow executes with trigger type <code>form</code>, and submitters see a customizable thank-you message upon completion.
Forms can be embedded via iframe or shared as direct links, making them ideal for surveys, contact forms, and data collection workflows.
</Tab>
</Tabs>
## Referencing Start data downstream

View File

@@ -1,76 +0,0 @@
---
title: Enterprise
description: Funciones enterprise para organizaciones con requisitos avanzados
de seguridad y cumplimiento
---
import { Callout } from 'fumadocs-ui/components/callout'
Sim Studio Enterprise proporciona funciones avanzadas para organizaciones con requisitos mejorados de seguridad, cumplimiento y gestión.
---
## Bring Your Own Key (BYOK)
Usa tus propias claves API para proveedores de modelos de IA en lugar de las claves alojadas de Sim Studio.
### Proveedores compatibles
| Proveedor | Uso |
|----------|-------|
| OpenAI | Embeddings de base de conocimiento, bloque Agent |
| Anthropic | Bloque Agent |
| Google | Bloque Agent |
| Mistral | OCR de base de conocimiento |
### Configuración
1. Navega a **Configuración** → **BYOK** en tu espacio de trabajo
2. Haz clic en **Añadir clave** para tu proveedor
3. Introduce tu clave API y guarda
<Callout type="warn">
Las claves BYOK están cifradas en reposo. Solo los administradores y propietarios de la organización pueden gestionar las claves.
</Callout>
Cuando está configurado, los flujos de trabajo usan tu clave en lugar de las claves alojadas de Sim Studio. Si se elimina, los flujos de trabajo vuelven automáticamente a las claves alojadas.
---
## Single Sign-On (SSO)
Autenticación enterprise con soporte SAML 2.0 y OIDC para gestión centralizada de identidades.
### Proveedores compatibles
- Okta
- Azure AD / Entra ID
- Google Workspace
- OneLogin
- Cualquier proveedor SAML 2.0 u OIDC
### Configuración
1. Navega a **Configuración** → **SSO** en tu espacio de trabajo
2. Elige tu proveedor de identidad
3. Configura la conexión usando los metadatos de tu IdP
4. Activa SSO para tu organización
<Callout type="info">
Una vez que SSO está activado, los miembros del equipo se autentican a través de tu proveedor de identidad en lugar de correo electrónico/contraseña.
</Callout>
---
## Self-Hosted
Para implementaciones self-hosted, las funciones enterprise se pueden activar mediante variables de entorno:
| Variable | Descripción |
|----------|-------------|
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Inicio de sesión único con SAML/OIDC |
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Grupos de sondeo para activadores de correo electrónico |
<Callout type="warn">
BYOK solo está disponible en Sim Studio alojado. Las implementaciones autoalojadas configuran las claves de proveedor de IA directamente a través de variables de entorno.
</Callout>

View File

@@ -49,40 +49,40 @@ El desglose del modelo muestra:
<Tabs items={['Modelos alojados', 'Trae tu propia clave API']}>
<Tab>
**Modelos alojados** - Sim proporciona claves API con un multiplicador de precios de 1.4x para bloques de agente:
**Modelos alojados** - Sim proporciona claves API con un multiplicador de precio de 2x:
**OpenAI**
| Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
**Anthropic**
| Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
**Google**
| Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
*El multiplicador de 1.4x cubre los costos de infraestructura y gestión de API.*
*El multiplicador 2x cubre los costos de infraestructura y gestión de API.*
</Tab>
<Tab>

View File

@@ -1,76 +0,0 @@
---
title: Entreprise
description: Fonctionnalités entreprise pour les organisations ayant des
exigences avancées en matière de sécurité et de conformité
---
import { Callout } from 'fumadocs-ui/components/callout'
Sim Studio Entreprise fournit des fonctionnalités avancées pour les organisations ayant des exigences renforcées en matière de sécurité, de conformité et de gestion.
---
## Apportez votre propre clé (BYOK)
Utilisez vos propres clés API pour les fournisseurs de modèles IA au lieu des clés hébergées par Sim Studio.
### Fournisseurs pris en charge
| Fournisseur | Utilisation |
|----------|-------|
| OpenAI | Embeddings de base de connaissances, bloc Agent |
| Anthropic | Bloc Agent |
| Google | Bloc Agent |
| Mistral | OCR de base de connaissances |
### Configuration
1. Accédez à **Paramètres** → **BYOK** dans votre espace de travail
2. Cliquez sur **Ajouter une clé** pour votre fournisseur
3. Saisissez votre clé API et enregistrez
<Callout type="warn">
Les clés BYOK sont chiffrées au repos. Seuls les administrateurs et propriétaires de l'organisation peuvent gérer les clés.
</Callout>
Une fois configurés, les workflows utilisent votre clé au lieu des clés hébergées par Sim Studio. Si elle est supprimée, les workflows basculent automatiquement vers les clés hébergées.
---
## Authentification unique (SSO)
Authentification entreprise avec prise en charge de SAML 2.0 et OIDC pour une gestion centralisée des identités.
### Fournisseurs pris en charge
- Okta
- Azure AD / Entra ID
- Google Workspace
- OneLogin
- Tout fournisseur SAML 2.0 ou OIDC
### Configuration
1. Accédez à **Paramètres** → **SSO** dans votre espace de travail
2. Choisissez votre fournisseur d'identité
3. Configurez la connexion en utilisant les métadonnées de votre IdP
4. Activez le SSO pour votre organisation
<Callout type="info">
Une fois le SSO activé, les membres de l'équipe s'authentifient via votre fournisseur d'identité au lieu d'utiliser un email/mot de passe.
</Callout>
---
## Auto-hébergé
Pour les déploiements auto-hébergés, les fonctionnalités entreprise peuvent être activées via des variables d'environnement :
| Variable | Description |
|----------|-------------|
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Authentification unique avec SAML/OIDC |
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Groupes de sondage pour les déclencheurs d'e-mail |
<Callout type="warn">
BYOK est uniquement disponible sur Sim Studio hébergé. Les déploiements auto-hébergés configurent les clés de fournisseur d'IA directement via les variables d'environnement.
</Callout>

View File

@@ -49,40 +49,40 @@ La répartition des modèles montre :
<Tabs items={['Modèles hébergés', 'Apportez votre propre clé API']}>
<Tab>
**Modèles hébergés** - Sim fournit des clés API avec un multiplicateur de prix de 1,4x pour les blocs Agent :
**Modèles hébergés** - Sim fournit des clés API avec un multiplicateur de prix de 2x :
**OpenAI**
| Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ |
| GPT-5 | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ |
| GPT-5 Mini | 0,25 $ / 2,00 $ | 0,35 $ / 2,80 $ |
| GPT-5 Nano | 0,05 $ / 0,40 $ | 0,07 $ / 0,56 $ |
| GPT-4o | 2,50 $ / 10,00 $ | 3,50 $ / 14,00 $ |
| GPT-4.1 | 2,00 $ / 8,00 $ | 2,80 $ / 11,20 $ |
| GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,56 $ / 2,24 $ |
| GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,14 $ / 0,56 $ |
| o1 | 15,00 $ / 60,00 $ | 21,00 $ / 84,00 $ |
| o3 | 2,00 $ / 8,00 $ | 2,80 $ / 11,20 $ |
| o4 Mini | 1,10 $ / 4,40 $ | 1,54 $ / 6,16 $ |
| GPT-5.1 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
| GPT-5 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
| GPT-5 Mini | 0,25 $ / 2,00 $ | 0,50 $ / 4,00 $ |
| GPT-5 Nano | 0,05 $ / 0,40 $ | 0,10 $ / 0,80 $ |
| GPT-4o | 2,50 $ / 10,00 $ | 5,00 $ / 20,00 $ |
| GPT-4.1 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
| GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,80 $ / 3,20 $ |
| GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,20 $ / 0,80 $ |
| o1 | 15,00 $ / 60,00 $ | 30,00 $ / 120,00 $ |
| o3 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
| o4 Mini | 1,10 $ / 4,40 $ | 2,20 $ / 8,80 $ |
**Anthropic**
| Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | 5,00 $ / 25,00 $ | 7,00 $ / 35,00 $ |
| Claude Opus 4.1 | 15,00 $ / 75,00 $ | 21,00 $ / 105,00 $ |
| Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 4,20 $ / 21,00 $ |
| Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 4,20 $ / 21,00 $ |
| Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 1,40 $ / 7,00 $ |
| Claude Opus 4.5 | 5,00 $ / 25,00 $ | 10,00 $ / 50,00 $ |
| Claude Opus 4.1 | 15,00 $ / 75,00 $ | 30,00 $ / 150,00 $ |
| Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
| Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
| Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 2,00 $ / 10,00 $ |
**Google**
| Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 2,80 $ / 16,80 $ |
| Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ |
| Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,42 $ / 3,50 $ |
| Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 4,00 $ / 24,00 $ |
| Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
| Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,60 $ / 5,00 $ |
*Le multiplicateur de 1,4x couvre les coûts d'infrastructure et de gestion des API.*
*Le multiplicateur 2x couvre les coûts d'infrastructure et de gestion des API.*
</Tab>
<Tab>

View File

@@ -1,75 +0,0 @@
---
title: エンタープライズ
description: 高度なセキュリティとコンプライアンス要件を持つ組織向けのエンタープライズ機能
---
import { Callout } from 'fumadocs-ui/components/callout'
Sim Studio Enterpriseは、強化されたセキュリティ、コンプライアンス、管理要件を持つ組織向けの高度な機能を提供します。
---
## Bring Your Own Key (BYOK)
Sim Studioのホストキーの代わりに、AIモデルプロバイダー用の独自のAPIキーを使用できます。
### 対応プロバイダー
| プロバイダー | 用途 |
|----------|-------|
| OpenAI | ナレッジベースの埋め込み、エージェントブロック |
| Anthropic | エージェントブロック |
| Google | エージェントブロック |
| Mistral | ナレッジベースOCR |
### セットアップ
1. ワークスペースの**設定** → **BYOK**に移動します
2. プロバイダーの**キーを追加**をクリックします
3. APIキーを入力して保存します
<Callout type="warn">
BYOKキーは保存時に暗号化されます。組織の管理者とオーナーのみがキーを管理できます。
</Callout>
設定すると、ワークフローはSim Studioのホストキーの代わりに独自のキーを使用します。削除すると、ワークフローは自動的にホストキーにフォールバックします。
---
## シングルサインオン (SSO)
集中型IDマネジメントのためのSAML 2.0およびOIDCサポートを備えたエンタープライズ認証。
### 対応プロバイダー
- Okta
- Azure AD / Entra ID
- Google Workspace
- OneLogin
- SAML 2.0またはOIDCに対応する任意のプロバイダー
### セットアップ
1. ワークスペースの**設定** → **SSO**に移動します
2. IDプロバイダーを選択します
3. IdPのメタデータを使用して接続を設定します
4. 組織のSSOを有効にします
<Callout type="info">
SSOを有効にすると、チームメンバーはメール/パスワードの代わりにIDプロバイダーを通じて認証します。
</Callout>
---
## セルフホスト
セルフホストデプロイメントの場合、エンタープライズ機能は環境変数を介して有効にできます:
| 変数 | 説明 |
|----------|-------------|
| `SSO_ENABLED`、`NEXT_PUBLIC_SSO_ENABLED` | SAML/OIDCによるシングルサインオン |
| `CREDENTIAL_SETS_ENABLED`、`NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | メールトリガー用のポーリンググループ |
<Callout type="warn">
BYOKはホスト型Sim Studioでのみ利用可能です。セルフホスト型デプロイメントでは、環境変数を介してAIプロバイダーキーを直接設定します。
</Callout>

View File

@@ -47,42 +47,42 @@ AIブロックを使用するワークフローでは、ログで詳細なコス
## 料金オプション
<Tabs items={['ホステッドモデル', '独自のAPIキーを使用']}>
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
<Tab>
**ホステッドモデル** - Simは、エージェントブロック用に1.4倍の価格乗数を適用したAPIキーを提供します:
**ホステッドモデル** - Simは2倍の価格乗数APIキーを提供します
**OpenAI**
| モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
**Anthropic**
| モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
**Google**
| モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
*1.4倍の乗数は、インフラストラクチャとAPI管理コストをカバーします。*
*2倍の乗数は、インフラストラクチャとAPI管理コストをカバーします。*
</Tab>
<Tab>

View File

@@ -1,75 +0,0 @@
---
title: 企业版
description: 为具有高级安全性和合规性需求的组织提供企业级功能
---
import { Callout } from 'fumadocs-ui/components/callout'
Sim Studio 企业版为需要更高安全性、合规性和管理能力的组织提供高级功能。
---
## 自带密钥BYOK
使用您自己的 API 密钥对接 AI 模型服务商,而不是使用 Sim Studio 托管的密钥。
### 支持的服务商
| Provider | Usage |
|----------|-------|
| OpenAI | 知识库嵌入、Agent 模块 |
| Anthropic | Agent 模块 |
| Google | Agent 模块 |
| Mistral | 知识库 OCR |
### 配置方法
1. 在您的工作区进入 **设置** → **BYOK**
2. 为您的服务商点击 **添加密钥**
3. 输入您的 API 密钥并保存
<Callout type="warn">
BYOK 密钥静态加密存储。仅组织管理员和所有者可管理密钥。
</Callout>
配置后,工作流将使用您的密钥而非 Sim Studio 托管密钥。如移除,工作流会自动切换回托管密钥。
---
## 单点登录SSO
企业级身份认证,支持 SAML 2.0 和 OIDC实现集中式身份管理。
### 支持的服务商
- Okta
- Azure AD / Entra ID
- Google Workspace
- OneLogin
- 任何 SAML 2.0 或 OIDC 服务商
### 配置方法
1. 在您的工作区进入 **设置** → **SSO**
2. 选择您的身份提供商
3. 使用 IdP 元数据配置连接
4. 为您的组织启用 SSO
<Callout type="info">
启用 SSO 后,团队成员将通过您的身份提供商进行身份验证,而不再使用邮箱/密码。
</Callout>
---
## 自主部署
对于自主部署场景,可通过环境变量启用企业功能:
| 变量 | 描述 |
|----------|-------------|
| `SSO_ENABLED``NEXT_PUBLIC_SSO_ENABLED` | 使用 SAML/OIDC 的单点登录 |
| `CREDENTIAL_SETS_ENABLED``NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | 用于邮件触发器的轮询组 |
<Callout type="warn">
BYOK 仅适用于托管版 Sim Studio。自托管部署需通过环境变量直接配置 AI 提供商密钥。
</Callout>

View File

@@ -47,42 +47,42 @@ totalCost = baseExecutionCharge + modelCost
## 定价选项
<Tabs items={['托管模型', '自带 API Key']}>
<Tabs items={[ '托管模型', '自带 API 密钥' ]}>
<Tab>
**托管模型** - Sim 为 Agent 模块提供 API Key价格乘以 1.4 倍:
**托管模型** - Sim 提供 API 密钥,价格为基础价格的 2 倍:
**OpenAI**
| 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) |
|-------|---------------------------|----------------------------|
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
**Anthropic**
| 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) |
|-------|---------------------------|----------------------------|
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
**Google**
| 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) |
|-------|---------------------------|----------------------------|
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
*1.4系数涵盖了基础设施和 API 管理成本。*
*2 倍系数涵盖了基础设施和 API 管理成本。*
</Tab>
<Tab>

View File

@@ -4581,11 +4581,11 @@ checksums:
content/10: d19c8c67f52eb08b6a49c0969a9c8b86
content/11: 4024a36e0d9479ff3191fb9cd2b2e365
content/12: 0396a1e5d9548207f56e6b6cae85a542
content/13: 68f90237f86be125224c56a2643904a3
content/14: e854781f0fbf6f397a3ac682e892a993
content/15: 2340c44af715fb8ca58f43151515aae1
content/16: fc7ae93bff492d80f4b6f16e762e05fa
content/17: 8a46692d5df3fed9f94d59dfc3fb7e0a
content/13: 4bfdeac5ad21c75209dcdfde85aa52b0
content/14: 35df9a16b866dbe4bb9fc1d7aee42711
content/15: 135c044066cea8cc0e22f06d67754ec5
content/16: 6882b91e30548d7d331388c26cf2e948
content/17: 29aed7061148ae46fa6ec8bcbc857c3d
content/18: e0571c88ea5bcd4305a6f5772dcbed98
content/19: 83fc31418ff454a5e06b290e3708ef32
content/20: 4392b5939a6d5774fb080cad1ee1dbb8
@@ -50308,30 +50308,3 @@ checksums:
content/68: ba6b5020ed971cd7ffc7f0423650dfbf
content/69: b3f310d5ef115bea5a8b75bf25d7ea9a
content/70: 0362be478aa7ba4b6d1ebde0bd83e83a
f5bc5f89ed66818f4c485c554bf26eea:
meta/title: c70474271708e5b27392fde87462fa26
meta/description: 7b47db7fbb818c180b99354b912a72b3
content/0: 232be69c8f3053a40f695f9c9dcb3f2e
content/1: a4a62a6e782e18bd863546dfcf2aec1c
content/2: 51adf33450cab2ef392e93147386647c
content/3: ada515cf6e2e0f9d3f57f720f79699d3
content/4: d5e8b9f64d855675588845dc4124c491
content/5: 3acf1f0551f6097ca6159e66f5c8da1a
content/6: 6a6e277ded1a063ec2c2067abb519088
content/7: 6debcd334c3310480cbe6feab87f37b5
content/8: 0e3372052a2b3a1c43d853d6ed269d69
content/9: 90063613714128f4e61e9588e2d2c735
content/10: 182154179fe2a8b6b73fde0d04e0bf4c
content/11: 51adf33450cab2ef392e93147386647c
content/12: 73c3e8a5d36d6868fdb455fcb3d6074c
content/13: 30cd8f1d6197bce560a091ba19d0392a
content/14: 3acf1f0551f6097ca6159e66f5c8da1a
content/15: 997deef758698d207be9382c45301ad6
content/16: 6debcd334c3310480cbe6feab87f37b5
content/17: e26c8c2dffd70baef0253720c1511886
content/18: a99eba53979531f1c974cf653c346909
content/19: 51adf33450cab2ef392e93147386647c
content/20: ca3ec889fb218b8b130959ff04baa659
content/21: 306617201cf63b42f09bb72c9722e048
content/22: 4b48ba3f10b043f74b70edeb4ad87080
content/23: c8531bd570711abc1963d8b5dcf9deef

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

After

Width:  |  Height:  |  Size: 33 KiB

View File

@@ -1,100 +0,0 @@
'use client'
import { forwardRef, useState } from 'react'
import { ArrowRight, ChevronRight, Loader2 } from 'lucide-react'
import { Button, type ButtonProps as EmcnButtonProps } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
export interface BrandedButtonProps extends Omit<EmcnButtonProps, 'variant' | 'size'> {
/** Shows loading spinner and disables button */
loading?: boolean
/** Text to show when loading (appends "..." automatically) */
loadingText?: string
/** Show arrow animation on hover (default: true) */
showArrow?: boolean
/** Make button full width (default: true) */
fullWidth?: boolean
}
/**
* Branded button for auth and status pages.
* Automatically detects whitelabel customization and applies appropriate styling.
*
* @example
* ```tsx
* // Primary branded button with arrow
* <BrandedButton onClick={handleSubmit}>Sign In</BrandedButton>
*
* // Loading state
* <BrandedButton loading loadingText="Signing in">Sign In</BrandedButton>
*
* // Without arrow animation
* <BrandedButton showArrow={false}>Continue</BrandedButton>
* ```
*/
export const BrandedButton = forwardRef<HTMLButtonElement, BrandedButtonProps>(
(
{
children,
loading = false,
loadingText,
showArrow = true,
fullWidth = true,
className,
disabled,
onMouseEnter,
onMouseLeave,
...props
},
ref
) => {
const buttonClass = useBrandedButtonClass()
const [isHovered, setIsHovered] = useState(false)
const handleMouseEnter = (e: React.MouseEvent<HTMLButtonElement>) => {
setIsHovered(true)
onMouseEnter?.(e)
}
const handleMouseLeave = (e: React.MouseEvent<HTMLButtonElement>) => {
setIsHovered(false)
onMouseLeave?.(e)
}
return (
<Button
ref={ref}
variant='branded'
size='branded'
disabled={disabled || loading}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
className={cn(buttonClass, 'group', fullWidth && 'w-full', className)}
{...props}
>
{loading ? (
<span className='flex items-center gap-2'>
<Loader2 className='h-4 w-4 animate-spin' />
{loadingText ? `${loadingText}...` : children}
</span>
) : showArrow ? (
<span className='flex items-center gap-1'>
{children}
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
{isHovered ? (
<ArrowRight className='h-4 w-4' aria-hidden='true' />
) : (
<ChevronRight className='h-4 w-4' aria-hidden='true' />
)}
</span>
</span>
) : (
children
)}
</Button>
)
}
)
BrandedButton.displayName = 'BrandedButton'

View File

@@ -34,7 +34,7 @@ export function SSOLoginButton({
}
const primaryBtnClasses = cn(
primaryClassName || 'branded-button-gradient',
primaryClassName || 'auth-button-gradient',
'flex w-full items-center justify-center gap-2 rounded-[10px] border font-medium text-[15px] text-white transition-all duration-200'
)

View File

@@ -1,74 +0,0 @@
'use client'
import type { ReactNode } from 'react'
import { inter } from '@/app/_styles/fonts/inter/inter'
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import AuthBackground from '@/app/(auth)/components/auth-background'
import Nav from '@/app/(landing)/components/nav/nav'
import { SupportFooter } from './support-footer'
export interface StatusPageLayoutProps {
/** Page title displayed prominently */
title: string
/** Description text below the title */
description: string | ReactNode
/** Content to render below the title/description (usually buttons) */
children?: ReactNode
/** Whether to show the support footer (default: true) */
showSupportFooter?: boolean
/** Whether to hide the nav bar (useful for embedded forms) */
hideNav?: boolean
}
/**
* Unified layout for status/error pages (404, form unavailable, chat error, etc.).
* Uses AuthBackground and Nav for consistent styling with auth pages.
*
* @example
* ```tsx
* <StatusPageLayout
* title="Page Not Found"
* description="The page you're looking for doesn't exist."
* >
* <BrandedButton onClick={() => router.push('/')}>Return to Home</BrandedButton>
* </StatusPageLayout>
* ```
*/
export function StatusPageLayout({
title,
description,
children,
showSupportFooter = true,
hideNav = false,
}: StatusPageLayoutProps) {
return (
<AuthBackground>
<main className='relative flex min-h-screen flex-col text-foreground'>
{!hideNav && <Nav hideAuthButtons={true} variant='auth' />}
<div className='relative z-30 flex flex-1 items-center justify-center px-4 pb-24'>
<div className='w-full max-w-lg px-4'>
<div className='flex flex-col items-center justify-center'>
<div className='space-y-1 text-center'>
<h1
className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}
>
{title}
</h1>
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
{description}
</p>
</div>
{children && (
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
{children}
</div>
)}
</div>
</div>
</div>
{showSupportFooter && <SupportFooter position='absolute' />}
</main>
</AuthBackground>
)
}

View File

@@ -1,40 +0,0 @@
'use client'
import { useBrandConfig } from '@/lib/branding/branding'
import { inter } from '@/app/_styles/fonts/inter/inter'
export interface SupportFooterProps {
/** Position style - 'fixed' for pages without AuthLayout, 'absolute' for pages with AuthLayout */
position?: 'fixed' | 'absolute'
}
/**
* Support footer component for auth and status pages.
* Displays a "Need help? Contact support" link using branded support email.
*
* @example
* ```tsx
* // Fixed position (for standalone pages)
* <SupportFooter />
*
* // Absolute position (for pages using AuthLayout)
* <SupportFooter position="absolute" />
* ```
*/
export function SupportFooter({ position = 'fixed' }: SupportFooterProps) {
const brandConfig = useBrandConfig()
return (
<div
className={`${inter.className} auth-text-muted right-0 bottom-0 left-0 z-50 pb-8 text-center font-[340] text-[13px] leading-relaxed ${position}`}
>
Need help?{' '}
<a
href={`mailto:${brandConfig.supportEmail}`}
className='auth-link underline-offset-4 transition hover:underline'
>
Contact support
</a>
</div>
)
}

View File

@@ -105,7 +105,7 @@ export default function LoginPage({
const [password, setPassword] = useState('')
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
const [showValidationError, setShowValidationError] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
const [isButtonHovered, setIsButtonHovered] = useState(false)
const [callbackUrl, setCallbackUrl] = useState('/workspace')
@@ -146,9 +146,9 @@ export default function LoginPage({
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
setButtonClass('auth-button-custom')
} else {
setButtonClass('branded-button-gradient')
setButtonClass('auth-button-gradient')
}
}

View File

@@ -27,7 +27,7 @@ export function RequestResetForm({
statusMessage,
className,
}: RequestResetFormProps) {
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
const [isButtonHovered, setIsButtonHovered] = useState(false)
useEffect(() => {
@@ -36,9 +36,9 @@ export function RequestResetForm({
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
setButtonClass('auth-button-custom')
} else {
setButtonClass('branded-button-gradient')
setButtonClass('auth-button-gradient')
}
}
@@ -138,7 +138,7 @@ export function SetNewPasswordForm({
const [validationMessage, setValidationMessage] = useState('')
const [showPassword, setShowPassword] = useState(false)
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
const [isButtonHovered, setIsButtonHovered] = useState(false)
useEffect(() => {
@@ -147,9 +147,9 @@ export function SetNewPasswordForm({
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
setButtonClass('auth-button-custom')
} else {
setButtonClass('branded-button-gradient')
setButtonClass('auth-button-gradient')
}
}

View File

@@ -95,7 +95,7 @@ function SignupFormContent({
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
const [redirectUrl, setRedirectUrl] = useState('')
const [isInviteFlow, setIsInviteFlow] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
const [isButtonHovered, setIsButtonHovered] = useState(false)
const [name, setName] = useState('')
@@ -132,9 +132,9 @@ function SignupFormContent({
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
setButtonClass('auth-button-custom')
} else {
setButtonClass('branded-button-gradient')
setButtonClass('auth-button-gradient')
}
}

View File

@@ -57,7 +57,7 @@ export default function SSOForm() {
const [email, setEmail] = useState('')
const [emailErrors, setEmailErrors] = useState<string[]>([])
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
const [callbackUrl, setCallbackUrl] = useState('/workspace')
useEffect(() => {
@@ -96,9 +96,9 @@ export default function SSOForm() {
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
setButtonClass('auth-button-custom')
} else {
setButtonClass('branded-button-gradient')
setButtonClass('auth-button-gradient')
}
}

View File

@@ -58,7 +58,7 @@ function VerificationForm({
setCountdown(30)
}
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
useEffect(() => {
const checkCustomBrand = () => {
@@ -66,9 +66,9 @@ function VerificationForm({
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
setButtonClass('auth-button-custom')
} else {
setButtonClass('branded-button-gradient')
setButtonClass('auth-button-gradient')
}
}

View File

@@ -767,7 +767,7 @@ export default function PrivacyPolicy() {
privacy@sim.ai
</Link>
</li>
<li>Mailing Address: Sim, 80 Langton St, San Francisco, CA 94103, USA</li>
<li>Mailing Address: Sim, 80 Langton St, San Francisco, CA 94133, USA</li>
</ul>
<p>We will respond to your request within a reasonable timeframe.</p>
</section>

View File

@@ -0,0 +1,13 @@
export default function Head() {
return (
<>
<link rel='canonical' href='https://sim.ai/studio' />
<link
rel='alternate'
type='application/rss+xml'
title='Sim Studio'
href='https://sim.ai/studio/rss.xml'
/>
</>
)
}

View File

@@ -2,7 +2,6 @@
import type React from 'react'
import { createContext, useCallback, useEffect, useMemo, useState } from 'react'
import { useQueryClient } from '@tanstack/react-query'
import posthog from 'posthog-js'
import { client } from '@/lib/auth/auth-client'
@@ -36,15 +35,12 @@ export function SessionProvider({ children }: { children: React.ReactNode }) {
const [data, setData] = useState<AppSession>(null)
const [isPending, setIsPending] = useState(true)
const [error, setError] = useState<Error | null>(null)
const queryClient = useQueryClient()
const loadSession = useCallback(async (bypassCache = false) => {
const loadSession = useCallback(async () => {
try {
setIsPending(true)
setError(null)
const res = bypassCache
? await client.getSession({ query: { disableCookieCache: true } })
: await client.getSession()
const res = await client.getSession()
setData(res?.data ?? null)
} catch (e) {
setError(e instanceof Error ? e : new Error('Failed to fetch session'))
@@ -54,25 +50,8 @@ export function SessionProvider({ children }: { children: React.ReactNode }) {
}, [])
useEffect(() => {
// Check if user was redirected after plan upgrade
const params = new URLSearchParams(window.location.search)
const wasUpgraded = params.get('upgraded') === 'true'
if (wasUpgraded) {
params.delete('upgraded')
const newUrl = params.toString()
? `${window.location.pathname}?${params.toString()}`
: window.location.pathname
window.history.replaceState({}, '', newUrl)
}
loadSession(wasUpgraded).then(() => {
if (wasUpgraded) {
queryClient.invalidateQueries({ queryKey: ['organizations'] })
queryClient.invalidateQueries({ queryKey: ['subscription'] })
}
})
}, [loadSession, queryClient])
loadSession()
}, [loadSession])
useEffect(() => {
if (isPending || typeof posthog.identify !== 'function') {

View File

@@ -22,13 +22,12 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
pathname.startsWith('/changelog') ||
pathname.startsWith('/chat') ||
pathname.startsWith('/studio') ||
pathname.startsWith('/resume') ||
pathname.startsWith('/form')
pathname.startsWith('/resume')
return (
<NextThemesProvider
attribute='class'
defaultTheme='dark'
defaultTheme='system'
enableSystem
disableTransitionOnChange
storageKey='sim-theme'

View File

@@ -42,40 +42,6 @@
animation: dash-animation 1.5s linear infinite !important;
}
/**
* React Flow selection box styling
* Uses brand-secondary color for selection highlighting
*/
.react-flow__selection {
background: rgba(51, 180, 255, 0.08) !important;
border: 1px solid var(--brand-secondary) !important;
}
.react-flow__nodesselection-rect,
.react-flow__nodesselection {
background: transparent !important;
border: none !important;
pointer-events: none !important;
}
/**
* Selected node ring indicator
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
*/
.react-flow__node.selected > div > div {
position: relative;
}
.react-flow__node.selected > div > div::after {
content: "";
position: absolute;
inset: 0;
z-index: 40;
border-radius: 8px;
box-shadow: 0 0 0 1.75px var(--brand-secondary);
pointer-events: none;
}
/**
* Color tokens - single source of truth for all colors
* Light mode: Warm theme
@@ -587,25 +553,27 @@ input[type="search"]::-ms-clear {
animation: placeholder-pulse 1.5s ease-in-out infinite;
}
.branded-button-gradient {
background: linear-gradient(to bottom, #8357ff, #6f3dfa) !important;
border-color: #6f3dfa !important;
box-shadow: inset 0 2px 4px 0 #9b77ff !important;
.auth-button-gradient {
background: linear-gradient(to bottom, var(--brand-primary-hex), var(--brand-400)) !important;
border-color: var(--brand-400) !important;
box-shadow: inset 0 2px 4px 0 var(--brand-400) !important;
}
.branded-button-gradient:hover {
background: linear-gradient(to bottom, #8357ff, #6f3dfa) !important;
.auth-button-gradient:hover {
background: linear-gradient(to bottom, var(--brand-primary-hex), var(--brand-400)) !important;
opacity: 0.9;
}
.branded-button-custom {
.auth-button-custom {
background: var(--brand-primary-hex) !important;
border-color: var(--brand-primary-hex) !important;
box-shadow: inset 0 2px 4px 0 rgba(0, 0, 0, 0.1) !important;
}
.branded-button-custom:hover {
.auth-button-custom:hover {
background: var(--brand-primary-hover-hex) !important;
border-color: var(--brand-primary-hover-hex) !important;
opacity: 1;
}
/**

View File

@@ -0,0 +1,272 @@
/**
* A2A Agent Card Endpoint
*
* Returns the Agent Card (discovery document) for an A2A agent.
* Also supports CRUD operations for managing agents.
*/
import { db } from '@sim/db'
import { a2aAgent, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { generateAgentCard, generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
import type { AgentAuthentication, AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
const logger = createLogger('A2AAgentCardAPI')
export const dynamic = 'force-dynamic'
interface RouteParams {
agentId: string
}
/**
* GET - Returns the Agent Card for discovery
*/
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { agentId } = await params
try {
const [agent] = await db
.select({
agent: a2aAgent,
workflow: workflow,
})
.from(a2aAgent)
.innerJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
.where(eq(a2aAgent.id, agentId))
.limit(1)
if (!agent) {
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
}
if (!agent.agent.isPublished) {
// Check if requester has access (for preview)
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success) {
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
}
}
const agentCard = generateAgentCard(
{
id: agent.agent.id,
name: agent.agent.name,
description: agent.agent.description,
version: agent.agent.version,
capabilities: agent.agent.capabilities as AgentCapabilities,
skills: agent.agent.skills as AgentSkill[],
authentication: agent.agent.authentication as AgentAuthentication,
},
{
id: agent.workflow.id,
name: agent.workflow.name,
description: agent.workflow.description,
}
)
return NextResponse.json(agentCard, {
headers: {
'Content-Type': 'application/json',
'Cache-Control': agent.agent.isPublished ? 'public, max-age=3600' : 'private, no-cache',
},
})
} catch (error) {
logger.error('Error getting Agent Card:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* PUT - Update an agent
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { agentId } = await params
try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const [existingAgent] = await db
.select()
.from(a2aAgent)
.where(eq(a2aAgent.id, agentId))
.limit(1)
if (!existingAgent) {
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
}
const body = await request.json()
// Update agent
const [updatedAgent] = await db
.update(a2aAgent)
.set({
name: body.name ?? existingAgent.name,
description: body.description ?? existingAgent.description,
version: body.version ?? existingAgent.version,
capabilities: body.capabilities ?? existingAgent.capabilities,
skills: body.skills ?? existingAgent.skills,
authentication: body.authentication ?? existingAgent.authentication,
isPublished: body.isPublished ?? existingAgent.isPublished,
publishedAt:
body.isPublished && !existingAgent.isPublished ? new Date() : existingAgent.publishedAt,
updatedAt: new Date(),
})
.where(eq(a2aAgent.id, agentId))
.returning()
logger.info(`Updated A2A agent: ${agentId}`)
return NextResponse.json({ success: true, agent: updatedAgent })
} catch (error) {
logger.error('Error updating agent:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* DELETE - Delete an agent
*/
export async function DELETE(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { agentId } = await params
try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const [existingAgent] = await db
.select()
.from(a2aAgent)
.where(eq(a2aAgent.id, agentId))
.limit(1)
if (!existingAgent) {
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
}
await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId))
logger.info(`Deleted A2A agent: ${agentId}`)
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error deleting agent:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* POST - Publish/unpublish an agent
*/
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { agentId } = await params
try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
logger.warn('A2A agent publish auth failed:', { error: auth.error, hasUserId: !!auth.userId })
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const [existingAgent] = await db
.select()
.from(a2aAgent)
.where(eq(a2aAgent.id, agentId))
.limit(1)
if (!existingAgent) {
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
}
const body = await request.json()
const action = body.action as 'publish' | 'unpublish' | 'refresh'
if (action === 'publish') {
// Verify workflow is deployed
const [wf] = await db
.select({ isDeployed: workflow.isDeployed })
.from(workflow)
.where(eq(workflow.id, existingAgent.workflowId))
.limit(1)
if (!wf?.isDeployed) {
return NextResponse.json(
{ error: 'Workflow must be deployed before publishing agent' },
{ status: 400 }
)
}
await db
.update(a2aAgent)
.set({
isPublished: true,
publishedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(a2aAgent.id, agentId))
logger.info(`Published A2A agent: ${agentId}`)
return NextResponse.json({ success: true, isPublished: true })
}
if (action === 'unpublish') {
await db
.update(a2aAgent)
.set({
isPublished: false,
updatedAt: new Date(),
})
.where(eq(a2aAgent.id, agentId))
logger.info(`Unpublished A2A agent: ${agentId}`)
return NextResponse.json({ success: true, isPublished: false })
}
if (action === 'refresh') {
// Refresh skills from workflow
const workflowData = await loadWorkflowFromNormalizedTables(existingAgent.workflowId)
if (!workflowData) {
return NextResponse.json({ error: 'Failed to load workflow' }, { status: 500 })
}
const [wf] = await db
.select({ name: workflow.name, description: workflow.description })
.from(workflow)
.where(eq(workflow.id, existingAgent.workflowId))
.limit(1)
const skills = generateSkillsFromWorkflow(
existingAgent.workflowId,
wf?.name || existingAgent.name,
wf?.description,
workflowData.blocks
)
await db
.update(a2aAgent)
.set({
skills,
updatedAt: new Date(),
})
.where(eq(a2aAgent.id, agentId))
logger.info(`Refreshed skills for A2A agent: ${agentId}`)
return NextResponse.json({ success: true, skills })
}
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
} catch (error) {
logger.error('Error with agent action:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,197 @@
/**
* A2A Agents List Endpoint
*
* List and create A2A agents for a workspace.
*/
import { db } from '@sim/db'
import { a2aAgent, workflow, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
import { A2A_DEFAULT_CAPABILITIES } from '@/lib/a2a/constants'
import { sanitizeAgentName } from '@/lib/a2a/utils'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
const logger = createLogger('A2AAgentsAPI')
export const dynamic = 'force-dynamic'
/**
* GET - List all A2A agents for a workspace
*/
export async function GET(request: NextRequest) {
try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
if (!workspaceId) {
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
}
// Verify workspace access
const [ws] = await db
.select({ id: workspace.id })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
if (!ws) {
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
}
// Get agents with workflow info
const agents = await db
.select({
id: a2aAgent.id,
workspaceId: a2aAgent.workspaceId,
workflowId: a2aAgent.workflowId,
name: a2aAgent.name,
description: a2aAgent.description,
version: a2aAgent.version,
capabilities: a2aAgent.capabilities,
skills: a2aAgent.skills,
authentication: a2aAgent.authentication,
isPublished: a2aAgent.isPublished,
publishedAt: a2aAgent.publishedAt,
createdAt: a2aAgent.createdAt,
updatedAt: a2aAgent.updatedAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
isDeployed: workflow.isDeployed,
taskCount: sql<number>`(
SELECT COUNT(*)::int
FROM "a2a_task"
WHERE "a2a_task"."agent_id" = "a2a_agent"."id"
)`.as('task_count'),
})
.from(a2aAgent)
.leftJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
.where(eq(a2aAgent.workspaceId, workspaceId))
.orderBy(a2aAgent.createdAt)
logger.info(`Listed ${agents.length} A2A agents for workspace ${workspaceId}`)
return NextResponse.json({ success: true, agents })
} catch (error) {
logger.error('Error listing agents:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* POST - Create a new A2A agent from a workflow
*/
export async function POST(request: NextRequest) {
try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const { workspaceId, workflowId, name, description, capabilities, authentication } = body
if (!workspaceId || !workflowId) {
return NextResponse.json(
{ error: 'workspaceId and workflowId are required' },
{ status: 400 }
)
}
// Verify workflow exists and belongs to workspace
const [wf] = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
workspaceId: workflow.workspaceId,
isDeployed: workflow.isDeployed,
})
.from(workflow)
.where(and(eq(workflow.id, workflowId), eq(workflow.workspaceId, workspaceId)))
.limit(1)
if (!wf) {
return NextResponse.json(
{ error: 'Workflow not found or does not belong to workspace' },
{ status: 404 }
)
}
// Check if agent already exists for this workflow
const [existing] = await db
.select({ id: a2aAgent.id })
.from(a2aAgent)
.where(and(eq(a2aAgent.workspaceId, workspaceId), eq(a2aAgent.workflowId, workflowId)))
.limit(1)
if (existing) {
return NextResponse.json(
{ error: 'An agent already exists for this workflow' },
{ status: 409 }
)
}
// Verify workflow has a start block
const workflowData = await loadWorkflowFromNormalizedTables(workflowId)
if (!workflowData || !hasValidStartBlockInState(workflowData)) {
return NextResponse.json(
{ error: 'Workflow must have a Start block to be exposed as an A2A agent' },
{ status: 400 }
)
}
// Generate skills from workflow
const skills = generateSkillsFromWorkflow(
workflowId,
name || wf.name,
description || wf.description,
workflowData.blocks
)
// Create agent
const agentId = uuidv4()
const agentName = name || sanitizeAgentName(wf.name)
const [agent] = await db
.insert(a2aAgent)
.values({
id: agentId,
workspaceId,
workflowId,
createdBy: auth.userId,
name: agentName,
description: description || wf.description,
version: '1.0.0',
capabilities: {
...A2A_DEFAULT_CAPABILITIES,
...capabilities,
},
skills,
authentication: authentication || {
schemes: ['bearer', 'apiKey'],
},
isPublished: false,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
logger.info(`Created A2A agent ${agentId} for workflow ${workflowId}`)
return NextResponse.json({ success: true, agent }, { status: 201 })
} catch (error) {
logger.error('Error creating agent:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,739 @@
/**
* A2A Serve Endpoint - Implements A2A protocol for workflow agents
*
* Handles JSON-RPC 2.0 requests for:
* - tasks/send: Create or continue a task
* - tasks/get: Query task status
* - tasks/cancel: Cancel a running task
* - tasks/sendSubscribe: SSE streaming for real-time updates
*/
import { db } from '@sim/db'
import { a2aAgent, a2aTask, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { A2A_DEFAULT_TIMEOUT, A2A_METHODS } from '@/lib/a2a/constants'
import {
A2AErrorCode,
type Artifact,
type Task,
type TaskCancelParams,
type TaskMessage,
type TaskQueryParams,
type TaskSendParams,
type TaskState,
} from '@/lib/a2a/types'
import {
createAgentMessage,
createTaskStatus,
extractTextContent,
formatTaskResponse,
generateTaskId,
isTerminalState,
} from '@/lib/a2a/utils'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('A2AServeAPI')
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
interface RouteParams {
agentId: string
}
interface JSONRPCRequest {
jsonrpc: '2.0'
id: string | number
method: string
params?: unknown
}
interface JSONRPCResponse {
jsonrpc: '2.0'
id: string | number | null
result?: unknown
error?: {
code: number
message: string
data?: unknown
}
}
function createResponse(id: string | number | null, result: unknown): JSONRPCResponse {
return { jsonrpc: '2.0', id, result }
}
function createError(
id: string | number | null,
code: number,
message: string,
data?: unknown
): JSONRPCResponse {
return { jsonrpc: '2.0', id, error: { code, message, data } }
}
function isJSONRPCRequest(obj: unknown): obj is JSONRPCRequest {
if (!obj || typeof obj !== 'object') return false
const r = obj as Record<string, unknown>
return r.jsonrpc === '2.0' && typeof r.method === 'string' && r.id !== undefined
}
/**
* GET - Returns the Agent Card (discovery document)
*
* This allows clients to discover the agent's capabilities by calling GET on the serve endpoint.
*/
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { agentId } = await params
try {
const [agent] = await db
.select({
id: a2aAgent.id,
name: a2aAgent.name,
description: a2aAgent.description,
version: a2aAgent.version,
capabilities: a2aAgent.capabilities,
skills: a2aAgent.skills,
authentication: a2aAgent.authentication,
isPublished: a2aAgent.isPublished,
})
.from(a2aAgent)
.where(eq(a2aAgent.id, agentId))
.limit(1)
if (!agent) {
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
}
if (!agent.isPublished) {
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
}
const baseUrl = getBaseUrl()
// Return full Agent Card for discovery
return NextResponse.json(
{
name: agent.name,
description: agent.description,
url: `${baseUrl}/api/a2a/serve/${agent.id}`,
version: agent.version,
documentationUrl: `${baseUrl}/docs/a2a`,
provider: {
organization: 'Sim Studio',
url: baseUrl,
},
capabilities: agent.capabilities,
skills: agent.skills,
authentication: agent.authentication,
defaultInputModes: ['text', 'data'],
defaultOutputModes: ['text', 'data'],
},
{
headers: {
'Content-Type': 'application/json',
'Cache-Control': 'public, max-age=3600',
},
}
)
} catch (error) {
logger.error('Error getting Agent Card:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* POST - Handle JSON-RPC requests
*/
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { agentId } = await params
try {
// Verify agent exists and is published
const [agent] = await db
.select({
id: a2aAgent.id,
name: a2aAgent.name,
workflowId: a2aAgent.workflowId,
workspaceId: a2aAgent.workspaceId,
isPublished: a2aAgent.isPublished,
capabilities: a2aAgent.capabilities,
})
.from(a2aAgent)
.where(eq(a2aAgent.id, agentId))
.limit(1)
if (!agent) {
return NextResponse.json(
createError(null, A2AErrorCode.AGENT_UNAVAILABLE, 'Agent not found'),
{ status: 404 }
)
}
if (!agent.isPublished) {
return NextResponse.json(
createError(null, A2AErrorCode.AGENT_UNAVAILABLE, 'Agent not published'),
{ status: 404 }
)
}
// Auth check
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json(
createError(null, A2AErrorCode.AUTHENTICATION_REQUIRED, 'Unauthorized'),
{ status: 401 }
)
}
// Verify workflow is deployed
const [wf] = await db
.select({ isDeployed: workflow.isDeployed })
.from(workflow)
.where(eq(workflow.id, agent.workflowId))
.limit(1)
if (!wf?.isDeployed) {
return NextResponse.json(
createError(null, A2AErrorCode.AGENT_UNAVAILABLE, 'Workflow is not deployed'),
{ status: 400 }
)
}
// Parse JSON-RPC request
const body = await request.json()
if (!isJSONRPCRequest(body)) {
return NextResponse.json(
createError(null, A2AErrorCode.INVALID_REQUEST, 'Invalid JSON-RPC request'),
{ status: 400 }
)
}
const { id, method, params: rpcParams } = body
const apiKey =
request.headers.get('X-API-Key') ||
request.headers.get('Authorization')?.replace('Bearer ', '')
logger.info(`A2A request: ${method} for agent ${agentId}`)
switch (method) {
case A2A_METHODS.TASKS_SEND:
return handleTaskSend(id, agent, rpcParams as TaskSendParams, apiKey)
case A2A_METHODS.TASKS_GET:
return handleTaskGet(id, rpcParams as TaskQueryParams)
case A2A_METHODS.TASKS_CANCEL:
return handleTaskCancel(id, rpcParams as TaskCancelParams)
case A2A_METHODS.TASKS_SEND_SUBSCRIBE:
return handleTaskSendSubscribe(request, id, agent, rpcParams as TaskSendParams, apiKey)
default:
return NextResponse.json(
createError(id, A2AErrorCode.METHOD_NOT_FOUND, `Method not found: ${method}`),
{ status: 404 }
)
}
} catch (error) {
logger.error('Error handling A2A request:', error)
return NextResponse.json(createError(null, A2AErrorCode.INTERNAL_ERROR, 'Internal error'), {
status: 500,
})
}
}
/**
* Handle tasks/send - Create or continue a task
*/
async function handleTaskSend(
id: string | number,
agent: {
id: string
name: string
workflowId: string
workspaceId: string
},
params: TaskSendParams,
apiKey?: string | null
): Promise<NextResponse> {
if (!params?.message) {
return NextResponse.json(createError(id, A2AErrorCode.INVALID_PARAMS, 'Message is required'), {
status: 400,
})
}
const taskId = params.id || generateTaskId()
const contextId = params.contextId
// Check if task exists (continuation)
let existingTask: typeof a2aTask.$inferSelect | null = null
if (params.id) {
const [found] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
existingTask = found || null
if (!existingTask) {
return NextResponse.json(createError(id, A2AErrorCode.TASK_NOT_FOUND, 'Task not found'), {
status: 404,
})
}
if (isTerminalState(existingTask.status as TaskState)) {
return NextResponse.json(
createError(id, A2AErrorCode.TASK_ALREADY_COMPLETE, 'Task already in terminal state'),
{ status: 400 }
)
}
}
// Get existing history or start fresh
const history: TaskMessage[] = existingTask?.messages
? (existingTask.messages as TaskMessage[])
: []
// Add the new user message
history.push(params.message)
// Create or update task
if (existingTask) {
await db
.update(a2aTask)
.set({
status: 'working',
messages: history,
updatedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
} else {
await db.insert(a2aTask).values({
id: taskId,
agentId: agent.id,
sessionId: contextId || null,
status: 'working',
messages: history,
metadata: params.metadata || {},
createdAt: new Date(),
updatedAt: new Date(),
})
}
// Execute the workflow
const executeUrl = `${getBaseUrl()}/api/workflows/${agent.workflowId}/execute`
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
if (apiKey) headers['X-API-Key'] = apiKey
logger.info(`Executing workflow ${agent.workflowId} for A2A task ${taskId}`)
try {
// Extract text content from the TaskMessage for easier workflow consumption
const messageText = extractTextContent(params.message)
const response = await fetch(executeUrl, {
method: 'POST',
headers,
body: JSON.stringify({
input: messageText,
triggerType: 'api',
}),
signal: AbortSignal.timeout(A2A_DEFAULT_TIMEOUT),
})
const executeResult = await response.json()
// Determine final state
const finalState: TaskState = response.ok ? 'completed' : 'failed'
// Create agent response message
const agentContent =
executeResult.output?.content ||
(typeof executeResult.output === 'object'
? JSON.stringify(executeResult.output)
: String(executeResult.output || executeResult.error || 'Task completed'))
const agentMessage = createAgentMessage(agentContent)
history.push(agentMessage)
// Extract artifacts if present
const artifacts = executeResult.output?.artifacts || []
// Update task with result
await db
.update(a2aTask)
.set({
status: finalState,
messages: history,
artifacts,
executionId: executeResult.metadata?.executionId,
completedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
const task: Task = {
id: taskId,
contextId: contextId || undefined,
status: createTaskStatus(finalState),
history,
artifacts,
metadata: params.metadata,
kind: 'task',
}
return NextResponse.json(createResponse(id, task))
} catch (error) {
logger.error(`Error executing workflow for task ${taskId}:`, error)
// Mark task as failed
const errorMessage = error instanceof Error ? error.message : 'Workflow execution failed'
await db
.update(a2aTask)
.set({
status: 'failed',
updatedAt: new Date(),
completedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
return NextResponse.json(createError(id, A2AErrorCode.INTERNAL_ERROR, errorMessage), {
status: 500,
})
}
}
/**
* Handle tasks/get - Query task status
*/
async function handleTaskGet(id: string | number, params: TaskQueryParams): Promise<NextResponse> {
if (!params?.id) {
return NextResponse.json(createError(id, A2AErrorCode.INVALID_PARAMS, 'Task ID is required'), {
status: 400,
})
}
// Validate historyLength if provided
const historyLength =
params.historyLength !== undefined && params.historyLength >= 0
? params.historyLength
: undefined
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
if (!task) {
return NextResponse.json(createError(id, A2AErrorCode.TASK_NOT_FOUND, 'Task not found'), {
status: 404,
})
}
const result = formatTaskResponse(
{
id: task.id,
contextId: task.sessionId || undefined,
status: createTaskStatus(task.status as TaskState),
history: task.messages as TaskMessage[],
artifacts: (task.artifacts as Artifact[]) || [],
metadata: (task.metadata as Record<string, unknown>) || {},
kind: 'task',
},
historyLength
)
return NextResponse.json(createResponse(id, result))
}
/**
* Handle tasks/cancel - Cancel a running task
*/
async function handleTaskCancel(
id: string | number,
params: TaskCancelParams
): Promise<NextResponse> {
if (!params?.id) {
return NextResponse.json(createError(id, A2AErrorCode.INVALID_PARAMS, 'Task ID is required'), {
status: 400,
})
}
const [task] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
if (!task) {
return NextResponse.json(createError(id, A2AErrorCode.TASK_NOT_FOUND, 'Task not found'), {
status: 404,
})
}
if (isTerminalState(task.status as TaskState)) {
return NextResponse.json(
createError(id, A2AErrorCode.TASK_ALREADY_COMPLETE, 'Task already in terminal state'),
{ status: 400 }
)
}
await db
.update(a2aTask)
.set({
status: 'canceled',
updatedAt: new Date(),
completedAt: new Date(),
})
.where(eq(a2aTask.id, params.id))
const result: Task = {
id: task.id,
contextId: task.sessionId || undefined,
status: createTaskStatus('canceled'),
history: task.messages as TaskMessage[],
artifacts: (task.artifacts as Artifact[]) || [],
kind: 'task',
}
return NextResponse.json(createResponse(id, result))
}
/**
* Handle tasks/sendSubscribe - SSE streaming
*/
async function handleTaskSendSubscribe(
request: NextRequest,
id: string | number,
agent: {
id: string
name: string
workflowId: string
workspaceId: string
},
params: TaskSendParams,
apiKey?: string | null
): Promise<NextResponse> {
if (!params?.message) {
return NextResponse.json(createError(id, A2AErrorCode.INVALID_PARAMS, 'Message is required'), {
status: 400,
})
}
const contextId = params.contextId
// Get existing task or prepare for new one
let history: TaskMessage[] = []
let existingTask: typeof a2aTask.$inferSelect | null = null
if (params.id) {
const [found] = await db.select().from(a2aTask).where(eq(a2aTask.id, params.id)).limit(1)
existingTask = found || null
if (!existingTask) {
return NextResponse.json(createError(id, A2AErrorCode.TASK_NOT_FOUND, 'Task not found'), {
status: 404,
})
}
if (isTerminalState(existingTask.status as TaskState)) {
return NextResponse.json(
createError(id, A2AErrorCode.TASK_ALREADY_COMPLETE, 'Task already in terminal state'),
{ status: 400 }
)
}
history = existingTask.messages as TaskMessage[]
}
const taskId = params.id || generateTaskId()
history.push(params.message)
// Create or update task record
if (existingTask) {
await db
.update(a2aTask)
.set({
status: 'working',
messages: history,
updatedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
} else {
await db.insert(a2aTask).values({
id: taskId,
agentId: agent.id,
sessionId: contextId || null,
status: 'working',
messages: history,
metadata: params.metadata || {},
createdAt: new Date(),
updatedAt: new Date(),
})
}
// Create SSE stream
const encoder = new TextEncoder()
const stream = new ReadableStream({
async start(controller) {
const sendEvent = (event: string, data: unknown) => {
try {
controller.enqueue(encoder.encode(`event: ${event}\ndata: ${JSON.stringify(data)}\n\n`))
} catch (error) {
logger.error('Error sending SSE event:', error)
}
}
// Send initial status
sendEvent('task:status', {
id: taskId,
status: { state: 'working', timestamp: new Date().toISOString() },
})
try {
// Execute workflow with streaming
const executeUrl = `${getBaseUrl()}/api/workflows/${agent.workflowId}/execute`
const headers: Record<string, string> = {
'Content-Type': 'application/json',
'X-Stream-Response': 'true',
}
if (apiKey) headers['X-API-Key'] = apiKey
// Extract text content from the TaskMessage for easier workflow consumption
const messageText = extractTextContent(params.message)
const response = await fetch(executeUrl, {
method: 'POST',
headers,
body: JSON.stringify({
input: messageText,
triggerType: 'api',
stream: true,
}),
signal: AbortSignal.timeout(A2A_DEFAULT_TIMEOUT),
})
if (!response.ok) {
let errorMessage = 'Workflow execution failed'
try {
const errorResult = await response.json()
errorMessage = errorResult.error || errorMessage
} catch {
// Response may not be JSON
}
throw new Error(errorMessage)
}
// Check content type to determine response handling
const contentType = response.headers.get('content-type') || ''
const isStreamingResponse =
contentType.includes('text/event-stream') || contentType.includes('text/plain')
if (response.body && isStreamingResponse) {
// Handle streaming response - forward chunks
const reader = response.body.getReader()
const decoder = new TextDecoder()
let fullContent = ''
while (true) {
const { done, value } = await reader.read()
if (done) break
const chunk = decoder.decode(value, { stream: true })
fullContent += chunk
// Forward chunk as message event
sendEvent('task:message', {
id: taskId,
chunk,
})
}
// Create final agent message
const agentMessage = createAgentMessage(fullContent || 'Task completed')
history.push(agentMessage)
// Update task
await db
.update(a2aTask)
.set({
status: 'completed',
messages: history,
completedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
sendEvent('task:status', {
id: taskId,
status: { state: 'completed', timestamp: new Date().toISOString() },
final: true,
})
} else {
// Handle JSON response (non-streaming workflow)
const result = await response.json()
const content =
result.output?.content ||
(typeof result.output === 'object'
? JSON.stringify(result.output)
: String(result.output || 'Task completed'))
// Send the complete content as a single message
sendEvent('task:message', {
id: taskId,
chunk: content,
})
const agentMessage = createAgentMessage(content)
history.push(agentMessage)
const artifacts = (result.output?.artifacts as Artifact[]) || []
// Update task with result
await db
.update(a2aTask)
.set({
status: 'completed',
messages: history,
artifacts,
executionId: result.metadata?.executionId,
completedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
sendEvent('task:status', {
id: taskId,
status: { state: 'completed', timestamp: new Date().toISOString() },
final: true,
})
}
} catch (error) {
logger.error(`Streaming error for task ${taskId}:`, error)
await db
.update(a2aTask)
.set({
status: 'failed',
completedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(a2aTask.id, taskId))
sendEvent('error', {
code: A2AErrorCode.INTERNAL_ERROR,
message: error instanceof Error ? error.message : 'Streaming failed',
})
} finally {
sendEvent('task:done', { id: taskId })
controller.close()
}
},
})
return new NextResponse(stream, {
headers: {
...SSE_HEADERS,
'X-Task-Id': taskId,
},
})
}

View File

@@ -1,8 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { auth, getSession } from '@/lib/auth'
import { hasSSOAccess } from '@/lib/billing'
import { auth } from '@/lib/auth'
import { env } from '@/lib/core/config/env'
import { REDACTED_MARKER } from '@/lib/core/security/redaction'
@@ -64,22 +63,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
export async function POST(request: NextRequest) {
try {
// SSO plugin must be enabled in Better Auth
if (!env.SSO_ENABLED) {
return NextResponse.json({ error: 'SSO is not enabled' }, { status: 400 })
}
// Check plan access (enterprise) or env var override
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const hasAccess = await hasSSOAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json({ error: 'SSO requires an Enterprise plan' }, { status: 403 })
}
const rawBody = await request.json()
const parseResult = ssoRegistrationSchema.safeParse(rawBody)

View File

@@ -7,11 +7,10 @@ import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { renderOTPEmail } from '@/components/emails'
import { getRedisClient } from '@/lib/core/config/redis'
import { addCorsHeaders } from '@/lib/core/security/deployment'
import { getStorageMethod } from '@/lib/core/storage'
import { generateRequestId } from '@/lib/core/utils/request'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { setChatAuthCookie } from '@/app/api/chat/utils'
import { addCorsHeaders, setChatAuthCookie } from '@/app/api/chat/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
const logger = createLogger('ChatOtpAPI')

View File

@@ -3,7 +3,6 @@
*
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest } from '@/app/api/__test-utils__/utils'
@@ -121,8 +120,14 @@ describe('Chat Identifier API Route', () => {
validateAuthToken: vi.fn().mockReturnValue(true),
}))
// Mock logger - use loggerMock from @sim/testing
vi.doMock('@sim/logger', () => loggerMock)
vi.doMock('@sim/logger', () => ({
createLogger: vi.fn().mockReturnValue({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.doMock('@sim/db', () => {
const mockSelect = vi.fn().mockImplementation((fields) => {

View File

@@ -5,12 +5,16 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment'
import { generateRequestId } from '@/lib/core/utils/request'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { ChatFiles } from '@/lib/uploads'
import { setChatAuthCookie, validateChatAuth } from '@/app/api/chat/utils'
import {
addCorsHeaders,
setChatAuthCookie,
validateAuthToken,
validateChatAuth,
} from '@/app/api/chat/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
const logger = createLogger('ChatIdentifierAPI')
@@ -249,7 +253,7 @@ export async function POST(
userId: deployment.userId,
workspaceId,
isDeployed: workflowRecord?.isDeployed ?? false,
variables: (workflowRecord?.variables as Record<string, unknown>) ?? undefined,
variables: workflowRecord?.variables || {},
}
const stream = await createStreamingResponse({

View File

@@ -1,10 +1,9 @@
import { NextRequest } from 'next/server'
/**
* Tests for chat edit API route
*
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/lib/core/config/feature-flags', () => ({
@@ -51,8 +50,14 @@ describe('Chat Edit API Route', () => {
chat: { id: 'id', identifier: 'identifier', userId: 'userId' },
}))
// Mock logger - use loggerMock from @sim/testing
vi.doMock('@sim/logger', () => loggerMock)
vi.doMock('@sim/logger', () => ({
createLogger: vi.fn().mockReturnValue({
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
}),
}))
vi.doMock('@/app/api/workflows/utils', () => ({
createSuccessResponse: mockCreateSuccessResponse.mockImplementation((data) => {

View File

@@ -1,4 +1,3 @@
import { databaseMock, loggerMock } from '@sim/testing'
import type { NextResponse } from 'next/server'
/**
* Tests for chat API utils
@@ -6,9 +5,14 @@ import type { NextResponse } from 'next/server'
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { env } from '@/lib/core/config/env'
vi.mock('@sim/db', () => databaseMock)
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/db', () => ({
db: {
select: vi.fn(),
update: vi.fn(),
},
}))
vi.mock('@/lib/logs/execution/logging-session', () => ({
LoggingSession: vi.fn().mockImplementation(() => ({
@@ -48,10 +52,19 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
describe('Chat API Utils', () => {
beforeEach(() => {
vi.doMock('@sim/logger', () => ({
createLogger: vi.fn().mockReturnValue({
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
}),
}))
vi.stubGlobal('process', {
...process,
env: {
...process.env,
...env,
NODE_ENV: 'development',
},
})
@@ -62,8 +75,8 @@ describe('Chat API Utils', () => {
})
describe('Auth token utils', () => {
it.concurrent('should validate auth tokens', async () => {
const { validateAuthToken } = await import('@/lib/core/security/deployment')
it('should validate auth tokens', async () => {
const { validateAuthToken } = await import('@/app/api/chat/utils')
const chatId = 'test-chat-id'
const type = 'password'
@@ -79,8 +92,8 @@ describe('Chat API Utils', () => {
expect(isInvalidChat).toBe(false)
})
it.concurrent('should reject expired tokens', async () => {
const { validateAuthToken } = await import('@/lib/core/security/deployment')
it('should reject expired tokens', async () => {
const { validateAuthToken } = await import('@/app/api/chat/utils')
const chatId = 'test-chat-id'
const expiredToken = Buffer.from(
@@ -123,7 +136,7 @@ describe('Chat API Utils', () => {
describe('CORS handling', () => {
it('should add CORS headers for localhost in development', async () => {
const { addCorsHeaders } = await import('@/lib/core/security/deployment')
const { addCorsHeaders } = await import('@/app/api/chat/utils')
const mockRequest = {
headers: {
@@ -330,7 +343,7 @@ describe('Chat API Utils', () => {
})
describe('Execution Result Processing', () => {
it.concurrent('should process logs regardless of overall success status', () => {
it('should process logs regardless of overall success status', () => {
const executionResult = {
success: false,
output: {},
@@ -368,7 +381,7 @@ describe('Chat API Utils', () => {
expect(executionResult.logs[1].error).toBe('Agent 2 failed')
})
it.concurrent('should handle ExecutionResult vs StreamingExecution types correctly', () => {
it('should handle ExecutionResult vs StreamingExecution types correctly', () => {
const executionResult = {
success: true,
output: { content: 'test' },

View File

@@ -1,25 +1,17 @@
import { createHash } from 'crypto'
import { db } from '@sim/db'
import { chat, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest, NextResponse } from 'next/server'
import {
isEmailAllowed,
setDeploymentAuthCookie,
validateAuthToken,
} from '@/lib/core/security/deployment'
import { isDev } from '@/lib/core/config/feature-flags'
import { decryptSecret } from '@/lib/core/security/encryption'
import { hasAdminPermission } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('ChatAuthUtils')
export function setChatAuthCookie(
response: NextResponse,
chatId: string,
type: string,
encryptedPassword?: string | null
): void {
setDeploymentAuthCookie(response, 'chat', chatId, type, encryptedPassword)
function hashPassword(encryptedPassword: string): string {
return createHash('sha256').update(encryptedPassword).digest('hex').substring(0, 8)
}
/**
@@ -90,6 +82,77 @@ export async function checkChatAccess(
return { hasAccess: false }
}
function encryptAuthToken(chatId: string, type: string, encryptedPassword?: string | null): string {
const pwHash = encryptedPassword ? hashPassword(encryptedPassword) : ''
return Buffer.from(`${chatId}:${type}:${Date.now()}:${pwHash}`).toString('base64')
}
export function validateAuthToken(
token: string,
chatId: string,
encryptedPassword?: string | null
): boolean {
try {
const decoded = Buffer.from(token, 'base64').toString()
const parts = decoded.split(':')
const [storedId, _type, timestamp, storedPwHash] = parts
if (storedId !== chatId) {
return false
}
const createdAt = Number.parseInt(timestamp)
const now = Date.now()
const expireTime = 24 * 60 * 60 * 1000
if (now - createdAt > expireTime) {
return false
}
if (encryptedPassword) {
const currentPwHash = hashPassword(encryptedPassword)
if (storedPwHash !== currentPwHash) {
return false
}
}
return true
} catch (_e) {
return false
}
}
export function setChatAuthCookie(
response: NextResponse,
chatId: string,
type: string,
encryptedPassword?: string | null
): void {
const token = encryptAuthToken(chatId, type, encryptedPassword)
response.cookies.set({
name: `chat_auth_${chatId}`,
value: token,
httpOnly: true,
secure: !isDev,
sameSite: 'lax',
path: '/',
maxAge: 60 * 60 * 24,
})
}
export function addCorsHeaders(response: NextResponse, request: NextRequest) {
const origin = request.headers.get('origin') || ''
if (isDev && origin.includes('localhost')) {
response.headers.set('Access-Control-Allow-Origin', origin)
response.headers.set('Access-Control-Allow-Credentials', 'true')
response.headers.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
response.headers.set('Access-Control-Allow-Headers', 'Content-Type, X-Requested-With')
}
return response
}
export async function validateChatAuth(
requestId: string,
deployment: any,
@@ -168,7 +231,12 @@ export async function validateChatAuth(
const allowedEmails = deployment.allowedEmails || []
if (isEmailAllowed(email, allowedEmails)) {
if (allowedEmails.includes(email)) {
return { authorized: false, error: 'otp_required' }
}
const domain = email.split('@')[1]
if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) {
return { authorized: false, error: 'otp_required' }
}
@@ -202,7 +270,12 @@ export async function validateChatAuth(
const allowedEmails = deployment.allowedEmails || []
if (isEmailAllowed(email, allowedEmails)) {
if (allowedEmails.includes(email)) {
return { authorized: true }
}
const domain = email.split('@')[1]
if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) {
return { authorized: true }
}
@@ -223,7 +296,12 @@ export async function validateChatAuth(
const allowedEmails = deployment.allowedEmails || []
if (isEmailAllowed(userEmail, allowedEmails)) {
if (allowedEmails.includes(userEmail)) {
return { authorized: true }
}
const domain = userEmail.split('@')[1]
if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) {
return { authorized: true }
}

View File

@@ -17,30 +17,25 @@ const logger = createLogger('CopilotChatUpdateAPI')
const UpdateMessagesSchema = z.object({
chatId: z.string(),
messages: z.array(
z
.object({
id: z.string(),
role: z.enum(['user', 'assistant', 'system']),
content: z.string(),
timestamp: z.string(),
toolCalls: z.array(z.any()).optional(),
contentBlocks: z.array(z.any()).optional(),
fileAttachments: z
.array(
z.object({
id: z.string(),
key: z.string(),
filename: z.string(),
media_type: z.string(),
size: z.number(),
})
)
.optional(),
contexts: z.array(z.any()).optional(),
citations: z.array(z.any()).optional(),
errorType: z.string().optional(),
})
.passthrough() // Preserve any additional fields for future compatibility
z.object({
id: z.string(),
role: z.enum(['user', 'assistant']),
content: z.string(),
timestamp: z.string(),
toolCalls: z.array(z.any()).optional(),
contentBlocks: z.array(z.any()).optional(),
fileAttachments: z
.array(
z.object({
id: z.string(),
key: z.string(),
filename: z.string(),
media_type: z.string(),
size: z.number(),
})
)
.optional(),
})
),
planArtifact: z.string().nullable().optional(),
config: z
@@ -62,19 +57,6 @@ export async function POST(req: NextRequest) {
}
const body = await req.json()
// Debug: Log what we received
const lastMsg = body.messages?.[body.messages.length - 1]
if (lastMsg?.role === 'assistant') {
logger.info(`[${tracker.requestId}] Received messages to save`, {
messageCount: body.messages?.length,
lastMsgId: lastMsg.id,
lastMsgContentLength: lastMsg.content?.length || 0,
lastMsgContentBlockCount: lastMsg.contentBlocks?.length || 0,
lastMsgContentBlockTypes: lastMsg.contentBlocks?.map((b: any) => b?.type) || [],
})
}
const { chatId, messages, planArtifact, config } = UpdateMessagesSchema.parse(body)
// Verify that the chat belongs to the user

View File

@@ -0,0 +1,50 @@
/**
* @deprecated This route is not currently in use
* @remarks Kept for reference - may be removed in future cleanup
*/
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('UpdateChatTitleAPI')
const UpdateTitleSchema = z.object({
chatId: z.string(),
title: z.string(),
})
export async function POST(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const parsed = UpdateTitleSchema.parse(body)
// Update the chat title
await db
.update(copilotChats)
.set({
title: parsed.title,
updatedAt: new Date(),
})
.where(eq(copilotChats.id, parsed.chatId))
logger.info('Chat title updated', { chatId: parsed.chatId, title: parsed.title })
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error updating chat title:', error)
return NextResponse.json(
{ success: false, error: 'Failed to update chat title' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,134 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getCopilotModel } from '@/lib/copilot/config'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
import type { CopilotProviderConfig } from '@/lib/copilot/types'
import { env } from '@/lib/core/config/env'
const logger = createLogger('ContextUsageAPI')
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const ContextUsageRequestSchema = z.object({
chatId: z.string(),
model: z.string(),
workflowId: z.string(),
provider: z.any().optional(),
})
/**
* POST /api/copilot/context-usage
* Fetch context usage from sim-agent API
*/
export async function POST(req: NextRequest) {
try {
logger.info('[Context Usage API] Request received')
const session = await getSession()
if (!session?.user?.id) {
logger.warn('[Context Usage API] No session/user ID')
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await req.json()
logger.info('[Context Usage API] Request body', body)
const parsed = ContextUsageRequestSchema.safeParse(body)
if (!parsed.success) {
logger.warn('[Context Usage API] Invalid request body', parsed.error.errors)
return NextResponse.json(
{ error: 'Invalid request body', details: parsed.error.errors },
{ status: 400 }
)
}
const { chatId, model, workflowId, provider } = parsed.data
const userId = session.user.id // Get userId from session, not from request
logger.info('[Context Usage API] Request validated', { chatId, model, userId, workflowId })
// Build provider config similar to chat route
let providerConfig: CopilotProviderConfig | undefined = provider
if (!providerConfig) {
const defaults = getCopilotModel('chat')
const modelToUse = env.COPILOT_MODEL || defaults.model
const providerEnv = env.COPILOT_PROVIDER as any
if (providerEnv) {
if (providerEnv === 'azure-openai') {
providerConfig = {
provider: 'azure-openai',
model: modelToUse,
apiKey: env.AZURE_OPENAI_API_KEY,
apiVersion: env.AZURE_OPENAI_API_VERSION,
endpoint: env.AZURE_OPENAI_ENDPOINT,
}
} else if (providerEnv === 'vertex') {
providerConfig = {
provider: 'vertex',
model: modelToUse,
apiKey: env.COPILOT_API_KEY,
vertexProject: env.VERTEX_PROJECT,
vertexLocation: env.VERTEX_LOCATION,
}
} else {
providerConfig = {
provider: providerEnv,
model: modelToUse,
apiKey: env.COPILOT_API_KEY,
}
}
}
}
// Call sim-agent API
const requestPayload = {
chatId,
model,
userId,
workflowId,
...(providerConfig ? { provider: providerConfig } : {}),
}
logger.info('[Context Usage API] Calling sim-agent', {
url: `${SIM_AGENT_API_URL}/api/get-context-usage`,
payload: requestPayload,
})
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/get-context-usage`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
},
body: JSON.stringify(requestPayload),
})
logger.info('[Context Usage API] Sim-agent response', {
status: simAgentResponse.status,
ok: simAgentResponse.ok,
})
if (!simAgentResponse.ok) {
const errorText = await simAgentResponse.text().catch(() => '')
logger.warn('[Context Usage API] Sim agent request failed', {
status: simAgentResponse.status,
error: errorText,
})
return NextResponse.json(
{ error: 'Failed to fetch context usage from sim-agent' },
{ status: simAgentResponse.status }
)
}
const data = await simAgentResponse.json()
logger.info('[Context Usage API] Sim-agent data received', data)
return NextResponse.json(data)
} catch (error) {
logger.error('Error fetching context usage:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -5,7 +5,6 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -46,15 +45,6 @@ export async function POST(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id, invitationId } = await params
try {

View File

@@ -6,7 +6,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -48,15 +47,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
const result = await getCredentialSetWithAccess(id, session.user.id)
@@ -79,15 +69,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
try {
@@ -197,15 +178,6 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
const { searchParams } = new URL(req.url)
const invitationId = searchParams.get('invitationId')

View File

@@ -4,7 +4,6 @@ import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
const logger = createLogger('CredentialSetMembers')
@@ -40,15 +39,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
const result = await getCredentialSetWithAccess(id, session.user.id)
@@ -120,15 +110,6 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
const { searchParams } = new URL(req.url)
const memberId = searchParams.get('memberId')

View File

@@ -5,7 +5,6 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
const logger = createLogger('CredentialSet')
@@ -50,15 +49,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
const result = await getCredentialSetWithAccess(id, session.user.id)
@@ -76,15 +66,6 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
try {
@@ -148,15 +129,6 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { id } = await params
try {

View File

@@ -5,7 +5,6 @@ import { and, count, desc, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasCredentialSetsAccess } from '@/lib/billing'
const logger = createLogger('CredentialSets')
@@ -23,15 +22,6 @@ export async function GET(req: Request) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
const { searchParams } = new URL(req.url)
const organizationId = searchParams.get('organizationId')
@@ -95,15 +85,6 @@ export async function POST(req: Request) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check plan access (team/enterprise) or env var override
const hasAccess = await hasCredentialSetsAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Credential sets require a Team or Enterprise plan' },
{ status: 403 }
)
}
try {
const body = await req.json()
const { organizationId, name, description, providerId } = createCredentialSetSchema.parse(body)

View File

@@ -7,7 +7,7 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request'
import type { EnvironmentVariable } from '@/stores/settings/environment'
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
const logger = createLogger('EnvironmentAPI')

View File

@@ -1,414 +0,0 @@
import { randomUUID } from 'crypto'
import { db } from '@sim/db'
import { form, workflow, workflowBlocks } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment'
import { generateRequestId } from '@/lib/core/utils/request'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
const logger = createLogger('FormIdentifierAPI')
const formPostBodySchema = z.object({
formData: z.record(z.unknown()).optional(),
password: z.string().optional(),
email: z.string().email('Invalid email format').optional().or(z.literal('')),
})
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
/**
* Get the input format schema from the workflow's start block
*/
async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
try {
const blocks = await db
.select()
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
// Find the start block (starter or start_trigger type)
const startBlock = blocks.find(
(block) => block.type === 'starter' || block.type === 'start_trigger'
)
if (!startBlock) {
return []
}
// Extract inputFormat from subBlocks
const subBlocks = startBlock.subBlocks as Record<string, any> | null
if (!subBlocks?.inputFormat?.value) {
return []
}
return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : []
} catch (error) {
logger.error('Error fetching workflow input schema:', error)
return []
}
}
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ identifier: string }> }
) {
const { identifier } = await params
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Processing form submission for identifier: ${identifier}`)
let parsedBody
try {
const rawBody = await request.json()
const validation = formPostBodySchema.safeParse(rawBody)
if (!validation.success) {
const errorMessage = validation.error.errors
.map((err) => `${err.path.join('.')}: ${err.message}`)
.join(', ')
logger.warn(`[${requestId}] Validation error: ${errorMessage}`)
return addCorsHeaders(
createErrorResponse(`Invalid request body: ${errorMessage}`, 400),
request
)
}
parsedBody = validation.data
} catch (_error) {
return addCorsHeaders(createErrorResponse('Invalid request body', 400), request)
}
const deploymentResult = await db
.select({
id: form.id,
workflowId: form.workflowId,
userId: form.userId,
isActive: form.isActive,
authType: form.authType,
password: form.password,
allowedEmails: form.allowedEmails,
customizations: form.customizations,
})
.from(form)
.where(eq(form.identifier, identifier))
.limit(1)
if (deploymentResult.length === 0) {
logger.warn(`[${requestId}] Form not found for identifier: ${identifier}`)
return addCorsHeaders(createErrorResponse('Form not found', 404), request)
}
const deployment = deploymentResult[0]
if (!deployment.isActive) {
logger.warn(`[${requestId}] Form is not active: ${identifier}`)
const [workflowRecord] = await db
.select({ workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, deployment.workflowId))
.limit(1)
const workspaceId = workflowRecord?.workspaceId
if (!workspaceId) {
logger.warn(`[${requestId}] Cannot log: workflow ${deployment.workflowId} has no workspace`)
return addCorsHeaders(
createErrorResponse('This form is currently unavailable', 403),
request
)
}
const executionId = randomUUID()
const loggingSession = new LoggingSession(
deployment.workflowId,
executionId,
'form',
requestId
)
await loggingSession.safeStart({
userId: deployment.userId,
workspaceId,
variables: {},
})
await loggingSession.safeCompleteWithError({
error: {
message: 'This form is currently unavailable. The form has been disabled.',
stackTrace: undefined,
},
traceSpans: [],
})
return addCorsHeaders(createErrorResponse('This form is currently unavailable', 403), request)
}
const authResult = await validateFormAuth(requestId, deployment, request, parsedBody)
if (!authResult.authorized) {
return addCorsHeaders(
createErrorResponse(authResult.error || 'Authentication required', 401),
request
)
}
const { formData, password, email } = parsedBody
// If only authentication credentials provided (no form data), just return authenticated
if ((password || email) && !formData) {
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
setFormAuthCookie(response, deployment.id, deployment.authType, deployment.password)
return response
}
if (!formData || Object.keys(formData).length === 0) {
return addCorsHeaders(createErrorResponse('No form data provided', 400), request)
}
const executionId = randomUUID()
const loggingSession = new LoggingSession(deployment.workflowId, executionId, 'form', requestId)
const preprocessResult = await preprocessExecution({
workflowId: deployment.workflowId,
userId: deployment.userId,
triggerType: 'form',
executionId,
requestId,
checkRateLimit: true,
checkDeployment: true,
loggingSession,
})
if (!preprocessResult.success) {
logger.warn(`[${requestId}] Preprocessing failed: ${preprocessResult.error?.message}`)
return addCorsHeaders(
createErrorResponse(
preprocessResult.error?.message || 'Failed to process request',
preprocessResult.error?.statusCode || 500
),
request
)
}
const { actorUserId, workflowRecord } = preprocessResult
const workspaceOwnerId = actorUserId!
const workspaceId = workflowRecord?.workspaceId
if (!workspaceId) {
logger.error(`[${requestId}] Workflow ${deployment.workflowId} has no workspaceId`)
return addCorsHeaders(
createErrorResponse('Workflow has no associated workspace', 500),
request
)
}
try {
const workflowForExecution = {
id: deployment.workflowId,
userId: deployment.userId,
workspaceId,
isDeployed: workflowRecord?.isDeployed ?? false,
variables: (workflowRecord?.variables ?? {}) as Record<string, unknown>,
}
// Pass form data as the workflow input
const workflowInput = {
input: formData,
...formData, // Spread form fields at top level for convenience
}
// Execute workflow using streaming (for consistency with chat)
const stream = await createStreamingResponse({
requestId,
workflow: workflowForExecution,
input: workflowInput,
executingUserId: workspaceOwnerId,
streamConfig: {
selectedOutputs: [],
isSecureMode: true,
workflowTriggerType: 'api', // Use 'api' type since form is similar
},
executionId,
})
// For forms, we don't stream back - we wait for completion and return success
// Consume the stream to wait for completion
const reader = stream.getReader()
let lastOutput: any = null
try {
while (true) {
const { done, value } = await reader.read()
if (done) break
// Parse SSE data if present
const text = new TextDecoder().decode(value)
const lines = text.split('\n')
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.slice(6))
if (data.type === 'complete' || data.output) {
lastOutput = data.output || data
}
} catch {
// Ignore parse errors
}
}
}
}
} finally {
reader.releaseLock()
}
logger.info(`[${requestId}] Form submission successful for ${identifier}`)
// Return success with customizations for thank you screen
const customizations = deployment.customizations as Record<string, any> | null
return addCorsHeaders(
createSuccessResponse({
success: true,
executionId,
thankYouTitle: customizations?.thankYouTitle || 'Thank you!',
thankYouMessage:
customizations?.thankYouMessage || 'Your response has been submitted successfully.',
}),
request
)
} catch (error: any) {
logger.error(`[${requestId}] Error processing form submission:`, error)
return addCorsHeaders(
createErrorResponse(error.message || 'Failed to process form submission', 500),
request
)
}
} catch (error: any) {
logger.error(`[${requestId}] Error processing form submission:`, error)
return addCorsHeaders(
createErrorResponse(error.message || 'Failed to process form submission', 500),
request
)
}
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ identifier: string }> }
) {
const { identifier } = await params
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Fetching form info for identifier: ${identifier}`)
const deploymentResult = await db
.select({
id: form.id,
title: form.title,
description: form.description,
customizations: form.customizations,
isActive: form.isActive,
workflowId: form.workflowId,
authType: form.authType,
password: form.password,
allowedEmails: form.allowedEmails,
showBranding: form.showBranding,
})
.from(form)
.where(eq(form.identifier, identifier))
.limit(1)
if (deploymentResult.length === 0) {
logger.warn(`[${requestId}] Form not found for identifier: ${identifier}`)
return addCorsHeaders(createErrorResponse('Form not found', 404), request)
}
const deployment = deploymentResult[0]
if (!deployment.isActive) {
logger.warn(`[${requestId}] Form is not active: ${identifier}`)
return addCorsHeaders(createErrorResponse('This form is currently unavailable', 403), request)
}
// Get the workflow's input schema
const inputSchema = await getWorkflowInputSchema(deployment.workflowId)
const cookieName = `form_auth_${deployment.id}`
const authCookie = request.cookies.get(cookieName)
// If authenticated (via cookie), return full form config
if (
deployment.authType !== 'public' &&
authCookie &&
validateAuthToken(authCookie.value, deployment.id, deployment.password)
) {
return addCorsHeaders(
createSuccessResponse({
id: deployment.id,
title: deployment.title,
description: deployment.description,
customizations: deployment.customizations,
authType: deployment.authType,
showBranding: deployment.showBranding,
inputSchema,
}),
request
)
}
// Check authentication requirement
const authResult = await validateFormAuth(requestId, deployment, request)
if (!authResult.authorized) {
// Return limited info for auth required forms
logger.info(
`[${requestId}] Authentication required for form: ${identifier}, type: ${deployment.authType}`
)
return addCorsHeaders(
NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
authType: deployment.authType,
title: deployment.title,
customizations: {
primaryColor: (deployment.customizations as any)?.primaryColor,
logoUrl: (deployment.customizations as any)?.logoUrl,
},
},
{ status: 401 }
),
request
)
}
return addCorsHeaders(
createSuccessResponse({
id: deployment.id,
title: deployment.title,
description: deployment.description,
customizations: deployment.customizations,
authType: deployment.authType,
showBranding: deployment.showBranding,
inputSchema,
}),
request
)
} catch (error: any) {
logger.error(`[${requestId}] Error fetching form info:`, error)
return addCorsHeaders(
createErrorResponse(error.message || 'Failed to fetch form information', 500),
request
)
}
}
export async function OPTIONS(request: NextRequest) {
return addCorsHeaders(new NextResponse(null, { status: 204 }), request)
}

View File

@@ -1,233 +0,0 @@
import { db } from '@sim/db'
import { form } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { encryptSecret } from '@/lib/core/security/encryption'
import { checkFormAccess, DEFAULT_FORM_CUSTOMIZATIONS } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
const logger = createLogger('FormManageAPI')
const fieldConfigSchema = z.object({
name: z.string(),
type: z.string(),
label: z.string(),
description: z.string().optional(),
required: z.boolean().optional(),
})
const updateFormSchema = z.object({
identifier: z
.string()
.min(1, 'Identifier is required')
.max(100, 'Identifier must be 100 characters or less')
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens')
.optional(),
title: z
.string()
.min(1, 'Title is required')
.max(200, 'Title must be 200 characters or less')
.optional(),
description: z.string().max(1000, 'Description must be 1000 characters or less').optional(),
customizations: z
.object({
primaryColor: z.string().optional(),
welcomeMessage: z
.string()
.max(500, 'Welcome message must be 500 characters or less')
.optional(),
thankYouTitle: z
.string()
.max(100, 'Thank you title must be 100 characters or less')
.optional(),
thankYouMessage: z
.string()
.max(500, 'Thank you message must be 500 characters or less')
.optional(),
logoUrl: z.string().url('Logo URL must be a valid URL').optional().or(z.literal('')),
fieldConfigs: z.array(fieldConfigSchema).optional(),
})
.optional(),
authType: z.enum(['public', 'password', 'email']).optional(),
password: z
.string()
.min(6, 'Password must be at least 6 characters')
.optional()
.or(z.literal('')),
allowedEmails: z.array(z.string()).optional(),
showBranding: z.boolean().optional(),
isActive: z.boolean().optional(),
})
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
try {
const session = await getSession()
if (!session) {
return createErrorResponse('Unauthorized', 401)
}
const { id } = await params
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
if (!hasAccess || !formRecord) {
return createErrorResponse('Form not found or access denied', 404)
}
const { password: _password, ...formWithoutPassword } = formRecord
return createSuccessResponse({
form: {
...formWithoutPassword,
hasPassword: !!formRecord.password,
},
})
} catch (error: any) {
logger.error('Error fetching form:', error)
return createErrorResponse(error.message || 'Failed to fetch form', 500)
}
}
export async function PATCH(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
try {
const session = await getSession()
if (!session) {
return createErrorResponse('Unauthorized', 401)
}
const { id } = await params
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
if (!hasAccess || !formRecord) {
return createErrorResponse('Form not found or access denied', 404)
}
const body = await request.json()
try {
const validatedData = updateFormSchema.parse(body)
const {
identifier,
title,
description,
customizations,
authType,
password,
allowedEmails,
showBranding,
isActive,
} = validatedData
if (identifier && identifier !== formRecord.identifier) {
const existingIdentifier = await db
.select()
.from(form)
.where(eq(form.identifier, identifier))
.limit(1)
if (existingIdentifier.length > 0) {
return createErrorResponse('Identifier already in use', 400)
}
}
if (authType === 'password' && !password && !formRecord.password) {
return createErrorResponse('Password is required when using password protection', 400)
}
if (
authType === 'email' &&
(!allowedEmails || allowedEmails.length === 0) &&
(!formRecord.allowedEmails || (formRecord.allowedEmails as string[]).length === 0)
) {
return createErrorResponse(
'At least one email or domain is required when using email access control',
400
)
}
const updateData: Record<string, any> = {
updatedAt: new Date(),
}
if (identifier !== undefined) updateData.identifier = identifier
if (title !== undefined) updateData.title = title
if (description !== undefined) updateData.description = description
if (showBranding !== undefined) updateData.showBranding = showBranding
if (isActive !== undefined) updateData.isActive = isActive
if (authType !== undefined) updateData.authType = authType
if (allowedEmails !== undefined) updateData.allowedEmails = allowedEmails
if (customizations !== undefined) {
const existingCustomizations = (formRecord.customizations as Record<string, any>) || {}
updateData.customizations = {
...DEFAULT_FORM_CUSTOMIZATIONS,
...existingCustomizations,
...customizations,
}
}
if (password) {
const { encrypted } = await encryptSecret(password)
updateData.password = encrypted
} else if (authType && authType !== 'password') {
updateData.password = null
}
await db.update(form).set(updateData).where(eq(form.id, id))
logger.info(`Form ${id} updated successfully`)
return createSuccessResponse({
message: 'Form updated successfully',
})
} catch (validationError) {
if (validationError instanceof z.ZodError) {
const errorMessage = validationError.errors[0]?.message || 'Invalid request data'
return createErrorResponse(errorMessage, 400, 'VALIDATION_ERROR')
}
throw validationError
}
} catch (error: any) {
logger.error('Error updating form:', error)
return createErrorResponse(error.message || 'Failed to update form', 500)
}
}
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
try {
const session = await getSession()
if (!session) {
return createErrorResponse('Unauthorized', 401)
}
const { id } = await params
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
if (!hasAccess || !formRecord) {
return createErrorResponse('Form not found or access denied', 404)
}
await db.update(form).set({ isActive: false, updatedAt: new Date() }).where(eq(form.id, id))
logger.info(`Form ${id} deleted (soft delete)`)
return createSuccessResponse({
message: 'Form deleted successfully',
})
} catch (error: any) {
logger.error('Error deleting form:', error)
return createErrorResponse(error.message || 'Failed to delete form', 500)
}
}

View File

@@ -1,214 +0,0 @@
import { db } from '@sim/db'
import { form } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { isDev } from '@/lib/core/config/feature-flags'
import { encryptSecret } from '@/lib/core/security/encryption'
import { getEmailDomain } from '@/lib/core/utils/urls'
import { deployWorkflow } from '@/lib/workflows/persistence/utils'
import {
checkWorkflowAccessForFormCreation,
DEFAULT_FORM_CUSTOMIZATIONS,
} from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
const logger = createLogger('FormAPI')
const fieldConfigSchema = z.object({
name: z.string(),
type: z.string(),
label: z.string(),
description: z.string().optional(),
required: z.boolean().optional(),
})
const formSchema = z.object({
workflowId: z.string().min(1, 'Workflow ID is required'),
identifier: z
.string()
.min(1, 'Identifier is required')
.max(100, 'Identifier must be 100 characters or less')
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens'),
title: z.string().min(1, 'Title is required').max(200, 'Title must be 200 characters or less'),
description: z.string().max(1000, 'Description must be 1000 characters or less').optional(),
customizations: z
.object({
primaryColor: z.string().optional(),
welcomeMessage: z
.string()
.max(500, 'Welcome message must be 500 characters or less')
.optional(),
thankYouTitle: z
.string()
.max(100, 'Thank you title must be 100 characters or less')
.optional(),
thankYouMessage: z
.string()
.max(500, 'Thank you message must be 500 characters or less')
.optional(),
logoUrl: z.string().url('Logo URL must be a valid URL').optional().or(z.literal('')),
fieldConfigs: z.array(fieldConfigSchema).optional(),
})
.optional(),
authType: z.enum(['public', 'password', 'email']).default('public'),
password: z
.string()
.min(6, 'Password must be at least 6 characters')
.optional()
.or(z.literal('')),
allowedEmails: z.array(z.string()).optional().default([]),
showBranding: z.boolean().optional().default(true),
})
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session) {
return createErrorResponse('Unauthorized', 401)
}
const deployments = await db.select().from(form).where(eq(form.userId, session.user.id))
return createSuccessResponse({ deployments })
} catch (error: any) {
logger.error('Error fetching form deployments:', error)
return createErrorResponse(error.message || 'Failed to fetch form deployments', 500)
}
}
export async function POST(request: NextRequest) {
try {
const session = await getSession()
if (!session) {
return createErrorResponse('Unauthorized', 401)
}
const body = await request.json()
try {
const validatedData = formSchema.parse(body)
const {
workflowId,
identifier,
title,
description = '',
customizations,
authType = 'public',
password,
allowedEmails = [],
showBranding = true,
} = validatedData
if (authType === 'password' && !password) {
return createErrorResponse('Password is required when using password protection', 400)
}
if (authType === 'email' && (!Array.isArray(allowedEmails) || allowedEmails.length === 0)) {
return createErrorResponse(
'At least one email or domain is required when using email access control',
400
)
}
const existingIdentifier = await db
.select()
.from(form)
.where(eq(form.identifier, identifier))
.limit(1)
if (existingIdentifier.length > 0) {
return createErrorResponse('Identifier already in use', 400)
}
const { hasAccess, workflow: workflowRecord } = await checkWorkflowAccessForFormCreation(
workflowId,
session.user.id
)
if (!hasAccess || !workflowRecord) {
return createErrorResponse('Workflow not found or access denied', 404)
}
const result = await deployWorkflow({
workflowId,
deployedBy: session.user.id,
})
if (!result.success) {
return createErrorResponse(result.error || 'Failed to deploy workflow', 500)
}
logger.info(
`${workflowRecord.isDeployed ? 'Redeployed' : 'Auto-deployed'} workflow ${workflowId} for form (v${result.version})`
)
let encryptedPassword = null
if (authType === 'password' && password) {
const { encrypted } = await encryptSecret(password)
encryptedPassword = encrypted
}
const id = uuidv4()
logger.info('Creating form deployment with values:', {
workflowId,
identifier,
title,
authType,
hasPassword: !!encryptedPassword,
emailCount: allowedEmails?.length || 0,
showBranding,
})
const mergedCustomizations = {
...DEFAULT_FORM_CUSTOMIZATIONS,
...(customizations || {}),
}
await db.insert(form).values({
id,
workflowId,
userId: session.user.id,
identifier,
title,
description: description || '',
customizations: mergedCustomizations,
isActive: true,
authType,
password: encryptedPassword,
allowedEmails: authType === 'email' ? allowedEmails : [],
showBranding,
createdAt: new Date(),
updatedAt: new Date(),
})
const baseDomain = getEmailDomain()
const protocol = isDev ? 'http' : 'https'
const formUrl = `${protocol}://${baseDomain}/form/${identifier}`
logger.info(`Form "${title}" deployed successfully at ${formUrl}`)
return createSuccessResponse({
id,
formUrl,
message: 'Form deployment created successfully',
})
} catch (validationError) {
if (validationError instanceof z.ZodError) {
const errorMessage = validationError.errors[0]?.message || 'Invalid request data'
return createErrorResponse(errorMessage, 400, 'VALIDATION_ERROR')
}
throw validationError
}
} catch (error: any) {
logger.error('Error creating form deployment:', error)
return createErrorResponse(error.message || 'Failed to create form deployment', 500)
}
}

View File

@@ -1,367 +0,0 @@
import { databaseMock, loggerMock } from '@sim/testing'
import type { NextResponse } from 'next/server'
/**
* Tests for form API utils
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@sim/db', () => databaseMock)
vi.mock('@sim/logger', () => loggerMock)
const mockDecryptSecret = vi.fn()
vi.mock('@/lib/core/security/encryption', () => ({
decryptSecret: mockDecryptSecret,
}))
vi.mock('@/lib/core/config/feature-flags', () => ({
isDev: true,
isHosted: false,
isProd: false,
}))
vi.mock('@/lib/workspaces/permissions/utils', () => ({
hasAdminPermission: vi.fn(),
}))
describe('Form API Utils', () => {
afterEach(() => {
vi.clearAllMocks()
})
describe('Auth token utils', () => {
it.concurrent('should validate auth tokens', async () => {
const { validateAuthToken } = await import('@/lib/core/security/deployment')
const formId = 'test-form-id'
const type = 'password'
const token = Buffer.from(`${formId}:${type}:${Date.now()}`).toString('base64')
expect(typeof token).toBe('string')
expect(token.length).toBeGreaterThan(0)
const isValid = validateAuthToken(token, formId)
expect(isValid).toBe(true)
const isInvalidForm = validateAuthToken(token, 'wrong-form-id')
expect(isInvalidForm).toBe(false)
})
it.concurrent('should reject expired tokens', async () => {
const { validateAuthToken } = await import('@/lib/core/security/deployment')
const formId = 'test-form-id'
const expiredToken = Buffer.from(
`${formId}:password:${Date.now() - 25 * 60 * 60 * 1000}`
).toString('base64')
const isValid = validateAuthToken(expiredToken, formId)
expect(isValid).toBe(false)
})
it.concurrent('should validate tokens with password hash', async () => {
const { validateAuthToken } = await import('@/lib/core/security/deployment')
const crypto = await import('crypto')
const formId = 'test-form-id'
const encryptedPassword = 'encrypted-password-value'
const pwHash = crypto
.createHash('sha256')
.update(encryptedPassword)
.digest('hex')
.substring(0, 8)
const token = Buffer.from(`${formId}:password:${Date.now()}:${pwHash}`).toString('base64')
const isValid = validateAuthToken(token, formId, encryptedPassword)
expect(isValid).toBe(true)
const isInvalidPassword = validateAuthToken(token, formId, 'different-password')
expect(isInvalidPassword).toBe(false)
})
})
describe('Cookie handling', () => {
it('should set auth cookie correctly', async () => {
const { setFormAuthCookie } = await import('@/app/api/form/utils')
const mockSet = vi.fn()
const mockResponse = {
cookies: {
set: mockSet,
},
} as unknown as NextResponse
const formId = 'test-form-id'
const type = 'password'
setFormAuthCookie(mockResponse, formId, type)
expect(mockSet).toHaveBeenCalledWith({
name: `form_auth_${formId}`,
value: expect.any(String),
httpOnly: true,
secure: false, // Development mode
sameSite: 'lax',
path: '/',
maxAge: 60 * 60 * 24,
})
})
})
describe('CORS handling', () => {
it.concurrent('should add CORS headers for any origin', async () => {
const { addCorsHeaders } = await import('@/lib/core/security/deployment')
const mockRequest = {
headers: {
get: vi.fn().mockReturnValue('http://localhost:3000'),
},
} as any
const mockResponse = {
headers: {
set: vi.fn(),
},
} as unknown as NextResponse
addCorsHeaders(mockResponse, mockRequest)
expect(mockResponse.headers.set).toHaveBeenCalledWith(
'Access-Control-Allow-Origin',
'http://localhost:3000'
)
expect(mockResponse.headers.set).toHaveBeenCalledWith(
'Access-Control-Allow-Credentials',
'true'
)
expect(mockResponse.headers.set).toHaveBeenCalledWith(
'Access-Control-Allow-Methods',
'GET, POST, OPTIONS'
)
expect(mockResponse.headers.set).toHaveBeenCalledWith(
'Access-Control-Allow-Headers',
'Content-Type, X-Requested-With'
)
})
it.concurrent('should not set CORS headers when no origin', async () => {
const { addCorsHeaders } = await import('@/lib/core/security/deployment')
const mockRequest = {
headers: {
get: vi.fn().mockReturnValue(''),
},
} as any
const mockResponse = {
headers: {
set: vi.fn(),
},
} as unknown as NextResponse
addCorsHeaders(mockResponse, mockRequest)
expect(mockResponse.headers.set).not.toHaveBeenCalled()
})
})
describe('Form auth validation', () => {
beforeEach(async () => {
vi.clearAllMocks()
mockDecryptSecret.mockResolvedValue({ decrypted: 'correct-password' })
})
it('should allow access to public forms', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const deployment = {
id: 'form-id',
authType: 'public',
}
const mockRequest = {
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
const result = await validateFormAuth('request-id', deployment, mockRequest)
expect(result.authorized).toBe(true)
})
it('should request password auth for GET requests', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const deployment = {
id: 'form-id',
authType: 'password',
}
const mockRequest = {
method: 'GET',
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
const result = await validateFormAuth('request-id', deployment, mockRequest)
expect(result.authorized).toBe(false)
expect(result.error).toBe('auth_required_password')
})
it('should validate password for POST requests', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const { decryptSecret } = await import('@/lib/core/security/encryption')
const deployment = {
id: 'form-id',
authType: 'password',
password: 'encrypted-password',
}
const mockRequest = {
method: 'POST',
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
const parsedBody = {
password: 'correct-password',
}
const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody)
expect(decryptSecret).toHaveBeenCalledWith('encrypted-password')
expect(result.authorized).toBe(true)
})
it('should reject incorrect password', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const deployment = {
id: 'form-id',
authType: 'password',
password: 'encrypted-password',
}
const mockRequest = {
method: 'POST',
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
const parsedBody = {
password: 'wrong-password',
}
const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody)
expect(result.authorized).toBe(false)
expect(result.error).toBe('Invalid password')
})
it('should request email auth for email-protected forms', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const deployment = {
id: 'form-id',
authType: 'email',
allowedEmails: ['user@example.com', '@company.com'],
}
const mockRequest = {
method: 'GET',
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
const result = await validateFormAuth('request-id', deployment, mockRequest)
expect(result.authorized).toBe(false)
expect(result.error).toBe('auth_required_email')
})
it('should check allowed emails for email auth', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const deployment = {
id: 'form-id',
authType: 'email',
allowedEmails: ['user@example.com', '@company.com'],
}
const mockRequest = {
method: 'POST',
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
// Exact email match should authorize
const result1 = await validateFormAuth('request-id', deployment, mockRequest, {
email: 'user@example.com',
})
expect(result1.authorized).toBe(true)
// Domain match should authorize
const result2 = await validateFormAuth('request-id', deployment, mockRequest, {
email: 'other@company.com',
})
expect(result2.authorized).toBe(true)
// Unknown email should not authorize
const result3 = await validateFormAuth('request-id', deployment, mockRequest, {
email: 'user@unknown.com',
})
expect(result3.authorized).toBe(false)
expect(result3.error).toBe('Email not authorized for this form')
})
it('should require password when formData is present without password', async () => {
const { validateFormAuth } = await import('@/app/api/form/utils')
const deployment = {
id: 'form-id',
authType: 'password',
password: 'encrypted-password',
}
const mockRequest = {
method: 'POST',
cookies: {
get: vi.fn().mockReturnValue(null),
},
} as any
const parsedBody = {
formData: { field1: 'value1' },
// No password provided
}
const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody)
expect(result.authorized).toBe(false)
expect(result.error).toBe('auth_required_password')
})
})
describe('Default customizations', () => {
it.concurrent('should have correct default values', async () => {
const { DEFAULT_FORM_CUSTOMIZATIONS } = await import('@/app/api/form/utils')
expect(DEFAULT_FORM_CUSTOMIZATIONS).toEqual({
welcomeMessage: '',
thankYouTitle: 'Thank you!',
thankYouMessage: 'Your response has been submitted successfully.',
})
})
})
})

View File

@@ -1,204 +0,0 @@
import { db } from '@sim/db'
import { form, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest, NextResponse } from 'next/server'
import {
isEmailAllowed,
setDeploymentAuthCookie,
validateAuthToken,
} from '@/lib/core/security/deployment'
import { decryptSecret } from '@/lib/core/security/encryption'
import { hasAdminPermission } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('FormAuthUtils')
export function setFormAuthCookie(
response: NextResponse,
formId: string,
type: string,
encryptedPassword?: string | null
): void {
setDeploymentAuthCookie(response, 'form', formId, type, encryptedPassword)
}
/**
* Check if user has permission to create a form for a specific workflow
* Either the user owns the workflow directly OR has admin permission for the workflow's workspace
*/
export async function checkWorkflowAccessForFormCreation(
workflowId: string,
userId: string
): Promise<{ hasAccess: boolean; workflow?: any }> {
const workflowData = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
if (workflowData.length === 0) {
return { hasAccess: false }
}
const workflowRecord = workflowData[0]
if (workflowRecord.userId === userId) {
return { hasAccess: true, workflow: workflowRecord }
}
if (workflowRecord.workspaceId) {
const hasAdmin = await hasAdminPermission(userId, workflowRecord.workspaceId)
if (hasAdmin) {
return { hasAccess: true, workflow: workflowRecord }
}
}
return { hasAccess: false }
}
/**
* Check if user has access to view/edit/delete a specific form
* Either the user owns the form directly OR has admin permission for the workflow's workspace
*/
export async function checkFormAccess(
formId: string,
userId: string
): Promise<{ hasAccess: boolean; form?: any }> {
const formData = await db
.select({
form: form,
workflowWorkspaceId: workflow.workspaceId,
})
.from(form)
.innerJoin(workflow, eq(form.workflowId, workflow.id))
.where(eq(form.id, formId))
.limit(1)
if (formData.length === 0) {
return { hasAccess: false }
}
const { form: formRecord, workflowWorkspaceId } = formData[0]
if (formRecord.userId === userId) {
return { hasAccess: true, form: formRecord }
}
if (workflowWorkspaceId) {
const hasAdmin = await hasAdminPermission(userId, workflowWorkspaceId)
if (hasAdmin) {
return { hasAccess: true, form: formRecord }
}
}
return { hasAccess: false }
}
export async function validateFormAuth(
requestId: string,
deployment: any,
request: NextRequest,
parsedBody?: any
): Promise<{ authorized: boolean; error?: string }> {
const authType = deployment.authType || 'public'
if (authType === 'public') {
return { authorized: true }
}
const cookieName = `form_auth_${deployment.id}`
const authCookie = request.cookies.get(cookieName)
if (authCookie && validateAuthToken(authCookie.value, deployment.id, deployment.password)) {
return { authorized: true }
}
if (authType === 'password') {
if (request.method === 'GET') {
return { authorized: false, error: 'auth_required_password' }
}
try {
if (!parsedBody) {
return { authorized: false, error: 'Password is required' }
}
const { password, formData } = parsedBody
if (formData && !password) {
return { authorized: false, error: 'auth_required_password' }
}
if (!password) {
return { authorized: false, error: 'Password is required' }
}
if (!deployment.password) {
logger.error(`[${requestId}] No password set for password-protected form: ${deployment.id}`)
return { authorized: false, error: 'Authentication configuration error' }
}
const { decrypted } = await decryptSecret(deployment.password)
if (password !== decrypted) {
return { authorized: false, error: 'Invalid password' }
}
return { authorized: true }
} catch (error) {
logger.error(`[${requestId}] Error validating password:`, error)
return { authorized: false, error: 'Authentication error' }
}
}
if (authType === 'email') {
if (request.method === 'GET') {
return { authorized: false, error: 'auth_required_email' }
}
try {
if (!parsedBody) {
return { authorized: false, error: 'Email is required' }
}
const { email, formData } = parsedBody
if (formData && !email) {
return { authorized: false, error: 'auth_required_email' }
}
if (!email) {
return { authorized: false, error: 'Email is required' }
}
const allowedEmails: string[] = deployment.allowedEmails || []
if (isEmailAllowed(email, allowedEmails)) {
return { authorized: true }
}
return { authorized: false, error: 'Email not authorized for this form' }
} catch (error) {
logger.error(`[${requestId}] Error validating email:`, error)
return { authorized: false, error: 'Authentication error' }
}
}
return { authorized: false, error: 'Unsupported authentication type' }
}
/**
* Form customizations interface
*/
export interface FormCustomizations {
primaryColor?: string
welcomeMessage?: string
thankYouTitle?: string
thankYouMessage?: string
logoUrl?: string
}
/**
* Default form customizations
* Note: primaryColor is intentionally undefined to allow thank you screen to use its green default
*/
export const DEFAULT_FORM_CUSTOMIZATIONS: FormCustomizations = {
welcomeMessage: '',
thankYouTitle: 'Thank you!',
thankYouMessage: 'Your response has been submitted successfully.',
}

View File

@@ -1,71 +0,0 @@
import { db } from '@sim/db'
import { form } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
const logger = createLogger('FormValidateAPI')
const validateQuerySchema = z.object({
identifier: z
.string()
.min(1, 'Identifier is required')
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens')
.max(100, 'Identifier must be 100 characters or less'),
})
/**
* GET endpoint to validate form identifier availability
*/
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return createErrorResponse('Unauthorized', 401)
}
const { searchParams } = new URL(request.url)
const identifier = searchParams.get('identifier')
const validation = validateQuerySchema.safeParse({ identifier })
if (!validation.success) {
const errorMessage = validation.error.errors[0]?.message || 'Invalid identifier'
logger.warn(`Validation error: ${errorMessage}`)
if (identifier && !/^[a-z0-9-]+$/.test(identifier)) {
return createSuccessResponse({
available: false,
error: errorMessage,
})
}
return createErrorResponse(errorMessage, 400)
}
const { identifier: validatedIdentifier } = validation.data
const existingForm = await db
.select({ id: form.id })
.from(form)
.where(eq(form.identifier, validatedIdentifier))
.limit(1)
const isAvailable = existingForm.length === 0
logger.debug(
`Identifier "${validatedIdentifier}" availability check: ${isAvailable ? 'available' : 'taken'}`
)
return createSuccessResponse({
available: isAvailable,
error: isAvailable ? null : 'This identifier is already in use',
})
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Failed to validate identifier'
logger.error('Error validating form identifier:', error)
return createErrorResponse(message, 500)
}
}

View File

@@ -3,7 +3,6 @@
*
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest } from '@/app/api/__test-utils__/utils'
@@ -83,7 +82,14 @@ vi.mock('@/lib/execution/isolated-vm', () => ({
}),
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: vi.fn(() => ({
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
})),
}))
vi.mock('@/lib/execution/e2b', () => ({
executeInE2B: vi.fn(),

View File

@@ -21,6 +21,7 @@ export async function POST(req: NextRequest) {
const requestId = generateRequestId()
try {
// Get user session
const session = await getSession()
if (!session?.user?.email) {
logger.warn(`[${requestId}] Unauthorized help request attempt`)
@@ -29,20 +30,20 @@ export async function POST(req: NextRequest) {
const email = session.user.email
// Handle multipart form data
const formData = await req.formData()
// Extract form fields
const subject = formData.get('subject') as string
const message = formData.get('message') as string
const type = formData.get('type') as string
const workflowId = formData.get('workflowId') as string | null
const workspaceId = formData.get('workspaceId') as string
const userAgent = formData.get('userAgent') as string | null
logger.info(`[${requestId}] Processing help request`, {
type,
email: `${email.substring(0, 3)}***`, // Log partial email for privacy
})
// Validate the form data
const validationResult = helpFormSchema.safeParse({
subject,
message,
@@ -59,6 +60,7 @@ export async function POST(req: NextRequest) {
)
}
// Extract images
const images: { filename: string; content: Buffer; contentType: string }[] = []
for (const [key, value] of formData.entries()) {
@@ -79,14 +81,10 @@ export async function POST(req: NextRequest) {
logger.debug(`[${requestId}] Help request includes ${images.length} images`)
const userId = session.user.id
// Prepare email content
let emailText = `
Type: ${type}
From: ${email}
User ID: ${userId}
Workspace ID: ${workspaceId ?? 'N/A'}
Workflow ID: ${workflowId ?? 'N/A'}
Browser: ${userAgent ?? 'N/A'}
${message}
`
@@ -117,6 +115,7 @@ ${message}
logger.info(`[${requestId}] Help request email sent successfully`)
// Send confirmation email to the user
try {
const confirmationHtml = await renderHelpConfirmationEmail(
type as 'bug' | 'feedback' | 'feature_request' | 'other',

View File

@@ -4,15 +4,18 @@
*
* @vitest-environment node
*/
import { createEnvMock, createMockLogger } from '@sim/testing'
import { createEnvMock } from '@sim/testing'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const loggerMock = vi.hoisted(() => ({
createLogger: () => createMockLogger(),
}))
vi.mock('drizzle-orm')
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: vi.fn(() => ({
info: vi.fn(),
debug: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('@sim/db')
vi.mock('@/lib/knowledge/documents/utils', () => ({
retryWithExponentialBackoff: (fn: any) => fn(),

View File

@@ -1,166 +0,0 @@
import { db } from '@sim/db'
import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
const logger = createLogger('PermissionGroupBulkMembers')
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
const [group] = await db
.select({
id: permissionGroup.id,
organizationId: permissionGroup.organizationId,
})
.from(permissionGroup)
.where(eq(permissionGroup.id, groupId))
.limit(1)
if (!group) return null
const [membership] = await db
.select({ role: member.role })
.from(member)
.where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId)))
.limit(1)
if (!membership) return null
return { group, role: membership.role }
}
const bulkAddSchema = z.object({
userIds: z.array(z.string()).optional(),
addAllOrgMembers: z.boolean().optional(),
})
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const hasAccess = await hasAccessControlAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Access Control is an Enterprise feature' },
{ status: 403 }
)
}
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
if (result.role !== 'admin' && result.role !== 'owner') {
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
}
const body = await req.json()
const { userIds, addAllOrgMembers } = bulkAddSchema.parse(body)
let targetUserIds: string[] = []
if (addAllOrgMembers) {
const orgMembers = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, result.group.organizationId))
targetUserIds = orgMembers.map((m) => m.userId)
} else if (userIds && userIds.length > 0) {
const validMembers = await db
.select({ userId: member.userId })
.from(member)
.where(
and(
eq(member.organizationId, result.group.organizationId),
inArray(member.userId, userIds)
)
)
targetUserIds = validMembers.map((m) => m.userId)
}
if (targetUserIds.length === 0) {
return NextResponse.json({ added: 0, moved: 0 })
}
const existingMemberships = await db
.select({
id: permissionGroupMember.id,
userId: permissionGroupMember.userId,
permissionGroupId: permissionGroupMember.permissionGroupId,
})
.from(permissionGroupMember)
.where(inArray(permissionGroupMember.userId, targetUserIds))
const alreadyInThisGroup = new Set(
existingMemberships.filter((m) => m.permissionGroupId === id).map((m) => m.userId)
)
const usersToAdd = targetUserIds.filter((uid) => !alreadyInThisGroup.has(uid))
if (usersToAdd.length === 0) {
return NextResponse.json({ added: 0, moved: 0 })
}
const membershipsToDelete = existingMemberships.filter(
(m) => m.permissionGroupId !== id && usersToAdd.includes(m.userId)
)
const movedCount = membershipsToDelete.length
await db.transaction(async (tx) => {
if (membershipsToDelete.length > 0) {
await tx.delete(permissionGroupMember).where(
inArray(
permissionGroupMember.id,
membershipsToDelete.map((m) => m.id)
)
)
}
const newMembers = usersToAdd.map((userId) => ({
id: crypto.randomUUID(),
permissionGroupId: id,
userId,
assignedBy: session.user.id,
assignedAt: new Date(),
}))
await tx.insert(permissionGroupMember).values(newMembers)
})
logger.info('Bulk added members to permission group', {
permissionGroupId: id,
addedCount: usersToAdd.length,
movedCount,
assignedBy: session.user.id,
})
return NextResponse.json({ added: usersToAdd.length, moved: movedCount })
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
}
if (
error instanceof Error &&
error.message.includes('permission_group_member_user_id_unique')
) {
return NextResponse.json(
{ error: 'One or more users are already in a permission group' },
{ status: 409 }
)
}
logger.error('Error bulk adding members to permission group', error)
return NextResponse.json({ error: 'Failed to add members' }, { status: 500 })
}
}

View File

@@ -1,229 +0,0 @@
import { db } from '@sim/db'
import { member, permissionGroup, permissionGroupMember, user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
const logger = createLogger('PermissionGroupMembers')
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
const [group] = await db
.select({
id: permissionGroup.id,
organizationId: permissionGroup.organizationId,
})
.from(permissionGroup)
.where(eq(permissionGroup.id, groupId))
.limit(1)
if (!group) return null
const [membership] = await db
.select({ role: member.role })
.from(member)
.where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId)))
.limit(1)
if (!membership) return null
return { group, role: membership.role }
}
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
const members = await db
.select({
id: permissionGroupMember.id,
userId: permissionGroupMember.userId,
assignedAt: permissionGroupMember.assignedAt,
userName: user.name,
userEmail: user.email,
userImage: user.image,
})
.from(permissionGroupMember)
.leftJoin(user, eq(permissionGroupMember.userId, user.id))
.where(eq(permissionGroupMember.permissionGroupId, id))
return NextResponse.json({ members })
}
const addMemberSchema = z.object({
userId: z.string().min(1),
})
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const hasAccess = await hasAccessControlAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Access Control is an Enterprise feature' },
{ status: 403 }
)
}
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
if (result.role !== 'admin' && result.role !== 'owner') {
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
}
const body = await req.json()
const { userId } = addMemberSchema.parse(body)
const [orgMember] = await db
.select({ id: member.id })
.from(member)
.where(and(eq(member.userId, userId), eq(member.organizationId, result.group.organizationId)))
.limit(1)
if (!orgMember) {
return NextResponse.json(
{ error: 'User is not a member of this organization' },
{ status: 400 }
)
}
const [existingMembership] = await db
.select({
id: permissionGroupMember.id,
permissionGroupId: permissionGroupMember.permissionGroupId,
})
.from(permissionGroupMember)
.where(eq(permissionGroupMember.userId, userId))
.limit(1)
if (existingMembership?.permissionGroupId === id) {
return NextResponse.json(
{ error: 'User is already in this permission group' },
{ status: 409 }
)
}
const newMember = await db.transaction(async (tx) => {
if (existingMembership) {
await tx
.delete(permissionGroupMember)
.where(eq(permissionGroupMember.id, existingMembership.id))
}
const memberData = {
id: crypto.randomUUID(),
permissionGroupId: id,
userId,
assignedBy: session.user.id,
assignedAt: new Date(),
}
await tx.insert(permissionGroupMember).values(memberData)
return memberData
})
logger.info('Added member to permission group', {
permissionGroupId: id,
userId,
assignedBy: session.user.id,
})
return NextResponse.json({ member: newMember }, { status: 201 })
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
}
if (
error instanceof Error &&
error.message.includes('permission_group_member_user_id_unique')
) {
return NextResponse.json({ error: 'User is already in a permission group' }, { status: 409 })
}
logger.error('Error adding member to permission group', error)
return NextResponse.json({ error: 'Failed to add member' }, { status: 500 })
}
}
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
const { searchParams } = new URL(req.url)
const memberId = searchParams.get('memberId')
if (!memberId) {
return NextResponse.json({ error: 'memberId is required' }, { status: 400 })
}
try {
const hasAccess = await hasAccessControlAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Access Control is an Enterprise feature' },
{ status: 403 }
)
}
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
if (result.role !== 'admin' && result.role !== 'owner') {
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
}
const [memberToRemove] = await db
.select()
.from(permissionGroupMember)
.where(
and(eq(permissionGroupMember.id, memberId), eq(permissionGroupMember.permissionGroupId, id))
)
.limit(1)
if (!memberToRemove) {
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
}
await db.delete(permissionGroupMember).where(eq(permissionGroupMember.id, memberId))
logger.info('Removed member from permission group', {
permissionGroupId: id,
memberId,
userId: session.user.id,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error removing member from permission group', error)
return NextResponse.json({ error: 'Failed to remove member' }, { status: 500 })
}
}

View File

@@ -1,212 +0,0 @@
import { db } from '@sim/db'
import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import {
type PermissionGroupConfig,
parsePermissionGroupConfig,
} from '@/lib/permission-groups/types'
const logger = createLogger('PermissionGroup')
const configSchema = z.object({
allowedIntegrations: z.array(z.string()).nullable().optional(),
allowedModelProviders: z.array(z.string()).nullable().optional(),
hideTraceSpans: z.boolean().optional(),
hideKnowledgeBaseTab: z.boolean().optional(),
hideCopilot: z.boolean().optional(),
hideApiKeysTab: z.boolean().optional(),
hideEnvironmentTab: z.boolean().optional(),
hideFilesTab: z.boolean().optional(),
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
})
const updateSchema = z.object({
name: z.string().trim().min(1).max(100).optional(),
description: z.string().max(500).nullable().optional(),
config: configSchema.optional(),
})
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
const [group] = await db
.select({
id: permissionGroup.id,
organizationId: permissionGroup.organizationId,
name: permissionGroup.name,
description: permissionGroup.description,
config: permissionGroup.config,
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
})
.from(permissionGroup)
.where(eq(permissionGroup.id, groupId))
.limit(1)
if (!group) return null
const [membership] = await db
.select({ role: member.role })
.from(member)
.where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId)))
.limit(1)
if (!membership) return null
return { group, role: membership.role }
}
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
return NextResponse.json({
permissionGroup: {
...result.group,
config: parsePermissionGroupConfig(result.group.config),
},
})
}
export async function PUT(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const hasAccess = await hasAccessControlAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Access Control is an Enterprise feature' },
{ status: 403 }
)
}
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
if (result.role !== 'admin' && result.role !== 'owner') {
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
}
const body = await req.json()
const updates = updateSchema.parse(body)
if (updates.name) {
const existingGroup = await db
.select({ id: permissionGroup.id })
.from(permissionGroup)
.where(
and(
eq(permissionGroup.organizationId, result.group.organizationId),
eq(permissionGroup.name, updates.name)
)
)
.limit(1)
if (existingGroup.length > 0 && existingGroup[0].id !== id) {
return NextResponse.json(
{ error: 'A permission group with this name already exists' },
{ status: 409 }
)
}
}
const currentConfig = parsePermissionGroupConfig(result.group.config)
const newConfig: PermissionGroupConfig = updates.config
? { ...currentConfig, ...updates.config }
: currentConfig
await db
.update(permissionGroup)
.set({
...(updates.name !== undefined && { name: updates.name }),
...(updates.description !== undefined && { description: updates.description }),
config: newConfig,
updatedAt: new Date(),
})
.where(eq(permissionGroup.id, id))
const [updated] = await db
.select()
.from(permissionGroup)
.where(eq(permissionGroup.id, id))
.limit(1)
return NextResponse.json({
permissionGroup: {
...updated,
config: parsePermissionGroupConfig(updated.config),
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
}
logger.error('Error updating permission group', error)
return NextResponse.json({ error: 'Failed to update permission group' }, { status: 500 })
}
}
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const hasAccess = await hasAccessControlAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Access Control is an Enterprise feature' },
{ status: 403 }
)
}
const result = await getPermissionGroupWithAccess(id, session.user.id)
if (!result) {
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
}
if (result.role !== 'admin' && result.role !== 'owner') {
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
}
await db.delete(permissionGroupMember).where(eq(permissionGroupMember.permissionGroupId, id))
await db.delete(permissionGroup).where(eq(permissionGroup.id, id))
logger.info('Deleted permission group', { permissionGroupId: id, userId: session.user.id })
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error deleting permission group', error)
return NextResponse.json({ error: 'Failed to delete permission group' }, { status: 500 })
}
}

View File

@@ -1,185 +0,0 @@
import { db } from '@sim/db'
import { member, organization, permissionGroup, permissionGroupMember, user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, count, desc, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import {
DEFAULT_PERMISSION_GROUP_CONFIG,
type PermissionGroupConfig,
parsePermissionGroupConfig,
} from '@/lib/permission-groups/types'
const logger = createLogger('PermissionGroups')
const configSchema = z.object({
allowedIntegrations: z.array(z.string()).nullable().optional(),
allowedModelProviders: z.array(z.string()).nullable().optional(),
hideTraceSpans: z.boolean().optional(),
hideKnowledgeBaseTab: z.boolean().optional(),
hideCopilot: z.boolean().optional(),
hideApiKeysTab: z.boolean().optional(),
hideEnvironmentTab: z.boolean().optional(),
hideFilesTab: z.boolean().optional(),
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
})
const createSchema = z.object({
organizationId: z.string().min(1),
name: z.string().trim().min(1).max(100),
description: z.string().max(500).optional(),
config: configSchema.optional(),
})
export async function GET(req: Request) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(req.url)
const organizationId = searchParams.get('organizationId')
if (!organizationId) {
return NextResponse.json({ error: 'organizationId is required' }, { status: 400 })
}
const membership = await db
.select({ id: member.id, role: member.role })
.from(member)
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId)))
.limit(1)
if (membership.length === 0) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const groups = await db
.select({
id: permissionGroup.id,
name: permissionGroup.name,
description: permissionGroup.description,
config: permissionGroup.config,
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
creatorName: user.name,
creatorEmail: user.email,
})
.from(permissionGroup)
.leftJoin(user, eq(permissionGroup.createdBy, user.id))
.where(eq(permissionGroup.organizationId, organizationId))
.orderBy(desc(permissionGroup.createdAt))
const groupsWithCounts = await Promise.all(
groups.map(async (group) => {
const [memberCount] = await db
.select({ count: count() })
.from(permissionGroupMember)
.where(eq(permissionGroupMember.permissionGroupId, group.id))
return {
...group,
config: parsePermissionGroupConfig(group.config),
memberCount: memberCount?.count ?? 0,
}
})
)
return NextResponse.json({ permissionGroups: groupsWithCounts })
}
export async function POST(req: Request) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const hasAccess = await hasAccessControlAccess(session.user.id)
if (!hasAccess) {
return NextResponse.json(
{ error: 'Access Control is an Enterprise feature' },
{ status: 403 }
)
}
const body = await req.json()
const { organizationId, name, description, config } = createSchema.parse(body)
const membership = await db
.select({ id: member.id, role: member.role })
.from(member)
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId)))
.limit(1)
const role = membership[0]?.role
if (membership.length === 0 || (role !== 'admin' && role !== 'owner')) {
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
}
const orgExists = await db
.select({ id: organization.id })
.from(organization)
.where(eq(organization.id, organizationId))
.limit(1)
if (orgExists.length === 0) {
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
}
const existingGroup = await db
.select({ id: permissionGroup.id })
.from(permissionGroup)
.where(
and(eq(permissionGroup.organizationId, organizationId), eq(permissionGroup.name, name))
)
.limit(1)
if (existingGroup.length > 0) {
return NextResponse.json(
{ error: 'A permission group with this name already exists' },
{ status: 409 }
)
}
const groupConfig: PermissionGroupConfig = {
...DEFAULT_PERMISSION_GROUP_CONFIG,
...config,
}
const now = new Date()
const newGroup = {
id: crypto.randomUUID(),
organizationId,
name,
description: description || null,
config: groupConfig,
createdBy: session.user.id,
createdAt: now,
updatedAt: now,
}
await db.insert(permissionGroup).values(newGroup)
logger.info('Created permission group', {
permissionGroupId: newGroup.id,
organizationId,
userId: session.user.id,
})
return NextResponse.json({ permissionGroup: newGroup }, { status: 201 })
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
}
logger.error('Error creating permission group', error)
return NextResponse.json({ error: 'Failed to create permission group' }, { status: 500 })
}
}

View File

@@ -1,72 +0,0 @@
import { db } from '@sim/db'
import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { isOrganizationOnEnterprisePlan } from '@/lib/billing'
import { parsePermissionGroupConfig } from '@/lib/permission-groups/types'
export async function GET(req: Request) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(req.url)
const organizationId = searchParams.get('organizationId')
if (!organizationId) {
return NextResponse.json({ error: 'organizationId is required' }, { status: 400 })
}
const [membership] = await db
.select({ id: member.id })
.from(member)
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId)))
.limit(1)
if (!membership) {
return NextResponse.json({ error: 'Not a member of this organization' }, { status: 403 })
}
// Short-circuit: if org is not on enterprise plan, ignore permission configs
const isEnterprise = await isOrganizationOnEnterprisePlan(organizationId)
if (!isEnterprise) {
return NextResponse.json({
permissionGroupId: null,
groupName: null,
config: null,
})
}
const [groupMembership] = await db
.select({
permissionGroupId: permissionGroupMember.permissionGroupId,
config: permissionGroup.config,
groupName: permissionGroup.name,
})
.from(permissionGroupMember)
.innerJoin(permissionGroup, eq(permissionGroupMember.permissionGroupId, permissionGroup.id))
.where(
and(
eq(permissionGroupMember.userId, session.user.id),
eq(permissionGroup.organizationId, organizationId)
)
)
.limit(1)
if (!groupMembership) {
return NextResponse.json({
permissionGroupId: null,
groupName: null,
config: null,
})
}
return NextResponse.json({
permissionGroupId: groupMembership.permissionGroupId,
groupName: groupMembership.groupName,
config: parsePermissionGroupConfig(groupMembership.config),
})
}

View File

@@ -4,8 +4,8 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { env } from '@/lib/core/config/env'
import { validateAuthToken } from '@/lib/core/security/deployment'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { validateAuthToken } from '@/app/api/chat/utils'
const logger = createLogger('ProxyTTSStreamAPI')

View File

@@ -3,7 +3,6 @@
*
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
@@ -44,7 +43,14 @@ vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: () => 'test-request-id',
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
import { PUT } from './route'

View File

@@ -3,7 +3,6 @@
*
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
@@ -41,7 +40,13 @@ vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: () => 'test-request-id',
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
import { GET } from '@/app/api/schedules/route'

View File

@@ -10,7 +10,6 @@ import {
extractRequiredCredentials,
sanitizeCredentials,
} from '@/lib/workflows/credentials/credential-extractor'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('TemplateByIdAPI')
@@ -190,12 +189,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.where(eq(workflow.id, template.workflowId))
.limit(1)
const currentState: Partial<WorkflowState> = {
const currentState = {
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
variables: (workflowRecord?.variables as WorkflowState['variables']) ?? undefined,
variables: workflowRecord?.variables || undefined,
lastSaved: Date.now(),
}

Some files were not shown because too many files have changed in this diff Show More