mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-13 08:57:55 -05:00
Compare commits
37 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
034ad8331d | ||
|
|
56458625b7 | ||
|
|
f93a946272 | ||
|
|
f2950c7060 | ||
|
|
88b4a1fe6e | ||
|
|
2512767dde | ||
|
|
b5f55b7c63 | ||
|
|
684ad5aeec | ||
|
|
a3dff1027f | ||
|
|
0aec9ef571 | ||
|
|
cb4db20a5f | ||
|
|
4941b5224b | ||
|
|
7f18d96d32 | ||
|
|
e347486f50 | ||
|
|
e21cc1132b | ||
|
|
ab32a19cf4 | ||
|
|
ead2413b95 | ||
|
|
9a16e7c20f | ||
|
|
283a521614 | ||
|
|
92fabe785d | ||
|
|
3ed177520a | ||
|
|
baa54b4c97 | ||
|
|
a11d452d7b | ||
|
|
6262503b89 | ||
|
|
67440432bf | ||
|
|
47eb060311 | ||
|
|
fd76e98f0e | ||
|
|
1dbd16115f | ||
|
|
38e827b61a | ||
|
|
1f5e8a41f8 | ||
|
|
796f73ee01 | ||
|
|
d3d6012d5c | ||
|
|
860610b4c2 | ||
|
|
05bbf34265 | ||
|
|
753600ed60 | ||
|
|
4da43d937c | ||
|
|
9502227fd4 |
@@ -1,60 +1,57 @@
|
||||
---
|
||||
description: Testing patterns with Vitest
|
||||
description: Testing patterns with Vitest and @sim/testing
|
||||
globs: ["apps/sim/**/*.test.ts", "apps/sim/**/*.test.tsx"]
|
||||
---
|
||||
|
||||
# Testing Patterns
|
||||
|
||||
Use Vitest. Test files live next to source: `feature.ts` → `feature.test.ts`
|
||||
Use Vitest. Test files: `feature.ts` → `feature.test.ts`
|
||||
|
||||
## Structure
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* Tests for [feature name]
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
// 1. Mocks BEFORE imports
|
||||
vi.mock('@sim/db', () => ({ db: { select: vi.fn() } }))
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
// 2. Imports AFTER mocks
|
||||
import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest'
|
||||
import { createSession, loggerMock } from '@sim/testing'
|
||||
import { myFunction } from '@/lib/feature'
|
||||
|
||||
describe('myFunction', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
|
||||
it('should do something', () => {
|
||||
expect(myFunction()).toBe(expected)
|
||||
})
|
||||
|
||||
it.concurrent('runs in parallel', () => { ... })
|
||||
it.concurrent('isolated tests run in parallel', () => { ... })
|
||||
})
|
||||
```
|
||||
|
||||
## @sim/testing Package
|
||||
|
||||
```typescript
|
||||
// Factories - create test data
|
||||
import { createBlock, createWorkflow, createSession } from '@sim/testing'
|
||||
Always prefer over local mocks.
|
||||
|
||||
// Mocks - pre-configured mocks
|
||||
import { loggerMock, databaseMock, fetchMock } from '@sim/testing'
|
||||
|
||||
// Builders - fluent API for complex objects
|
||||
import { ExecutionBuilder, WorkflowBuilder } from '@sim/testing'
|
||||
```
|
||||
| Category | Utilities |
|
||||
|----------|-----------|
|
||||
| **Mocks** | `loggerMock`, `databaseMock`, `setupGlobalFetchMock()` |
|
||||
| **Factories** | `createSession()`, `createWorkflowRecord()`, `createBlock()`, `createExecutorContext()` |
|
||||
| **Builders** | `WorkflowBuilder`, `ExecutionContextBuilder` |
|
||||
| **Assertions** | `expectWorkflowAccessGranted()`, `expectBlockExecuted()` |
|
||||
|
||||
## Rules
|
||||
|
||||
1. `@vitest-environment node` directive at file top
|
||||
2. **Mocks before imports** - `vi.mock()` calls must come first
|
||||
3. Use `@sim/testing` factories over manual test data
|
||||
4. `it.concurrent` for independent tests (faster)
|
||||
2. `vi.mock()` calls before importing mocked modules
|
||||
3. `@sim/testing` utilities over local mocks
|
||||
4. `it.concurrent` for isolated tests (no shared mutable state)
|
||||
5. `beforeEach(() => vi.clearAllMocks())` to reset state
|
||||
6. Group related tests with nested `describe` blocks
|
||||
7. Test file naming: `*.test.ts` (not `*.spec.ts`)
|
||||
|
||||
## Hoisted Mocks
|
||||
|
||||
For mutable mock references:
|
||||
|
||||
```typescript
|
||||
const mockFn = vi.hoisted(() => vi.fn())
|
||||
vi.mock('@/lib/module', () => ({ myFunction: mockFn }))
|
||||
mockFn.mockResolvedValue({ data: 'test' })
|
||||
```
|
||||
|
||||
14
CLAUDE.md
14
CLAUDE.md
@@ -173,13 +173,13 @@ Use Vitest. Test files: `feature.ts` → `feature.test.ts`
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
// Mocks BEFORE imports
|
||||
vi.mock('@sim/db', () => ({ db: { select: vi.fn() } }))
|
||||
|
||||
// Imports AFTER mocks
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { createSession, loggerMock } from '@sim/testing'
|
||||
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
import { myFunction } from '@/lib/feature'
|
||||
|
||||
describe('feature', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
@@ -187,7 +187,7 @@ describe('feature', () => {
|
||||
})
|
||||
```
|
||||
|
||||
Use `@sim/testing` factories over manual test data.
|
||||
Use `@sim/testing` mocks/factories over local test data. See `.cursor/rules/sim-testing.mdc` for details.
|
||||
|
||||
## Utils Rules
|
||||
|
||||
|
||||
@@ -4061,6 +4061,31 @@ export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function A2AIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 860 860' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<circle cx='544' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='154' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='706' cy='307' r='27' fill='currentColor' />
|
||||
<circle cx='316' cy='307' r='27' fill='currentColor' />
|
||||
<path
|
||||
d='M336.5 191.003H162C97.6588 191.003 45.5 243.162 45.5 307.503C45.5 371.844 97.6442 424.003 161.985 424.003C206.551 424.003 256.288 424.003 296.5 424.003C487.5 424.003 374 191.005 569 191.001C613.886 191 658.966 191 698.025 191C762.366 191.001 814.5 243.16 814.5 307.501C814.5 371.843 762.34 424.003 697.998 424.003H523.5'
|
||||
stroke='currentColor'
|
||||
strokeWidth='48'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
<path
|
||||
d='M256 510.002C270.359 510.002 282 521.643 282 536.002C282 550.361 270.359 562.002 256 562.002H148C133.641 562.002 122 550.361 122 536.002C122 521.643 133.641 510.002 148 510.002H256ZM712 510.002C726.359 510.002 738 521.643 738 536.002C738 550.361 726.359 562.002 712 562.002H360C345.641 562.002 334 550.361 334 536.002C334 521.643 345.641 510.002 360 510.002H712Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
d='M444 628.002C458.359 628.002 470 639.643 470 654.002C470 668.361 458.359 680.002 444 680.002H100C85.6406 680.002 74 668.361 74 654.002C74 639.643 85.6406 628.002 100 628.002H444ZM548 628.002C562.359 628.002 574 639.643 574 654.002C574 668.361 562.359 680.002 548 680.002C533.641 680.002 522 668.361 522 654.002C522 639.643 533.641 628.002 548 628.002ZM760 628.002C774.359 628.002 786 639.643 786 654.002C786 668.361 774.359 680.002 760 680.002H652C637.641 680.002 626 668.361 626 654.002C626 639.643 637.641 628.002 652 628.002H760Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function WordpressIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 25.925 25.925'>
|
||||
@@ -4575,3 +4600,22 @@ export function FirefliesIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient id='bedrock_gradient' x1='80%' x2='20%' y1='20%' y2='80%'>
|
||||
<stop offset='0%' stopColor='#6350FB' />
|
||||
<stop offset='50%' stopColor='#3D8FFF' />
|
||||
<stop offset='100%' stopColor='#9AD8F8' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path
|
||||
d='M13.05 15.513h3.08c.214 0 .389.177.389.394v1.82a1.704 1.704 0 011.296 1.661c0 .943-.755 1.708-1.685 1.708-.931 0-1.686-.765-1.686-1.708 0-.807.554-1.484 1.297-1.662v-1.425h-2.69v4.663a.395.395 0 01-.188.338l-2.69 1.641a.385.385 0 01-.405-.002l-4.926-3.086a.395.395 0 01-.185-.336V16.3L2.196 14.87A.395.395 0 012 14.555L2 14.528V9.406c0-.14.073-.27.192-.34l2.465-1.462V4.448c0-.129.062-.249.165-.322l.021-.014L9.77 1.058a.385.385 0 01.407 0l2.69 1.675a.395.395 0 01.185.336V7.6h3.856V5.683a1.704 1.704 0 01-1.296-1.662c0-.943.755-1.708 1.685-1.708.931 0 1.685.765 1.685 1.708 0 .807-.553 1.484-1.296 1.662v2.311a.391.391 0 01-.389.394h-4.245v1.806h6.624a1.69 1.69 0 011.64-1.313c.93 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708a1.69 1.69 0 01-1.64-1.314H13.05v1.937h4.953l.915 1.18a1.66 1.66 0 01.84-.227c.931 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708-.93 0-1.685-.765-1.685-1.708 0-.346.102-.668.276-.937l-.724-.935H13.05v1.806zM9.973 1.856L7.93 3.122V6.09h-.778V3.604L5.435 4.669v2.945l2.11 1.36L9.712 7.61V5.334h.778V7.83c0 .136-.07.263-.184.335L7.963 9.638v2.081l1.422 1.009-.446.646-1.406-.998-1.53 1.005-.423-.66 1.605-1.055v-1.99L5.038 8.29l-2.26 1.34v1.676l1.972-1.189.398.677-2.37 1.429V14.3l2.166 1.258 2.27-1.368.397.677-2.176 1.311V19.3l1.876 1.175 2.365-1.426.398.678-2.017 1.216 1.918 1.201 2.298-1.403v-5.78l-4.758 2.893-.4-.675 5.158-3.136V3.289L9.972 1.856zM16.13 18.47a.913.913 0 00-.908.92c0 .507.406.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zm3.63-3.81a.913.913 0 00-.908.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92zm1.555-4.99a.913.913 0 00-.908.92c0 .507.407.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zM17.296 3.1a.913.913 0 00-.907.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92z'
|
||||
fill='url(#bedrock_gradient)'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import type { ComponentType, SVGProps } from 'react'
|
||||
import {
|
||||
A2AIcon,
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
ApifyIcon,
|
||||
@@ -126,6 +127,7 @@ import {
|
||||
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||
|
||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
a2a: A2AIcon,
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
apify: ApifyIcon,
|
||||
|
||||
@@ -49,40 +49,40 @@ Die Modellaufschlüsselung zeigt:
|
||||
|
||||
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
|
||||
<Tab>
|
||||
**Gehostete Modelle** - Sim stellt API-Schlüssel mit einem 2-fachen Preismultiplikator bereit:
|
||||
**Hosted Models** - Sim bietet API-Schlüssel mit einem 1,4-fachen Preismultiplikator für Agent-Blöcke:
|
||||
|
||||
**OpenAI**
|
||||
| Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) |
|
||||
| Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| GPT-5.1 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
|
||||
| GPT-5 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
|
||||
| GPT-5 Mini | 0,25 $ / 2,00 $ | 0,50 $ / 4,00 $ |
|
||||
| GPT-5 Nano | 0,05 $ / 0,40 $ | 0,10 $ / 0,80 $ |
|
||||
| GPT-4o | 2,50 $ / 10,00 $ | 5,00 $ / 20,00 $ |
|
||||
| GPT-4.1 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
|
||||
| GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,80 $ / 3,20 $ |
|
||||
| GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,20 $ / 0,80 $ |
|
||||
| o1 | 15,00 $ / 60,00 $ | 30,00 $ / 120,00 $ |
|
||||
| o3 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
|
||||
| o4 Mini | 1,10 $ / 4,40 $ | 2,20 $ / 8,80 $ |
|
||||
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
|
||||
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
|
||||
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
|
||||
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
|
||||
|
||||
**Anthropic**
|
||||
| Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) |
|
||||
| Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Claude Opus 4.5 | 5,00 $ / 25,00 $ | 10,00 $ / 50,00 $ |
|
||||
| Claude Opus 4.1 | 15,00 $ / 75,00 $ | 30,00 $ / 150,00 $ |
|
||||
| Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
|
||||
| Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
|
||||
| Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 2,00 $ / 10,00 $ |
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
|
||||
|
||||
**Google**
|
||||
| Modell | Basispreis (Eingabe/Ausgabe) | Gehosteter Preis (Eingabe/Ausgabe) |
|
||||
| Modell | Basispreis (Eingabe/Ausgabe) | Hosted-Preis (Eingabe/Ausgabe) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 4,00 $ / 24,00 $ |
|
||||
| Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
|
||||
| Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,60 $ / 5,00 $ |
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
|
||||
|
||||
*Der 2x-Multiplikator deckt Infrastruktur- und API-Verwaltungskosten ab.*
|
||||
*Der 1,4-fache Multiplikator deckt Infrastruktur- und API-Verwaltungskosten ab.*
|
||||
</Tab>
|
||||
|
||||
<Tab>
|
||||
|
||||
@@ -2,16 +2,15 @@
|
||||
title: Router
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
The Router block uses AI to intelligently route workflows based on content analysis. Unlike Condition blocks that use simple rules, Routers understand context and intent.
|
||||
The Router block uses AI to intelligently route workflows based on content analysis. Unlike Condition blocks that use simple rules, Routers understand context and intent. Each route you define creates a separate output port, allowing you to connect different paths to different downstream blocks.
|
||||
|
||||
<div className="flex justify-center">
|
||||
<Image
|
||||
src="/static/blocks/router.png"
|
||||
alt="Router Block with Multiple Paths"
|
||||
alt="Router Block with Multiple Route Ports"
|
||||
width={500}
|
||||
height={400}
|
||||
className="my-6"
|
||||
@@ -32,21 +31,23 @@ The Router block uses AI to intelligently route workflows based on content analy
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### Content/Prompt
|
||||
### Context
|
||||
|
||||
The content or prompt that the Router will analyze to make routing decisions. This can be:
|
||||
The context that the Router will analyze to make routing decisions. This is the input data that gets evaluated against your route descriptions. It can be:
|
||||
|
||||
- A direct user query or input
|
||||
- Output from a previous block
|
||||
- A system-generated message
|
||||
- Any text content that needs intelligent routing
|
||||
|
||||
### Target Blocks
|
||||
### Routes
|
||||
|
||||
The possible destination blocks that the Router can select from. The Router will automatically detect connected blocks, but you can also:
|
||||
Define the possible paths that the Router can take. Each route consists of:
|
||||
|
||||
- Customize the descriptions of target blocks to improve routing accuracy
|
||||
- Specify routing criteria for each target block
|
||||
- Exclude certain blocks from being considered as routing targets
|
||||
- **Route Title**: A name for the route (e.g., "Sales", "Support", "Technical")
|
||||
- **Route Description**: A clear description of when this route should be selected (e.g., "Route here when the query is about pricing, purchasing, or sales inquiries")
|
||||
|
||||
Each route you add creates a **separate output port** on the Router block. Connect each port to the appropriate downstream block for that route.
|
||||
|
||||
### Model Selection
|
||||
|
||||
@@ -66,8 +67,9 @@ Your API key for the selected LLM provider. This is securely stored and used for
|
||||
|
||||
## Outputs
|
||||
|
||||
- **`<router.prompt>`**: Summary of the routing prompt
|
||||
- **`<router.selected_path>`**: Chosen destination block
|
||||
- **`<router.context>`**: The context that was analyzed
|
||||
- **`<router.selectedRoute>`**: The ID of the selected route
|
||||
- **`<router.selected_path>`**: Details of the chosen destination block
|
||||
- **`<router.tokens>`**: Token usage statistics
|
||||
- **`<router.cost>`**: Estimated routing cost
|
||||
- **`<router.model>`**: Model used for decision-making
|
||||
@@ -75,26 +77,43 @@ Your API key for the selected LLM provider. This is securely stored and used for
|
||||
## Example Use Cases
|
||||
|
||||
**Customer Support Triage** - Route tickets to specialized departments
|
||||
|
||||
```
|
||||
Input (Ticket) → Router → Agent (Engineering) or Agent (Finance)
|
||||
Input (Ticket) → Router
|
||||
├── [Sales Route] → Agent (Sales Team)
|
||||
├── [Technical Route] → Agent (Engineering)
|
||||
└── [Billing Route] → Agent (Finance)
|
||||
```
|
||||
|
||||
**Content Classification** - Classify and route user-generated content
|
||||
|
||||
```
|
||||
Input (Feedback) → Router → Workflow (Product) or Workflow (Technical)
|
||||
Input (Feedback) → Router
|
||||
├── [Product Feedback] → Workflow (Product Team)
|
||||
└── [Bug Report] → Workflow (Technical Team)
|
||||
```
|
||||
|
||||
**Lead Qualification** - Route leads based on qualification criteria
|
||||
|
||||
```
|
||||
Input (Lead) → Router → Agent (Enterprise Sales) or Workflow (Self-serve)
|
||||
Input (Lead) → Router
|
||||
├── [Enterprise] → Agent (Enterprise Sales)
|
||||
└── [Self-serve] → Workflow (Automated Onboarding)
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
When the Router cannot determine an appropriate route for the given context, it will route to the **error path** instead of arbitrarily selecting a route. This happens when:
|
||||
|
||||
- The context doesn't clearly match any of the defined route descriptions
|
||||
- The AI determines that none of the available routes are appropriate
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Provide clear target descriptions**: Help the Router understand when to select each destination with specific, detailed descriptions
|
||||
- **Use specific routing criteria**: Define clear conditions and examples for each path to improve accuracy
|
||||
- **Implement fallback paths**: Connect a default destination for when no specific path is appropriate
|
||||
- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content
|
||||
- **Monitor routing performance**: Review routing decisions regularly and refine criteria based on actual usage patterns
|
||||
- **Choose appropriate models**: Use models with strong reasoning capabilities for complex routing decisions
|
||||
- **Write clear route descriptions**: Each route description should clearly explain when that route should be selected. Be specific about the criteria.
|
||||
- **Make routes mutually exclusive**: When possible, ensure route descriptions don't overlap to prevent ambiguous routing decisions.
|
||||
- **Connect an error path**: Handle cases where no route matches by connecting an error handler for graceful fallback behavior.
|
||||
- **Use descriptive route titles**: Route titles appear in the workflow canvas, so make them meaningful for readability.
|
||||
- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content.
|
||||
- **Monitor routing performance**: Review routing decisions regularly and refine route descriptions based on actual usage patterns.
|
||||
- **Choose appropriate models**: Use models with strong reasoning capabilities for complex routing decisions.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Enterprise
|
||||
description: Enterprise features for organizations with advanced security and compliance requirements
|
||||
description: Enterprise features for business organizations
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
@@ -9,6 +9,28 @@ Sim Studio Enterprise provides advanced features for organizations with enhanced
|
||||
|
||||
---
|
||||
|
||||
## Access Control
|
||||
|
||||
Define permission groups to control what features and integrations team members can use.
|
||||
|
||||
### Features
|
||||
|
||||
- **Allowed Model Providers** - Restrict which AI providers users can access (OpenAI, Anthropic, Google, etc.)
|
||||
- **Allowed Blocks** - Control which workflow blocks are available
|
||||
- **Platform Settings** - Hide Knowledge Base, disable MCP tools, or disable custom tools
|
||||
|
||||
### Setup
|
||||
|
||||
1. Navigate to **Settings** → **Access Control** in your workspace
|
||||
2. Create a permission group with your desired restrictions
|
||||
3. Add team members to the permission group
|
||||
|
||||
<Callout type="info">
|
||||
Users not assigned to any permission group have full access. Permission restrictions are enforced at both UI and execution time.
|
||||
</Callout>
|
||||
|
||||
---
|
||||
|
||||
## Bring Your Own Key (BYOK)
|
||||
|
||||
Use your own API keys for AI model providers instead of Sim Studio's hosted keys.
|
||||
@@ -61,15 +83,38 @@ Enterprise authentication with SAML 2.0 and OIDC support for centralized identit
|
||||
|
||||
---
|
||||
|
||||
## Self-Hosted
|
||||
## Self-Hosted Configuration
|
||||
|
||||
For self-hosted deployments, enterprise features can be enabled via environment variables:
|
||||
For self-hosted deployments, enterprise features can be enabled via environment variables without requiring billing.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `ORGANIZATIONS_ENABLED`, `NEXT_PUBLIC_ORGANIZATIONS_ENABLED` | Enable team/organization management |
|
||||
| `ACCESS_CONTROL_ENABLED`, `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED` | Permission groups for access restrictions |
|
||||
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Single Sign-On with SAML/OIDC |
|
||||
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Polling Groups for email triggers |
|
||||
|
||||
<Callout type="warn">
|
||||
BYOK is only available on hosted Sim Studio. Self-hosted deployments configure AI provider keys directly via environment variables.
|
||||
</Callout>
|
||||
### Organization Management
|
||||
|
||||
When billing is disabled, use the Admin API to manage organizations:
|
||||
|
||||
```bash
|
||||
# Create an organization
|
||||
curl -X POST https://your-instance/api/v1/admin/organizations \
|
||||
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"name": "My Organization", "ownerId": "user-id-here"}'
|
||||
|
||||
# Add a member
|
||||
curl -X POST https://your-instance/api/v1/admin/organizations/{orgId}/members \
|
||||
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"userId": "user-id-here", "role": "admin"}'
|
||||
```
|
||||
|
||||
### Notes
|
||||
|
||||
- Enabling `ACCESS_CONTROL_ENABLED` automatically enables organizations, as access control requires organization membership.
|
||||
- BYOK is only available on hosted Sim Studio. Self-hosted deployments configure AI provider keys directly via environment variables.
|
||||
|
||||
@@ -48,40 +48,40 @@ The model breakdown shows:
|
||||
|
||||
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
|
||||
<Tab>
|
||||
**Hosted Models** - Sim provides API keys with a 2x pricing multiplier:
|
||||
**Hosted Models** - Sim provides API keys with a 1.4x pricing multiplier for Agent blocks:
|
||||
|
||||
**OpenAI**
|
||||
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
|
||||
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
|
||||
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
|
||||
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
|
||||
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
|
||||
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
|
||||
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
|
||||
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
|
||||
|
||||
**Anthropic**
|
||||
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
|
||||
|
||||
**Google**
|
||||
| Model | Base Price (Input/Output) | Hosted Price (Input/Output) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
|
||||
|
||||
*The 2x multiplier covers infrastructure and API management costs.*
|
||||
*The 1.4x multiplier covers infrastructure and API management costs.*
|
||||
</Tab>
|
||||
|
||||
<Tab>
|
||||
|
||||
136
apps/docs/content/docs/en/execution/form.mdx
Normal file
136
apps/docs/content/docs/en/execution/form.mdx
Normal file
@@ -0,0 +1,136 @@
|
||||
---
|
||||
title: Form Deployment
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
Deploy your workflow as an embeddable form that users can fill out on your website or share via link. Form submissions trigger your workflow with the `form` trigger type.
|
||||
|
||||
## Overview
|
||||
|
||||
Form deployment turns your workflow's Input Format into a responsive form that can be:
|
||||
- Shared via a direct link (e.g., `https://sim.ai/form/my-survey`)
|
||||
- Embedded in any website using an iframe
|
||||
|
||||
When a user submits the form, it triggers your workflow with the form data.
|
||||
|
||||
<Callout type="info">
|
||||
Forms derive their fields from your workflow's Start block Input Format. Each field becomes a form input with the appropriate type.
|
||||
</Callout>
|
||||
|
||||
## Creating a Form
|
||||
|
||||
1. Open your workflow and click **Deploy**
|
||||
2. Select the **Form** tab
|
||||
3. Configure:
|
||||
- **URL**: Unique identifier (e.g., `contact-form` → `sim.ai/form/contact-form`)
|
||||
- **Title**: Form heading
|
||||
- **Description**: Optional subtitle
|
||||
- **Form Fields**: Customize labels and descriptions for each field
|
||||
- **Authentication**: Public, password-protected, or email whitelist
|
||||
- **Thank You Message**: Shown after submission
|
||||
4. Click **Launch**
|
||||
|
||||
## Field Type Mapping
|
||||
|
||||
| Input Format Type | Form Field |
|
||||
|------------------|------------|
|
||||
| `string` | Text input |
|
||||
| `number` | Number input |
|
||||
| `boolean` | Toggle switch |
|
||||
| `object` | JSON editor |
|
||||
| `array` | JSON array editor |
|
||||
| `files` | File upload |
|
||||
|
||||
## Access Control
|
||||
|
||||
| Mode | Description |
|
||||
|------|-------------|
|
||||
| **Public** | Anyone with the link can submit |
|
||||
| **Password** | Users must enter a password |
|
||||
| **Email Whitelist** | Only specified emails/domains can submit |
|
||||
|
||||
For email whitelist:
|
||||
- Exact: `user@example.com`
|
||||
- Domain: `@example.com` (all emails from domain)
|
||||
|
||||
## Embedding
|
||||
|
||||
### Direct Link
|
||||
|
||||
```
|
||||
https://sim.ai/form/your-identifier
|
||||
```
|
||||
|
||||
### Iframe
|
||||
|
||||
```html
|
||||
<iframe
|
||||
src="https://sim.ai/form/your-identifier"
|
||||
width="100%"
|
||||
height="600"
|
||||
frameborder="0"
|
||||
title="Form"
|
||||
></iframe>
|
||||
```
|
||||
|
||||
## API Submission
|
||||
|
||||
Submit forms programmatically:
|
||||
|
||||
<Tabs items={['cURL', 'TypeScript']}>
|
||||
<Tab value="cURL">
|
||||
```bash
|
||||
curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"formData": {
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com"
|
||||
}
|
||||
}'
|
||||
```
|
||||
</Tab>
|
||||
<Tab value="TypeScript">
|
||||
```typescript
|
||||
const response = await fetch('https://sim.ai/api/form/your-identifier', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
formData: {
|
||||
name: 'John Doe',
|
||||
email: 'john@example.com'
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
// { success: true, data: { executionId: '...' } }
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
### Protected Forms
|
||||
|
||||
For password-protected forms:
|
||||
```bash
|
||||
curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{ "password": "secret", "formData": { "name": "John" } }'
|
||||
```
|
||||
|
||||
For email-protected forms:
|
||||
```bash
|
||||
curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{ "email": "allowed@example.com", "formData": { "name": "John" } }'
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**"No input fields configured"** - Add Input Format fields to your Start block.
|
||||
|
||||
**Form not loading in iframe** - Check your site's CSP allows iframes from `sim.ai`.
|
||||
|
||||
**Submissions failing** - Verify the identifier is correct and required fields are filled.
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"pages": ["index", "basics", "api", "logging", "costs"]
|
||||
"pages": ["index", "basics", "api", "form", "logging", "costs"]
|
||||
}
|
||||
|
||||
240
apps/docs/content/docs/en/tools/a2a.mdx
Normal file
240
apps/docs/content/docs/en/tools/a2a.mdx
Normal file
@@ -0,0 +1,240 @@
|
||||
---
|
||||
title: A2A
|
||||
description: Interact with external A2A-compatible agents
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="a2a"
|
||||
color="#4151B5"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The A2A (Agent-to-Agent) protocol enables Sim to interact with external AI agents and systems that implement A2A-compatible APIs. With A2A, you can connect Sim’s automations and workflows to remote agents—such as LLM-powered bots, microservices, and other AI-based tools—using a standardized messaging format.
|
||||
|
||||
Using the A2A tools in Sim, you can:
|
||||
|
||||
- **Send Messages to External Agents**: Communicate directly with remote agents, providing prompts, commands, or data.
|
||||
- **Receive and Stream Responses**: Get structured responses, artifacts, or real-time updates from the agent as the task progresses.
|
||||
- **Continue Conversations or Tasks**: Carry on multi-turn conversations or workflows by referencing task and context IDs.
|
||||
- **Integrate Third-Party AI and Automation**: Leverage external A2A-compatible services as part of your Sim workflows.
|
||||
|
||||
These features allow you to build advanced workflows that combine Sim’s native capabilities with the intelligence and automation of external AIs or custom agents. To use A2A integrations, you’ll need the external agent’s endpoint URL and, if required, an API key or credentials.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Use the A2A (Agent-to-Agent) protocol to interact with external AI agents.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `a2a_send_message`
|
||||
|
||||
Send a message to an external A2A-compatible agent.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `message` | string | Yes | Message to send to the agent |
|
||||
| `taskId` | string | No | Task ID for continuing an existing task |
|
||||
| `contextId` | string | No | Context ID for conversation continuity |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | The text response from the agent |
|
||||
| `taskId` | string | Task ID for follow-up interactions |
|
||||
| `contextId` | string | Context ID for conversation continuity |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Structured output artifacts |
|
||||
| `history` | array | Full message history |
|
||||
|
||||
### `a2a_send_message_stream`
|
||||
|
||||
Send a message to an external A2A-compatible agent with real-time streaming.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `message` | string | Yes | Message to send to the agent |
|
||||
| `taskId` | string | No | Task ID for continuing an existing task |
|
||||
| `contextId` | string | No | Context ID for conversation continuity |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `content` | string | The text response from the agent |
|
||||
| `taskId` | string | Task ID for follow-up interactions |
|
||||
| `contextId` | string | Context ID for conversation continuity |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Structured output artifacts |
|
||||
| `history` | array | Full message history |
|
||||
|
||||
### `a2a_get_task`
|
||||
|
||||
Query the status of an existing A2A task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to query |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
| `historyLength` | number | No | Number of history messages to include |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Task ID |
|
||||
| `contextId` | string | Context ID |
|
||||
| `state` | string | Task state |
|
||||
| `artifacts` | array | Output artifacts |
|
||||
| `history` | array | Message history |
|
||||
|
||||
### `a2a_cancel_task`
|
||||
|
||||
Cancel a running A2A task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to cancel |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `cancelled` | boolean | Whether cancellation was successful |
|
||||
| `state` | string | Task state after cancellation |
|
||||
|
||||
### `a2a_get_agent_card`
|
||||
|
||||
Fetch the Agent Card (discovery document) for an A2A agent.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `apiKey` | string | No | API key for authentication \(if required\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `name` | string | Agent name |
|
||||
| `description` | string | Agent description |
|
||||
| `url` | string | Agent endpoint URL |
|
||||
| `version` | string | Agent version |
|
||||
| `capabilities` | object | Agent capabilities \(streaming, pushNotifications, etc.\) |
|
||||
| `skills` | array | Skills the agent can perform |
|
||||
| `defaultInputModes` | array | Default input modes \(text, file, data\) |
|
||||
| `defaultOutputModes` | array | Default output modes \(text, file, data\) |
|
||||
|
||||
### `a2a_resubscribe`
|
||||
|
||||
Reconnect to an ongoing A2A task stream after connection interruption.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to resubscribe to |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Task ID |
|
||||
| `contextId` | string | Context ID |
|
||||
| `state` | string | Current task state |
|
||||
| `isRunning` | boolean | Whether the task is still running |
|
||||
| `artifacts` | array | Output artifacts |
|
||||
| `history` | array | Message history |
|
||||
|
||||
### `a2a_set_push_notification`
|
||||
|
||||
Configure a webhook to receive task update notifications.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to configure notifications for |
|
||||
| `webhookUrl` | string | Yes | HTTPS webhook URL to receive notifications |
|
||||
| `token` | string | No | Token for webhook validation |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | Configured webhook URL |
|
||||
| `token` | string | Token for webhook validation |
|
||||
| `success` | boolean | Whether configuration was successful |
|
||||
|
||||
### `a2a_get_push_notification`
|
||||
|
||||
Get the push notification webhook configuration for a task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to get notification config for |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | Configured webhook URL |
|
||||
| `token` | string | Token for webhook validation |
|
||||
| `exists` | boolean | Whether a push notification config exists |
|
||||
|
||||
### `a2a_delete_push_notification`
|
||||
|
||||
Delete the push notification webhook configuration for a task.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `agentUrl` | string | Yes | The A2A agent endpoint URL |
|
||||
| `taskId` | string | Yes | Task ID to delete notification config for |
|
||||
| `pushNotificationConfigId` | string | No | Push notification configuration ID to delete \(optional - server can derive from taskId\) |
|
||||
| `apiKey` | string | No | API key for authentication |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether deletion was successful |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `a2a`
|
||||
@@ -37,7 +37,7 @@ This integration empowers Sim agents to automate data management tasks within yo
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.
|
||||
|
||||
|
||||
|
||||
@@ -185,6 +185,27 @@ Delete an item from a DynamoDB table
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `dynamodb_introspect`
|
||||
|
||||
Introspect DynamoDB to list tables or get detailed schema information for a specific table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `tableName` | string | No | Optional table name to get detailed schema. If not provided, lists all tables. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | List of table names in the region |
|
||||
| `tableDetails` | object | Detailed schema information for a specific table |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -362,6 +362,29 @@ Get comprehensive statistics about the Elasticsearch cluster.
|
||||
| `nodes` | object | Node statistics including count and versions |
|
||||
| `indices` | object | Index statistics including document count and store size |
|
||||
|
||||
### `elasticsearch_list_indices`
|
||||
|
||||
List all indices in the Elasticsearch cluster with their health, status, and statistics.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deploymentType` | string | Yes | Deployment type: self_hosted or cloud |
|
||||
| `host` | string | No | Elasticsearch host URL \(for self-hosted\) |
|
||||
| `cloudId` | string | No | Elastic Cloud ID \(for cloud deployments\) |
|
||||
| `authMethod` | string | Yes | Authentication method: api_key or basic_auth |
|
||||
| `apiKey` | string | No | Elasticsearch API key |
|
||||
| `username` | string | No | Username for basic auth |
|
||||
| `password` | string | No | Password for basic auth |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Summary message about the indices |
|
||||
| `indices` | json | Array of index information objects |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -48,7 +48,7 @@ Integrate Google Drive into the workflow. Can create, upload, and list files.
|
||||
|
||||
### `google_drive_upload`
|
||||
|
||||
Upload a file to Google Drive
|
||||
Upload a file to Google Drive with complete metadata returned
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -65,11 +65,11 @@ Upload a file to Google Drive
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `file` | json | Uploaded file metadata including ID, name, and links |
|
||||
| `file` | object | Complete uploaded file metadata from Google Drive |
|
||||
|
||||
### `google_drive_create_folder`
|
||||
|
||||
Create a new folder in Google Drive
|
||||
Create a new folder in Google Drive with complete metadata returned
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -83,11 +83,11 @@ Create a new folder in Google Drive
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `file` | json | Created folder metadata including ID, name, and parent information |
|
||||
| `file` | object | Complete created folder metadata from Google Drive |
|
||||
|
||||
### `google_drive_download`
|
||||
|
||||
Download a file from Google Drive (exports Google Workspace files automatically)
|
||||
Download a file from Google Drive with complete metadata (exports Google Workspace files automatically)
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -96,16 +96,17 @@ Download a file from Google Drive (exports Google Workspace files automatically)
|
||||
| `fileId` | string | Yes | The ID of the file to download |
|
||||
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
|
||||
| `fileName` | string | No | Optional filename override |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `file` | file | Downloaded file stored in execution files |
|
||||
| `file` | object | Downloaded file data |
|
||||
|
||||
### `google_drive_list`
|
||||
|
||||
List files and folders in Google Drive
|
||||
List files and folders in Google Drive with complete metadata
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -121,7 +122,7 @@ List files and folders in Google Drive
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `files` | json | Array of file metadata objects from the specified folder |
|
||||
| `files` | array | Array of file metadata objects from Google Drive |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -162,6 +162,7 @@ Create a webhook to receive recording events
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
|
||||
| `hookUrl` | string | Yes | Webhook endpoint URL \(must respond 2xx\) |
|
||||
| `hookType` | string | Yes | Type of webhook: "recording_added" or "upload_status" |
|
||||
| `filterBeforeDatetime` | string | No | Filter: recordings before this date |
|
||||
| `filterAfterDatetime` | string | No | Filter: recordings after this date |
|
||||
| `filterParticipantScope` | string | No | Filter: "internal" or "external" |
|
||||
@@ -178,6 +179,7 @@ Create a webhook to receive recording events
|
||||
| `id` | string | Hook UUID |
|
||||
| `enabled` | boolean | Whether hook is active |
|
||||
| `hook_url` | string | The webhook URL |
|
||||
| `hook_type` | string | Type of hook: recording_added or upload_status |
|
||||
| `filter` | object | Applied filters |
|
||||
| `include` | object | Included fields |
|
||||
| `inserted_at` | string | ISO8601 creation timestamp |
|
||||
|
||||
@@ -851,24 +851,6 @@ List all status updates for a project in Linear
|
||||
| --------- | ---- | ----------- |
|
||||
| `updates` | array | Array of project updates |
|
||||
|
||||
### `linear_create_project_link`
|
||||
|
||||
Add an external link to a project in Linear
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Project ID to add link to |
|
||||
| `url` | string | Yes | URL of the external link |
|
||||
| `label` | string | No | Link label/title |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `link` | object | The created project link |
|
||||
|
||||
### `linear_list_notifications`
|
||||
|
||||
List notifications for the current user in Linear
|
||||
@@ -1246,7 +1228,6 @@ Create a new project label in Linear
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | The project for this label |
|
||||
| `name` | string | Yes | Project label name |
|
||||
| `color` | string | No | Label color \(hex code\) |
|
||||
| `description` | string | No | Label description |
|
||||
@@ -1424,12 +1405,12 @@ Create a new project status in Linear
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | The project to create the status for |
|
||||
| `name` | string | Yes | Project status name |
|
||||
| `type` | string | Yes | Status type: "backlog", "planned", "started", "paused", "completed", or "canceled" |
|
||||
| `color` | string | Yes | Status color \(hex code\) |
|
||||
| `position` | number | Yes | Position in status list \(e.g. 0, 1, 2...\) |
|
||||
| `description` | string | No | Status description |
|
||||
| `indefinite` | boolean | No | Whether the status is indefinite |
|
||||
| `position` | number | No | Position in status list |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"pages": [
|
||||
"index",
|
||||
"a2a",
|
||||
"ahrefs",
|
||||
"airtable",
|
||||
"apify",
|
||||
|
||||
@@ -172,6 +172,30 @@ Execute MongoDB aggregation pipeline
|
||||
| `documents` | array | Array of documents returned from aggregation |
|
||||
| `documentCount` | number | Number of documents returned |
|
||||
|
||||
### `mongodb_introspect`
|
||||
|
||||
Introspect MongoDB database to list databases, collections, and indexes
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MongoDB server hostname or IP address |
|
||||
| `port` | number | Yes | MongoDB server port \(default: 27017\) |
|
||||
| `database` | string | No | Database name to introspect \(optional - if not provided, lists all databases\) |
|
||||
| `username` | string | No | MongoDB username |
|
||||
| `password` | string | No | MongoDB password |
|
||||
| `authSource` | string | No | Authentication database |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `databases` | array | Array of database names |
|
||||
| `collections` | array | Array of collection info with name, type, document count, and indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,29 @@ Execute raw SQL query on MySQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `mysql_introspect`
|
||||
|
||||
Introspect MySQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `databases` | array | List of available databases on the server |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -168,6 +168,33 @@ Execute arbitrary Cypher queries on Neo4j graph database for complex operations
|
||||
| `recordCount` | number | Number of records returned |
|
||||
| `summary` | json | Execution summary with timing and counters |
|
||||
|
||||
### `neo4j_introspect`
|
||||
|
||||
Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | Neo4j server hostname or IP address |
|
||||
| `port` | number | Yes | Neo4j server port \(default: 7687 for Bolt protocol\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Neo4j username |
|
||||
| `password` | string | Yes | Neo4j password |
|
||||
| `encryption` | string | No | Connection encryption mode \(enabled, disabled\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `labels` | array | Array of node labels in the database |
|
||||
| `relationshipTypes` | array | Array of relationship types in the database |
|
||||
| `nodeSchemas` | array | Array of node schemas with their properties |
|
||||
| `relationshipSchemas` | array | Array of relationship schemas with their properties |
|
||||
| `constraints` | array | Array of database constraints |
|
||||
| `indexes` | array | Array of database indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,30 @@ Execute raw SQL query on PostgreSQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `postgresql_introspect`
|
||||
|
||||
Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -79,30 +79,6 @@ Capture multiple events at once in PostHog. Use this for bulk event ingestion to
|
||||
| `status` | string | Status message indicating whether the batch was captured successfully |
|
||||
| `eventsProcessed` | number | Number of events processed in the batch |
|
||||
|
||||
### `posthog_list_events`
|
||||
|
||||
List events in PostHog. Note: This endpoint is deprecated but kept for backwards compatibility. For production use, prefer the Query endpoint with HogQL.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `personalApiKey` | string | Yes | PostHog Personal API Key \(for authenticated API access\) |
|
||||
| `region` | string | No | PostHog region: us \(default\) or eu |
|
||||
| `projectId` | string | Yes | PostHog Project ID |
|
||||
| `limit` | number | No | Number of events to return \(default: 100, max: 100\) |
|
||||
| `offset` | number | No | Number of events to skip for pagination |
|
||||
| `event` | string | No | Filter by specific event name |
|
||||
| `distinctId` | string | No | Filter by specific distinct_id |
|
||||
| `before` | string | No | ISO 8601 timestamp - only return events before this time |
|
||||
| `after` | string | No | ISO 8601 timestamp - only return events after this time |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `events` | array | List of events with their properties and metadata |
|
||||
|
||||
### `posthog_list_persons`
|
||||
|
||||
List persons (users) in PostHog. Returns user profiles with their properties and distinct IDs.
|
||||
|
||||
@@ -165,6 +165,32 @@ Execute raw SQL on Amazon RDS using the Data API
|
||||
| `rows` | array | Array of rows returned or affected |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `rds_introspect`
|
||||
|
||||
Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `resourceArn` | string | Yes | ARN of the Aurora DB cluster |
|
||||
| `secretArn` | string | Yes | ARN of the Secrets Manager secret containing DB credentials |
|
||||
| `database` | string | No | Database name \(optional\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public for PostgreSQL, database name for MySQL\) |
|
||||
| `engine` | string | No | Database engine \(aurora-postgresql or aurora-mysql\). Auto-detected if not provided. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `engine` | string | Detected database engine type |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -261,6 +261,25 @@ Call a PostgreSQL function in Supabase
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | json | Result returned from the function |
|
||||
|
||||
### `supabase_introspect`
|
||||
|
||||
Introspect Supabase database schema to get table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
|
||||
| `schema` | string | No | Database schema to introspect \(defaults to all user schemas, commonly "public"\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
|
||||
### `supabase_storage_upload`
|
||||
|
||||
Upload a file to a Supabase storage bucket
|
||||
|
||||
@@ -53,6 +53,9 @@ Send a chat completion request to any supported LLM provider
|
||||
| `vertexProject` | string | No | Google Cloud project ID for Vertex AI |
|
||||
| `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) |
|
||||
| `vertexCredential` | string | No | Google Cloud OAuth credential ID for Vertex AI |
|
||||
| `bedrockAccessKeyId` | string | No | AWS Access Key ID for Bedrock |
|
||||
| `bedrockSecretKey` | string | No | AWS Secret Access Key for Bedrock |
|
||||
| `bedrockRegion` | string | No | AWS region for Bedrock \(defaults to us-east-1\) |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ Reference structured values downstream with expressions such as <code><start.
|
||||
|
||||
## How it behaves per entry point
|
||||
|
||||
<Tabs items={['Editor run', 'Deploy to API', 'Deploy to chat']}>
|
||||
<Tabs items={['Editor run', 'Deploy to API', 'Deploy to chat', 'Deploy to form']}>
|
||||
<Tab>
|
||||
When you click <strong>Run</strong> in the editor, the Start block renders the Input Format as a form. Default values make it easy to retest without retyping data. Submitting the form triggers the workflow immediately and the values become available on <code><start.fieldName></code> (for example <code><start.sampleField></code>).
|
||||
|
||||
@@ -64,6 +64,13 @@ Reference structured values downstream with expressions such as <code><start.
|
||||
|
||||
If you launch chat with additional structured context (for example from an embed), it merges into the corresponding <code><start.fieldName></code> outputs, keeping downstream blocks consistent with API and manual runs.
|
||||
</Tab>
|
||||
<Tab>
|
||||
Form deployments render the Input Format as a standalone, embeddable form page. Each field becomes a form input with appropriate UI controls—text inputs for strings, number inputs for numbers, toggle switches for booleans, and file upload zones for files.
|
||||
|
||||
When a user submits the form, values become available on <code><start.fieldName></code> just like other entry points. The workflow executes with trigger type <code>form</code>, and submitters see a customizable thank-you message upon completion.
|
||||
|
||||
Forms can be embedded via iframe or shared as direct links, making them ideal for surveys, contact forms, and data collection workflows.
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Referencing Start data downstream
|
||||
|
||||
@@ -49,40 +49,40 @@ El desglose del modelo muestra:
|
||||
|
||||
<Tabs items={['Modelos alojados', 'Trae tu propia clave API']}>
|
||||
<Tab>
|
||||
**Modelos alojados** - Sim proporciona claves API con un multiplicador de precio de 2x:
|
||||
**Modelos alojados** - Sim proporciona claves API con un multiplicador de precios de 1.4x para bloques de agente:
|
||||
|
||||
**OpenAI**
|
||||
| Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
|
||||
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
|
||||
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
|
||||
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
|
||||
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
|
||||
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
|
||||
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
|
||||
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
|
||||
|
||||
**Anthropic**
|
||||
| Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
|
||||
|
||||
**Google**
|
||||
| Modelo | Precio base (entrada/salida) | Precio alojado (entrada/salida) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
|
||||
|
||||
*El multiplicador 2x cubre los costos de infraestructura y gestión de API.*
|
||||
*El multiplicador de 1.4x cubre los costos de infraestructura y gestión de API.*
|
||||
</Tab>
|
||||
|
||||
<Tab>
|
||||
|
||||
@@ -49,40 +49,40 @@ La répartition des modèles montre :
|
||||
|
||||
<Tabs items={['Modèles hébergés', 'Apportez votre propre clé API']}>
|
||||
<Tab>
|
||||
**Modèles hébergés** - Sim fournit des clés API avec un multiplicateur de prix de 2x :
|
||||
**Modèles hébergés** - Sim fournit des clés API avec un multiplicateur de prix de 1,4x pour les blocs Agent :
|
||||
|
||||
**OpenAI**
|
||||
| Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| GPT-5.1 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
|
||||
| GPT-5 | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
|
||||
| GPT-5 Mini | 0,25 $ / 2,00 $ | 0,50 $ / 4,00 $ |
|
||||
| GPT-5 Nano | 0,05 $ / 0,40 $ | 0,10 $ / 0,80 $ |
|
||||
| GPT-4o | 2,50 $ / 10,00 $ | 5,00 $ / 20,00 $ |
|
||||
| GPT-4.1 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
|
||||
| GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,80 $ / 3,20 $ |
|
||||
| GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,20 $ / 0,80 $ |
|
||||
| o1 | 15,00 $ / 60,00 $ | 30,00 $ / 120,00 $ |
|
||||
| o3 | 2,00 $ / 8,00 $ | 4,00 $ / 16,00 $ |
|
||||
| o4 Mini | 1,10 $ / 4,40 $ | 2,20 $ / 8,80 $ |
|
||||
| GPT-5.1 | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ |
|
||||
| GPT-5 | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ |
|
||||
| GPT-5 Mini | 0,25 $ / 2,00 $ | 0,35 $ / 2,80 $ |
|
||||
| GPT-5 Nano | 0,05 $ / 0,40 $ | 0,07 $ / 0,56 $ |
|
||||
| GPT-4o | 2,50 $ / 10,00 $ | 3,50 $ / 14,00 $ |
|
||||
| GPT-4.1 | 2,00 $ / 8,00 $ | 2,80 $ / 11,20 $ |
|
||||
| GPT-4.1 Mini | 0,40 $ / 1,60 $ | 0,56 $ / 2,24 $ |
|
||||
| GPT-4.1 Nano | 0,10 $ / 0,40 $ | 0,14 $ / 0,56 $ |
|
||||
| o1 | 15,00 $ / 60,00 $ | 21,00 $ / 84,00 $ |
|
||||
| o3 | 2,00 $ / 8,00 $ | 2,80 $ / 11,20 $ |
|
||||
| o4 Mini | 1,10 $ / 4,40 $ | 1,54 $ / 6,16 $ |
|
||||
|
||||
**Anthropic**
|
||||
| Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Claude Opus 4.5 | 5,00 $ / 25,00 $ | 10,00 $ / 50,00 $ |
|
||||
| Claude Opus 4.1 | 15,00 $ / 75,00 $ | 30,00 $ / 150,00 $ |
|
||||
| Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
|
||||
| Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 6,00 $ / 30,00 $ |
|
||||
| Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 2,00 $ / 10,00 $ |
|
||||
| Claude Opus 4.5 | 5,00 $ / 25,00 $ | 7,00 $ / 35,00 $ |
|
||||
| Claude Opus 4.1 | 15,00 $ / 75,00 $ | 21,00 $ / 105,00 $ |
|
||||
| Claude Sonnet 4.5 | 3,00 $ / 15,00 $ | 4,20 $ / 21,00 $ |
|
||||
| Claude Sonnet 4.0 | 3,00 $ / 15,00 $ | 4,20 $ / 21,00 $ |
|
||||
| Claude Haiku 4.5 | 1,00 $ / 5,00 $ | 1,40 $ / 7,00 $ |
|
||||
|
||||
**Google**
|
||||
| Modèle | Prix de base (entrée/sortie) | Prix hébergé (entrée/sortie) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 4,00 $ / 24,00 $ |
|
||||
| Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 2,50 $ / 20,00 $ |
|
||||
| Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,60 $ / 5,00 $ |
|
||||
| Gemini 3 Pro Preview | 2,00 $ / 12,00 $ | 2,80 $ / 16,80 $ |
|
||||
| Gemini 2.5 Pro | 1,25 $ / 10,00 $ | 1,75 $ / 14,00 $ |
|
||||
| Gemini 2.5 Flash | 0,30 $ / 2,50 $ | 0,42 $ / 3,50 $ |
|
||||
|
||||
*Le multiplicateur 2x couvre les coûts d'infrastructure et de gestion des API.*
|
||||
*Le multiplicateur de 1,4x couvre les coûts d'infrastructure et de gestion des API.*
|
||||
</Tab>
|
||||
|
||||
<Tab>
|
||||
|
||||
@@ -47,42 +47,42 @@ AIブロックを使用するワークフローでは、ログで詳細なコス
|
||||
|
||||
## 料金オプション
|
||||
|
||||
<Tabs items={['Hosted Models', 'Bring Your Own API Key']}>
|
||||
<Tabs items={['ホステッドモデル', '独自のAPIキーを使用']}>
|
||||
<Tab>
|
||||
**ホステッドモデル** - Simは2倍の価格乗数でAPIキーを提供します:
|
||||
**ホステッドモデル** - Simは、エージェントブロック用に1.4倍の価格乗数を適用したAPIキーを提供します:
|
||||
|
||||
**OpenAI**
|
||||
| モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
|
||||
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
|
||||
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
|
||||
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
|
||||
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
|
||||
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
|
||||
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
|
||||
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
|
||||
|
||||
**Anthropic**
|
||||
| モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
|
||||
|
||||
**Google**
|
||||
| モデル | 基本価格(入力/出力) | ホステッド価格(入力/出力) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
|
||||
|
||||
*2倍の乗数は、インフラストラクチャとAPI管理コストをカバーします。*
|
||||
*1.4倍の乗数は、インフラストラクチャとAPI管理のコストをカバーします。*
|
||||
</Tab>
|
||||
|
||||
<Tab>
|
||||
|
||||
@@ -47,42 +47,42 @@ totalCost = baseExecutionCharge + modelCost
|
||||
|
||||
## 定价选项
|
||||
|
||||
<Tabs items={[ '托管模型', '自带 API 密钥' ]}>
|
||||
<Tabs items={['托管模型', '自带 API Key']}>
|
||||
<Tab>
|
||||
**托管模型** - Sim 提供 API 密钥,价格为基础价格的 2 倍:
|
||||
**托管模型** - Sim 为 Agent 模块提供 API Key,价格乘以 1.4 倍:
|
||||
|
||||
**OpenAI**
|
||||
| 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| GPT-5.1 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.50 / $4.00 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.10 / $0.80 |
|
||||
| GPT-4o | $2.50 / $10.00 | $5.00 / $20.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.80 / $3.20 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.20 / $0.80 |
|
||||
| o1 | $15.00 / $60.00 | $30.00 / $120.00 |
|
||||
| o3 | $2.00 / $8.00 | $4.00 / $16.00 |
|
||||
| o4 Mini | $1.10 / $4.40 | $2.20 / $8.80 |
|
||||
| GPT-5.1 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| GPT-5 Mini | $0.25 / $2.00 | $0.35 / $2.80 |
|
||||
| GPT-5 Nano | $0.05 / $0.40 | $0.07 / $0.56 |
|
||||
| GPT-4o | $2.50 / $10.00 | $3.50 / $14.00 |
|
||||
| GPT-4.1 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| GPT-4.1 Mini | $0.40 / $1.60 | $0.56 / $2.24 |
|
||||
| GPT-4.1 Nano | $0.10 / $0.40 | $0.14 / $0.56 |
|
||||
| o1 | $15.00 / $60.00 | $21.00 / $84.00 |
|
||||
| o3 | $2.00 / $8.00 | $2.80 / $11.20 |
|
||||
| o4 Mini | $1.10 / $4.40 | $1.54 / $6.16 |
|
||||
|
||||
**Anthropic**
|
||||
| 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $10.00 / $50.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $30.00 / $150.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $6.00 / $30.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $2.00 / $10.00 |
|
||||
| Claude Opus 4.5 | $5.00 / $25.00 | $7.00 / $35.00 |
|
||||
| Claude Opus 4.1 | $15.00 / $75.00 | $21.00 / $105.00 |
|
||||
| Claude Sonnet 4.5 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Sonnet 4.0 | $3.00 / $15.00 | $4.20 / $21.00 |
|
||||
| Claude Haiku 4.5 | $1.00 / $5.00 | $1.40 / $7.00 |
|
||||
|
||||
**Google**
|
||||
| 模型 | 基础价格(输入/输出) | 托管价格(输入/输出) |
|
||||
|-------|---------------------------|----------------------------|
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $4.00 / $24.00 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $2.50 / $20.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.60 / $5.00 |
|
||||
| Gemini 3 Pro Preview | $2.00 / $12.00 | $2.80 / $16.80 |
|
||||
| Gemini 2.5 Pro | $1.25 / $10.00 | $1.75 / $14.00 |
|
||||
| Gemini 2.5 Flash | $0.30 / $2.50 | $0.42 / $3.50 |
|
||||
|
||||
*2 倍系数涵盖了基础设施和 API 管理成本。*
|
||||
*1.4 倍的系数涵盖了基础设施和 API 管理成本。*
|
||||
</Tab>
|
||||
|
||||
<Tab>
|
||||
|
||||
@@ -4581,11 +4581,11 @@ checksums:
|
||||
content/10: d19c8c67f52eb08b6a49c0969a9c8b86
|
||||
content/11: 4024a36e0d9479ff3191fb9cd2b2e365
|
||||
content/12: 0396a1e5d9548207f56e6b6cae85a542
|
||||
content/13: 4bfdeac5ad21c75209dcdfde85aa52b0
|
||||
content/14: 35df9a16b866dbe4bb9fc1d7aee42711
|
||||
content/15: 135c044066cea8cc0e22f06d67754ec5
|
||||
content/16: 6882b91e30548d7d331388c26cf2e948
|
||||
content/17: 29aed7061148ae46fa6ec8bcbc857c3d
|
||||
content/13: 68f90237f86be125224c56a2643904a3
|
||||
content/14: e854781f0fbf6f397a3ac682e892a993
|
||||
content/15: 2340c44af715fb8ca58f43151515aae1
|
||||
content/16: fc7ae93bff492d80f4b6f16e762e05fa
|
||||
content/17: 8a46692d5df3fed9f94d59dfc3fb7e0a
|
||||
content/18: e0571c88ea5bcd4305a6f5772dcbed98
|
||||
content/19: 83fc31418ff454a5e06b290e3708ef32
|
||||
content/20: 4392b5939a6d5774fb080cad1ee1dbb8
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 33 KiB After Width: | Height: | Size: 11 KiB |
100
apps/sim/app/(auth)/components/branded-button.tsx
Normal file
100
apps/sim/app/(auth)/components/branded-button.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
'use client'
|
||||
|
||||
import { forwardRef, useState } from 'react'
|
||||
import { ArrowRight, ChevronRight, Loader2 } from 'lucide-react'
|
||||
import { Button, type ButtonProps as EmcnButtonProps } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
export interface BrandedButtonProps extends Omit<EmcnButtonProps, 'variant' | 'size'> {
|
||||
/** Shows loading spinner and disables button */
|
||||
loading?: boolean
|
||||
/** Text to show when loading (appends "..." automatically) */
|
||||
loadingText?: string
|
||||
/** Show arrow animation on hover (default: true) */
|
||||
showArrow?: boolean
|
||||
/** Make button full width (default: true) */
|
||||
fullWidth?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Branded button for auth and status pages.
|
||||
* Automatically detects whitelabel customization and applies appropriate styling.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Primary branded button with arrow
|
||||
* <BrandedButton onClick={handleSubmit}>Sign In</BrandedButton>
|
||||
*
|
||||
* // Loading state
|
||||
* <BrandedButton loading loadingText="Signing in">Sign In</BrandedButton>
|
||||
*
|
||||
* // Without arrow animation
|
||||
* <BrandedButton showArrow={false}>Continue</BrandedButton>
|
||||
* ```
|
||||
*/
|
||||
export const BrandedButton = forwardRef<HTMLButtonElement, BrandedButtonProps>(
|
||||
(
|
||||
{
|
||||
children,
|
||||
loading = false,
|
||||
loadingText,
|
||||
showArrow = true,
|
||||
fullWidth = true,
|
||||
className,
|
||||
disabled,
|
||||
onMouseEnter,
|
||||
onMouseLeave,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
|
||||
const handleMouseEnter = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
setIsHovered(true)
|
||||
onMouseEnter?.(e)
|
||||
}
|
||||
|
||||
const handleMouseLeave = (e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
setIsHovered(false)
|
||||
onMouseLeave?.(e)
|
||||
}
|
||||
|
||||
return (
|
||||
<Button
|
||||
ref={ref}
|
||||
variant='branded'
|
||||
size='branded'
|
||||
disabled={disabled || loading}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
className={cn(buttonClass, 'group', fullWidth && 'w-full', className)}
|
||||
{...props}
|
||||
>
|
||||
{loading ? (
|
||||
<span className='flex items-center gap-2'>
|
||||
<Loader2 className='h-4 w-4 animate-spin' />
|
||||
{loadingText ? `${loadingText}...` : children}
|
||||
</span>
|
||||
) : showArrow ? (
|
||||
<span className='flex items-center gap-1'>
|
||||
{children}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
) : (
|
||||
children
|
||||
)}
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
BrandedButton.displayName = 'BrandedButton'
|
||||
@@ -34,7 +34,7 @@ export function SSOLoginButton({
|
||||
}
|
||||
|
||||
const primaryBtnClasses = cn(
|
||||
primaryClassName || 'auth-button-gradient',
|
||||
primaryClassName || 'branded-button-gradient',
|
||||
'flex w-full items-center justify-center gap-2 rounded-[10px] border font-medium text-[15px] text-white transition-all duration-200'
|
||||
)
|
||||
|
||||
|
||||
74
apps/sim/app/(auth)/components/status-page-layout.tsx
Normal file
74
apps/sim/app/(auth)/components/status-page-layout.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
'use client'
|
||||
|
||||
import type { ReactNode } from 'react'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import AuthBackground from '@/app/(auth)/components/auth-background'
|
||||
import Nav from '@/app/(landing)/components/nav/nav'
|
||||
import { SupportFooter } from './support-footer'
|
||||
|
||||
export interface StatusPageLayoutProps {
|
||||
/** Page title displayed prominently */
|
||||
title: string
|
||||
/** Description text below the title */
|
||||
description: string | ReactNode
|
||||
/** Content to render below the title/description (usually buttons) */
|
||||
children?: ReactNode
|
||||
/** Whether to show the support footer (default: true) */
|
||||
showSupportFooter?: boolean
|
||||
/** Whether to hide the nav bar (useful for embedded forms) */
|
||||
hideNav?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Unified layout for status/error pages (404, form unavailable, chat error, etc.).
|
||||
* Uses AuthBackground and Nav for consistent styling with auth pages.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* <StatusPageLayout
|
||||
* title="Page Not Found"
|
||||
* description="The page you're looking for doesn't exist."
|
||||
* >
|
||||
* <BrandedButton onClick={() => router.push('/')}>Return to Home</BrandedButton>
|
||||
* </StatusPageLayout>
|
||||
* ```
|
||||
*/
|
||||
export function StatusPageLayout({
|
||||
title,
|
||||
description,
|
||||
children,
|
||||
showSupportFooter = true,
|
||||
hideNav = false,
|
||||
}: StatusPageLayoutProps) {
|
||||
return (
|
||||
<AuthBackground>
|
||||
<main className='relative flex min-h-screen flex-col text-foreground'>
|
||||
{!hideNav && <Nav hideAuthButtons={true} variant='auth' />}
|
||||
<div className='relative z-30 flex flex-1 items-center justify-center px-4 pb-24'>
|
||||
<div className='w-full max-w-lg px-4'>
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1
|
||||
className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}
|
||||
>
|
||||
{title}
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
{description}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{children && (
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
{children}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{showSupportFooter && <SupportFooter position='absolute' />}
|
||||
</main>
|
||||
</AuthBackground>
|
||||
)
|
||||
}
|
||||
40
apps/sim/app/(auth)/components/support-footer.tsx
Normal file
40
apps/sim/app/(auth)/components/support-footer.tsx
Normal file
@@ -0,0 +1,40 @@
|
||||
'use client'
|
||||
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
|
||||
export interface SupportFooterProps {
|
||||
/** Position style - 'fixed' for pages without AuthLayout, 'absolute' for pages with AuthLayout */
|
||||
position?: 'fixed' | 'absolute'
|
||||
}
|
||||
|
||||
/**
|
||||
* Support footer component for auth and status pages.
|
||||
* Displays a "Need help? Contact support" link using branded support email.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Fixed position (for standalone pages)
|
||||
* <SupportFooter />
|
||||
*
|
||||
* // Absolute position (for pages using AuthLayout)
|
||||
* <SupportFooter position="absolute" />
|
||||
* ```
|
||||
*/
|
||||
export function SupportFooter({ position = 'fixed' }: SupportFooterProps) {
|
||||
const brandConfig = useBrandConfig()
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`${inter.className} auth-text-muted right-0 bottom-0 left-0 z-50 pb-8 text-center font-[340] text-[13px] leading-relaxed ${position}`}
|
||||
>
|
||||
Need help?{' '}
|
||||
<a
|
||||
href={`mailto:${brandConfig.supportEmail}`}
|
||||
className='auth-link underline-offset-4 transition hover:underline'
|
||||
>
|
||||
Contact support
|
||||
</a>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -105,7 +105,7 @@ export default function LoginPage({
|
||||
const [password, setPassword] = useState('')
|
||||
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
|
||||
const [showValidationError, setShowValidationError] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
@@ -146,9 +146,9 @@ export default function LoginPage({
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('auth-button-custom')
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('auth-button-gradient')
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ export function RequestResetForm({
|
||||
statusMessage,
|
||||
className,
|
||||
}: RequestResetFormProps) {
|
||||
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -36,9 +36,9 @@ export function RequestResetForm({
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('auth-button-custom')
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('auth-button-gradient')
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,7 +138,7 @@ export function SetNewPasswordForm({
|
||||
const [validationMessage, setValidationMessage] = useState('')
|
||||
const [showPassword, setShowPassword] = useState(false)
|
||||
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -147,9 +147,9 @@ export function SetNewPasswordForm({
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('auth-button-custom')
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('auth-button-gradient')
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ function SignupFormContent({
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [redirectUrl, setRedirectUrl] = useState('')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const [name, setName] = useState('')
|
||||
@@ -132,9 +132,9 @@ function SignupFormContent({
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('auth-button-custom')
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('auth-button-gradient')
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ export default function SSOForm() {
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
|
||||
useEffect(() => {
|
||||
@@ -96,9 +96,9 @@ export default function SSOForm() {
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('auth-button-custom')
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('auth-button-gradient')
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ function VerificationForm({
|
||||
setCountdown(30)
|
||||
}
|
||||
|
||||
const [buttonClass, setButtonClass] = useState('auth-button-gradient')
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
@@ -66,9 +66,9 @@ function VerificationForm({
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('auth-button-custom')
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('auth-button-gradient')
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -767,7 +767,7 @@ export default function PrivacyPolicy() {
|
||||
privacy@sim.ai
|
||||
</Link>
|
||||
</li>
|
||||
<li>Mailing Address: Sim, 80 Langton St, San Francisco, CA 94133, USA</li>
|
||||
<li>Mailing Address: Sim, 80 Langton St, San Francisco, CA 94103, USA</li>
|
||||
</ul>
|
||||
<p>We will respond to your request within a reasonable timeframe.</p>
|
||||
</section>
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
export default function Head() {
|
||||
return (
|
||||
<>
|
||||
<link rel='canonical' href='https://sim.ai/studio' />
|
||||
<link
|
||||
rel='alternate'
|
||||
type='application/rss+xml'
|
||||
title='Sim Studio'
|
||||
href='https://sim.ai/studio/rss.xml'
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import type React from 'react'
|
||||
import { createContext, useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import posthog from 'posthog-js'
|
||||
import { client } from '@/lib/auth/auth-client'
|
||||
|
||||
@@ -35,12 +36,15 @@ export function SessionProvider({ children }: { children: React.ReactNode }) {
|
||||
const [data, setData] = useState<AppSession>(null)
|
||||
const [isPending, setIsPending] = useState(true)
|
||||
const [error, setError] = useState<Error | null>(null)
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const loadSession = useCallback(async () => {
|
||||
const loadSession = useCallback(async (bypassCache = false) => {
|
||||
try {
|
||||
setIsPending(true)
|
||||
setError(null)
|
||||
const res = await client.getSession()
|
||||
const res = bypassCache
|
||||
? await client.getSession({ query: { disableCookieCache: true } })
|
||||
: await client.getSession()
|
||||
setData(res?.data ?? null)
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e : new Error('Failed to fetch session'))
|
||||
@@ -50,8 +54,25 @@ export function SessionProvider({ children }: { children: React.ReactNode }) {
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
loadSession()
|
||||
}, [loadSession])
|
||||
// Check if user was redirected after plan upgrade
|
||||
const params = new URLSearchParams(window.location.search)
|
||||
const wasUpgraded = params.get('upgraded') === 'true'
|
||||
|
||||
if (wasUpgraded) {
|
||||
params.delete('upgraded')
|
||||
const newUrl = params.toString()
|
||||
? `${window.location.pathname}?${params.toString()}`
|
||||
: window.location.pathname
|
||||
window.history.replaceState({}, '', newUrl)
|
||||
}
|
||||
|
||||
loadSession(wasUpgraded).then(() => {
|
||||
if (wasUpgraded) {
|
||||
queryClient.invalidateQueries({ queryKey: ['organizations'] })
|
||||
queryClient.invalidateQueries({ queryKey: ['subscription'] })
|
||||
}
|
||||
})
|
||||
}, [loadSession, queryClient])
|
||||
|
||||
useEffect(() => {
|
||||
if (isPending || typeof posthog.identify !== 'function') {
|
||||
|
||||
@@ -22,12 +22,13 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
|
||||
pathname.startsWith('/changelog') ||
|
||||
pathname.startsWith('/chat') ||
|
||||
pathname.startsWith('/studio') ||
|
||||
pathname.startsWith('/resume')
|
||||
pathname.startsWith('/resume') ||
|
||||
pathname.startsWith('/form')
|
||||
|
||||
return (
|
||||
<NextThemesProvider
|
||||
attribute='class'
|
||||
defaultTheme='system'
|
||||
defaultTheme='dark'
|
||||
enableSystem
|
||||
disableTransitionOnChange
|
||||
storageKey='sim-theme'
|
||||
|
||||
@@ -42,6 +42,40 @@
|
||||
animation: dash-animation 1.5s linear infinite !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* React Flow selection box styling
|
||||
* Uses brand-secondary color for selection highlighting
|
||||
*/
|
||||
.react-flow__selection {
|
||||
background: rgba(51, 180, 255, 0.08) !important;
|
||||
border: 1px solid var(--brand-secondary) !important;
|
||||
}
|
||||
|
||||
.react-flow__nodesselection-rect,
|
||||
.react-flow__nodesselection {
|
||||
background: transparent !important;
|
||||
border: none !important;
|
||||
pointer-events: none !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selected node ring indicator
|
||||
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
|
||||
*/
|
||||
.react-flow__node.selected > div > div {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.react-flow__node.selected > div > div::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
z-index: 40;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 0 0 1.75px var(--brand-secondary);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/**
|
||||
* Color tokens - single source of truth for all colors
|
||||
* Light mode: Warm theme
|
||||
@@ -553,27 +587,25 @@ input[type="search"]::-ms-clear {
|
||||
animation: placeholder-pulse 1.5s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.auth-button-gradient {
|
||||
background: linear-gradient(to bottom, var(--brand-primary-hex), var(--brand-400)) !important;
|
||||
border-color: var(--brand-400) !important;
|
||||
box-shadow: inset 0 2px 4px 0 var(--brand-400) !important;
|
||||
.branded-button-gradient {
|
||||
background: linear-gradient(to bottom, #8357ff, #6f3dfa) !important;
|
||||
border-color: #6f3dfa !important;
|
||||
box-shadow: inset 0 2px 4px 0 #9b77ff !important;
|
||||
}
|
||||
|
||||
.auth-button-gradient:hover {
|
||||
background: linear-gradient(to bottom, var(--brand-primary-hex), var(--brand-400)) !important;
|
||||
.branded-button-gradient:hover {
|
||||
background: linear-gradient(to bottom, #8357ff, #6f3dfa) !important;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.auth-button-custom {
|
||||
.branded-button-custom {
|
||||
background: var(--brand-primary-hex) !important;
|
||||
border-color: var(--brand-primary-hex) !important;
|
||||
box-shadow: inset 0 2px 4px 0 rgba(0, 0, 0, 0.1) !important;
|
||||
}
|
||||
|
||||
.auth-button-custom:hover {
|
||||
.branded-button-custom:hover {
|
||||
background: var(--brand-primary-hover-hex) !important;
|
||||
border-color: var(--brand-primary-hover-hex) !important;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
269
apps/sim/app/api/a2a/agents/[agentId]/route.ts
Normal file
269
apps/sim/app/api/a2a/agents/[agentId]/route.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* A2A Agent Card Endpoint
|
||||
*
|
||||
* Returns the Agent Card (discovery document) for an A2A agent.
|
||||
* Also supports CRUD operations for managing agents.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { a2aAgent, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { generateAgentCard, generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
|
||||
import type { AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentCardAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
agentId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Returns the Agent Card for discovery
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const [agent] = await db
|
||||
.select({
|
||||
agent: a2aAgent,
|
||||
workflow: workflow,
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.innerJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!agent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!agent.agent.isPublished) {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success) {
|
||||
return NextResponse.json({ error: 'Agent not published' }, { status: 404 })
|
||||
}
|
||||
}
|
||||
|
||||
const agentCard = generateAgentCard(
|
||||
{
|
||||
id: agent.agent.id,
|
||||
name: agent.agent.name,
|
||||
description: agent.agent.description,
|
||||
version: agent.agent.version,
|
||||
capabilities: agent.agent.capabilities as AgentCapabilities,
|
||||
skills: agent.agent.skills as AgentSkill[],
|
||||
},
|
||||
{
|
||||
id: agent.workflow.id,
|
||||
name: agent.workflow.name,
|
||||
description: agent.workflow.description,
|
||||
}
|
||||
)
|
||||
|
||||
return NextResponse.json(agentCard, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Cache-Control': agent.agent.isPublished ? 'public, max-age=3600' : 'private, no-cache',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting Agent Card:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT - Update an agent
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
let skills = body.skills ?? existingAgent.skills
|
||||
if (body.skillTags !== undefined) {
|
||||
const agentName = body.name ?? existingAgent.name
|
||||
const agentDescription = body.description ?? existingAgent.description
|
||||
skills = generateSkillsFromWorkflow(agentName, agentDescription, body.skillTags)
|
||||
}
|
||||
|
||||
const [updatedAgent] = await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
name: body.name ?? existingAgent.name,
|
||||
description: body.description ?? existingAgent.description,
|
||||
version: body.version ?? existingAgent.version,
|
||||
capabilities: body.capabilities ?? existingAgent.capabilities,
|
||||
skills,
|
||||
authentication: body.authentication ?? existingAgent.authentication,
|
||||
isPublished: body.isPublished ?? existingAgent.isPublished,
|
||||
publishedAt:
|
||||
body.isPublished && !existingAgent.isPublished ? new Date() : existingAgent.publishedAt,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.returning()
|
||||
|
||||
logger.info(`Updated A2A agent: ${agentId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agent: updatedAgent })
|
||||
} catch (error) {
|
||||
logger.error('Error updating agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE - Delete an agent
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Deleted A2A agent: ${agentId}`)
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Publish/unpublish an agent
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { agentId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn('A2A agent publish auth failed:', { error: auth.error, hasUserId: !!auth.userId })
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [existingAgent] = await db
|
||||
.select()
|
||||
.from(a2aAgent)
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingAgent) {
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const action = body.action as 'publish' | 'unpublish' | 'refresh'
|
||||
|
||||
if (action === 'publish') {
|
||||
const [wf] = await db
|
||||
.select({ isDeployed: workflow.isDeployed })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingAgent.workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!wf?.isDeployed) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow must be deployed before publishing agent' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
isPublished: true,
|
||||
publishedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Published A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, isPublished: true })
|
||||
}
|
||||
|
||||
if (action === 'unpublish') {
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
isPublished: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Unpublished A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, isPublished: false })
|
||||
}
|
||||
|
||||
if (action === 'refresh') {
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(existingAgent.workflowId)
|
||||
if (!workflowData) {
|
||||
return NextResponse.json({ error: 'Failed to load workflow' }, { status: 500 })
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({ name: workflow.name, description: workflow.description })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingAgent.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const skills = generateSkillsFromWorkflow(wf?.name || existingAgent.name, wf?.description)
|
||||
|
||||
await db
|
||||
.update(a2aAgent)
|
||||
.set({
|
||||
skills,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Refreshed skills for A2A agent: ${agentId}`)
|
||||
return NextResponse.json({ success: true, skills })
|
||||
}
|
||||
|
||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
||||
} catch (error) {
|
||||
logger.error('Error with agent action:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
186
apps/sim/app/api/a2a/agents/route.ts
Normal file
186
apps/sim/app/api/a2a/agents/route.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* A2A Agents List Endpoint
|
||||
*
|
||||
* List and create A2A agents for a workspace.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { a2aAgent, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
|
||||
import { A2A_DEFAULT_CAPABILITIES } from '@/lib/a2a/constants'
|
||||
import { sanitizeAgentName } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* GET - List all A2A agents for a workspace
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
|
||||
if (!workspaceId) {
|
||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const ws = await getWorkspaceById(workspaceId)
|
||||
if (!ws) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const agents = await db
|
||||
.select({
|
||||
id: a2aAgent.id,
|
||||
workspaceId: a2aAgent.workspaceId,
|
||||
workflowId: a2aAgent.workflowId,
|
||||
name: a2aAgent.name,
|
||||
description: a2aAgent.description,
|
||||
version: a2aAgent.version,
|
||||
capabilities: a2aAgent.capabilities,
|
||||
skills: a2aAgent.skills,
|
||||
authentication: a2aAgent.authentication,
|
||||
isPublished: a2aAgent.isPublished,
|
||||
publishedAt: a2aAgent.publishedAt,
|
||||
createdAt: a2aAgent.createdAt,
|
||||
updatedAt: a2aAgent.updatedAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
taskCount: sql<number>`(
|
||||
SELECT COUNT(*)::int
|
||||
FROM "a2a_task"
|
||||
WHERE "a2a_task"."agent_id" = "a2a_agent"."id"
|
||||
)`.as('task_count'),
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.leftJoin(workflow, eq(a2aAgent.workflowId, workflow.id))
|
||||
.where(eq(a2aAgent.workspaceId, workspaceId))
|
||||
.orderBy(a2aAgent.createdAt)
|
||||
|
||||
logger.info(`Listed ${agents.length} A2A agents for workspace ${workspaceId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agents })
|
||||
} catch (error) {
|
||||
logger.error('Error listing agents:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Create a new A2A agent from a workflow
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { workspaceId, workflowId, name, description, capabilities, authentication, skillTags } =
|
||||
body
|
||||
|
||||
if (!workspaceId || !workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'workspaceId and workflowId are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.id, workflowId), eq(workflow.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
if (!wf) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow not found or does not belong to workspace' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: a2aAgent.id })
|
||||
.from(a2aAgent)
|
||||
.where(and(eq(a2aAgent.workspaceId, workspaceId), eq(a2aAgent.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existing) {
|
||||
return NextResponse.json(
|
||||
{ error: 'An agent already exists for this workflow' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
const workflowData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!workflowData || !hasValidStartBlockInState(workflowData)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow must have a Start block to be exposed as an A2A agent' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const skills = generateSkillsFromWorkflow(
|
||||
name || wf.name,
|
||||
description || wf.description,
|
||||
skillTags
|
||||
)
|
||||
|
||||
const agentId = uuidv4()
|
||||
const agentName = name || sanitizeAgentName(wf.name)
|
||||
|
||||
const [agent] = await db
|
||||
.insert(a2aAgent)
|
||||
.values({
|
||||
id: agentId,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
createdBy: auth.userId,
|
||||
name: agentName,
|
||||
description: description || wf.description,
|
||||
version: '1.0.0',
|
||||
capabilities: {
|
||||
...A2A_DEFAULT_CAPABILITIES,
|
||||
...capabilities,
|
||||
},
|
||||
skills,
|
||||
authentication: authentication || {
|
||||
schemes: ['bearer', 'apiKey'],
|
||||
},
|
||||
isPublished: false,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`Created A2A agent ${agentId} for workflow ${workflowId}`)
|
||||
|
||||
return NextResponse.json({ success: true, agent }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error('Error creating agent:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
1211
apps/sim/app/api/a2a/serve/[agentId]/route.ts
Normal file
1211
apps/sim/app/api/a2a/serve/[agentId]/route.ts
Normal file
File diff suppressed because it is too large
Load Diff
166
apps/sim/app/api/a2a/serve/[agentId]/utils.ts
Normal file
166
apps/sim/app/api/a2a/serve/[agentId]/utils.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
/** A2A v0.3 JSON-RPC method names */
|
||||
export const A2A_METHODS = {
|
||||
MESSAGE_SEND: 'message/send',
|
||||
MESSAGE_STREAM: 'message/stream',
|
||||
TASKS_GET: 'tasks/get',
|
||||
TASKS_CANCEL: 'tasks/cancel',
|
||||
TASKS_RESUBSCRIBE: 'tasks/resubscribe',
|
||||
PUSH_NOTIFICATION_SET: 'tasks/pushNotificationConfig/set',
|
||||
PUSH_NOTIFICATION_GET: 'tasks/pushNotificationConfig/get',
|
||||
PUSH_NOTIFICATION_DELETE: 'tasks/pushNotificationConfig/delete',
|
||||
} as const
|
||||
|
||||
/** A2A v0.3 error codes */
|
||||
export const A2A_ERROR_CODES = {
|
||||
PARSE_ERROR: -32700,
|
||||
INVALID_REQUEST: -32600,
|
||||
METHOD_NOT_FOUND: -32601,
|
||||
INVALID_PARAMS: -32602,
|
||||
INTERNAL_ERROR: -32603,
|
||||
TASK_NOT_FOUND: -32001,
|
||||
TASK_ALREADY_COMPLETE: -32002,
|
||||
AGENT_UNAVAILABLE: -32003,
|
||||
AUTHENTICATION_REQUIRED: -32004,
|
||||
} as const
|
||||
|
||||
export interface JSONRPCRequest {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number
|
||||
method: string
|
||||
params?: unknown
|
||||
}
|
||||
|
||||
export interface JSONRPCResponse {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number | null
|
||||
result?: unknown
|
||||
error?: {
|
||||
code: number
|
||||
message: string
|
||||
data?: unknown
|
||||
}
|
||||
}
|
||||
|
||||
export interface MessageSendParams {
|
||||
message: Message
|
||||
configuration?: {
|
||||
acceptedOutputModes?: string[]
|
||||
historyLength?: number
|
||||
pushNotificationConfig?: PushNotificationConfig
|
||||
}
|
||||
}
|
||||
|
||||
export interface TaskIdParams {
|
||||
id: string
|
||||
historyLength?: number
|
||||
}
|
||||
|
||||
export interface PushNotificationSetParams {
|
||||
id: string
|
||||
pushNotificationConfig: PushNotificationConfig
|
||||
}
|
||||
|
||||
export function createResponse(id: string | number | null, result: unknown): JSONRPCResponse {
|
||||
return { jsonrpc: '2.0', id, result }
|
||||
}
|
||||
|
||||
export function createError(
|
||||
id: string | number | null,
|
||||
code: number,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): JSONRPCResponse {
|
||||
return { jsonrpc: '2.0', id, error: { code, message, data } }
|
||||
}
|
||||
|
||||
export function isJSONRPCRequest(obj: unknown): obj is JSONRPCRequest {
|
||||
if (!obj || typeof obj !== 'object') return false
|
||||
const r = obj as Record<string, unknown>
|
||||
return r.jsonrpc === '2.0' && typeof r.method === 'string' && r.id !== undefined
|
||||
}
|
||||
|
||||
export function generateTaskId(): string {
|
||||
return uuidv4()
|
||||
}
|
||||
|
||||
export function createTaskStatus(state: TaskState): { state: TaskState; timestamp: string } {
|
||||
return { state, timestamp: new Date().toISOString() }
|
||||
}
|
||||
|
||||
export function formatTaskResponse(task: Task, historyLength?: number): Task {
|
||||
if (historyLength !== undefined && task.history) {
|
||||
return {
|
||||
...task,
|
||||
history: task.history.slice(-historyLength),
|
||||
}
|
||||
}
|
||||
return task
|
||||
}
|
||||
|
||||
export interface ExecuteRequestConfig {
|
||||
workflowId: string
|
||||
apiKey?: string | null
|
||||
stream?: boolean
|
||||
}
|
||||
|
||||
export interface ExecuteRequestResult {
|
||||
url: string
|
||||
headers: Record<string, string>
|
||||
useInternalAuth: boolean
|
||||
}
|
||||
|
||||
export async function buildExecuteRequest(
|
||||
config: ExecuteRequestConfig
|
||||
): Promise<ExecuteRequestResult> {
|
||||
const url = `${getBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
let useInternalAuth = false
|
||||
|
||||
if (config.apiKey) {
|
||||
headers['X-API-Key'] = config.apiKey
|
||||
} else {
|
||||
const internalToken = await generateInternalToken()
|
||||
headers.Authorization = `Bearer ${internalToken}`
|
||||
useInternalAuth = true
|
||||
}
|
||||
|
||||
if (config.stream) {
|
||||
headers['X-Stream-Response'] = 'true'
|
||||
}
|
||||
|
||||
return { url, headers, useInternalAuth }
|
||||
}
|
||||
|
||||
export function extractAgentContent(executeResult: {
|
||||
output?: { content?: string; [key: string]: unknown }
|
||||
error?: string
|
||||
}): string {
|
||||
return (
|
||||
executeResult.output?.content ||
|
||||
(typeof executeResult.output === 'object'
|
||||
? JSON.stringify(executeResult.output)
|
||||
: String(executeResult.output || executeResult.error || 'Task completed'))
|
||||
)
|
||||
}
|
||||
|
||||
export function buildTaskResponse(params: {
|
||||
taskId: string
|
||||
contextId: string
|
||||
state: TaskState
|
||||
history: Message[]
|
||||
artifacts?: Artifact[]
|
||||
}): Task {
|
||||
return {
|
||||
kind: 'task',
|
||||
id: params.taskId,
|
||||
contextId: params.contextId,
|
||||
status: createTaskStatus(params.state),
|
||||
history: params.history,
|
||||
artifacts: params.artifacts || [],
|
||||
}
|
||||
}
|
||||
@@ -7,10 +7,11 @@ import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { renderOTPEmail } from '@/components/emails'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import { addCorsHeaders } from '@/lib/core/security/deployment'
|
||||
import { getStorageMethod } from '@/lib/core/storage'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import { addCorsHeaders, setChatAuthCookie } from '@/app/api/chat/utils'
|
||||
import { setChatAuthCookie } from '@/app/api/chat/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('ChatOtpAPI')
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
@@ -120,14 +121,8 @@ describe('Chat Identifier API Route', () => {
|
||||
validateAuthToken: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
// Mock logger - use loggerMock from @sim/testing
|
||||
vi.doMock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
const mockSelect = vi.fn().mockImplementation((fields) => {
|
||||
|
||||
@@ -5,16 +5,12 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { ChatFiles } from '@/lib/uploads'
|
||||
import {
|
||||
addCorsHeaders,
|
||||
setChatAuthCookie,
|
||||
validateAuthToken,
|
||||
validateChatAuth,
|
||||
} from '@/app/api/chat/utils'
|
||||
import { setChatAuthCookie, validateChatAuth } from '@/app/api/chat/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('ChatIdentifierAPI')
|
||||
@@ -253,7 +249,7 @@ export async function POST(
|
||||
userId: deployment.userId,
|
||||
workspaceId,
|
||||
isDeployed: workflowRecord?.isDeployed ?? false,
|
||||
variables: workflowRecord?.variables || {},
|
||||
variables: (workflowRecord?.variables as Record<string, unknown>) ?? undefined,
|
||||
}
|
||||
|
||||
const stream = await createStreamingResponse({
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { NextRequest } from 'next/server'
|
||||
/**
|
||||
* Tests for chat edit API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
@@ -50,14 +51,8 @@ describe('Chat Edit API Route', () => {
|
||||
chat: { id: 'id', identifier: 'identifier', userId: 'userId' },
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
// Mock logger - use loggerMock from @sim/testing
|
||||
vi.doMock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.doMock('@/app/api/workflows/utils', () => ({
|
||||
createSuccessResponse: mockCreateSuccessResponse.mockImplementation((data) => {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import type { NextResponse } from 'next/server'
|
||||
/**
|
||||
* Tests for chat API utils
|
||||
@@ -5,14 +6,9 @@ import type { NextResponse } from 'next/server'
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
vi.mock('@sim/db', () => ({
|
||||
db: {
|
||||
select: vi.fn(),
|
||||
update: vi.fn(),
|
||||
},
|
||||
}))
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/logs/execution/logging-session', () => ({
|
||||
LoggingSession: vi.fn().mockImplementation(() => ({
|
||||
@@ -52,19 +48,10 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
|
||||
describe('Chat API Utils', () => {
|
||||
beforeEach(() => {
|
||||
vi.doMock('@sim/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.stubGlobal('process', {
|
||||
...process,
|
||||
env: {
|
||||
...env,
|
||||
...process.env,
|
||||
NODE_ENV: 'development',
|
||||
},
|
||||
})
|
||||
@@ -75,8 +62,8 @@ describe('Chat API Utils', () => {
|
||||
})
|
||||
|
||||
describe('Auth token utils', () => {
|
||||
it('should validate auth tokens', async () => {
|
||||
const { validateAuthToken } = await import('@/app/api/chat/utils')
|
||||
it.concurrent('should validate auth tokens', async () => {
|
||||
const { validateAuthToken } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const chatId = 'test-chat-id'
|
||||
const type = 'password'
|
||||
@@ -92,8 +79,8 @@ describe('Chat API Utils', () => {
|
||||
expect(isInvalidChat).toBe(false)
|
||||
})
|
||||
|
||||
it('should reject expired tokens', async () => {
|
||||
const { validateAuthToken } = await import('@/app/api/chat/utils')
|
||||
it.concurrent('should reject expired tokens', async () => {
|
||||
const { validateAuthToken } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const chatId = 'test-chat-id'
|
||||
const expiredToken = Buffer.from(
|
||||
@@ -136,7 +123,7 @@ describe('Chat API Utils', () => {
|
||||
|
||||
describe('CORS handling', () => {
|
||||
it('should add CORS headers for localhost in development', async () => {
|
||||
const { addCorsHeaders } = await import('@/app/api/chat/utils')
|
||||
const { addCorsHeaders } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const mockRequest = {
|
||||
headers: {
|
||||
@@ -343,7 +330,7 @@ describe('Chat API Utils', () => {
|
||||
})
|
||||
|
||||
describe('Execution Result Processing', () => {
|
||||
it('should process logs regardless of overall success status', () => {
|
||||
it.concurrent('should process logs regardless of overall success status', () => {
|
||||
const executionResult = {
|
||||
success: false,
|
||||
output: {},
|
||||
@@ -381,7 +368,7 @@ describe('Chat API Utils', () => {
|
||||
expect(executionResult.logs[1].error).toBe('Agent 2 failed')
|
||||
})
|
||||
|
||||
it('should handle ExecutionResult vs StreamingExecution types correctly', () => {
|
||||
it.concurrent('should handle ExecutionResult vs StreamingExecution types correctly', () => {
|
||||
const executionResult = {
|
||||
success: true,
|
||||
output: { content: 'test' },
|
||||
|
||||
@@ -1,17 +1,25 @@
|
||||
import { createHash } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { chat, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
isEmailAllowed,
|
||||
setDeploymentAuthCookie,
|
||||
validateAuthToken,
|
||||
} from '@/lib/core/security/deployment'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { hasAdminPermission } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('ChatAuthUtils')
|
||||
|
||||
function hashPassword(encryptedPassword: string): string {
|
||||
return createHash('sha256').update(encryptedPassword).digest('hex').substring(0, 8)
|
||||
export function setChatAuthCookie(
|
||||
response: NextResponse,
|
||||
chatId: string,
|
||||
type: string,
|
||||
encryptedPassword?: string | null
|
||||
): void {
|
||||
setDeploymentAuthCookie(response, 'chat', chatId, type, encryptedPassword)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -82,77 +90,6 @@ export async function checkChatAccess(
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
function encryptAuthToken(chatId: string, type: string, encryptedPassword?: string | null): string {
|
||||
const pwHash = encryptedPassword ? hashPassword(encryptedPassword) : ''
|
||||
return Buffer.from(`${chatId}:${type}:${Date.now()}:${pwHash}`).toString('base64')
|
||||
}
|
||||
|
||||
export function validateAuthToken(
|
||||
token: string,
|
||||
chatId: string,
|
||||
encryptedPassword?: string | null
|
||||
): boolean {
|
||||
try {
|
||||
const decoded = Buffer.from(token, 'base64').toString()
|
||||
const parts = decoded.split(':')
|
||||
const [storedId, _type, timestamp, storedPwHash] = parts
|
||||
|
||||
if (storedId !== chatId) {
|
||||
return false
|
||||
}
|
||||
|
||||
const createdAt = Number.parseInt(timestamp)
|
||||
const now = Date.now()
|
||||
const expireTime = 24 * 60 * 60 * 1000
|
||||
|
||||
if (now - createdAt > expireTime) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (encryptedPassword) {
|
||||
const currentPwHash = hashPassword(encryptedPassword)
|
||||
if (storedPwHash !== currentPwHash) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (_e) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export function setChatAuthCookie(
|
||||
response: NextResponse,
|
||||
chatId: string,
|
||||
type: string,
|
||||
encryptedPassword?: string | null
|
||||
): void {
|
||||
const token = encryptAuthToken(chatId, type, encryptedPassword)
|
||||
response.cookies.set({
|
||||
name: `chat_auth_${chatId}`,
|
||||
value: token,
|
||||
httpOnly: true,
|
||||
secure: !isDev,
|
||||
sameSite: 'lax',
|
||||
path: '/',
|
||||
maxAge: 60 * 60 * 24,
|
||||
})
|
||||
}
|
||||
|
||||
export function addCorsHeaders(response: NextResponse, request: NextRequest) {
|
||||
const origin = request.headers.get('origin') || ''
|
||||
|
||||
if (isDev && origin.includes('localhost')) {
|
||||
response.headers.set('Access-Control-Allow-Origin', origin)
|
||||
response.headers.set('Access-Control-Allow-Credentials', 'true')
|
||||
response.headers.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
||||
response.headers.set('Access-Control-Allow-Headers', 'Content-Type, X-Requested-With')
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
export async function validateChatAuth(
|
||||
requestId: string,
|
||||
deployment: any,
|
||||
@@ -231,12 +168,7 @@ export async function validateChatAuth(
|
||||
|
||||
const allowedEmails = deployment.allowedEmails || []
|
||||
|
||||
if (allowedEmails.includes(email)) {
|
||||
return { authorized: false, error: 'otp_required' }
|
||||
}
|
||||
|
||||
const domain = email.split('@')[1]
|
||||
if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) {
|
||||
if (isEmailAllowed(email, allowedEmails)) {
|
||||
return { authorized: false, error: 'otp_required' }
|
||||
}
|
||||
|
||||
@@ -270,12 +202,7 @@ export async function validateChatAuth(
|
||||
|
||||
const allowedEmails = deployment.allowedEmails || []
|
||||
|
||||
if (allowedEmails.includes(email)) {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
const domain = email.split('@')[1]
|
||||
if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) {
|
||||
if (isEmailAllowed(email, allowedEmails)) {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
@@ -296,12 +223,7 @@ export async function validateChatAuth(
|
||||
|
||||
const allowedEmails = deployment.allowedEmails || []
|
||||
|
||||
if (allowedEmails.includes(userEmail)) {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
const domain = userEmail.split('@')[1]
|
||||
if (domain && allowedEmails.some((allowed: string) => allowed === `@${domain}`)) {
|
||||
if (isEmailAllowed(userEmail, allowedEmails)) {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
|
||||
@@ -802,49 +802,29 @@ export async function POST(req: NextRequest) {
|
||||
toolNames: toolCalls.map((tc) => tc?.name).filter(Boolean),
|
||||
})
|
||||
|
||||
// Save messages to database after streaming completes (including aborted messages)
|
||||
// NOTE: Messages are saved by the client via update-messages endpoint with full contentBlocks.
|
||||
// Server only updates conversationId here to avoid overwriting client's richer save.
|
||||
if (currentChat) {
|
||||
const updatedMessages = [...conversationHistory, userMessage]
|
||||
|
||||
// Save assistant message if there's any content or tool calls (even partial from abort)
|
||||
if (assistantContent.trim() || toolCalls.length > 0) {
|
||||
const assistantMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: assistantContent,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(toolCalls.length > 0 && { toolCalls }),
|
||||
}
|
||||
updatedMessages.push(assistantMessage)
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Saving assistant message with content (${assistantContent.length} chars) and ${toolCalls.length} tool calls`
|
||||
)
|
||||
} else {
|
||||
logger.info(
|
||||
`[${tracker.requestId}] No assistant content or tool calls to save (aborted before response)`
|
||||
)
|
||||
}
|
||||
|
||||
// Persist only a safe conversationId to avoid continuing from a state that expects tool outputs
|
||||
const previousConversationId = currentChat?.conversationId as string | undefined
|
||||
const responseId = lastSafeDoneResponseId || previousConversationId || undefined
|
||||
|
||||
// Update chat in database immediately (without title)
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
messages: updatedMessages,
|
||||
updatedAt: new Date(),
|
||||
...(responseId ? { conversationId: responseId } : {}),
|
||||
})
|
||||
.where(eq(copilotChats.id, actualChatId!))
|
||||
if (responseId) {
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
updatedAt: new Date(),
|
||||
conversationId: responseId,
|
||||
})
|
||||
.where(eq(copilotChats.id, actualChatId!))
|
||||
|
||||
logger.info(`[${tracker.requestId}] Updated chat ${actualChatId} with new messages`, {
|
||||
messageCount: updatedMessages.length,
|
||||
savedUserMessage: true,
|
||||
savedAssistantMessage: assistantContent.trim().length > 0,
|
||||
updatedConversationId: responseId || null,
|
||||
})
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Updated conversationId for chat ${actualChatId}`,
|
||||
{
|
||||
updatedConversationId: responseId,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${tracker.requestId}] Error processing stream:`, error)
|
||||
|
||||
@@ -17,25 +17,30 @@ const logger = createLogger('CopilotChatUpdateAPI')
|
||||
const UpdateMessagesSchema = z.object({
|
||||
chatId: z.string(),
|
||||
messages: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.enum(['user', 'assistant']),
|
||||
content: z.string(),
|
||||
timestamp: z.string(),
|
||||
toolCalls: z.array(z.any()).optional(),
|
||||
contentBlocks: z.array(z.any()).optional(),
|
||||
fileAttachments: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
key: z.string(),
|
||||
filename: z.string(),
|
||||
media_type: z.string(),
|
||||
size: z.number(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
})
|
||||
z
|
||||
.object({
|
||||
id: z.string(),
|
||||
role: z.enum(['user', 'assistant', 'system']),
|
||||
content: z.string(),
|
||||
timestamp: z.string(),
|
||||
toolCalls: z.array(z.any()).optional(),
|
||||
contentBlocks: z.array(z.any()).optional(),
|
||||
fileAttachments: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
key: z.string(),
|
||||
filename: z.string(),
|
||||
media_type: z.string(),
|
||||
size: z.number(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
contexts: z.array(z.any()).optional(),
|
||||
citations: z.array(z.any()).optional(),
|
||||
errorType: z.string().optional(),
|
||||
})
|
||||
.passthrough() // Preserve any additional fields for future compatibility
|
||||
),
|
||||
planArtifact: z.string().nullable().optional(),
|
||||
config: z
|
||||
@@ -57,8 +62,33 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
|
||||
// Debug: Log what we received
|
||||
const lastMsg = body.messages?.[body.messages.length - 1]
|
||||
if (lastMsg?.role === 'assistant') {
|
||||
logger.info(`[${tracker.requestId}] Received messages to save`, {
|
||||
messageCount: body.messages?.length,
|
||||
lastMsgId: lastMsg.id,
|
||||
lastMsgContentLength: lastMsg.content?.length || 0,
|
||||
lastMsgContentBlockCount: lastMsg.contentBlocks?.length || 0,
|
||||
lastMsgContentBlockTypes: lastMsg.contentBlocks?.map((b: any) => b?.type) || [],
|
||||
})
|
||||
}
|
||||
|
||||
const { chatId, messages, planArtifact, config } = UpdateMessagesSchema.parse(body)
|
||||
|
||||
// Debug: Log what we're about to save
|
||||
const lastMsgParsed = messages[messages.length - 1]
|
||||
if (lastMsgParsed?.role === 'assistant') {
|
||||
logger.info(`[${tracker.requestId}] Parsed messages to save`, {
|
||||
messageCount: messages.length,
|
||||
lastMsgId: lastMsgParsed.id,
|
||||
lastMsgContentLength: lastMsgParsed.content?.length || 0,
|
||||
lastMsgContentBlockCount: lastMsgParsed.contentBlocks?.length || 0,
|
||||
lastMsgContentBlockTypes: lastMsgParsed.contentBlocks?.map((b: any) => b?.type) || [],
|
||||
})
|
||||
}
|
||||
|
||||
// Verify that the chat belongs to the user
|
||||
const [chat] = await db
|
||||
.select()
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
/**
|
||||
* @deprecated This route is not currently in use
|
||||
* @remarks Kept for reference - may be removed in future cleanup
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('UpdateChatTitleAPI')
|
||||
|
||||
const UpdateTitleSchema = z.object({
|
||||
chatId: z.string(),
|
||||
title: z.string(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = UpdateTitleSchema.parse(body)
|
||||
|
||||
// Update the chat title
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
title: parsed.title,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(copilotChats.id, parsed.chatId))
|
||||
|
||||
logger.info('Chat title updated', { chatId: parsed.chatId, title: parsed.title })
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error updating chat title:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to update chat title' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const logger = createLogger('ContextUsageAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const ContextUsageRequestSchema = z.object({
|
||||
chatId: z.string(),
|
||||
model: z.string(),
|
||||
workflowId: z.string(),
|
||||
provider: z.any().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/context-usage
|
||||
* Fetch context usage from sim-agent API
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
logger.info('[Context Usage API] Request received')
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn('[Context Usage API] No session/user ID')
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
logger.info('[Context Usage API] Request body', body)
|
||||
|
||||
const parsed = ContextUsageRequestSchema.safeParse(body)
|
||||
|
||||
if (!parsed.success) {
|
||||
logger.warn('[Context Usage API] Invalid request body', parsed.error.errors)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request body', details: parsed.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { chatId, model, workflowId, provider } = parsed.data
|
||||
const userId = session.user.id // Get userId from session, not from request
|
||||
|
||||
logger.info('[Context Usage API] Request validated', { chatId, model, userId, workflowId })
|
||||
|
||||
// Build provider config similar to chat route
|
||||
let providerConfig: CopilotProviderConfig | undefined = provider
|
||||
if (!providerConfig) {
|
||||
const defaults = getCopilotModel('chat')
|
||||
const modelToUse = env.COPILOT_MODEL || defaults.model
|
||||
const providerEnv = env.COPILOT_PROVIDER as any
|
||||
|
||||
if (providerEnv) {
|
||||
if (providerEnv === 'azure-openai') {
|
||||
providerConfig = {
|
||||
provider: 'azure-openai',
|
||||
model: modelToUse,
|
||||
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||
apiVersion: env.AZURE_OPENAI_API_VERSION,
|
||||
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||
}
|
||||
} else if (providerEnv === 'vertex') {
|
||||
providerConfig = {
|
||||
provider: 'vertex',
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
vertexProject: env.VERTEX_PROJECT,
|
||||
vertexLocation: env.VERTEX_LOCATION,
|
||||
}
|
||||
} else {
|
||||
providerConfig = {
|
||||
provider: providerEnv,
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Call sim-agent API
|
||||
const requestPayload = {
|
||||
chatId,
|
||||
model,
|
||||
userId,
|
||||
workflowId,
|
||||
...(providerConfig ? { provider: providerConfig } : {}),
|
||||
}
|
||||
|
||||
logger.info('[Context Usage API] Calling sim-agent', {
|
||||
url: `${SIM_AGENT_API_URL}/api/get-context-usage`,
|
||||
payload: requestPayload,
|
||||
})
|
||||
|
||||
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/get-context-usage`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify(requestPayload),
|
||||
})
|
||||
|
||||
logger.info('[Context Usage API] Sim-agent response', {
|
||||
status: simAgentResponse.status,
|
||||
ok: simAgentResponse.ok,
|
||||
})
|
||||
|
||||
if (!simAgentResponse.ok) {
|
||||
const errorText = await simAgentResponse.text().catch(() => '')
|
||||
logger.warn('[Context Usage API] Sim agent request failed', {
|
||||
status: simAgentResponse.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to fetch context usage from sim-agent' },
|
||||
{ status: simAgentResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await simAgentResponse.json()
|
||||
logger.info('[Context Usage API] Sim-agent data received', data)
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
logger.error('Error fetching context usage:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment'
|
||||
|
||||
const logger = createLogger('EnvironmentAPI')
|
||||
|
||||
|
||||
414
apps/sim/app/api/form/[identifier]/route.ts
Normal file
414
apps/sim/app/api/form/[identifier]/route.ts
Normal file
@@ -0,0 +1,414 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { form, workflow, workflowBlocks } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('FormIdentifierAPI')
|
||||
|
||||
const formPostBodySchema = z.object({
|
||||
formData: z.record(z.unknown()).optional(),
|
||||
password: z.string().optional(),
|
||||
email: z.string().email('Invalid email format').optional().or(z.literal('')),
|
||||
})
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
/**
|
||||
* Get the input format schema from the workflow's start block
|
||||
*/
|
||||
async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
|
||||
try {
|
||||
const blocks = await db
|
||||
.select()
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
// Find the start block (starter or start_trigger type)
|
||||
const startBlock = blocks.find(
|
||||
(block) => block.type === 'starter' || block.type === 'start_trigger'
|
||||
)
|
||||
|
||||
if (!startBlock) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Extract inputFormat from subBlocks
|
||||
const subBlocks = startBlock.subBlocks as Record<string, any> | null
|
||||
if (!subBlocks?.inputFormat?.value) {
|
||||
return []
|
||||
}
|
||||
|
||||
return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : []
|
||||
} catch (error) {
|
||||
logger.error('Error fetching workflow input schema:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ identifier: string }> }
|
||||
) {
|
||||
const { identifier } = await params
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
logger.debug(`[${requestId}] Processing form submission for identifier: ${identifier}`)
|
||||
|
||||
let parsedBody
|
||||
try {
|
||||
const rawBody = await request.json()
|
||||
const validation = formPostBodySchema.safeParse(rawBody)
|
||||
|
||||
if (!validation.success) {
|
||||
const errorMessage = validation.error.errors
|
||||
.map((err) => `${err.path.join('.')}: ${err.message}`)
|
||||
.join(', ')
|
||||
logger.warn(`[${requestId}] Validation error: ${errorMessage}`)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse(`Invalid request body: ${errorMessage}`, 400),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
parsedBody = validation.data
|
||||
} catch (_error) {
|
||||
return addCorsHeaders(createErrorResponse('Invalid request body', 400), request)
|
||||
}
|
||||
|
||||
const deploymentResult = await db
|
||||
.select({
|
||||
id: form.id,
|
||||
workflowId: form.workflowId,
|
||||
userId: form.userId,
|
||||
isActive: form.isActive,
|
||||
authType: form.authType,
|
||||
password: form.password,
|
||||
allowedEmails: form.allowedEmails,
|
||||
customizations: form.customizations,
|
||||
})
|
||||
.from(form)
|
||||
.where(eq(form.identifier, identifier))
|
||||
.limit(1)
|
||||
|
||||
if (deploymentResult.length === 0) {
|
||||
logger.warn(`[${requestId}] Form not found for identifier: ${identifier}`)
|
||||
return addCorsHeaders(createErrorResponse('Form not found', 404), request)
|
||||
}
|
||||
|
||||
const deployment = deploymentResult[0]
|
||||
|
||||
if (!deployment.isActive) {
|
||||
logger.warn(`[${requestId}] Form is not active: ${identifier}`)
|
||||
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflow.workspaceId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, deployment.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const workspaceId = workflowRecord?.workspaceId
|
||||
if (!workspaceId) {
|
||||
logger.warn(`[${requestId}] Cannot log: workflow ${deployment.workflowId} has no workspace`)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse('This form is currently unavailable', 403),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
const executionId = randomUUID()
|
||||
const loggingSession = new LoggingSession(
|
||||
deployment.workflowId,
|
||||
executionId,
|
||||
'form',
|
||||
requestId
|
||||
)
|
||||
|
||||
await loggingSession.safeStart({
|
||||
userId: deployment.userId,
|
||||
workspaceId,
|
||||
variables: {},
|
||||
})
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
error: {
|
||||
message: 'This form is currently unavailable. The form has been disabled.',
|
||||
stackTrace: undefined,
|
||||
},
|
||||
traceSpans: [],
|
||||
})
|
||||
|
||||
return addCorsHeaders(createErrorResponse('This form is currently unavailable', 403), request)
|
||||
}
|
||||
|
||||
const authResult = await validateFormAuth(requestId, deployment, request, parsedBody)
|
||||
if (!authResult.authorized) {
|
||||
return addCorsHeaders(
|
||||
createErrorResponse(authResult.error || 'Authentication required', 401),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
const { formData, password, email } = parsedBody
|
||||
|
||||
// If only authentication credentials provided (no form data), just return authenticated
|
||||
if ((password || email) && !formData) {
|
||||
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
|
||||
setFormAuthCookie(response, deployment.id, deployment.authType, deployment.password)
|
||||
return response
|
||||
}
|
||||
|
||||
if (!formData || Object.keys(formData).length === 0) {
|
||||
return addCorsHeaders(createErrorResponse('No form data provided', 400), request)
|
||||
}
|
||||
|
||||
const executionId = randomUUID()
|
||||
const loggingSession = new LoggingSession(deployment.workflowId, executionId, 'form', requestId)
|
||||
|
||||
const preprocessResult = await preprocessExecution({
|
||||
workflowId: deployment.workflowId,
|
||||
userId: deployment.userId,
|
||||
triggerType: 'form',
|
||||
executionId,
|
||||
requestId,
|
||||
checkRateLimit: true,
|
||||
checkDeployment: true,
|
||||
loggingSession,
|
||||
})
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
logger.warn(`[${requestId}] Preprocessing failed: ${preprocessResult.error?.message}`)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse(
|
||||
preprocessResult.error?.message || 'Failed to process request',
|
||||
preprocessResult.error?.statusCode || 500
|
||||
),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
const { actorUserId, workflowRecord } = preprocessResult
|
||||
const workspaceOwnerId = actorUserId!
|
||||
const workspaceId = workflowRecord?.workspaceId
|
||||
if (!workspaceId) {
|
||||
logger.error(`[${requestId}] Workflow ${deployment.workflowId} has no workspaceId`)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse('Workflow has no associated workspace', 500),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
const workflowForExecution = {
|
||||
id: deployment.workflowId,
|
||||
userId: deployment.userId,
|
||||
workspaceId,
|
||||
isDeployed: workflowRecord?.isDeployed ?? false,
|
||||
variables: (workflowRecord?.variables ?? {}) as Record<string, unknown>,
|
||||
}
|
||||
|
||||
// Pass form data as the workflow input
|
||||
const workflowInput = {
|
||||
input: formData,
|
||||
...formData, // Spread form fields at top level for convenience
|
||||
}
|
||||
|
||||
// Execute workflow using streaming (for consistency with chat)
|
||||
const stream = await createStreamingResponse({
|
||||
requestId,
|
||||
workflow: workflowForExecution,
|
||||
input: workflowInput,
|
||||
executingUserId: workspaceOwnerId,
|
||||
streamConfig: {
|
||||
selectedOutputs: [],
|
||||
isSecureMode: true,
|
||||
workflowTriggerType: 'api', // Use 'api' type since form is similar
|
||||
},
|
||||
executionId,
|
||||
})
|
||||
|
||||
// For forms, we don't stream back - we wait for completion and return success
|
||||
// Consume the stream to wait for completion
|
||||
const reader = stream.getReader()
|
||||
let lastOutput: any = null
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
// Parse SSE data if present
|
||||
const text = new TextDecoder().decode(value)
|
||||
const lines = text.split('\n')
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
const data = JSON.parse(line.slice(6))
|
||||
if (data.type === 'complete' || data.output) {
|
||||
lastOutput = data.output || data
|
||||
}
|
||||
} catch {
|
||||
// Ignore parse errors
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Form submission successful for ${identifier}`)
|
||||
|
||||
// Return success with customizations for thank you screen
|
||||
const customizations = deployment.customizations as Record<string, any> | null
|
||||
return addCorsHeaders(
|
||||
createSuccessResponse({
|
||||
success: true,
|
||||
executionId,
|
||||
thankYouTitle: customizations?.thankYouTitle || 'Thank you!',
|
||||
thankYouMessage:
|
||||
customizations?.thankYouMessage || 'Your response has been submitted successfully.',
|
||||
}),
|
||||
request
|
||||
)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error processing form submission:`, error)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse(error.message || 'Failed to process form submission', 500),
|
||||
request
|
||||
)
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error processing form submission:`, error)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse(error.message || 'Failed to process form submission', 500),
|
||||
request
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ identifier: string }> }
|
||||
) {
|
||||
const { identifier } = await params
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
logger.debug(`[${requestId}] Fetching form info for identifier: ${identifier}`)
|
||||
|
||||
const deploymentResult = await db
|
||||
.select({
|
||||
id: form.id,
|
||||
title: form.title,
|
||||
description: form.description,
|
||||
customizations: form.customizations,
|
||||
isActive: form.isActive,
|
||||
workflowId: form.workflowId,
|
||||
authType: form.authType,
|
||||
password: form.password,
|
||||
allowedEmails: form.allowedEmails,
|
||||
showBranding: form.showBranding,
|
||||
})
|
||||
.from(form)
|
||||
.where(eq(form.identifier, identifier))
|
||||
.limit(1)
|
||||
|
||||
if (deploymentResult.length === 0) {
|
||||
logger.warn(`[${requestId}] Form not found for identifier: ${identifier}`)
|
||||
return addCorsHeaders(createErrorResponse('Form not found', 404), request)
|
||||
}
|
||||
|
||||
const deployment = deploymentResult[0]
|
||||
|
||||
if (!deployment.isActive) {
|
||||
logger.warn(`[${requestId}] Form is not active: ${identifier}`)
|
||||
return addCorsHeaders(createErrorResponse('This form is currently unavailable', 403), request)
|
||||
}
|
||||
|
||||
// Get the workflow's input schema
|
||||
const inputSchema = await getWorkflowInputSchema(deployment.workflowId)
|
||||
|
||||
const cookieName = `form_auth_${deployment.id}`
|
||||
const authCookie = request.cookies.get(cookieName)
|
||||
|
||||
// If authenticated (via cookie), return full form config
|
||||
if (
|
||||
deployment.authType !== 'public' &&
|
||||
authCookie &&
|
||||
validateAuthToken(authCookie.value, deployment.id, deployment.password)
|
||||
) {
|
||||
return addCorsHeaders(
|
||||
createSuccessResponse({
|
||||
id: deployment.id,
|
||||
title: deployment.title,
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
showBranding: deployment.showBranding,
|
||||
inputSchema,
|
||||
}),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
// Check authentication requirement
|
||||
const authResult = await validateFormAuth(requestId, deployment, request)
|
||||
if (!authResult.authorized) {
|
||||
// Return limited info for auth required forms
|
||||
logger.info(
|
||||
`[${requestId}] Authentication required for form: ${identifier}, type: ${deployment.authType}`
|
||||
)
|
||||
return addCorsHeaders(
|
||||
NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
authType: deployment.authType,
|
||||
title: deployment.title,
|
||||
customizations: {
|
||||
primaryColor: (deployment.customizations as any)?.primaryColor,
|
||||
logoUrl: (deployment.customizations as any)?.logoUrl,
|
||||
},
|
||||
},
|
||||
{ status: 401 }
|
||||
),
|
||||
request
|
||||
)
|
||||
}
|
||||
|
||||
return addCorsHeaders(
|
||||
createSuccessResponse({
|
||||
id: deployment.id,
|
||||
title: deployment.title,
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
showBranding: deployment.showBranding,
|
||||
inputSchema,
|
||||
}),
|
||||
request
|
||||
)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error fetching form info:`, error)
|
||||
return addCorsHeaders(
|
||||
createErrorResponse(error.message || 'Failed to fetch form information', 500),
|
||||
request
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS(request: NextRequest) {
|
||||
return addCorsHeaders(new NextResponse(null, { status: 204 }), request)
|
||||
}
|
||||
233
apps/sim/app/api/form/manage/[id]/route.ts
Normal file
233
apps/sim/app/api/form/manage/[id]/route.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
import { db } from '@sim/db'
|
||||
import { form } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { checkFormAccess, DEFAULT_FORM_CUSTOMIZATIONS } from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('FormManageAPI')
|
||||
|
||||
const fieldConfigSchema = z.object({
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
label: z.string(),
|
||||
description: z.string().optional(),
|
||||
required: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const updateFormSchema = z.object({
|
||||
identifier: z
|
||||
.string()
|
||||
.min(1, 'Identifier is required')
|
||||
.max(100, 'Identifier must be 100 characters or less')
|
||||
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens')
|
||||
.optional(),
|
||||
title: z
|
||||
.string()
|
||||
.min(1, 'Title is required')
|
||||
.max(200, 'Title must be 200 characters or less')
|
||||
.optional(),
|
||||
description: z.string().max(1000, 'Description must be 1000 characters or less').optional(),
|
||||
customizations: z
|
||||
.object({
|
||||
primaryColor: z.string().optional(),
|
||||
welcomeMessage: z
|
||||
.string()
|
||||
.max(500, 'Welcome message must be 500 characters or less')
|
||||
.optional(),
|
||||
thankYouTitle: z
|
||||
.string()
|
||||
.max(100, 'Thank you title must be 100 characters or less')
|
||||
.optional(),
|
||||
thankYouMessage: z
|
||||
.string()
|
||||
.max(500, 'Thank you message must be 500 characters or less')
|
||||
.optional(),
|
||||
logoUrl: z.string().url('Logo URL must be a valid URL').optional().or(z.literal('')),
|
||||
fieldConfigs: z.array(fieldConfigSchema).optional(),
|
||||
})
|
||||
.optional(),
|
||||
authType: z.enum(['public', 'password', 'email']).optional(),
|
||||
password: z
|
||||
.string()
|
||||
.min(6, 'Password must be at least 6 characters')
|
||||
.optional()
|
||||
.or(z.literal('')),
|
||||
allowedEmails: z.array(z.string()).optional(),
|
||||
showBranding: z.boolean().optional(),
|
||||
isActive: z.boolean().optional(),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session) {
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
|
||||
|
||||
if (!hasAccess || !formRecord) {
|
||||
return createErrorResponse('Form not found or access denied', 404)
|
||||
}
|
||||
|
||||
const { password: _password, ...formWithoutPassword } = formRecord
|
||||
|
||||
return createSuccessResponse({
|
||||
form: {
|
||||
...formWithoutPassword,
|
||||
hasPassword: !!formRecord.password,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Error fetching form:', error)
|
||||
return createErrorResponse(error.message || 'Failed to fetch form', 500)
|
||||
}
|
||||
}
|
||||
|
||||
export async function PATCH(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session) {
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
|
||||
|
||||
if (!hasAccess || !formRecord) {
|
||||
return createErrorResponse('Form not found or access denied', 404)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
try {
|
||||
const validatedData = updateFormSchema.parse(body)
|
||||
|
||||
const {
|
||||
identifier,
|
||||
title,
|
||||
description,
|
||||
customizations,
|
||||
authType,
|
||||
password,
|
||||
allowedEmails,
|
||||
showBranding,
|
||||
isActive,
|
||||
} = validatedData
|
||||
|
||||
if (identifier && identifier !== formRecord.identifier) {
|
||||
const existingIdentifier = await db
|
||||
.select()
|
||||
.from(form)
|
||||
.where(eq(form.identifier, identifier))
|
||||
.limit(1)
|
||||
|
||||
if (existingIdentifier.length > 0) {
|
||||
return createErrorResponse('Identifier already in use', 400)
|
||||
}
|
||||
}
|
||||
|
||||
if (authType === 'password' && !password && !formRecord.password) {
|
||||
return createErrorResponse('Password is required when using password protection', 400)
|
||||
}
|
||||
|
||||
if (
|
||||
authType === 'email' &&
|
||||
(!allowedEmails || allowedEmails.length === 0) &&
|
||||
(!formRecord.allowedEmails || (formRecord.allowedEmails as string[]).length === 0)
|
||||
) {
|
||||
return createErrorResponse(
|
||||
'At least one email or domain is required when using email access control',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
const updateData: Record<string, any> = {
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
if (identifier !== undefined) updateData.identifier = identifier
|
||||
if (title !== undefined) updateData.title = title
|
||||
if (description !== undefined) updateData.description = description
|
||||
if (showBranding !== undefined) updateData.showBranding = showBranding
|
||||
if (isActive !== undefined) updateData.isActive = isActive
|
||||
if (authType !== undefined) updateData.authType = authType
|
||||
if (allowedEmails !== undefined) updateData.allowedEmails = allowedEmails
|
||||
|
||||
if (customizations !== undefined) {
|
||||
const existingCustomizations = (formRecord.customizations as Record<string, any>) || {}
|
||||
updateData.customizations = {
|
||||
...DEFAULT_FORM_CUSTOMIZATIONS,
|
||||
...existingCustomizations,
|
||||
...customizations,
|
||||
}
|
||||
}
|
||||
|
||||
if (password) {
|
||||
const { encrypted } = await encryptSecret(password)
|
||||
updateData.password = encrypted
|
||||
} else if (authType && authType !== 'password') {
|
||||
updateData.password = null
|
||||
}
|
||||
|
||||
await db.update(form).set(updateData).where(eq(form.id, id))
|
||||
|
||||
logger.info(`Form ${id} updated successfully`)
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Form updated successfully',
|
||||
})
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
const errorMessage = validationError.errors[0]?.message || 'Invalid request data'
|
||||
return createErrorResponse(errorMessage, 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Error updating form:', error)
|
||||
return createErrorResponse(error.message || 'Failed to update form', 500)
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session) {
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
const { hasAccess, form: formRecord } = await checkFormAccess(id, session.user.id)
|
||||
|
||||
if (!hasAccess || !formRecord) {
|
||||
return createErrorResponse('Form not found or access denied', 404)
|
||||
}
|
||||
|
||||
await db.update(form).set({ isActive: false, updatedAt: new Date() }).where(eq(form.id, id))
|
||||
|
||||
logger.info(`Form ${id} deleted (soft delete)`)
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Form deleted successfully',
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Error deleting form:', error)
|
||||
return createErrorResponse(error.message || 'Failed to delete form', 500)
|
||||
}
|
||||
}
|
||||
214
apps/sim/app/api/form/route.ts
Normal file
214
apps/sim/app/api/form/route.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
import { db } from '@sim/db'
|
||||
import { form } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getEmailDomain } from '@/lib/core/utils/urls'
|
||||
import { deployWorkflow } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
checkWorkflowAccessForFormCreation,
|
||||
DEFAULT_FORM_CUSTOMIZATIONS,
|
||||
} from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('FormAPI')
|
||||
|
||||
const fieldConfigSchema = z.object({
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
label: z.string(),
|
||||
description: z.string().optional(),
|
||||
required: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const formSchema = z.object({
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
identifier: z
|
||||
.string()
|
||||
.min(1, 'Identifier is required')
|
||||
.max(100, 'Identifier must be 100 characters or less')
|
||||
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens'),
|
||||
title: z.string().min(1, 'Title is required').max(200, 'Title must be 200 characters or less'),
|
||||
description: z.string().max(1000, 'Description must be 1000 characters or less').optional(),
|
||||
customizations: z
|
||||
.object({
|
||||
primaryColor: z.string().optional(),
|
||||
welcomeMessage: z
|
||||
.string()
|
||||
.max(500, 'Welcome message must be 500 characters or less')
|
||||
.optional(),
|
||||
thankYouTitle: z
|
||||
.string()
|
||||
.max(100, 'Thank you title must be 100 characters or less')
|
||||
.optional(),
|
||||
thankYouMessage: z
|
||||
.string()
|
||||
.max(500, 'Thank you message must be 500 characters or less')
|
||||
.optional(),
|
||||
logoUrl: z.string().url('Logo URL must be a valid URL').optional().or(z.literal('')),
|
||||
fieldConfigs: z.array(fieldConfigSchema).optional(),
|
||||
})
|
||||
.optional(),
|
||||
authType: z.enum(['public', 'password', 'email']).default('public'),
|
||||
password: z
|
||||
.string()
|
||||
.min(6, 'Password must be at least 6 characters')
|
||||
.optional()
|
||||
.or(z.literal('')),
|
||||
allowedEmails: z.array(z.string()).optional().default([]),
|
||||
showBranding: z.boolean().optional().default(true),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session) {
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const deployments = await db.select().from(form).where(eq(form.userId, session.user.id))
|
||||
|
||||
return createSuccessResponse({ deployments })
|
||||
} catch (error: any) {
|
||||
logger.error('Error fetching form deployments:', error)
|
||||
return createErrorResponse(error.message || 'Failed to fetch form deployments', 500)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session) {
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
try {
|
||||
const validatedData = formSchema.parse(body)
|
||||
|
||||
const {
|
||||
workflowId,
|
||||
identifier,
|
||||
title,
|
||||
description = '',
|
||||
customizations,
|
||||
authType = 'public',
|
||||
password,
|
||||
allowedEmails = [],
|
||||
showBranding = true,
|
||||
} = validatedData
|
||||
|
||||
if (authType === 'password' && !password) {
|
||||
return createErrorResponse('Password is required when using password protection', 400)
|
||||
}
|
||||
|
||||
if (authType === 'email' && (!Array.isArray(allowedEmails) || allowedEmails.length === 0)) {
|
||||
return createErrorResponse(
|
||||
'At least one email or domain is required when using email access control',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
const existingIdentifier = await db
|
||||
.select()
|
||||
.from(form)
|
||||
.where(eq(form.identifier, identifier))
|
||||
.limit(1)
|
||||
|
||||
if (existingIdentifier.length > 0) {
|
||||
return createErrorResponse('Identifier already in use', 400)
|
||||
}
|
||||
|
||||
const { hasAccess, workflow: workflowRecord } = await checkWorkflowAccessForFormCreation(
|
||||
workflowId,
|
||||
session.user.id
|
||||
)
|
||||
|
||||
if (!hasAccess || !workflowRecord) {
|
||||
return createErrorResponse('Workflow not found or access denied', 404)
|
||||
}
|
||||
|
||||
const result = await deployWorkflow({
|
||||
workflowId,
|
||||
deployedBy: session.user.id,
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
return createErrorResponse(result.error || 'Failed to deploy workflow', 500)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`${workflowRecord.isDeployed ? 'Redeployed' : 'Auto-deployed'} workflow ${workflowId} for form (v${result.version})`
|
||||
)
|
||||
|
||||
let encryptedPassword = null
|
||||
if (authType === 'password' && password) {
|
||||
const { encrypted } = await encryptSecret(password)
|
||||
encryptedPassword = encrypted
|
||||
}
|
||||
|
||||
const id = uuidv4()
|
||||
|
||||
logger.info('Creating form deployment with values:', {
|
||||
workflowId,
|
||||
identifier,
|
||||
title,
|
||||
authType,
|
||||
hasPassword: !!encryptedPassword,
|
||||
emailCount: allowedEmails?.length || 0,
|
||||
showBranding,
|
||||
})
|
||||
|
||||
const mergedCustomizations = {
|
||||
...DEFAULT_FORM_CUSTOMIZATIONS,
|
||||
...(customizations || {}),
|
||||
}
|
||||
|
||||
await db.insert(form).values({
|
||||
id,
|
||||
workflowId,
|
||||
userId: session.user.id,
|
||||
identifier,
|
||||
title,
|
||||
description: description || '',
|
||||
customizations: mergedCustomizations,
|
||||
isActive: true,
|
||||
authType,
|
||||
password: encryptedPassword,
|
||||
allowedEmails: authType === 'email' ? allowedEmails : [],
|
||||
showBranding,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
const baseDomain = getEmailDomain()
|
||||
const protocol = isDev ? 'http' : 'https'
|
||||
const formUrl = `${protocol}://${baseDomain}/form/${identifier}`
|
||||
|
||||
logger.info(`Form "${title}" deployed successfully at ${formUrl}`)
|
||||
|
||||
return createSuccessResponse({
|
||||
id,
|
||||
formUrl,
|
||||
message: 'Form deployment created successfully',
|
||||
})
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
const errorMessage = validationError.errors[0]?.message || 'Invalid request data'
|
||||
return createErrorResponse(errorMessage, 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Error creating form deployment:', error)
|
||||
return createErrorResponse(error.message || 'Failed to create form deployment', 500)
|
||||
}
|
||||
}
|
||||
367
apps/sim/app/api/form/utils.test.ts
Normal file
367
apps/sim/app/api/form/utils.test.ts
Normal file
@@ -0,0 +1,367 @@
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import type { NextResponse } from 'next/server'
|
||||
/**
|
||||
* Tests for form API utils
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
const mockDecryptSecret = vi.fn()
|
||||
|
||||
vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: mockDecryptSecret,
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: true,
|
||||
isHosted: false,
|
||||
isProd: false,
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workspaces/permissions/utils', () => ({
|
||||
hasAdminPermission: vi.fn(),
|
||||
}))
|
||||
|
||||
describe('Form API Utils', () => {
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Auth token utils', () => {
|
||||
it.concurrent('should validate auth tokens', async () => {
|
||||
const { validateAuthToken } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const formId = 'test-form-id'
|
||||
const type = 'password'
|
||||
|
||||
const token = Buffer.from(`${formId}:${type}:${Date.now()}`).toString('base64')
|
||||
expect(typeof token).toBe('string')
|
||||
expect(token.length).toBeGreaterThan(0)
|
||||
|
||||
const isValid = validateAuthToken(token, formId)
|
||||
expect(isValid).toBe(true)
|
||||
|
||||
const isInvalidForm = validateAuthToken(token, 'wrong-form-id')
|
||||
expect(isInvalidForm).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent('should reject expired tokens', async () => {
|
||||
const { validateAuthToken } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const formId = 'test-form-id'
|
||||
const expiredToken = Buffer.from(
|
||||
`${formId}:password:${Date.now() - 25 * 60 * 60 * 1000}`
|
||||
).toString('base64')
|
||||
|
||||
const isValid = validateAuthToken(expiredToken, formId)
|
||||
expect(isValid).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent('should validate tokens with password hash', async () => {
|
||||
const { validateAuthToken } = await import('@/lib/core/security/deployment')
|
||||
const crypto = await import('crypto')
|
||||
|
||||
const formId = 'test-form-id'
|
||||
const encryptedPassword = 'encrypted-password-value'
|
||||
const pwHash = crypto
|
||||
.createHash('sha256')
|
||||
.update(encryptedPassword)
|
||||
.digest('hex')
|
||||
.substring(0, 8)
|
||||
|
||||
const token = Buffer.from(`${formId}:password:${Date.now()}:${pwHash}`).toString('base64')
|
||||
|
||||
const isValid = validateAuthToken(token, formId, encryptedPassword)
|
||||
expect(isValid).toBe(true)
|
||||
|
||||
const isInvalidPassword = validateAuthToken(token, formId, 'different-password')
|
||||
expect(isInvalidPassword).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cookie handling', () => {
|
||||
it('should set auth cookie correctly', async () => {
|
||||
const { setFormAuthCookie } = await import('@/app/api/form/utils')
|
||||
|
||||
const mockSet = vi.fn()
|
||||
const mockResponse = {
|
||||
cookies: {
|
||||
set: mockSet,
|
||||
},
|
||||
} as unknown as NextResponse
|
||||
|
||||
const formId = 'test-form-id'
|
||||
const type = 'password'
|
||||
|
||||
setFormAuthCookie(mockResponse, formId, type)
|
||||
|
||||
expect(mockSet).toHaveBeenCalledWith({
|
||||
name: `form_auth_${formId}`,
|
||||
value: expect.any(String),
|
||||
httpOnly: true,
|
||||
secure: false, // Development mode
|
||||
sameSite: 'lax',
|
||||
path: '/',
|
||||
maxAge: 60 * 60 * 24,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('CORS handling', () => {
|
||||
it.concurrent('should add CORS headers for any origin', async () => {
|
||||
const { addCorsHeaders } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const mockRequest = {
|
||||
headers: {
|
||||
get: vi.fn().mockReturnValue('http://localhost:3000'),
|
||||
},
|
||||
} as any
|
||||
|
||||
const mockResponse = {
|
||||
headers: {
|
||||
set: vi.fn(),
|
||||
},
|
||||
} as unknown as NextResponse
|
||||
|
||||
addCorsHeaders(mockResponse, mockRequest)
|
||||
|
||||
expect(mockResponse.headers.set).toHaveBeenCalledWith(
|
||||
'Access-Control-Allow-Origin',
|
||||
'http://localhost:3000'
|
||||
)
|
||||
expect(mockResponse.headers.set).toHaveBeenCalledWith(
|
||||
'Access-Control-Allow-Credentials',
|
||||
'true'
|
||||
)
|
||||
expect(mockResponse.headers.set).toHaveBeenCalledWith(
|
||||
'Access-Control-Allow-Methods',
|
||||
'GET, POST, OPTIONS'
|
||||
)
|
||||
expect(mockResponse.headers.set).toHaveBeenCalledWith(
|
||||
'Access-Control-Allow-Headers',
|
||||
'Content-Type, X-Requested-With'
|
||||
)
|
||||
})
|
||||
|
||||
it.concurrent('should not set CORS headers when no origin', async () => {
|
||||
const { addCorsHeaders } = await import('@/lib/core/security/deployment')
|
||||
|
||||
const mockRequest = {
|
||||
headers: {
|
||||
get: vi.fn().mockReturnValue(''),
|
||||
},
|
||||
} as any
|
||||
|
||||
const mockResponse = {
|
||||
headers: {
|
||||
set: vi.fn(),
|
||||
},
|
||||
} as unknown as NextResponse
|
||||
|
||||
addCorsHeaders(mockResponse, mockRequest)
|
||||
|
||||
expect(mockResponse.headers.set).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Form auth validation', () => {
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
mockDecryptSecret.mockResolvedValue({ decrypted: 'correct-password' })
|
||||
})
|
||||
|
||||
it('should allow access to public forms', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'public',
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
const result = await validateFormAuth('request-id', deployment, mockRequest)
|
||||
|
||||
expect(result.authorized).toBe(true)
|
||||
})
|
||||
|
||||
it('should request password auth for GET requests', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'password',
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
method: 'GET',
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
const result = await validateFormAuth('request-id', deployment, mockRequest)
|
||||
|
||||
expect(result.authorized).toBe(false)
|
||||
expect(result.error).toBe('auth_required_password')
|
||||
})
|
||||
|
||||
it('should validate password for POST requests', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
const { decryptSecret } = await import('@/lib/core/security/encryption')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'password',
|
||||
password: 'encrypted-password',
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
method: 'POST',
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
const parsedBody = {
|
||||
password: 'correct-password',
|
||||
}
|
||||
|
||||
const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody)
|
||||
|
||||
expect(decryptSecret).toHaveBeenCalledWith('encrypted-password')
|
||||
expect(result.authorized).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject incorrect password', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'password',
|
||||
password: 'encrypted-password',
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
method: 'POST',
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
const parsedBody = {
|
||||
password: 'wrong-password',
|
||||
}
|
||||
|
||||
const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody)
|
||||
|
||||
expect(result.authorized).toBe(false)
|
||||
expect(result.error).toBe('Invalid password')
|
||||
})
|
||||
|
||||
it('should request email auth for email-protected forms', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'email',
|
||||
allowedEmails: ['user@example.com', '@company.com'],
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
method: 'GET',
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
const result = await validateFormAuth('request-id', deployment, mockRequest)
|
||||
|
||||
expect(result.authorized).toBe(false)
|
||||
expect(result.error).toBe('auth_required_email')
|
||||
})
|
||||
|
||||
it('should check allowed emails for email auth', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'email',
|
||||
allowedEmails: ['user@example.com', '@company.com'],
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
method: 'POST',
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
// Exact email match should authorize
|
||||
const result1 = await validateFormAuth('request-id', deployment, mockRequest, {
|
||||
email: 'user@example.com',
|
||||
})
|
||||
expect(result1.authorized).toBe(true)
|
||||
|
||||
// Domain match should authorize
|
||||
const result2 = await validateFormAuth('request-id', deployment, mockRequest, {
|
||||
email: 'other@company.com',
|
||||
})
|
||||
expect(result2.authorized).toBe(true)
|
||||
|
||||
// Unknown email should not authorize
|
||||
const result3 = await validateFormAuth('request-id', deployment, mockRequest, {
|
||||
email: 'user@unknown.com',
|
||||
})
|
||||
expect(result3.authorized).toBe(false)
|
||||
expect(result3.error).toBe('Email not authorized for this form')
|
||||
})
|
||||
|
||||
it('should require password when formData is present without password', async () => {
|
||||
const { validateFormAuth } = await import('@/app/api/form/utils')
|
||||
|
||||
const deployment = {
|
||||
id: 'form-id',
|
||||
authType: 'password',
|
||||
password: 'encrypted-password',
|
||||
}
|
||||
|
||||
const mockRequest = {
|
||||
method: 'POST',
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(null),
|
||||
},
|
||||
} as any
|
||||
|
||||
const parsedBody = {
|
||||
formData: { field1: 'value1' },
|
||||
// No password provided
|
||||
}
|
||||
|
||||
const result = await validateFormAuth('request-id', deployment, mockRequest, parsedBody)
|
||||
|
||||
expect(result.authorized).toBe(false)
|
||||
expect(result.error).toBe('auth_required_password')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Default customizations', () => {
|
||||
it.concurrent('should have correct default values', async () => {
|
||||
const { DEFAULT_FORM_CUSTOMIZATIONS } = await import('@/app/api/form/utils')
|
||||
|
||||
expect(DEFAULT_FORM_CUSTOMIZATIONS).toEqual({
|
||||
welcomeMessage: '',
|
||||
thankYouTitle: 'Thank you!',
|
||||
thankYouMessage: 'Your response has been submitted successfully.',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
204
apps/sim/app/api/form/utils.ts
Normal file
204
apps/sim/app/api/form/utils.ts
Normal file
@@ -0,0 +1,204 @@
|
||||
import { db } from '@sim/db'
|
||||
import { form, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import {
|
||||
isEmailAllowed,
|
||||
setDeploymentAuthCookie,
|
||||
validateAuthToken,
|
||||
} from '@/lib/core/security/deployment'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { hasAdminPermission } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('FormAuthUtils')
|
||||
|
||||
export function setFormAuthCookie(
|
||||
response: NextResponse,
|
||||
formId: string,
|
||||
type: string,
|
||||
encryptedPassword?: string | null
|
||||
): void {
|
||||
setDeploymentAuthCookie(response, 'form', formId, type, encryptedPassword)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has permission to create a form for a specific workflow
|
||||
* Either the user owns the workflow directly OR has admin permission for the workflow's workspace
|
||||
*/
|
||||
export async function checkWorkflowAccessForFormCreation(
|
||||
workflowId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; workflow?: any }> {
|
||||
const workflowData = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
||||
|
||||
if (workflowData.length === 0) {
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
const workflowRecord = workflowData[0]
|
||||
|
||||
if (workflowRecord.userId === userId) {
|
||||
return { hasAccess: true, workflow: workflowRecord }
|
||||
}
|
||||
|
||||
if (workflowRecord.workspaceId) {
|
||||
const hasAdmin = await hasAdminPermission(userId, workflowRecord.workspaceId)
|
||||
if (hasAdmin) {
|
||||
return { hasAccess: true, workflow: workflowRecord }
|
||||
}
|
||||
}
|
||||
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has access to view/edit/delete a specific form
|
||||
* Either the user owns the form directly OR has admin permission for the workflow's workspace
|
||||
*/
|
||||
export async function checkFormAccess(
|
||||
formId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; form?: any }> {
|
||||
const formData = await db
|
||||
.select({
|
||||
form: form,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
})
|
||||
.from(form)
|
||||
.innerJoin(workflow, eq(form.workflowId, workflow.id))
|
||||
.where(eq(form.id, formId))
|
||||
.limit(1)
|
||||
|
||||
if (formData.length === 0) {
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
const { form: formRecord, workflowWorkspaceId } = formData[0]
|
||||
|
||||
if (formRecord.userId === userId) {
|
||||
return { hasAccess: true, form: formRecord }
|
||||
}
|
||||
|
||||
if (workflowWorkspaceId) {
|
||||
const hasAdmin = await hasAdminPermission(userId, workflowWorkspaceId)
|
||||
if (hasAdmin) {
|
||||
return { hasAccess: true, form: formRecord }
|
||||
}
|
||||
}
|
||||
|
||||
return { hasAccess: false }
|
||||
}
|
||||
|
||||
export async function validateFormAuth(
|
||||
requestId: string,
|
||||
deployment: any,
|
||||
request: NextRequest,
|
||||
parsedBody?: any
|
||||
): Promise<{ authorized: boolean; error?: string }> {
|
||||
const authType = deployment.authType || 'public'
|
||||
|
||||
if (authType === 'public') {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
const cookieName = `form_auth_${deployment.id}`
|
||||
const authCookie = request.cookies.get(cookieName)
|
||||
|
||||
if (authCookie && validateAuthToken(authCookie.value, deployment.id, deployment.password)) {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
if (authType === 'password') {
|
||||
if (request.method === 'GET') {
|
||||
return { authorized: false, error: 'auth_required_password' }
|
||||
}
|
||||
|
||||
try {
|
||||
if (!parsedBody) {
|
||||
return { authorized: false, error: 'Password is required' }
|
||||
}
|
||||
|
||||
const { password, formData } = parsedBody
|
||||
|
||||
if (formData && !password) {
|
||||
return { authorized: false, error: 'auth_required_password' }
|
||||
}
|
||||
|
||||
if (!password) {
|
||||
return { authorized: false, error: 'Password is required' }
|
||||
}
|
||||
|
||||
if (!deployment.password) {
|
||||
logger.error(`[${requestId}] No password set for password-protected form: ${deployment.id}`)
|
||||
return { authorized: false, error: 'Authentication configuration error' }
|
||||
}
|
||||
|
||||
const { decrypted } = await decryptSecret(deployment.password)
|
||||
if (password !== decrypted) {
|
||||
return { authorized: false, error: 'Invalid password' }
|
||||
}
|
||||
|
||||
return { authorized: true }
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error validating password:`, error)
|
||||
return { authorized: false, error: 'Authentication error' }
|
||||
}
|
||||
}
|
||||
|
||||
if (authType === 'email') {
|
||||
if (request.method === 'GET') {
|
||||
return { authorized: false, error: 'auth_required_email' }
|
||||
}
|
||||
|
||||
try {
|
||||
if (!parsedBody) {
|
||||
return { authorized: false, error: 'Email is required' }
|
||||
}
|
||||
|
||||
const { email, formData } = parsedBody
|
||||
|
||||
if (formData && !email) {
|
||||
return { authorized: false, error: 'auth_required_email' }
|
||||
}
|
||||
|
||||
if (!email) {
|
||||
return { authorized: false, error: 'Email is required' }
|
||||
}
|
||||
|
||||
const allowedEmails: string[] = deployment.allowedEmails || []
|
||||
|
||||
if (isEmailAllowed(email, allowedEmails)) {
|
||||
return { authorized: true }
|
||||
}
|
||||
|
||||
return { authorized: false, error: 'Email not authorized for this form' }
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error validating email:`, error)
|
||||
return { authorized: false, error: 'Authentication error' }
|
||||
}
|
||||
}
|
||||
|
||||
return { authorized: false, error: 'Unsupported authentication type' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Form customizations interface
|
||||
*/
|
||||
export interface FormCustomizations {
|
||||
primaryColor?: string
|
||||
welcomeMessage?: string
|
||||
thankYouTitle?: string
|
||||
thankYouMessage?: string
|
||||
logoUrl?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Default form customizations
|
||||
* Note: primaryColor is intentionally undefined to allow thank you screen to use its green default
|
||||
*/
|
||||
export const DEFAULT_FORM_CUSTOMIZATIONS: FormCustomizations = {
|
||||
welcomeMessage: '',
|
||||
thankYouTitle: 'Thank you!',
|
||||
thankYouMessage: 'Your response has been submitted successfully.',
|
||||
}
|
||||
71
apps/sim/app/api/form/validate/route.ts
Normal file
71
apps/sim/app/api/form/validate/route.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { db } from '@sim/db'
|
||||
import { form } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('FormValidateAPI')
|
||||
|
||||
const validateQuerySchema = z.object({
|
||||
identifier: z
|
||||
.string()
|
||||
.min(1, 'Identifier is required')
|
||||
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens')
|
||||
.max(100, 'Identifier must be 100 characters or less'),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET endpoint to validate form identifier availability
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
const { searchParams } = new URL(request.url)
|
||||
const identifier = searchParams.get('identifier')
|
||||
|
||||
const validation = validateQuerySchema.safeParse({ identifier })
|
||||
|
||||
if (!validation.success) {
|
||||
const errorMessage = validation.error.errors[0]?.message || 'Invalid identifier'
|
||||
logger.warn(`Validation error: ${errorMessage}`)
|
||||
|
||||
if (identifier && !/^[a-z0-9-]+$/.test(identifier)) {
|
||||
return createSuccessResponse({
|
||||
available: false,
|
||||
error: errorMessage,
|
||||
})
|
||||
}
|
||||
|
||||
return createErrorResponse(errorMessage, 400)
|
||||
}
|
||||
|
||||
const { identifier: validatedIdentifier } = validation.data
|
||||
|
||||
const existingForm = await db
|
||||
.select({ id: form.id })
|
||||
.from(form)
|
||||
.where(eq(form.identifier, validatedIdentifier))
|
||||
.limit(1)
|
||||
|
||||
const isAvailable = existingForm.length === 0
|
||||
|
||||
logger.debug(
|
||||
`Identifier "${validatedIdentifier}" availability check: ${isAvailable ? 'available' : 'taken'}`
|
||||
)
|
||||
|
||||
return createSuccessResponse({
|
||||
available: isAvailable,
|
||||
error: isAvailable ? null : 'This identifier is already in use',
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate identifier'
|
||||
logger.error('Error validating form identifier:', error)
|
||||
return createErrorResponse(message, 500)
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
@@ -82,14 +83,7 @@ vi.mock('@/lib/execution/isolated-vm', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: vi.fn(() => ({
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
})),
|
||||
}))
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/execution/e2b', () => ({
|
||||
executeInE2B: vi.fn(),
|
||||
|
||||
@@ -21,7 +21,6 @@ export async function POST(req: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
// Get user session
|
||||
const session = await getSession()
|
||||
if (!session?.user?.email) {
|
||||
logger.warn(`[${requestId}] Unauthorized help request attempt`)
|
||||
@@ -30,20 +29,20 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const email = session.user.email
|
||||
|
||||
// Handle multipart form data
|
||||
const formData = await req.formData()
|
||||
|
||||
// Extract form fields
|
||||
const subject = formData.get('subject') as string
|
||||
const message = formData.get('message') as string
|
||||
const type = formData.get('type') as string
|
||||
const workflowId = formData.get('workflowId') as string | null
|
||||
const workspaceId = formData.get('workspaceId') as string
|
||||
const userAgent = formData.get('userAgent') as string | null
|
||||
|
||||
logger.info(`[${requestId}] Processing help request`, {
|
||||
type,
|
||||
email: `${email.substring(0, 3)}***`, // Log partial email for privacy
|
||||
})
|
||||
|
||||
// Validate the form data
|
||||
const validationResult = helpFormSchema.safeParse({
|
||||
subject,
|
||||
message,
|
||||
@@ -60,7 +59,6 @@ export async function POST(req: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Extract images
|
||||
const images: { filename: string; content: Buffer; contentType: string }[] = []
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
@@ -81,10 +79,14 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
logger.debug(`[${requestId}] Help request includes ${images.length} images`)
|
||||
|
||||
// Prepare email content
|
||||
const userId = session.user.id
|
||||
let emailText = `
|
||||
Type: ${type}
|
||||
From: ${email}
|
||||
User ID: ${userId}
|
||||
Workspace ID: ${workspaceId ?? 'N/A'}
|
||||
Workflow ID: ${workflowId ?? 'N/A'}
|
||||
Browser: ${userAgent ?? 'N/A'}
|
||||
|
||||
${message}
|
||||
`
|
||||
@@ -115,7 +117,6 @@ ${message}
|
||||
|
||||
logger.info(`[${requestId}] Help request email sent successfully`)
|
||||
|
||||
// Send confirmation email to the user
|
||||
try {
|
||||
const confirmationHtml = await renderHelpConfirmationEmail(
|
||||
type as 'bug' | 'feedback' | 'feature_request' | 'other',
|
||||
|
||||
@@ -4,18 +4,15 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { createEnvMock, createMockLogger } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('drizzle-orm')
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: vi.fn(() => ({
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
})),
|
||||
const loggerMock = vi.hoisted(() => ({
|
||||
createLogger: () => createMockLogger(),
|
||||
}))
|
||||
|
||||
vi.mock('drizzle-orm')
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
vi.mock('@sim/db')
|
||||
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: (fn: any) => fn(),
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { memory, permissions, workspace } from '@sim/db/schema'
|
||||
import { memory } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MemoryByIdAPI')
|
||||
|
||||
@@ -29,46 +30,6 @@ const memoryPutBodySchema = z.object({
|
||||
workspaceId: z.string().uuid('Invalid workspace ID format'),
|
||||
})
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; canWrite: boolean }> {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceRow.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite: permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin',
|
||||
}
|
||||
}
|
||||
|
||||
async function validateMemoryAccess(
|
||||
request: NextRequest,
|
||||
workspaceId: string,
|
||||
@@ -86,8 +47,8 @@ async function validateMemoryAccess(
|
||||
}
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists || !access.hasAccess) {
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
@@ -96,7 +57,7 @@ async function validateMemoryAccess(
|
||||
}
|
||||
}
|
||||
|
||||
if (action === 'write' && !canWrite) {
|
||||
if (action === 'write' && !access.canWrite) {
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied' } },
|
||||
|
||||
@@ -1,56 +1,17 @@
|
||||
import { db } from '@sim/db'
|
||||
import { memory, permissions, workspace } from '@sim/db/schema'
|
||||
import { memory } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, like } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MemoryAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<{ hasAccess: boolean; canWrite: boolean }> {
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceRow.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permissionRow] = await db
|
||||
.select({ permissionType: permissions.permissionType })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permissionRow) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite: permissionRow.permissionType === 'write' || permissionRow.permissionType === 'admin',
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -76,8 +37,14 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
@@ -155,15 +122,21 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!canWrite) {
|
||||
if (!access.canWrite) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
@@ -282,15 +255,21 @@ export async function DELETE(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!hasAccess) {
|
||||
const access = await checkWorkspaceAccess(workspaceId, authResult.userId)
|
||||
if (!access.exists) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Workspace not found' } },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
if (!access.hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!canWrite) {
|
||||
if (!access.canWrite) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: { message: 'Write access denied to this workspace' } },
|
||||
{ status: 403 }
|
||||
|
||||
166
apps/sim/app/api/permission-groups/[id]/members/bulk/route.ts
Normal file
166
apps/sim/app/api/permission-groups/[id]/members/bulk/route.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
|
||||
const logger = createLogger('PermissionGroupBulkMembers')
|
||||
|
||||
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
|
||||
const [group] = await db
|
||||
.select({
|
||||
id: permissionGroup.id,
|
||||
organizationId: permissionGroup.organizationId,
|
||||
})
|
||||
.from(permissionGroup)
|
||||
.where(eq(permissionGroup.id, groupId))
|
||||
.limit(1)
|
||||
|
||||
if (!group) return null
|
||||
|
||||
const [membership] = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (!membership) return null
|
||||
|
||||
return { group, role: membership.role }
|
||||
}
|
||||
|
||||
const bulkAddSchema = z.object({
|
||||
userIds: z.array(z.string()).optional(),
|
||||
addAllOrgMembers: z.boolean().optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const hasAccess = await hasAccessControlAccess(session.user.id)
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access Control is an Enterprise feature' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (result.role !== 'admin' && result.role !== 'owner') {
|
||||
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { userIds, addAllOrgMembers } = bulkAddSchema.parse(body)
|
||||
|
||||
let targetUserIds: string[] = []
|
||||
|
||||
if (addAllOrgMembers) {
|
||||
const orgMembers = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, result.group.organizationId))
|
||||
|
||||
targetUserIds = orgMembers.map((m) => m.userId)
|
||||
} else if (userIds && userIds.length > 0) {
|
||||
const validMembers = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.organizationId, result.group.organizationId),
|
||||
inArray(member.userId, userIds)
|
||||
)
|
||||
)
|
||||
|
||||
targetUserIds = validMembers.map((m) => m.userId)
|
||||
}
|
||||
|
||||
if (targetUserIds.length === 0) {
|
||||
return NextResponse.json({ added: 0, moved: 0 })
|
||||
}
|
||||
|
||||
const existingMemberships = await db
|
||||
.select({
|
||||
id: permissionGroupMember.id,
|
||||
userId: permissionGroupMember.userId,
|
||||
permissionGroupId: permissionGroupMember.permissionGroupId,
|
||||
})
|
||||
.from(permissionGroupMember)
|
||||
.where(inArray(permissionGroupMember.userId, targetUserIds))
|
||||
|
||||
const alreadyInThisGroup = new Set(
|
||||
existingMemberships.filter((m) => m.permissionGroupId === id).map((m) => m.userId)
|
||||
)
|
||||
const usersToAdd = targetUserIds.filter((uid) => !alreadyInThisGroup.has(uid))
|
||||
|
||||
if (usersToAdd.length === 0) {
|
||||
return NextResponse.json({ added: 0, moved: 0 })
|
||||
}
|
||||
|
||||
const membershipsToDelete = existingMemberships.filter(
|
||||
(m) => m.permissionGroupId !== id && usersToAdd.includes(m.userId)
|
||||
)
|
||||
const movedCount = membershipsToDelete.length
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
if (membershipsToDelete.length > 0) {
|
||||
await tx.delete(permissionGroupMember).where(
|
||||
inArray(
|
||||
permissionGroupMember.id,
|
||||
membershipsToDelete.map((m) => m.id)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const newMembers = usersToAdd.map((userId) => ({
|
||||
id: crypto.randomUUID(),
|
||||
permissionGroupId: id,
|
||||
userId,
|
||||
assignedBy: session.user.id,
|
||||
assignedAt: new Date(),
|
||||
}))
|
||||
|
||||
await tx.insert(permissionGroupMember).values(newMembers)
|
||||
})
|
||||
|
||||
logger.info('Bulk added members to permission group', {
|
||||
permissionGroupId: id,
|
||||
addedCount: usersToAdd.length,
|
||||
movedCount,
|
||||
assignedBy: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({ added: usersToAdd.length, moved: movedCount })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||
}
|
||||
if (
|
||||
error instanceof Error &&
|
||||
error.message.includes('permission_group_member_user_id_unique')
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{ error: 'One or more users are already in a permission group' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
logger.error('Error bulk adding members to permission group', error)
|
||||
return NextResponse.json({ error: 'Failed to add members' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
229
apps/sim/app/api/permission-groups/[id]/members/route.ts
Normal file
229
apps/sim/app/api/permission-groups/[id]/members/route.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, permissionGroup, permissionGroupMember, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
|
||||
const logger = createLogger('PermissionGroupMembers')
|
||||
|
||||
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
|
||||
const [group] = await db
|
||||
.select({
|
||||
id: permissionGroup.id,
|
||||
organizationId: permissionGroup.organizationId,
|
||||
})
|
||||
.from(permissionGroup)
|
||||
.where(eq(permissionGroup.id, groupId))
|
||||
.limit(1)
|
||||
|
||||
if (!group) return null
|
||||
|
||||
const [membership] = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (!membership) return null
|
||||
|
||||
return { group, role: membership.role }
|
||||
}
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const members = await db
|
||||
.select({
|
||||
id: permissionGroupMember.id,
|
||||
userId: permissionGroupMember.userId,
|
||||
assignedAt: permissionGroupMember.assignedAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
userImage: user.image,
|
||||
})
|
||||
.from(permissionGroupMember)
|
||||
.leftJoin(user, eq(permissionGroupMember.userId, user.id))
|
||||
.where(eq(permissionGroupMember.permissionGroupId, id))
|
||||
|
||||
return NextResponse.json({ members })
|
||||
}
|
||||
|
||||
const addMemberSchema = z.object({
|
||||
userId: z.string().min(1),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const hasAccess = await hasAccessControlAccess(session.user.id)
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access Control is an Enterprise feature' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (result.role !== 'admin' && result.role !== 'owner') {
|
||||
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { userId } = addMemberSchema.parse(body)
|
||||
|
||||
const [orgMember] = await db
|
||||
.select({ id: member.id })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.organizationId, result.group.organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (!orgMember) {
|
||||
return NextResponse.json(
|
||||
{ error: 'User is not a member of this organization' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [existingMembership] = await db
|
||||
.select({
|
||||
id: permissionGroupMember.id,
|
||||
permissionGroupId: permissionGroupMember.permissionGroupId,
|
||||
})
|
||||
.from(permissionGroupMember)
|
||||
.where(eq(permissionGroupMember.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
if (existingMembership?.permissionGroupId === id) {
|
||||
return NextResponse.json(
|
||||
{ error: 'User is already in this permission group' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
const newMember = await db.transaction(async (tx) => {
|
||||
if (existingMembership) {
|
||||
await tx
|
||||
.delete(permissionGroupMember)
|
||||
.where(eq(permissionGroupMember.id, existingMembership.id))
|
||||
}
|
||||
|
||||
const memberData = {
|
||||
id: crypto.randomUUID(),
|
||||
permissionGroupId: id,
|
||||
userId,
|
||||
assignedBy: session.user.id,
|
||||
assignedAt: new Date(),
|
||||
}
|
||||
|
||||
await tx.insert(permissionGroupMember).values(memberData)
|
||||
return memberData
|
||||
})
|
||||
|
||||
logger.info('Added member to permission group', {
|
||||
permissionGroupId: id,
|
||||
userId,
|
||||
assignedBy: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({ member: newMember }, { status: 201 })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||
}
|
||||
if (
|
||||
error instanceof Error &&
|
||||
error.message.includes('permission_group_member_user_id_unique')
|
||||
) {
|
||||
return NextResponse.json({ error: 'User is already in a permission group' }, { status: 409 })
|
||||
}
|
||||
logger.error('Error adding member to permission group', error)
|
||||
return NextResponse.json({ error: 'Failed to add member' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
const { searchParams } = new URL(req.url)
|
||||
const memberId = searchParams.get('memberId')
|
||||
|
||||
if (!memberId) {
|
||||
return NextResponse.json({ error: 'memberId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
try {
|
||||
const hasAccess = await hasAccessControlAccess(session.user.id)
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access Control is an Enterprise feature' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (result.role !== 'admin' && result.role !== 'owner') {
|
||||
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const [memberToRemove] = await db
|
||||
.select()
|
||||
.from(permissionGroupMember)
|
||||
.where(
|
||||
and(eq(permissionGroupMember.id, memberId), eq(permissionGroupMember.permissionGroupId, id))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!memberToRemove) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db.delete(permissionGroupMember).where(eq(permissionGroupMember.id, memberId))
|
||||
|
||||
logger.info('Removed member from permission group', {
|
||||
permissionGroupId: id,
|
||||
memberId,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error removing member from permission group', error)
|
||||
return NextResponse.json({ error: 'Failed to remove member' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
212
apps/sim/app/api/permission-groups/[id]/route.ts
Normal file
212
apps/sim/app/api/permission-groups/[id]/route.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import {
|
||||
type PermissionGroupConfig,
|
||||
parsePermissionGroupConfig,
|
||||
} from '@/lib/permission-groups/types'
|
||||
|
||||
const logger = createLogger('PermissionGroup')
|
||||
|
||||
const configSchema = z.object({
|
||||
allowedIntegrations: z.array(z.string()).nullable().optional(),
|
||||
allowedModelProviders: z.array(z.string()).nullable().optional(),
|
||||
hideTraceSpans: z.boolean().optional(),
|
||||
hideKnowledgeBaseTab: z.boolean().optional(),
|
||||
hideCopilot: z.boolean().optional(),
|
||||
hideApiKeysTab: z.boolean().optional(),
|
||||
hideEnvironmentTab: z.boolean().optional(),
|
||||
hideFilesTab: z.boolean().optional(),
|
||||
disableMcpTools: z.boolean().optional(),
|
||||
disableCustomTools: z.boolean().optional(),
|
||||
hideTemplates: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const updateSchema = z.object({
|
||||
name: z.string().trim().min(1).max(100).optional(),
|
||||
description: z.string().max(500).nullable().optional(),
|
||||
config: configSchema.optional(),
|
||||
})
|
||||
|
||||
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
|
||||
const [group] = await db
|
||||
.select({
|
||||
id: permissionGroup.id,
|
||||
organizationId: permissionGroup.organizationId,
|
||||
name: permissionGroup.name,
|
||||
description: permissionGroup.description,
|
||||
config: permissionGroup.config,
|
||||
createdBy: permissionGroup.createdBy,
|
||||
createdAt: permissionGroup.createdAt,
|
||||
updatedAt: permissionGroup.updatedAt,
|
||||
})
|
||||
.from(permissionGroup)
|
||||
.where(eq(permissionGroup.id, groupId))
|
||||
.limit(1)
|
||||
|
||||
if (!group) return null
|
||||
|
||||
const [membership] = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.organizationId, group.organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (!membership) return null
|
||||
|
||||
return { group, role: membership.role }
|
||||
}
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
permissionGroup: {
|
||||
...result.group,
|
||||
config: parsePermissionGroupConfig(result.group.config),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export async function PUT(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const hasAccess = await hasAccessControlAccess(session.user.id)
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access Control is an Enterprise feature' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (result.role !== 'admin' && result.role !== 'owner') {
|
||||
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const updates = updateSchema.parse(body)
|
||||
|
||||
if (updates.name) {
|
||||
const existingGroup = await db
|
||||
.select({ id: permissionGroup.id })
|
||||
.from(permissionGroup)
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroup.organizationId, result.group.organizationId),
|
||||
eq(permissionGroup.name, updates.name)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingGroup.length > 0 && existingGroup[0].id !== id) {
|
||||
return NextResponse.json(
|
||||
{ error: 'A permission group with this name already exists' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const currentConfig = parsePermissionGroupConfig(result.group.config)
|
||||
const newConfig: PermissionGroupConfig = updates.config
|
||||
? { ...currentConfig, ...updates.config }
|
||||
: currentConfig
|
||||
|
||||
await db
|
||||
.update(permissionGroup)
|
||||
.set({
|
||||
...(updates.name !== undefined && { name: updates.name }),
|
||||
...(updates.description !== undefined && { description: updates.description }),
|
||||
config: newConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(permissionGroup.id, id))
|
||||
|
||||
const [updated] = await db
|
||||
.select()
|
||||
.from(permissionGroup)
|
||||
.where(eq(permissionGroup.id, id))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({
|
||||
permissionGroup: {
|
||||
...updated,
|
||||
config: parsePermissionGroupConfig(updated.config),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||
}
|
||||
logger.error('Error updating permission group', error)
|
||||
return NextResponse.json({ error: 'Failed to update permission group' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const hasAccess = await hasAccessControlAccess(session.user.id)
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access Control is an Enterprise feature' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await getPermissionGroupWithAccess(id, session.user.id)
|
||||
|
||||
if (!result) {
|
||||
return NextResponse.json({ error: 'Permission group not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (result.role !== 'admin' && result.role !== 'owner') {
|
||||
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
|
||||
}
|
||||
|
||||
await db.delete(permissionGroupMember).where(eq(permissionGroupMember.permissionGroupId, id))
|
||||
await db.delete(permissionGroup).where(eq(permissionGroup.id, id))
|
||||
|
||||
logger.info('Deleted permission group', { permissionGroupId: id, userId: session.user.id })
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting permission group', error)
|
||||
return NextResponse.json({ error: 'Failed to delete permission group' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
185
apps/sim/app/api/permission-groups/route.ts
Normal file
185
apps/sim/app/api/permission-groups/route.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization, permissionGroup, permissionGroupMember, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import {
|
||||
DEFAULT_PERMISSION_GROUP_CONFIG,
|
||||
type PermissionGroupConfig,
|
||||
parsePermissionGroupConfig,
|
||||
} from '@/lib/permission-groups/types'
|
||||
|
||||
const logger = createLogger('PermissionGroups')
|
||||
|
||||
const configSchema = z.object({
|
||||
allowedIntegrations: z.array(z.string()).nullable().optional(),
|
||||
allowedModelProviders: z.array(z.string()).nullable().optional(),
|
||||
hideTraceSpans: z.boolean().optional(),
|
||||
hideKnowledgeBaseTab: z.boolean().optional(),
|
||||
hideCopilot: z.boolean().optional(),
|
||||
hideApiKeysTab: z.boolean().optional(),
|
||||
hideEnvironmentTab: z.boolean().optional(),
|
||||
hideFilesTab: z.boolean().optional(),
|
||||
disableMcpTools: z.boolean().optional(),
|
||||
disableCustomTools: z.boolean().optional(),
|
||||
hideTemplates: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const createSchema = z.object({
|
||||
organizationId: z.string().min(1),
|
||||
name: z.string().trim().min(1).max(100),
|
||||
description: z.string().max(500).optional(),
|
||||
config: configSchema.optional(),
|
||||
})
|
||||
|
||||
export async function GET(req: Request) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
const organizationId = searchParams.get('organizationId')
|
||||
|
||||
if (!organizationId) {
|
||||
return NextResponse.json({ error: 'organizationId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const membership = await db
|
||||
.select({ id: member.id, role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (membership.length === 0) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const groups = await db
|
||||
.select({
|
||||
id: permissionGroup.id,
|
||||
name: permissionGroup.name,
|
||||
description: permissionGroup.description,
|
||||
config: permissionGroup.config,
|
||||
createdBy: permissionGroup.createdBy,
|
||||
createdAt: permissionGroup.createdAt,
|
||||
updatedAt: permissionGroup.updatedAt,
|
||||
creatorName: user.name,
|
||||
creatorEmail: user.email,
|
||||
})
|
||||
.from(permissionGroup)
|
||||
.leftJoin(user, eq(permissionGroup.createdBy, user.id))
|
||||
.where(eq(permissionGroup.organizationId, organizationId))
|
||||
.orderBy(desc(permissionGroup.createdAt))
|
||||
|
||||
const groupsWithCounts = await Promise.all(
|
||||
groups.map(async (group) => {
|
||||
const [memberCount] = await db
|
||||
.select({ count: count() })
|
||||
.from(permissionGroupMember)
|
||||
.where(eq(permissionGroupMember.permissionGroupId, group.id))
|
||||
|
||||
return {
|
||||
...group,
|
||||
config: parsePermissionGroupConfig(group.config),
|
||||
memberCount: memberCount?.count ?? 0,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return NextResponse.json({ permissionGroups: groupsWithCounts })
|
||||
}
|
||||
|
||||
export async function POST(req: Request) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const hasAccess = await hasAccessControlAccess(session.user.id)
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access Control is an Enterprise feature' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { organizationId, name, description, config } = createSchema.parse(body)
|
||||
|
||||
const membership = await db
|
||||
.select({ id: member.id, role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
const role = membership[0]?.role
|
||||
if (membership.length === 0 || (role !== 'admin' && role !== 'owner')) {
|
||||
return NextResponse.json({ error: 'Admin or owner permissions required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const orgExists = await db
|
||||
.select({ id: organization.id })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (orgExists.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const existingGroup = await db
|
||||
.select({ id: permissionGroup.id })
|
||||
.from(permissionGroup)
|
||||
.where(
|
||||
and(eq(permissionGroup.organizationId, organizationId), eq(permissionGroup.name, name))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingGroup.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'A permission group with this name already exists' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
const groupConfig: PermissionGroupConfig = {
|
||||
...DEFAULT_PERMISSION_GROUP_CONFIG,
|
||||
...config,
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const newGroup = {
|
||||
id: crypto.randomUUID(),
|
||||
organizationId,
|
||||
name,
|
||||
description: description || null,
|
||||
config: groupConfig,
|
||||
createdBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}
|
||||
|
||||
await db.insert(permissionGroup).values(newGroup)
|
||||
|
||||
logger.info('Created permission group', {
|
||||
permissionGroupId: newGroup.id,
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({ permissionGroup: newGroup }, { status: 201 })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||
}
|
||||
logger.error('Error creating permission group', error)
|
||||
return NextResponse.json({ error: 'Failed to create permission group' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
72
apps/sim/app/api/permission-groups/user/route.ts
Normal file
72
apps/sim/app/api/permission-groups/user/route.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isOrganizationOnEnterprisePlan } from '@/lib/billing'
|
||||
import { parsePermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
|
||||
export async function GET(req: Request) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
const organizationId = searchParams.get('organizationId')
|
||||
|
||||
if (!organizationId) {
|
||||
return NextResponse.json({ error: 'organizationId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [membership] = await db
|
||||
.select({ id: member.id })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return NextResponse.json({ error: 'Not a member of this organization' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Short-circuit: if org is not on enterprise plan, ignore permission configs
|
||||
const isEnterprise = await isOrganizationOnEnterprisePlan(organizationId)
|
||||
if (!isEnterprise) {
|
||||
return NextResponse.json({
|
||||
permissionGroupId: null,
|
||||
groupName: null,
|
||||
config: null,
|
||||
})
|
||||
}
|
||||
|
||||
const [groupMembership] = await db
|
||||
.select({
|
||||
permissionGroupId: permissionGroupMember.permissionGroupId,
|
||||
config: permissionGroup.config,
|
||||
groupName: permissionGroup.name,
|
||||
})
|
||||
.from(permissionGroupMember)
|
||||
.innerJoin(permissionGroup, eq(permissionGroupMember.permissionGroupId, permissionGroup.id))
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroupMember.userId, session.user.id),
|
||||
eq(permissionGroup.organizationId, organizationId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!groupMembership) {
|
||||
return NextResponse.json({
|
||||
permissionGroupId: null,
|
||||
groupName: null,
|
||||
config: null,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
permissionGroupId: groupMembership.permissionGroupId,
|
||||
groupName: groupMembership.groupName,
|
||||
config: parsePermissionGroupConfig(groupMembership.config),
|
||||
})
|
||||
}
|
||||
@@ -4,8 +4,8 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { validateAuthToken } from '@/lib/core/security/deployment'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
import { validateAuthToken } from '@/app/api/chat/utils'
|
||||
|
||||
const logger = createLogger('ProxyTTSStreamAPI')
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -43,14 +44,7 @@ vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: () => 'test-request-id',
|
||||
}))
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: () => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
import { PUT } from './route'
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -40,13 +41,7 @@ vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: () => 'test-request-id',
|
||||
}))
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: () => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
import { GET } from '@/app/api/schedules/route'
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
extractRequiredCredentials,
|
||||
sanitizeCredentials,
|
||||
} from '@/lib/workflows/credentials/credential-extractor'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('TemplateByIdAPI')
|
||||
|
||||
@@ -189,12 +190,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
.where(eq(workflow.id, template.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const currentState = {
|
||||
const currentState: Partial<WorkflowState> = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
variables: workflowRecord?.variables || undefined,
|
||||
variables: (workflowRecord?.variables as WorkflowState['variables']) ?? undefined,
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,10 @@ import { v4 as uuidv4 } from 'uuid'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { regenerateWorkflowStateIds } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
type RegenerateStateInput,
|
||||
regenerateWorkflowStateIds,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
|
||||
const logger = createLogger('TemplateUseAPI')
|
||||
|
||||
@@ -104,9 +107,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Step 2: Regenerate IDs when creating a copy (not when connecting/editing template)
|
||||
// When connecting to template (edit mode), keep original IDs
|
||||
// When using template (copy mode), regenerate all IDs to avoid conflicts
|
||||
const templateState = templateData.state as RegenerateStateInput
|
||||
const workflowState = connectToTemplate
|
||||
? templateData.state
|
||||
: regenerateWorkflowStateIds(templateData.state)
|
||||
? templateState
|
||||
: regenerateWorkflowStateIds(templateState)
|
||||
|
||||
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
|
||||
// Ensure variables in state are remapped for the new workflow as well
|
||||
|
||||
85
apps/sim/app/api/tools/a2a/cancel-task/route.ts
Normal file
85
apps/sim/app/api/tools/a2a/cancel-task/route.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { Task } from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('A2ACancelTaskAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const A2ACancelTaskSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A cancel task attempt`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ACancelTaskSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Canceling A2A task`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const task = (await client.cancelTask({ id: validatedData.taskId })) as Task
|
||||
|
||||
logger.info(`[${requestId}] Successfully canceled A2A task`, {
|
||||
taskId: validatedData.taskId,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
cancelled: true,
|
||||
state: task.status.state,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid A2A cancel task request`, {
|
||||
errors: error.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error canceling A2A task:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to cancel task',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
95
apps/sim/app/api/tools/a2a/delete-push-notification/route.ts
Normal file
95
apps/sim/app/api/tools/a2a/delete-push-notification/route.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ADeletePushNotificationAPI')
|
||||
|
||||
const A2ADeletePushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
pushNotificationConfigId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A delete push notification attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A delete push notification request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ADeletePushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Deleting A2A push notification config`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
pushNotificationConfigId: validatedData.pushNotificationConfigId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
await client.deleteTaskPushNotificationConfig({
|
||||
id: validatedData.taskId,
|
||||
pushNotificationConfigId: validatedData.pushNotificationConfigId || validatedData.taskId,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Push notification config deleted successfully`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to delete push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
93
apps/sim/app/api/tools/a2a/get-agent-card/route.ts
Normal file
93
apps/sim/app/api/tools/a2a/get-agent-card/route.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetAgentCardAPI')
|
||||
|
||||
const A2AGetAgentCardSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A get agent card attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A get agent card request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetAgentCardSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Fetching Agent Card`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const agentCard = await client.getAgentCard()
|
||||
|
||||
logger.info(`[${requestId}] Agent Card fetched successfully`, {
|
||||
agentName: agentCard.name,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
name: agentCard.name,
|
||||
description: agentCard.description,
|
||||
url: agentCard.url,
|
||||
version: agentCard.protocolVersion,
|
||||
capabilities: agentCard.capabilities,
|
||||
skills: agentCard.skills,
|
||||
defaultInputModes: agentCard.defaultInputModes,
|
||||
defaultOutputModes: agentCard.defaultOutputModes,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error fetching Agent Card:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to fetch Agent Card',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
116
apps/sim/app/api/tools/a2a/get-push-notification/route.ts
Normal file
116
apps/sim/app/api/tools/a2a/get-push-notification/route.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetPushNotificationAPI')
|
||||
|
||||
const A2AGetPushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A get push notification attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A get push notification request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetPushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Getting push notification config`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const result = await client.getTaskPushNotificationConfig({
|
||||
id: validatedData.taskId,
|
||||
})
|
||||
|
||||
if (!result || !result.pushNotificationConfig) {
|
||||
logger.info(`[${requestId}] No push notification config found for task`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
exists: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Push notification config retrieved successfully`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url: result.pushNotificationConfig.url,
|
||||
token: result.pushNotificationConfig.token,
|
||||
exists: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message.includes('not found')) {
|
||||
logger.info(`[${requestId}] Task not found, returning exists: false`)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
exists: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to get push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
96
apps/sim/app/api/tools/a2a/get-task/route.ts
Normal file
96
apps/sim/app/api/tools/a2a/get-task/route.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import type { Task } from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2AGetTaskAPI')
|
||||
|
||||
const A2AGetTaskSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
historyLength: z.number().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A get task attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated A2A get task request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AGetTaskSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Getting A2A task`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
historyLength: validatedData.historyLength,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const task = (await client.getTask({
|
||||
id: validatedData.taskId,
|
||||
historyLength: validatedData.historyLength,
|
||||
})) as Task
|
||||
|
||||
logger.info(`[${requestId}] Successfully retrieved A2A task`, {
|
||||
taskId: task.id,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting A2A task:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to get task',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
121
apps/sim/app/api/tools/a2a/resubscribe/route.ts
Normal file
121
apps/sim/app/api/tools/a2a/resubscribe/route.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('A2AResubscribeAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const A2AResubscribeSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A resubscribe attempt`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2AResubscribeSchema.parse(body)
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const stream = client.resubscribeTask({ id: validatedData.taskId })
|
||||
|
||||
let taskId = validatedData.taskId
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resubscribed to A2A task ${taskId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
isRunning: !isTerminalState(state),
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid A2A resubscribe data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error resubscribing to A2A task:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to resubscribe',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
152
apps/sim/app/api/tools/a2a/send-message-stream/route.ts
Normal file
152
apps/sim/app/api/tools/a2a/send-message-stream/route.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import type {
|
||||
Artifact,
|
||||
Message,
|
||||
Task,
|
||||
TaskArtifactUpdateEvent,
|
||||
TaskState,
|
||||
TaskStatusUpdateEvent,
|
||||
} from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageStreamAPI')
|
||||
|
||||
const A2ASendMessageStreamSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unauthorized A2A send message stream attempt: ${authResult.error}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message stream request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageStreamSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A streaming message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const stream = client.sendMessageStream({ message })
|
||||
|
||||
let taskId = ''
|
||||
let contextId: string | undefined
|
||||
let state: TaskState = 'working'
|
||||
let content = ''
|
||||
let artifacts: Artifact[] = []
|
||||
let history: Message[] = []
|
||||
|
||||
for await (const event of stream) {
|
||||
if (event.kind === 'message') {
|
||||
const msg = event as Message
|
||||
content = extractTextContent(msg)
|
||||
taskId = msg.taskId || taskId
|
||||
contextId = msg.contextId || contextId
|
||||
state = 'completed'
|
||||
} else if (event.kind === 'task') {
|
||||
const task = event as Task
|
||||
taskId = task.id
|
||||
contextId = task.contextId
|
||||
state = task.status.state
|
||||
artifacts = task.artifacts || []
|
||||
history = task.history || []
|
||||
const lastAgentMessage = history.filter((m) => m.role === 'agent').pop()
|
||||
if (lastAgentMessage) {
|
||||
content = extractTextContent(lastAgentMessage)
|
||||
}
|
||||
} else if ('status' in event) {
|
||||
const statusEvent = event as TaskStatusUpdateEvent
|
||||
state = statusEvent.status.state
|
||||
} else if ('artifact' in event) {
|
||||
const artifactEvent = event as TaskArtifactUpdateEvent
|
||||
artifacts.push(artifactEvent.artifact)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] A2A streaming message completed`, {
|
||||
taskId,
|
||||
state,
|
||||
artifactCount: artifacts.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(state) && state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId,
|
||||
contextId,
|
||||
state,
|
||||
artifacts,
|
||||
history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in A2A streaming:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Streaming failed',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
128
apps/sim/app/api/tools/a2a/send-message/route.ts
Normal file
128
apps/sim/app/api/tools/a2a/send-message/route.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import type { Message, Task } from '@a2a-js/sdk'
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASendMessageAPI')
|
||||
|
||||
const A2ASendMessageSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
taskId: z.string().optional(),
|
||||
contextId: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A send message attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated A2A send message request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASendMessageSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending A2A message`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
hasTaskId: !!validatedData.taskId,
|
||||
hasContextId: !!validatedData.contextId,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const message: Message = {
|
||||
kind: 'message',
|
||||
messageId: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
parts: [{ kind: 'text', text: validatedData.message }],
|
||||
...(validatedData.taskId && { taskId: validatedData.taskId }),
|
||||
...(validatedData.contextId && { contextId: validatedData.contextId }),
|
||||
}
|
||||
|
||||
const result = await client.sendMessage({ message })
|
||||
|
||||
if (result.kind === 'message') {
|
||||
const responseMessage = result as Message
|
||||
|
||||
logger.info(`[${requestId}] A2A message sent successfully (message response)`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: extractTextContent(responseMessage),
|
||||
taskId: responseMessage.taskId || '',
|
||||
contextId: responseMessage.contextId,
|
||||
state: 'completed',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const task = result as Task
|
||||
const lastAgentMessage = task.history?.filter((m) => m.role === 'agent').pop()
|
||||
const content = lastAgentMessage ? extractTextContent(lastAgentMessage) : ''
|
||||
|
||||
logger.info(`[${requestId}] A2A message sent successfully (task response)`, {
|
||||
taskId: task.id,
|
||||
state: task.status.state,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: isTerminalState(task.status.state) && task.status.state !== 'failed',
|
||||
output: {
|
||||
content,
|
||||
taskId: task.id,
|
||||
contextId: task.contextId,
|
||||
state: task.status.state,
|
||||
artifacts: task.artifacts,
|
||||
history: task.history,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error sending A2A message:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
94
apps/sim/app/api/tools/a2a/set-push-notification/route.ts
Normal file
94
apps/sim/app/api/tools/a2a/set-push-notification/route.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { ClientFactory } from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('A2ASetPushNotificationAPI')
|
||||
|
||||
const A2ASetPushNotificationSchema = z.object({
|
||||
agentUrl: z.string().min(1, 'Agent URL is required'),
|
||||
taskId: z.string().min(1, 'Task ID is required'),
|
||||
webhookUrl: z.string().min(1, 'Webhook URL is required'),
|
||||
token: z.string().optional(),
|
||||
apiKey: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized A2A set push notification attempt`, {
|
||||
error: authResult.error || 'Authentication required',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification request`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
webhookUrl: validatedData.webhookUrl,
|
||||
})
|
||||
|
||||
const factory = new ClientFactory()
|
||||
const client = await factory.createFromUrl(validatedData.agentUrl)
|
||||
|
||||
const result = await client.setTaskPushNotificationConfig({
|
||||
taskId: validatedData.taskId,
|
||||
pushNotificationConfig: {
|
||||
url: validatedData.webhookUrl,
|
||||
token: validatedData.token,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification successful`, {
|
||||
taskId: validatedData.taskId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url: result.pushNotificationConfig.url,
|
||||
token: result.pushNotificationConfig.token,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error setting A2A push notification:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to set push notification',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,14 @@
|
||||
import { NextRequest } from 'next/server'
|
||||
/**
|
||||
* Tests for custom tools API routes
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Custom Tools API Routes', () => {
|
||||
// Sample data for testing
|
||||
const sampleTools = [
|
||||
{
|
||||
id: 'tool-1',
|
||||
@@ -66,7 +66,6 @@ describe('Custom Tools API Routes', () => {
|
||||
},
|
||||
]
|
||||
|
||||
// Mock implementation stubs
|
||||
const mockSelect = vi.fn()
|
||||
const mockFrom = vi.fn()
|
||||
const mockWhere = vi.fn()
|
||||
@@ -82,13 +81,9 @@ describe('Custom Tools API Routes', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
// Reset all mock implementations
|
||||
mockSelect.mockReturnValue({ from: mockFrom })
|
||||
mockFrom.mockReturnValue({ where: mockWhere })
|
||||
// where() can be called with orderBy(), limit(), or directly awaited
|
||||
// Create a mock query builder that supports all patterns
|
||||
mockWhere.mockImplementation((condition) => {
|
||||
// Return an object that is both awaitable and has orderBy() and limit() methods
|
||||
const queryBuilder = {
|
||||
orderBy: mockOrderBy,
|
||||
limit: mockLimit,
|
||||
@@ -101,7 +96,6 @@ describe('Custom Tools API Routes', () => {
|
||||
return queryBuilder
|
||||
})
|
||||
mockOrderBy.mockImplementation(() => {
|
||||
// orderBy returns an awaitable query builder
|
||||
const queryBuilder = {
|
||||
limit: mockLimit,
|
||||
then: (resolve: (value: typeof sampleTools) => void) => {
|
||||
@@ -119,7 +113,6 @@ describe('Custom Tools API Routes', () => {
|
||||
mockSet.mockReturnValue({ where: mockWhere })
|
||||
mockDelete.mockReturnValue({ where: mockWhere })
|
||||
|
||||
// Mock database
|
||||
vi.doMock('@sim/db', () => ({
|
||||
db: {
|
||||
select: mockSelect,
|
||||
@@ -127,14 +120,11 @@ describe('Custom Tools API Routes', () => {
|
||||
update: mockUpdate,
|
||||
delete: mockDelete,
|
||||
transaction: vi.fn().mockImplementation(async (callback) => {
|
||||
// Execute the callback with a transaction object that has the same methods
|
||||
// Create transaction-specific mocks that follow the same pattern
|
||||
const txMockSelect = vi.fn().mockReturnValue({ from: mockFrom })
|
||||
const txMockInsert = vi.fn().mockReturnValue({ values: mockValues })
|
||||
const txMockUpdate = vi.fn().mockReturnValue({ set: mockSet })
|
||||
const txMockDelete = vi.fn().mockReturnValue({ where: mockWhere })
|
||||
|
||||
// Transaction where() should also support the query builder pattern with orderBy
|
||||
const txMockOrderBy = vi.fn().mockImplementation(() => {
|
||||
const queryBuilder = {
|
||||
limit: mockLimit,
|
||||
@@ -160,7 +150,6 @@ describe('Custom Tools API Routes', () => {
|
||||
return queryBuilder
|
||||
})
|
||||
|
||||
// Update mockFrom to return txMockWhere for transaction queries
|
||||
const txMockFrom = vi.fn().mockReturnValue({ where: txMockWhere })
|
||||
txMockSelect.mockReturnValue({ from: txMockFrom })
|
||||
|
||||
@@ -174,7 +163,6 @@ describe('Custom Tools API Routes', () => {
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock schema
|
||||
vi.doMock('@sim/db/schema', () => ({
|
||||
customTools: {
|
||||
id: 'id',
|
||||
@@ -189,12 +177,10 @@ describe('Custom Tools API Routes', () => {
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock authentication
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue(mockSession),
|
||||
}))
|
||||
|
||||
// Mock hybrid auth
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
@@ -203,22 +189,12 @@ describe('Custom Tools API Routes', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock permissions
|
||||
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
|
||||
}))
|
||||
|
||||
// Mock logger
|
||||
vi.doMock('@sim/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
vi.doMock('@sim/logger', () => loggerMock)
|
||||
|
||||
// Mock drizzle-orm functions
|
||||
vi.doMock('drizzle-orm', async () => {
|
||||
const actual = await vi.importActual('drizzle-orm')
|
||||
return {
|
||||
@@ -232,12 +208,10 @@ describe('Custom Tools API Routes', () => {
|
||||
}
|
||||
})
|
||||
|
||||
// Mock utils
|
||||
vi.doMock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
|
||||
}))
|
||||
|
||||
// Mock custom tools operations
|
||||
vi.doMock('@/lib/workflows/custom-tools/operations', () => ({
|
||||
upsertCustomTools: vi.fn().mockResolvedValue(sampleTools),
|
||||
}))
|
||||
@@ -252,29 +226,23 @@ describe('Custom Tools API Routes', () => {
|
||||
*/
|
||||
describe('GET /api/tools/custom', () => {
|
||||
it('should return tools for authenticated user with workspaceId', async () => {
|
||||
// Create mock request with workspaceId
|
||||
const req = new NextRequest(
|
||||
'http://localhost:3000/api/tools/custom?workspaceId=workspace-123'
|
||||
)
|
||||
|
||||
// Simulate DB returning tools with orderBy chain
|
||||
mockWhere.mockReturnValueOnce({
|
||||
orderBy: mockOrderBy.mockReturnValueOnce(Promise.resolve(sampleTools)),
|
||||
})
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { GET } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('data')
|
||||
expect(data.data).toEqual(sampleTools)
|
||||
|
||||
// Verify DB query
|
||||
expect(mockSelect).toHaveBeenCalled()
|
||||
expect(mockFrom).toHaveBeenCalled()
|
||||
expect(mockWhere).toHaveBeenCalled()
|
||||
@@ -282,12 +250,10 @@ describe('Custom Tools API Routes', () => {
|
||||
})
|
||||
|
||||
it('should handle unauthorized access', async () => {
|
||||
// Create mock request
|
||||
const req = new NextRequest(
|
||||
'http://localhost:3000/api/tools/custom?workspaceId=workspace-123'
|
||||
)
|
||||
|
||||
// Mock hybrid auth to return unauthorized
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
@@ -295,26 +261,20 @@ describe('Custom Tools API Routes', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { GET } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should handle workflowId parameter', async () => {
|
||||
// Create mock request with workflowId parameter
|
||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?workflowId=workflow-123')
|
||||
|
||||
// Mock workflow lookup to return workspaceId (for limit(1) call)
|
||||
mockLimit.mockResolvedValueOnce([{ workspaceId: 'workspace-123' }])
|
||||
|
||||
// Mock the where() call for fetching tools (returns awaitable query builder)
|
||||
mockWhere.mockImplementationOnce((condition) => {
|
||||
const queryBuilder = {
|
||||
limit: mockLimit,
|
||||
@@ -327,18 +287,14 @@ describe('Custom Tools API Routes', () => {
|
||||
return queryBuilder
|
||||
})
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { GET } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('data')
|
||||
|
||||
// Verify DB query was called
|
||||
expect(mockWhere).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -348,7 +304,6 @@ describe('Custom Tools API Routes', () => {
|
||||
*/
|
||||
describe('POST /api/tools/custom', () => {
|
||||
it('should reject unauthorized requests', async () => {
|
||||
// Mock hybrid auth to return unauthorized
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
@@ -356,39 +311,29 @@ describe('Custom Tools API Routes', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Create mock request
|
||||
const req = createMockRequest('POST', { tools: [], workspaceId: 'workspace-123' })
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { POST } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should validate request data', async () => {
|
||||
// Create invalid tool data (missing required fields)
|
||||
const invalidTool = {
|
||||
// Missing title, schema
|
||||
code: 'return "invalid";',
|
||||
}
|
||||
|
||||
// Create mock request with invalid tool and workspaceId
|
||||
const req = createMockRequest('POST', { tools: [invalidTool], workspaceId: 'workspace-123' })
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { POST } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(400)
|
||||
expect(data).toHaveProperty('error', 'Invalid request data')
|
||||
expect(data).toHaveProperty('details')
|
||||
@@ -400,96 +345,74 @@ describe('Custom Tools API Routes', () => {
|
||||
*/
|
||||
describe('DELETE /api/tools/custom', () => {
|
||||
it('should delete a workspace-scoped tool by ID', async () => {
|
||||
// Mock finding existing workspace-scoped tool
|
||||
mockLimit.mockResolvedValueOnce([sampleTools[0]])
|
||||
|
||||
// Create mock request with ID and workspaceId parameters
|
||||
const req = new NextRequest(
|
||||
'http://localhost:3000/api/tools/custom?id=tool-1&workspaceId=workspace-123'
|
||||
)
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { DELETE } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await DELETE(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('success', true)
|
||||
|
||||
// Verify delete was called with correct parameters
|
||||
expect(mockDelete).toHaveBeenCalled()
|
||||
expect(mockWhere).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reject requests missing tool ID', async () => {
|
||||
// Create mock request without ID parameter
|
||||
const req = createMockRequest('DELETE')
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { DELETE } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await DELETE(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(400)
|
||||
expect(data).toHaveProperty('error', 'Tool ID is required')
|
||||
})
|
||||
|
||||
it('should handle tool not found', async () => {
|
||||
// Mock tool not found
|
||||
mockLimit.mockResolvedValueOnce([])
|
||||
|
||||
// Create mock request with non-existent ID
|
||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=non-existent')
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { DELETE } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await DELETE(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(404)
|
||||
expect(data).toHaveProperty('error', 'Tool not found')
|
||||
})
|
||||
|
||||
it('should prevent unauthorized deletion of user-scoped tool', async () => {
|
||||
// Mock hybrid auth for the DELETE request
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-456', // Different user
|
||||
userId: 'user-456',
|
||||
authType: 'session',
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock finding user-scoped tool (no workspaceId) that belongs to user-123
|
||||
const userScopedTool = { ...sampleTools[0], workspaceId: null, userId: 'user-123' }
|
||||
mockLimit.mockResolvedValueOnce([userScopedTool])
|
||||
|
||||
// Create mock request (no workspaceId for user-scoped tool)
|
||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1')
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { DELETE } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await DELETE(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty('error', 'Access denied')
|
||||
})
|
||||
|
||||
it('should reject unauthorized requests', async () => {
|
||||
// Mock hybrid auth to return unauthorized
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
@@ -497,17 +420,13 @@ describe('Custom Tools API Routes', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Create mock request
|
||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1')
|
||||
|
||||
// Import handler after mocks are set up
|
||||
const { DELETE } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
// Call the handler
|
||||
const response = await DELETE(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify response
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRawDynamoDBClient, describeTable, listTables } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const logger = createLogger('DynamoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
tableName: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Introspecting DynamoDB in region ${params.region}`)
|
||||
|
||||
const client = createRawDynamoDBClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
})
|
||||
|
||||
try {
|
||||
const { tables } = await listTables(client)
|
||||
|
||||
if (params.tableName) {
|
||||
logger.info(`[${requestId}] Describing table: ${params.tableName}`)
|
||||
const { tableDetails } = await describeTable(client, params.tableName)
|
||||
|
||||
logger.info(`[${requestId}] Table description completed for '${params.tableName}'`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Table '${params.tableName}' described successfully.`,
|
||||
tables,
|
||||
tableDetails,
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Found ${tables.length} table(s) in region '${params.region}'.`,
|
||||
tables,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] DynamoDB introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `DynamoDB introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DynamoDBClient } from '@aws-sdk/client-dynamodb'
|
||||
import { DescribeTableCommand, DynamoDBClient, ListTablesCommand } from '@aws-sdk/client-dynamodb'
|
||||
import {
|
||||
DeleteCommand,
|
||||
DynamoDBDocumentClient,
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
ScanCommand,
|
||||
UpdateCommand,
|
||||
} from '@aws-sdk/lib-dynamodb'
|
||||
import type { DynamoDBConnectionConfig } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBConnectionConfig, DynamoDBTableSchema } from '@/tools/dynamodb/types'
|
||||
|
||||
export function createDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBDocumentClient {
|
||||
const client = new DynamoDBClient({
|
||||
@@ -172,3 +172,99 @@ export async function deleteItem(
|
||||
await client.send(command)
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raw DynamoDB client for operations that don't require DocumentClient
|
||||
*/
|
||||
export function createRawDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBClient {
|
||||
return new DynamoDBClient({
|
||||
region: config.region,
|
||||
credentials: {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all DynamoDB tables in the configured region
|
||||
*/
|
||||
export async function listTables(client: DynamoDBClient): Promise<{ tables: string[] }> {
|
||||
const tables: string[] = []
|
||||
let exclusiveStartTableName: string | undefined
|
||||
|
||||
do {
|
||||
const command = new ListTablesCommand({
|
||||
ExclusiveStartTableName: exclusiveStartTableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
if (response.TableNames) {
|
||||
tables.push(...response.TableNames)
|
||||
}
|
||||
exclusiveStartTableName = response.LastEvaluatedTableName
|
||||
} while (exclusiveStartTableName)
|
||||
|
||||
return { tables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes a specific DynamoDB table and returns its schema information
|
||||
*/
|
||||
export async function describeTable(
|
||||
client: DynamoDBClient,
|
||||
tableName: string
|
||||
): Promise<{ tableDetails: DynamoDBTableSchema }> {
|
||||
const command = new DescribeTableCommand({
|
||||
TableName: tableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
const table = response.Table
|
||||
|
||||
if (!table) {
|
||||
throw new Error(`Table '${tableName}' not found`)
|
||||
}
|
||||
|
||||
const tableDetails: DynamoDBTableSchema = {
|
||||
tableName: table.TableName || tableName,
|
||||
tableStatus: table.TableStatus || 'UNKNOWN',
|
||||
keySchema:
|
||||
table.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
attributeDefinitions:
|
||||
table.AttributeDefinitions?.map((attr) => ({
|
||||
attributeName: attr.AttributeName || '',
|
||||
attributeType: (attr.AttributeType as 'S' | 'N' | 'B') || 'S',
|
||||
})) || [],
|
||||
globalSecondaryIndexes:
|
||||
table.GlobalSecondaryIndexes?.map((gsi) => ({
|
||||
indexName: gsi.IndexName || '',
|
||||
keySchema:
|
||||
gsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: gsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: gsi.IndexStatus || 'UNKNOWN',
|
||||
})) || [],
|
||||
localSecondaryIndexes:
|
||||
table.LocalSecondaryIndexes?.map((lsi) => ({
|
||||
indexName: lsi.IndexName || '',
|
||||
keySchema:
|
||||
lsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: lsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: 'ACTIVE',
|
||||
})) || [],
|
||||
itemCount: Number(table.ItemCount) || 0,
|
||||
tableSizeBytes: Number(table.TableSizeBytes) || 0,
|
||||
billingMode: table.BillingModeSummary?.BillingMode || 'PROVISIONED',
|
||||
}
|
||||
|
||||
return { tableDetails }
|
||||
}
|
||||
|
||||
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMongoDBConnection, executeIntrospect } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
password: z.string().optional(),
|
||||
authSource: z.string().optional(),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MongoDB at ${params.host}:${params.port}${params.database ? `/${params.database}` : ''}`
|
||||
)
|
||||
|
||||
client = await createMongoDBConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database || 'admin',
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
authSource: params.authSource,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
const result = await executeIntrospect(client, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${result.databases.length} databases, ${result.collections.length} collections`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: result.message,
|
||||
databases: result.databases,
|
||||
collections: result.collections,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MongoDB introspect failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MongoDB introspect failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (client) {
|
||||
await client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { MongoClient } from 'mongodb'
|
||||
import type { MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
import type { MongoDBCollectionInfo, MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
|
||||
export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
|
||||
const credentials =
|
||||
@@ -129,3 +129,59 @@ export function sanitizeCollectionName(name: string): string {
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect MongoDB to get databases, collections, and indexes
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: MongoClient,
|
||||
database?: string
|
||||
): Promise<{
|
||||
message: string
|
||||
databases: string[]
|
||||
collections: MongoDBCollectionInfo[]
|
||||
}> {
|
||||
const databases: string[] = []
|
||||
const collections: MongoDBCollectionInfo[] = []
|
||||
|
||||
if (database) {
|
||||
databases.push(database)
|
||||
const db = client.db(database)
|
||||
const collectionList = await db.listCollections().toArray()
|
||||
|
||||
for (const collInfo of collectionList) {
|
||||
const coll = db.collection(collInfo.name)
|
||||
const indexes = await coll.indexes()
|
||||
const documentCount = await coll.estimatedDocumentCount()
|
||||
|
||||
collections.push({
|
||||
name: collInfo.name,
|
||||
type: collInfo.type || 'collection',
|
||||
documentCount,
|
||||
indexes: indexes.map((idx) => ({
|
||||
name: idx.name || '',
|
||||
key: idx.key as Record<string, number>,
|
||||
unique: idx.unique || false,
|
||||
sparse: idx.sparse,
|
||||
})),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const admin = client.db().admin()
|
||||
const dbList = await admin.listDatabases()
|
||||
|
||||
for (const dbInfo of dbList.databases) {
|
||||
databases.push(dbInfo.name)
|
||||
}
|
||||
}
|
||||
|
||||
const message = database
|
||||
? `Found ${collections.length} collections in database '${database}'`
|
||||
: `Found ${databases.length} databases`
|
||||
|
||||
return {
|
||||
message,
|
||||
databases,
|
||||
collections,
|
||||
}
|
||||
}
|
||||
|
||||
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MySQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(connection, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in database '${params.database}'.`,
|
||||
tables: result.tables,
|
||||
databases: result.databases,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MySQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -166,3 +166,146 @@ function sanitizeSingleIdentifier(identifier: string): string {
|
||||
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
|
||||
export interface MySQLIntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
database: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
autoIncrement: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
databases: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
connection: mysql.Connection,
|
||||
databaseName: string
|
||||
): Promise<MySQLIntrospectionResult> {
|
||||
const [databasesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const databases = databasesRows.map((row) => row.SCHEMA_NAME)
|
||||
|
||||
const [tablesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[databaseName]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesRows) {
|
||||
const tableName = tableRow.TABLE_NAME
|
||||
|
||||
const [columnsRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE, IS_NULLABLE, COLUMN_DEFAULT, EXTRA
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const [pkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
const primaryKeyColumns = pkRows.map((row) => row.COLUMN_NAME)
|
||||
|
||||
const [fkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT kcu.COLUMN_NAME, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = ? AND kcu.TABLE_NAME = ? AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const foreignKeys = fkRows.map((row) => ({
|
||||
column: row.COLUMN_NAME,
|
||||
referencesTable: row.REFERENCED_TABLE_NAME,
|
||||
referencesColumn: row.REFERENCED_COLUMN_NAME,
|
||||
}))
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const [indexRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT INDEX_NAME, COLUMN_NAME, SEQ_IN_INDEX, NON_UNIQUE
|
||||
FROM INFORMATION_SCHEMA.STATISTICS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexRows) {
|
||||
const indexName = row.INDEX_NAME
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.NON_UNIQUE === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.COLUMN_NAME)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsRows.map((col) => {
|
||||
const columnName = col.COLUMN_NAME
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
const isAutoIncrement = col.EXTRA?.toLowerCase().includes('auto_increment') || false
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.COLUMN_TYPE || col.DATA_TYPE,
|
||||
nullable: col.IS_NULLABLE === 'YES',
|
||||
default: col.COLUMN_DEFAULT,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
autoIncrement: isAutoIncrement,
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
database: databaseName,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, databases }
|
||||
}
|
||||
|
||||
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createNeo4jDriver } from '@/app/api/tools/neo4j/utils'
|
||||
import type { Neo4jNodeSchema, Neo4jRelationshipSchema } from '@/tools/neo4j/types'
|
||||
|
||||
const logger = createLogger('Neo4jIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
encryption: z.enum(['enabled', 'disabled']).default('disabled'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting Neo4j database at ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
driver = await createNeo4jDriver({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
encryption: params.encryption,
|
||||
})
|
||||
|
||||
session = driver.session({ database: params.database })
|
||||
|
||||
const labelsResult = await session.run(
|
||||
'CALL db.labels() YIELD label RETURN label ORDER BY label'
|
||||
)
|
||||
const labels: string[] = labelsResult.records.map((record) => record.get('label') as string)
|
||||
|
||||
const relationshipTypesResult = await session.run(
|
||||
'CALL db.relationshipTypes() YIELD relationshipType RETURN relationshipType ORDER BY relationshipType'
|
||||
)
|
||||
const relationshipTypes: string[] = relationshipTypesResult.records.map(
|
||||
(record) => record.get('relationshipType') as string
|
||||
)
|
||||
|
||||
const nodeSchemas: Neo4jNodeSchema[] = []
|
||||
try {
|
||||
const nodePropertiesResult = await session.run(
|
||||
'CALL db.schema.nodeTypeProperties() YIELD nodeLabels, propertyName, propertyTypes RETURN nodeLabels, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const nodePropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of nodePropertiesResult.records) {
|
||||
const nodeLabels = record.get('nodeLabels') as string[]
|
||||
const propertyName = record.get('propertyName') as string
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
const labelKey = nodeLabels.join(':')
|
||||
if (!nodePropertiesMap.has(labelKey)) {
|
||||
nodePropertiesMap.set(labelKey, [])
|
||||
}
|
||||
nodePropertiesMap.get(labelKey)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
|
||||
for (const [labelKey, properties] of nodePropertiesMap) {
|
||||
nodeSchemas.push({
|
||||
label: labelKey,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (nodePropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch node properties (may not be supported in this Neo4j version): ${nodePropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const relationshipSchemas: Neo4jRelationshipSchema[] = []
|
||||
try {
|
||||
const relPropertiesResult = await session.run(
|
||||
'CALL db.schema.relTypeProperties() YIELD relationshipType, propertyName, propertyTypes RETURN relationshipType, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const relPropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of relPropertiesResult.records) {
|
||||
const relType = record.get('relationshipType') as string
|
||||
const propertyName = record.get('propertyName') as string | null
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
if (!relPropertiesMap.has(relType)) {
|
||||
relPropertiesMap.set(relType, [])
|
||||
}
|
||||
if (propertyName) {
|
||||
relPropertiesMap.get(relType)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
}
|
||||
|
||||
for (const [relType, properties] of relPropertiesMap) {
|
||||
relationshipSchemas.push({
|
||||
type: relType,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (relPropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch relationship properties (may not be supported in this Neo4j version): ${relPropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const constraints: Array<{
|
||||
name: string
|
||||
type: string
|
||||
entityType: string
|
||||
properties: string[]
|
||||
}> = []
|
||||
try {
|
||||
const constraintsResult = await session.run('SHOW CONSTRAINTS')
|
||||
|
||||
for (const record of constraintsResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
constraints.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (constraintsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch constraints (may not be supported in this Neo4j version): ${constraintsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }> =
|
||||
[]
|
||||
try {
|
||||
const indexesResult = await session.run('SHOW INDEXES')
|
||||
|
||||
for (const record of indexesResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
indexes.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (indexesError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch indexes (may not be supported in this Neo4j version): ${indexesError}`
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${labels.length} labels, ${relationshipTypes.length} relationship types, ${constraints.length} constraints, ${indexes.length} indexes`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Database introspection completed: found ${labels.length} labels, ${relationshipTypes.length} relationship types, ${nodeSchemas.length} node schemas, ${relationshipSchemas.length} relationship schemas, ${constraints.length} constraints, ${indexes.length} indexes`,
|
||||
labels,
|
||||
relationshipTypes,
|
||||
nodeSchemas,
|
||||
relationshipSchemas,
|
||||
constraints,
|
||||
indexes,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] Neo4j introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Neo4j introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (session) {
|
||||
await session.close()
|
||||
}
|
||||
if (driver) {
|
||||
await driver.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
schema: z.string().default('public'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting PostgreSQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const sql = createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(sql, params.schema)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in schema '${params.schema}'.`,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
await sql.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user