mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-05 04:05:14 -05:00
Compare commits
19 Commits
fix/onedri
...
feat/copil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b87a07508 | ||
|
|
728463ace7 | ||
|
|
8cea43d926 | ||
|
|
2198a6caae | ||
|
|
0ba8c0ad29 | ||
|
|
a9e2f4a82e | ||
|
|
1e99f45590 | ||
|
|
1ec0ec4c1a | ||
|
|
2ce78e8c60 | ||
|
|
ea759b2d00 | ||
|
|
09d7fc671f | ||
|
|
ea210a56a8 | ||
|
|
c05b70be1e | ||
|
|
413c53208e | ||
|
|
b8ccd71423 | ||
|
|
8f556684a6 | ||
|
|
a391019995 | ||
|
|
d6179e7691 | ||
|
|
e68e653d5c |
@@ -206,15 +206,10 @@ export const {Service}Block: BlockConfig = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
**Critical Canonical Param Rules:**
|
**Critical:**
|
||||||
- `canonicalParamId` must NOT match any subblock's `id` in the block
|
- `canonicalParamId` must NOT match any other subblock's `id`, must be unique per block, and should only be used to link basic/advanced alternatives for the same parameter.
|
||||||
- `canonicalParamId` must be unique per operation/condition context
|
- `mode` only controls UI visibility, NOT serialization. Without `canonicalParamId`, both basic and advanced field values would be sent.
|
||||||
- Only use `canonicalParamId` to link basic/advanced alternatives for the same logical parameter
|
- Every subblock `id` must be unique within the block. Duplicate IDs cause conflicts even with different conditions.
|
||||||
- `mode` only controls UI visibility, NOT serialization. Without `canonicalParamId`, both basic and advanced field values would be sent
|
|
||||||
- Every subblock `id` must be unique within the block. Duplicate IDs cause conflicts even with different conditions
|
|
||||||
- **Required consistency:** If one subblock in a canonical group has `required: true`, ALL subblocks in that group must have `required: true` (prevents bypassing validation by switching modes)
|
|
||||||
- **Inputs section:** Must list canonical param IDs (e.g., `fileId`), NOT raw subblock IDs (e.g., `fileSelector`, `manualFileId`)
|
|
||||||
- **Params function:** Must use canonical param IDs, NOT raw subblock IDs (raw IDs are deleted after canonical transformation)
|
|
||||||
|
|
||||||
## Step 4: Add Icon
|
## Step 4: Add Icon
|
||||||
|
|
||||||
|
|||||||
@@ -157,36 +157,6 @@ dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] }
|
|||||||
- `'both'` - Show in both modes (default)
|
- `'both'` - Show in both modes (default)
|
||||||
- `'trigger'` - Only when block is used as trigger
|
- `'trigger'` - Only when block is used as trigger
|
||||||
|
|
||||||
### `canonicalParamId` - Link basic/advanced alternatives
|
|
||||||
|
|
||||||
Use to map multiple UI inputs to a single logical parameter:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Basic mode: Visual selector
|
|
||||||
{
|
|
||||||
id: 'fileSelector',
|
|
||||||
type: 'file-selector',
|
|
||||||
mode: 'basic',
|
|
||||||
canonicalParamId: 'fileId',
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
// Advanced mode: Manual input
|
|
||||||
{
|
|
||||||
id: 'manualFileId',
|
|
||||||
type: 'short-input',
|
|
||||||
mode: 'advanced',
|
|
||||||
canonicalParamId: 'fileId',
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
```
|
|
||||||
|
|
||||||
**Critical Rules:**
|
|
||||||
- `canonicalParamId` must NOT match any subblock's `id`
|
|
||||||
- `canonicalParamId` must be unique per operation/condition context
|
|
||||||
- **Required consistency:** All subblocks in a canonical group must have the same `required` status
|
|
||||||
- **Inputs section:** Must list canonical param IDs (e.g., `fileId`), NOT raw subblock IDs
|
|
||||||
- **Params function:** Must use canonical param IDs (raw IDs are deleted after canonical transformation)
|
|
||||||
|
|
||||||
**Register in `blocks/registry.ts`:**
|
**Register in `blocks/registry.ts`:**
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
|||||||
@@ -155,36 +155,6 @@ dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] }
|
|||||||
- `'both'` - Show in both modes (default)
|
- `'both'` - Show in both modes (default)
|
||||||
- `'trigger'` - Only when block is used as trigger
|
- `'trigger'` - Only when block is used as trigger
|
||||||
|
|
||||||
### `canonicalParamId` - Link basic/advanced alternatives
|
|
||||||
|
|
||||||
Use to map multiple UI inputs to a single logical parameter:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Basic mode: Visual selector
|
|
||||||
{
|
|
||||||
id: 'fileSelector',
|
|
||||||
type: 'file-selector',
|
|
||||||
mode: 'basic',
|
|
||||||
canonicalParamId: 'fileId',
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
// Advanced mode: Manual input
|
|
||||||
{
|
|
||||||
id: 'manualFileId',
|
|
||||||
type: 'short-input',
|
|
||||||
mode: 'advanced',
|
|
||||||
canonicalParamId: 'fileId',
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
```
|
|
||||||
|
|
||||||
**Critical Rules:**
|
|
||||||
- `canonicalParamId` must NOT match any subblock's `id`
|
|
||||||
- `canonicalParamId` must be unique per operation/condition context
|
|
||||||
- **Required consistency:** All subblocks in a canonical group must have the same `required` status
|
|
||||||
- **Inputs section:** Must list canonical param IDs (e.g., `fileId`), NOT raw subblock IDs
|
|
||||||
- **Params function:** Must use canonical param IDs (raw IDs are deleted after canonical transformation)
|
|
||||||
|
|
||||||
**Register in `blocks/registry.ts`:**
|
**Register in `blocks/registry.ts`:**
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
|||||||
@@ -163,9 +163,9 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
elevenlabs: ElevenLabsIcon,
|
elevenlabs: ElevenLabsIcon,
|
||||||
enrich: EnrichSoIcon,
|
enrich: EnrichSoIcon,
|
||||||
exa: ExaAIIcon,
|
exa: ExaAIIcon,
|
||||||
file_v3: DocumentIcon,
|
file_v2: DocumentIcon,
|
||||||
firecrawl: FirecrawlIcon,
|
firecrawl: FirecrawlIcon,
|
||||||
fireflies_v2: FirefliesIcon,
|
fireflies: FirefliesIcon,
|
||||||
github_v2: GithubIcon,
|
github_v2: GithubIcon,
|
||||||
gitlab: GitLabIcon,
|
gitlab: GitLabIcon,
|
||||||
gmail_v2: GmailIcon,
|
gmail_v2: GmailIcon,
|
||||||
@@ -177,7 +177,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
google_maps: GoogleMapsIcon,
|
google_maps: GoogleMapsIcon,
|
||||||
google_search: GoogleIcon,
|
google_search: GoogleIcon,
|
||||||
google_sheets_v2: GoogleSheetsIcon,
|
google_sheets_v2: GoogleSheetsIcon,
|
||||||
google_slides_v2: GoogleSlidesIcon,
|
google_slides: GoogleSlidesIcon,
|
||||||
google_vault: GoogleVaultIcon,
|
google_vault: GoogleVaultIcon,
|
||||||
grafana: GrafanaIcon,
|
grafana: GrafanaIcon,
|
||||||
grain: GrainIcon,
|
grain: GrainIcon,
|
||||||
@@ -206,7 +206,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
microsoft_excel_v2: MicrosoftExcelIcon,
|
microsoft_excel_v2: MicrosoftExcelIcon,
|
||||||
microsoft_planner: MicrosoftPlannerIcon,
|
microsoft_planner: MicrosoftPlannerIcon,
|
||||||
microsoft_teams: MicrosoftTeamsIcon,
|
microsoft_teams: MicrosoftTeamsIcon,
|
||||||
mistral_parse_v3: MistralIcon,
|
mistral_parse_v2: MistralIcon,
|
||||||
mongodb: MongoDBIcon,
|
mongodb: MongoDBIcon,
|
||||||
mysql: MySQLIcon,
|
mysql: MySQLIcon,
|
||||||
neo4j: Neo4jIcon,
|
neo4j: Neo4jIcon,
|
||||||
@@ -221,11 +221,11 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
polymarket: PolymarketIcon,
|
polymarket: PolymarketIcon,
|
||||||
postgresql: PostgresIcon,
|
postgresql: PostgresIcon,
|
||||||
posthog: PosthogIcon,
|
posthog: PosthogIcon,
|
||||||
pulse_v2: PulseIcon,
|
pulse: PulseIcon,
|
||||||
qdrant: QdrantIcon,
|
qdrant: QdrantIcon,
|
||||||
rds: RDSIcon,
|
rds: RDSIcon,
|
||||||
reddit: RedditIcon,
|
reddit: RedditIcon,
|
||||||
reducto_v2: ReductoIcon,
|
reducto: ReductoIcon,
|
||||||
resend: ResendIcon,
|
resend: ResendIcon,
|
||||||
s3: S3Icon,
|
s3: S3Icon,
|
||||||
salesforce: SalesforceIcon,
|
salesforce: SalesforceIcon,
|
||||||
@@ -244,11 +244,11 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
ssh: SshIcon,
|
ssh: SshIcon,
|
||||||
stagehand: StagehandIcon,
|
stagehand: StagehandIcon,
|
||||||
stripe: StripeIcon,
|
stripe: StripeIcon,
|
||||||
stt_v2: STTIcon,
|
stt: STTIcon,
|
||||||
supabase: SupabaseIcon,
|
supabase: SupabaseIcon,
|
||||||
tavily: TavilyIcon,
|
tavily: TavilyIcon,
|
||||||
telegram: TelegramIcon,
|
telegram: TelegramIcon,
|
||||||
textract_v2: TextractIcon,
|
textract: TextractIcon,
|
||||||
tinybird: TinybirdIcon,
|
tinybird: TinybirdIcon,
|
||||||
translate: TranslateIcon,
|
translate: TranslateIcon,
|
||||||
trello: TrelloIcon,
|
trello: TrelloIcon,
|
||||||
@@ -257,7 +257,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
twilio_voice: TwilioIcon,
|
twilio_voice: TwilioIcon,
|
||||||
typeform: TypeformIcon,
|
typeform: TypeformIcon,
|
||||||
video_generator_v2: VideoIcon,
|
video_generator_v2: VideoIcon,
|
||||||
vision_v2: EyeIcon,
|
vision: EyeIcon,
|
||||||
wealthbox: WealthboxIcon,
|
wealthbox: WealthboxIcon,
|
||||||
webflow: WebflowIcon,
|
webflow: WebflowIcon,
|
||||||
whatsapp: WhatsAppIcon,
|
whatsapp: WhatsAppIcon,
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Mehrere Dateien lesen und parsen
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="file_v3"
|
type="file"
|
||||||
color="#40916C"
|
color="#40916C"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Interagieren Sie mit Fireflies.ai-Besprechungstranskripten und -auf
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="fireflies_v2"
|
type="fireflies"
|
||||||
color="#100730"
|
color="#100730"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Text aus PDF-Dokumenten extrahieren
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="mistral_parse_v3"
|
type="mistral_parse"
|
||||||
color="#000000"
|
color="#000000"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -49,25 +49,10 @@ Retrieve content from Confluence pages using the Confluence API.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of retrieval |
|
||||||
| `pageId` | string | Confluence page ID |
|
| `pageId` | string | Confluence page ID |
|
||||||
| `title` | string | Page title |
|
|
||||||
| `content` | string | Page content with HTML tags stripped |
|
| `content` | string | Page content with HTML tags stripped |
|
||||||
| `status` | string | Page status \(current, archived, trashed, draft\) |
|
| `title` | string | Page title |
|
||||||
| `spaceId` | string | ID of the space containing the page |
|
|
||||||
| `parentId` | string | ID of the parent page |
|
|
||||||
| `authorId` | string | Account ID of the page author |
|
|
||||||
| `createdAt` | string | ISO 8601 timestamp when the page was created |
|
|
||||||
| `url` | string | URL to view the page in Confluence |
|
|
||||||
| `body` | object | Raw page body content in storage format |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| `version` | object | Page version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
|
|
||||||
### `confluence_update`
|
### `confluence_update`
|
||||||
|
|
||||||
@@ -91,25 +76,6 @@ Update a Confluence page using the Confluence API.
|
|||||||
| `ts` | string | Timestamp of update |
|
| `ts` | string | Timestamp of update |
|
||||||
| `pageId` | string | Confluence page ID |
|
| `pageId` | string | Confluence page ID |
|
||||||
| `title` | string | Updated page title |
|
| `title` | string | Updated page title |
|
||||||
| `status` | string | Page status |
|
|
||||||
| `spaceId` | string | Space ID |
|
|
||||||
| `body` | object | Page body content in storage format |
|
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| `version` | object | Page version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `url` | string | URL to view the page in Confluence |
|
|
||||||
| `success` | boolean | Update operation success status |
|
| `success` | boolean | Update operation success status |
|
||||||
|
|
||||||
### `confluence_create_page`
|
### `confluence_create_page`
|
||||||
@@ -134,30 +100,11 @@ Create a new page in a Confluence space.
|
|||||||
| `ts` | string | Timestamp of creation |
|
| `ts` | string | Timestamp of creation |
|
||||||
| `pageId` | string | Created page ID |
|
| `pageId` | string | Created page ID |
|
||||||
| `title` | string | Page title |
|
| `title` | string | Page title |
|
||||||
| `status` | string | Page status |
|
|
||||||
| `spaceId` | string | Space ID |
|
|
||||||
| `parentId` | string | Parent page ID |
|
|
||||||
| `body` | object | Page body content |
|
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| `version` | object | Page version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `url` | string | Page URL |
|
| `url` | string | Page URL |
|
||||||
|
|
||||||
### `confluence_delete_page`
|
### `confluence_delete_page`
|
||||||
|
|
||||||
Delete a Confluence page. By default moves to trash; use purge=true to permanently delete.
|
Delete a Confluence page (moves it to trash where it can be restored).
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
@@ -165,7 +112,6 @@ Delete a Confluence page. By default moves to trash; use purge=true to permanent
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `pageId` | string | Yes | Confluence page ID to delete |
|
| `pageId` | string | Yes | Confluence page ID to delete |
|
||||||
| `purge` | boolean | No | If true, permanently deletes the page instead of moving to trash \(default: false\) |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
@@ -176,229 +122,6 @@ Delete a Confluence page. By default moves to trash; use purge=true to permanent
|
|||||||
| `pageId` | string | Deleted page ID |
|
| `pageId` | string | Deleted page ID |
|
||||||
| `deleted` | boolean | Deletion status |
|
| `deleted` | boolean | Deletion status |
|
||||||
|
|
||||||
### `confluence_list_pages_in_space`
|
|
||||||
|
|
||||||
List all pages within a specific Confluence space. Supports pagination and filtering by status.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `spaceId` | string | Yes | The ID of the Confluence space to list pages from |
|
|
||||||
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
|
|
||||||
| `status` | string | No | Filter pages by status: current, archived, trashed, or draft |
|
|
||||||
| `bodyFormat` | string | No | Format for page body content: storage, atlas_doc_format, or view. If not specified, body is not included. |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response to get the next page of results |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pages` | array | Array of pages in the space |
|
|
||||||
| ↳ `id` | string | Unique page identifier |
|
|
||||||
| ↳ `title` | string | Page title |
|
|
||||||
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
|
|
||||||
| ↳ `spaceId` | string | ID of the space containing the page |
|
|
||||||
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
|
|
||||||
| ↳ `authorId` | string | Account ID of the page author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
|
|
||||||
| ↳ `version` | object | Page version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| ↳ `body` | object | Page body content \(if bodyFormat was specified\) |
|
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `webUrl` | string | URL to view the page in Confluence |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_get_page_children`
|
|
||||||
|
|
||||||
Get all child pages of a specific Confluence page. Useful for navigating page hierarchies.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the parent page to get children from |
|
|
||||||
| `limit` | number | No | Maximum number of child pages to return \(default: 50, max: 250\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response to get the next page of results |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `parentId` | string | ID of the parent page |
|
|
||||||
| `children` | array | Array of child pages |
|
|
||||||
| ↳ `id` | string | Child page ID |
|
|
||||||
| ↳ `title` | string | Child page title |
|
|
||||||
| ↳ `status` | string | Page status |
|
|
||||||
| ↳ `spaceId` | string | Space ID |
|
|
||||||
| ↳ `childPosition` | number | Position among siblings |
|
|
||||||
| ↳ `webUrl` | string | URL to view the page |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_get_page_ancestors`
|
|
||||||
|
|
||||||
Get the ancestor (parent) pages of a specific Confluence page. Returns the full hierarchy from the page up to the root.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page to get ancestors for |
|
|
||||||
| `limit` | number | No | Maximum number of ancestors to return \(default: 25, max: 250\) |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | ID of the page whose ancestors were retrieved |
|
|
||||||
| `ancestors` | array | Array of ancestor pages, ordered from direct parent to root |
|
|
||||||
| ↳ `id` | string | Ancestor page ID |
|
|
||||||
| ↳ `title` | string | Ancestor page title |
|
|
||||||
| ↳ `status` | string | Page status |
|
|
||||||
| ↳ `spaceId` | string | Space ID |
|
|
||||||
| ↳ `webUrl` | string | URL to view the page |
|
|
||||||
|
|
||||||
### `confluence_list_page_versions`
|
|
||||||
|
|
||||||
List all versions (revision history) of a Confluence page.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page to get versions for |
|
|
||||||
| `limit` | number | No | Maximum number of versions to return \(default: 50, max: 250\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | ID of the page |
|
|
||||||
| `versions` | array | Array of page versions |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_get_page_version`
|
|
||||||
|
|
||||||
Get details about a specific version of a Confluence page.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page |
|
|
||||||
| `versionNumber` | number | Yes | The version number to retrieve \(e.g., 1, 2, 3\) |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | ID of the page |
|
|
||||||
| `version` | object | Detailed version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| ↳ `contentTypeModified` | boolean | Whether the content type was modified in this version |
|
|
||||||
| ↳ `collaborators` | array | List of collaborator account IDs for this version |
|
|
||||||
| ↳ `prevVersion` | number | Previous version number |
|
|
||||||
| ↳ `nextVersion` | number | Next version number |
|
|
||||||
|
|
||||||
### `confluence_list_page_properties`
|
|
||||||
|
|
||||||
List all custom properties (metadata) attached to a Confluence page.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page to list properties from |
|
|
||||||
| `limit` | number | No | Maximum number of properties to return \(default: 50, max: 250\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | ID of the page |
|
|
||||||
| `properties` | array | Array of content properties |
|
|
||||||
| ↳ `id` | string | Property ID |
|
|
||||||
| ↳ `key` | string | Property key |
|
|
||||||
| ↳ `value` | json | Property value \(can be any JSON\) |
|
|
||||||
| ↳ `version` | object | Version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_create_page_property`
|
|
||||||
|
|
||||||
Create a new custom property (metadata) on a Confluence page.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page to add the property to |
|
|
||||||
| `key` | string | Yes | The key/name for the property |
|
|
||||||
| `value` | json | Yes | The value for the property \(can be any JSON value\) |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | ID of the page |
|
|
||||||
| `propertyId` | string | ID of the created property |
|
|
||||||
| `key` | string | Property key |
|
|
||||||
| `value` | json | Property value |
|
|
||||||
| `version` | object | Version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
|
|
||||||
### `confluence_search`
|
### `confluence_search`
|
||||||
|
|
||||||
Search for content across Confluence pages, blog posts, and other content.
|
Search for content across Confluence pages, blog posts, and other content.
|
||||||
@@ -432,211 +155,6 @@ Search for content across Confluence pages, blog posts, and other content.
|
|||||||
| ↳ `lastModified` | string | ISO 8601 timestamp of last modification |
|
| ↳ `lastModified` | string | ISO 8601 timestamp of last modification |
|
||||||
| ↳ `entityType` | string | Entity type identifier \(e.g., content, space\) |
|
| ↳ `entityType` | string | Entity type identifier \(e.g., content, space\) |
|
||||||
|
|
||||||
### `confluence_search_in_space`
|
|
||||||
|
|
||||||
Search for content within a specific Confluence space. Optionally filter by text query and content type.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `spaceKey` | string | Yes | The key of the Confluence space to search in \(e.g., "ENG", "HR"\) |
|
|
||||||
| `query` | string | No | Text search query. If not provided, returns all content in the space. |
|
|
||||||
| `contentType` | string | No | Filter by content type: page, blogpost, attachment, or comment |
|
|
||||||
| `limit` | number | No | Maximum number of results to return \(default: 25, max: 250\) |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `spaceKey` | string | The space key that was searched |
|
|
||||||
| `totalSize` | number | Total number of matching results |
|
|
||||||
| `results` | array | Array of search results |
|
|
||||||
| ↳ `id` | string | Unique content identifier |
|
|
||||||
| ↳ `title` | string | Content title |
|
|
||||||
| ↳ `type` | string | Content type \(e.g., page, blogpost, attachment, comment\) |
|
|
||||||
| ↳ `status` | string | Content status \(e.g., current\) |
|
|
||||||
| ↳ `url` | string | URL to view the content in Confluence |
|
|
||||||
| ↳ `excerpt` | string | Text excerpt matching the search query |
|
|
||||||
| ↳ `spaceKey` | string | Key of the space containing the content |
|
|
||||||
| ↳ `space` | object | Space information for the content |
|
|
||||||
| ↳ `id` | string | Space identifier |
|
|
||||||
| ↳ `key` | string | Space key |
|
|
||||||
| ↳ `name` | string | Space name |
|
|
||||||
| ↳ `lastModified` | string | ISO 8601 timestamp of last modification |
|
|
||||||
| ↳ `entityType` | string | Entity type identifier \(e.g., content, space\) |
|
|
||||||
|
|
||||||
### `confluence_list_blogposts`
|
|
||||||
|
|
||||||
List all blog posts across all accessible Confluence spaces.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `limit` | number | No | Maximum number of blog posts to return \(default: 25, max: 250\) |
|
|
||||||
| `status` | string | No | Filter by status: current, archived, trashed, or draft |
|
|
||||||
| `sort` | string | No | Sort order: created-date, -created-date, modified-date, -modified-date, title, -title |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `blogPosts` | array | Array of blog posts |
|
|
||||||
| ↳ `id` | string | Blog post ID |
|
|
||||||
| ↳ `title` | string | Blog post title |
|
|
||||||
| ↳ `status` | string | Blog post status |
|
|
||||||
| ↳ `spaceId` | string | Space ID |
|
|
||||||
| ↳ `authorId` | string | Author account ID |
|
|
||||||
| ↳ `createdAt` | string | Creation timestamp |
|
|
||||||
| ↳ `version` | object | Version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| ↳ `webUrl` | string | URL to view the blog post |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_get_blogpost`
|
|
||||||
|
|
||||||
Get a specific Confluence blog post by ID, including its content.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `blogPostId` | string | Yes | The ID of the blog post to retrieve |
|
|
||||||
| `bodyFormat` | string | No | Format for blog post body: storage, atlas_doc_format, or view |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `id` | string | Blog post ID |
|
|
||||||
| `title` | string | Blog post title |
|
|
||||||
| `status` | string | Blog post status |
|
|
||||||
| `spaceId` | string | Space ID |
|
|
||||||
| `authorId` | string | Author account ID |
|
|
||||||
| `createdAt` | string | Creation timestamp |
|
|
||||||
| `version` | object | Version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `body` | object | Blog post body content in requested format\(s\) |
|
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| `webUrl` | string | URL to view the blog post |
|
|
||||||
|
|
||||||
### `confluence_create_blogpost`
|
|
||||||
|
|
||||||
Create a new blog post in a Confluence space.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `spaceId` | string | Yes | The ID of the space to create the blog post in |
|
|
||||||
| `title` | string | Yes | Title of the blog post |
|
|
||||||
| `content` | string | Yes | Blog post content in Confluence storage format \(HTML\) |
|
|
||||||
| `status` | string | No | Blog post status: current \(default\) or draft |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `id` | string | Created blog post ID |
|
|
||||||
| `title` | string | Blog post title |
|
|
||||||
| `status` | string | Blog post status |
|
|
||||||
| `spaceId` | string | Space ID |
|
|
||||||
| `authorId` | string | Author account ID |
|
|
||||||
| `body` | object | Blog post body content |
|
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| `version` | object | Blog post version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `webUrl` | string | URL to view the blog post |
|
|
||||||
|
|
||||||
### `confluence_list_blogposts_in_space`
|
|
||||||
|
|
||||||
List all blog posts within a specific Confluence space.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `spaceId` | string | Yes | The ID of the Confluence space to list blog posts from |
|
|
||||||
| `limit` | number | No | Maximum number of blog posts to return \(default: 25, max: 250\) |
|
|
||||||
| `status` | string | No | Filter by status: current, archived, trashed, or draft |
|
|
||||||
| `bodyFormat` | string | No | Format for blog post body: storage, atlas_doc_format, or view |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `blogPosts` | array | Array of blog posts in the space |
|
|
||||||
| ↳ `id` | string | Blog post ID |
|
|
||||||
| ↳ `title` | string | Blog post title |
|
|
||||||
| ↳ `status` | string | Blog post status |
|
|
||||||
| ↳ `spaceId` | string | Space ID |
|
|
||||||
| ↳ `authorId` | string | Author account ID |
|
|
||||||
| ↳ `createdAt` | string | Creation timestamp |
|
|
||||||
| ↳ `version` | object | Version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| ↳ `body` | object | Blog post body content |
|
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `webUrl` | string | URL to view the blog post |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_create_comment`
|
### `confluence_create_comment`
|
||||||
|
|
||||||
Add a comment to a Confluence page.
|
Add a comment to a Confluence page.
|
||||||
@@ -669,8 +187,6 @@ List all comments on a Confluence page.
|
|||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `pageId` | string | Yes | Confluence page ID to list comments from |
|
| `pageId` | string | Yes | Confluence page ID to list comments from |
|
||||||
| `limit` | number | No | Maximum number of comments to return \(default: 25\) |
|
| `limit` | number | No | Maximum number of comments to return \(default: 25\) |
|
||||||
| `bodyFormat` | string | No | Format for the comment body: storage, atlas_doc_format, view, or export_view \(default: storage\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
@@ -696,7 +212,6 @@ List all comments on a Confluence page.
|
|||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_update_comment`
|
### `confluence_update_comment`
|
||||||
|
|
||||||
@@ -776,8 +291,7 @@ List all attachments on a Confluence page.
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `pageId` | string | Yes | Confluence page ID to list attachments from |
|
| `pageId` | string | Yes | Confluence page ID to list attachments from |
|
||||||
| `limit` | number | No | Maximum number of attachments to return \(default: 50, max: 250\) |
|
| `limit` | number | No | Maximum number of attachments to return \(default: 25\) |
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
@@ -802,7 +316,6 @@ List all attachments on a Confluence page.
|
|||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_delete_attachment`
|
### `confluence_delete_attachment`
|
||||||
|
|
||||||
@@ -834,8 +347,6 @@ List all labels on a Confluence page.
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `pageId` | string | Yes | Confluence page ID to list labels from |
|
| `pageId` | string | Yes | Confluence page ID to list labels from |
|
||||||
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
@@ -847,30 +358,6 @@ List all labels on a Confluence page.
|
|||||||
| ↳ `id` | string | Unique label identifier |
|
| ↳ `id` | string | Unique label identifier |
|
||||||
| ↳ `name` | string | Label name |
|
| ↳ `name` | string | Label name |
|
||||||
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
|
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_add_label`
|
|
||||||
|
|
||||||
Add a label to a Confluence page for organization and categorization.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | Confluence page ID to add the label to |
|
|
||||||
| `labelName` | string | Yes | Name of the label to add |
|
|
||||||
| `prefix` | string | No | Label prefix: global \(default\), my, team, or system |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | Page ID that the label was added to |
|
|
||||||
| `labelName` | string | Name of the added label |
|
|
||||||
| `labelId` | string | ID of the added label |
|
|
||||||
|
|
||||||
### `confluence_get_space`
|
### `confluence_get_space`
|
||||||
|
|
||||||
@@ -888,19 +375,13 @@ Get details about a specific Confluence space.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of retrieval |
|
||||||
| `spaceId` | string | Space ID |
|
| `spaceId` | string | Space ID |
|
||||||
| `name` | string | Space name |
|
| `name` | string | Space name |
|
||||||
| `key` | string | Space key |
|
| `key` | string | Space key |
|
||||||
| `type` | string | Space type \(global, personal\) |
|
| `type` | string | Space type |
|
||||||
| `status` | string | Space status \(current, archived\) |
|
| `status` | string | Space status |
|
||||||
| `url` | string | URL to view the space in Confluence |
|
| `url` | string | Space URL |
|
||||||
| `authorId` | string | Account ID of the space creator |
|
|
||||||
| `createdAt` | string | ISO 8601 timestamp when the space was created |
|
|
||||||
| `homepageId` | string | ID of the space homepage |
|
|
||||||
| `description` | object | Space description content |
|
|
||||||
| ↳ `value` | string | Description text content |
|
|
||||||
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
|
||||||
|
|
||||||
### `confluence_list_spaces`
|
### `confluence_list_spaces`
|
||||||
|
|
||||||
@@ -911,8 +392,7 @@ List all Confluence spaces accessible to the user.
|
|||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `limit` | number | No | Maximum number of spaces to return \(default: 25, max: 250\) |
|
| `limit` | number | No | Maximum number of spaces to return \(default: 25\) |
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
@@ -932,6 +412,5 @@ List all Confluence spaces accessible to the user.
|
|||||||
| ↳ `description` | object | Space description |
|
| ↳ `description` | object | Space description |
|
||||||
| ↳ `value` | string | Description text content |
|
| ↳ `value` | string | Description text content |
|
||||||
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -63,7 +63,6 @@ Send a message to a Discord channel
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `message` | string | Success or error message |
|
| `message` | string | Success or error message |
|
||||||
| `files` | file[] | Files attached to the message |
|
|
||||||
| `data` | object | Discord message data |
|
| `data` | object | Discord message data |
|
||||||
| ↳ `id` | string | Message ID |
|
| ↳ `id` | string | Message ID |
|
||||||
| ↳ `content` | string | Message content |
|
| ↳ `content` | string | Message content |
|
||||||
|
|||||||
@@ -43,8 +43,7 @@ Upload a file to Dropbox
|
|||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `path` | string | Yes | The path in Dropbox where the file should be saved \(e.g., /folder/document.pdf\) |
|
| `path` | string | Yes | The path in Dropbox where the file should be saved \(e.g., /folder/document.pdf\) |
|
||||||
| `file` | file | No | The file to upload \(UserFile object\) |
|
| `fileContent` | string | Yes | The base64 encoded content of the file to upload |
|
||||||
| `fileContent` | string | No | Legacy: base64 encoded file content |
|
|
||||||
| `fileName` | string | No | Optional filename \(used if path is a folder\) |
|
| `fileName` | string | No | Optional filename \(used if path is a folder\) |
|
||||||
| `mode` | string | No | Write mode: add \(default\) or overwrite |
|
| `mode` | string | No | Write mode: add \(default\) or overwrite |
|
||||||
| `autorename` | boolean | No | If true, rename the file if there is a conflict |
|
| `autorename` | boolean | No | If true, rename the file if there is a conflict |
|
||||||
@@ -67,7 +66,7 @@ Upload a file to Dropbox
|
|||||||
|
|
||||||
### `dropbox_download`
|
### `dropbox_download`
|
||||||
|
|
||||||
Download a file from Dropbox with metadata and content
|
Download a file from Dropbox and get a temporary link
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
@@ -79,8 +78,11 @@ Download a file from Dropbox with metadata and content
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
| `file` | object | The file metadata |
|
||||||
| `metadata` | json | The file metadata |
|
| ↳ `id` | string | Unique identifier for the file |
|
||||||
|
| ↳ `name` | string | Name of the file |
|
||||||
|
| ↳ `path_display` | string | Display path of the file |
|
||||||
|
| ↳ `size` | number | Size of the file in bytes |
|
||||||
| `temporaryLink` | string | Temporary link to download the file \(valid for ~4 hours\) |
|
| `temporaryLink` | string | Temporary link to download the file \(valid for ~4 hours\) |
|
||||||
| `content` | string | Base64 encoded file content \(if fetched\) |
|
| `content` | string | Base64 encoded file content \(if fetched\) |
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Read and parse multiple files
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="file_v3"
|
type="file_v2"
|
||||||
color="#40916C"
|
color="#40916C"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ The File Parser tool is particularly useful for scenarios where your agents need
|
|||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Upload files directly or import from external URLs to get UserFile objects for use in other blocks.
|
Integrate File into the workflow. Can upload a file manually or insert a file url.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -41,15 +41,14 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc
|
|||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `filePath` | string | No | Path to the file\(s\). Can be a single path, URL, or an array of paths. |
|
| `filePath` | string | Yes | Path to the file\(s\). Can be a single path, URL, or an array of paths. |
|
||||||
| `file` | file | No | Uploaded file\(s\) to parse |
|
|
||||||
| `fileType` | string | No | Type of file to parse \(auto-detected if not specified\) |
|
| `fileType` | string | No | Type of file to parse \(auto-detected if not specified\) |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `files` | file[] | Parsed files as UserFile objects |
|
| `files` | array | Array of parsed files with content, metadata, and file properties |
|
||||||
| `combinedContent` | string | Combined content of all parsed files |
|
| `combinedContent` | string | All file contents merged into a single text string |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Interact with Fireflies.ai meeting transcripts and recordings
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="fireflies_v2"
|
type="fireflies"
|
||||||
color="#100730"
|
color="#100730"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -692,7 +692,6 @@ Get the content of a file from a GitHub repository. Supports files up to 1MB. Co
|
|||||||
| `download_url` | string | Direct download URL |
|
| `download_url` | string | Direct download URL |
|
||||||
| `git_url` | string | Git blob API URL |
|
| `git_url` | string | Git blob API URL |
|
||||||
| `_links` | json | Related links |
|
| `_links` | json | Related links |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
|
||||||
|
|
||||||
### `github_create_file`
|
### `github_create_file`
|
||||||
|
|
||||||
|
|||||||
@@ -291,7 +291,11 @@ Download a file from Google Drive with complete metadata (exports Google Workspa
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
| `file` | object | Downloaded file data |
|
||||||
|
| ↳ `name` | string | File name |
|
||||||
|
| ↳ `mimeType` | string | MIME type of the file |
|
||||||
|
| ↳ `data` | string | File content as base64-encoded string |
|
||||||
|
| ↳ `size` | number | File size in bytes |
|
||||||
| `metadata` | object | Complete file metadata from Google Drive |
|
| `metadata` | object | Complete file metadata from Google Drive |
|
||||||
| ↳ `id` | string | Google Drive file ID |
|
| ↳ `id` | string | Google Drive file ID |
|
||||||
| ↳ `kind` | string | Resource type identifier |
|
| ↳ `kind` | string | Resource type identifier |
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Read, write, and create presentations
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="google_slides_v2"
|
type="google_slides"
|
||||||
color="#E0E0E0"
|
color="#E0E0E0"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -333,28 +333,6 @@ Get all attachments from a Jira issue
|
|||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `attachments` | array | Array of attachments with id, filename, size, mimeType, created, author |
|
| `attachments` | array | Array of attachments with id, filename, size, mimeType, created, author |
|
||||||
|
|
||||||
### `jira_add_attachment`
|
|
||||||
|
|
||||||
Add attachments to a Jira issue
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `issueKey` | string | Yes | Jira issue key to add attachments to \(e.g., PROJ-123\) |
|
|
||||||
| `files` | file[] | Yes | Files to attach to the Jira issue |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueKey` | string | Issue key |
|
|
||||||
| `attachmentIds` | json | IDs of uploaded attachments |
|
|
||||||
| `files` | file[] | Uploaded attachment files |
|
|
||||||
|
|
||||||
### `jira_delete_attachment`
|
### `jira_delete_attachment`
|
||||||
|
|
||||||
Delete an attachment from a Jira issue
|
Delete an attachment from a Jira issue
|
||||||
|
|||||||
@@ -1022,8 +1022,7 @@ Add an attachment to an issue in Linear
|
|||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `issueId` | string | Yes | Issue ID to attach to |
|
| `issueId` | string | Yes | Issue ID to attach to |
|
||||||
| `url` | string | No | URL of the attachment |
|
| `url` | string | Yes | URL of the attachment |
|
||||||
| `file` | file | No | File to attach |
|
|
||||||
| `title` | string | Yes | Attachment title |
|
| `title` | string | Yes | Attachment title |
|
||||||
| `subtitle` | string | No | Attachment subtitle/description |
|
| `subtitle` | string | No | Attachment subtitle/description |
|
||||||
|
|
||||||
|
|||||||
@@ -81,7 +81,6 @@ Write or update content in a Microsoft Teams chat
|
|||||||
| `createdTime` | string | Timestamp when message was created |
|
| `createdTime` | string | Timestamp when message was created |
|
||||||
| `url` | string | Web URL to the message |
|
| `url` | string | Web URL to the message |
|
||||||
| `updatedContent` | boolean | Whether content was successfully updated |
|
| `updatedContent` | boolean | Whether content was successfully updated |
|
||||||
| `files` | file[] | Files attached to the message |
|
|
||||||
|
|
||||||
### `microsoft_teams_read_channel`
|
### `microsoft_teams_read_channel`
|
||||||
|
|
||||||
@@ -133,7 +132,6 @@ Write or send a message to a Microsoft Teams channel
|
|||||||
| `createdTime` | string | Timestamp when message was created |
|
| `createdTime` | string | Timestamp when message was created |
|
||||||
| `url` | string | Web URL to the message |
|
| `url` | string | Web URL to the message |
|
||||||
| `updatedContent` | boolean | Whether content was successfully updated |
|
| `updatedContent` | boolean | Whether content was successfully updated |
|
||||||
| `files` | file[] | Files attached to the message |
|
|
||||||
|
|
||||||
### `microsoft_teams_update_chat_message`
|
### `microsoft_teams_update_chat_message`
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Extract text from PDF documents
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="mistral_parse_v3"
|
type="mistral_parse_v2"
|
||||||
color="#000000"
|
color="#000000"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -35,12 +35,13 @@ Integrate Mistral Parse into the workflow. Can extract text from uploaded PDF do
|
|||||||
|
|
||||||
### `mistral_parser`
|
### `mistral_parser`
|
||||||
|
|
||||||
|
Parse PDF documents using Mistral OCR API
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `filePath` | string | No | URL to a PDF document to be processed |
|
| `filePath` | string | Yes | URL to a PDF document to be processed |
|
||||||
| `file` | file | No | Document file to be processed |
|
|
||||||
| `fileUpload` | object | No | File upload data from file-upload component |
|
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||||
| `resultType` | string | No | Type of parsed result \(markdown, text, or json\). Defaults to markdown. |
|
| `resultType` | string | No | Type of parsed result \(markdown, text, or json\). Defaults to markdown. |
|
||||||
| `includeImageBase64` | boolean | No | Include base64-encoded images in the response |
|
| `includeImageBase64` | boolean | No | Include base64-encoded images in the response |
|
||||||
@@ -54,8 +55,27 @@ Integrate Mistral Parse into the workflow. Can extract text from uploaded PDF do
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `pages` | array | Array of page objects from Mistral OCR |
|
| `pages` | array | Array of page objects from Mistral OCR |
|
||||||
| `model` | string | Mistral OCR model identifier |
|
| ↳ `index` | number | Page index \(zero-based\) |
|
||||||
| `usage_info` | json | Usage statistics from the API |
|
| ↳ `markdown` | string | Extracted markdown content |
|
||||||
| `document_annotation` | string | Structured annotation data |
|
| ↳ `images` | array | Images extracted from this page with bounding boxes |
|
||||||
|
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||||
|
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||||
|
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||||
|
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||||
|
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||||
|
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||||
|
| ↳ `dimensions` | object | Page dimensions |
|
||||||
|
| ↳ `dpi` | number | Dots per inch |
|
||||||
|
| ↳ `height` | number | Page height in pixels |
|
||||||
|
| ↳ `width` | number | Page width in pixels |
|
||||||
|
| ↳ `tables` | array | Extracted tables as HTML/markdown \(when table_format is set\). Referenced via placeholders like \[tbl-0.html\] |
|
||||||
|
| ↳ `hyperlinks` | array | Array of URL strings detected in the page \(e.g., \["https://...", "mailto:..."\]\) |
|
||||||
|
| ↳ `header` | string | Page header content \(when extract_header=true\) |
|
||||||
|
| ↳ `footer` | string | Page footer content \(when extract_footer=true\) |
|
||||||
|
| `model` | string | Mistral OCR model identifier \(e.g., mistral-ocr-latest\) |
|
||||||
|
| `usage_info` | object | Usage and processing statistics |
|
||||||
|
| ↳ `pages_processed` | number | Total number of pages processed |
|
||||||
|
| ↳ `doc_size_bytes` | number | Document file size in bytes |
|
||||||
|
| `document_annotation` | string | Structured annotation data as JSON string \(when applicable\) |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -113,26 +113,6 @@ Create a new page in Notion
|
|||||||
| `last_edited_time` | string | ISO 8601 last edit timestamp |
|
| `last_edited_time` | string | ISO 8601 last edit timestamp |
|
||||||
| `title` | string | Page title |
|
| `title` | string | Page title |
|
||||||
|
|
||||||
### `notion_update_page`
|
|
||||||
|
|
||||||
Update properties of a Notion page
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `pageId` | string | Yes | The UUID of the Notion page to update |
|
|
||||||
| `properties` | json | Yes | JSON object of properties to update |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `id` | string | Page UUID |
|
|
||||||
| `url` | string | Notion page URL |
|
|
||||||
| `last_edited_time` | string | ISO 8601 last edit timestamp |
|
|
||||||
| `title` | string | Page title |
|
|
||||||
|
|
||||||
### `notion_query_database`
|
### `notion_query_database`
|
||||||
|
|
||||||
Query and filter Notion database entries with advanced filtering
|
Query and filter Notion database entries with advanced filtering
|
||||||
|
|||||||
@@ -152,7 +152,6 @@ Retrieve files from Pipedrive with optional filters
|
|||||||
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
|
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
|
||||||
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
|
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
|
||||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||||
| `downloadFiles` | boolean | No | Download file contents into file outputs |
|
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
@@ -169,7 +168,6 @@ Retrieve files from Pipedrive with optional filters
|
|||||||
| ↳ `person_id` | number | Associated person ID |
|
| ↳ `person_id` | number | Associated person ID |
|
||||||
| ↳ `org_id` | number | Associated organization ID |
|
| ↳ `org_id` | number | Associated organization ID |
|
||||||
| ↳ `url` | string | File download URL |
|
| ↳ `url` | string | File download URL |
|
||||||
| `downloadedFiles` | file[] | Downloaded files from Pipedrive |
|
|
||||||
| `total_items` | number | Total number of files returned |
|
| `total_items` | number | Total number of files returned |
|
||||||
| `success` | boolean | Operation success status |
|
| `success` | boolean | Operation success status |
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Extract text from documents using Pulse OCR
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="pulse_v2"
|
type="pulse"
|
||||||
color="#E0E0E0"
|
color="#E0E0E0"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ If you need accurate, scalable, and developer-friendly document parsing capabili
|
|||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via upload or file references.
|
Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -39,12 +39,13 @@ Integrate Pulse into the workflow. Extract text from PDF documents, images, and
|
|||||||
|
|
||||||
### `pulse_parser`
|
### `pulse_parser`
|
||||||
|
|
||||||
|
Parse documents (PDF, images, Office docs) using Pulse OCR API
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `filePath` | string | No | URL to a document to be processed |
|
| `filePath` | string | Yes | URL to a document to be processed |
|
||||||
| `file` | file | No | Document file to be processed |
|
|
||||||
| `fileUpload` | object | No | File upload data from file-upload component |
|
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||||
| `pages` | string | No | Page range to process \(1-indexed, e.g., "1-2,5"\) |
|
| `pages` | string | No | Page range to process \(1-indexed, e.g., "1-2,5"\) |
|
||||||
| `extractFigure` | boolean | No | Enable figure extraction from the document |
|
| `extractFigure` | boolean | No | Enable figure extraction from the document |
|
||||||
@@ -56,6 +57,16 @@ Integrate Pulse into the workflow. Extract text from PDF documents, images, and
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `markdown` | string | Extracted content in markdown format |
|
||||||
|
| `page_count` | number | Number of pages in the document |
|
||||||
|
| `job_id` | string | Unique job identifier |
|
||||||
|
| `bounding_boxes` | json | Bounding box layout information |
|
||||||
|
| `extraction_url` | string | URL for extraction results \(for large documents\) |
|
||||||
|
| `html` | string | HTML content if requested |
|
||||||
|
| `structured_output` | json | Structured output if schema was provided |
|
||||||
|
| `chunks` | json | Chunked content if chunking was enabled |
|
||||||
|
| `figures` | json | Extracted figures if figure extraction was enabled |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Extract text from PDF documents
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="reducto_v2"
|
type="reducto"
|
||||||
color="#5c0c5c"
|
color="#5c0c5c"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@ Looking for reliable and scalable PDF parsing? Reducto is optimized for develope
|
|||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents or file references.
|
Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -37,12 +37,13 @@ Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF do
|
|||||||
|
|
||||||
### `reducto_parser`
|
### `reducto_parser`
|
||||||
|
|
||||||
|
Parse PDF documents using Reducto OCR API
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `filePath` | string | No | URL to a PDF document to be processed |
|
| `filePath` | string | Yes | URL to a PDF document to be processed |
|
||||||
| `file` | file | No | Document file to be processed |
|
|
||||||
| `fileUpload` | object | No | File upload data from file-upload component |
|
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||||
| `pages` | array | No | Specific pages to process \(1-indexed page numbers\) |
|
| `pages` | array | No | Specific pages to process \(1-indexed page numbers\) |
|
||||||
| `tableOutputFormat` | string | No | Table output format \(html or markdown\). Defaults to markdown. |
|
| `tableOutputFormat` | string | No | Table output format \(html or markdown\). Defaults to markdown. |
|
||||||
@@ -50,6 +51,13 @@ Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF do
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `job_id` | string | Unique identifier for the processing job |
|
||||||
|
| `duration` | number | Processing time in seconds |
|
||||||
|
| `usage` | json | Resource consumption data |
|
||||||
|
| `result` | json | Parsed document content with chunks and blocks |
|
||||||
|
| `pdf_url` | string | Storage URL of converted PDF |
|
||||||
|
| `studio_link` | string | Link to Reducto studio interface |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ Retrieve an object from an AWS S3 bucket
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `url` | string | Pre-signed URL for downloading the S3 object |
|
| `url` | string | Pre-signed URL for downloading the S3 object |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
|
||||||
| `metadata` | object | File metadata including type, size, name, and last modified date |
|
| `metadata` | object | File metadata including type, size, name, and last modified date |
|
||||||
|
|
||||||
### `s3_list_objects`
|
### `s3_list_objects`
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ Send an email using SendGrid API
|
|||||||
| `bcc` | string | No | BCC email address |
|
| `bcc` | string | No | BCC email address |
|
||||||
| `replyTo` | string | No | Reply-to email address |
|
| `replyTo` | string | No | Reply-to email address |
|
||||||
| `replyToName` | string | No | Reply-to name |
|
| `replyToName` | string | No | Reply-to name |
|
||||||
| `attachments` | file[] | No | Files to attach to the email \(UserFile objects\) |
|
| `attachments` | file[] | No | Files to attach to the email as an array of attachment objects |
|
||||||
| `templateId` | string | No | SendGrid template ID to use |
|
| `templateId` | string | No | SendGrid template ID to use |
|
||||||
| `dynamicTemplateData` | json | No | JSON object of dynamic template data |
|
| `dynamicTemplateData` | json | No | JSON object of dynamic template data |
|
||||||
|
|
||||||
|
|||||||
@@ -97,7 +97,6 @@ Download a file from a remote SFTP server
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `success` | boolean | Whether the download was successful |
|
| `success` | boolean | Whether the download was successful |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
|
||||||
| `fileName` | string | Name of the downloaded file |
|
| `fileName` | string | Name of the downloaded file |
|
||||||
| `content` | string | File content \(text or base64 encoded\) |
|
| `content` | string | File content \(text or base64 encoded\) |
|
||||||
| `size` | number | File size in bytes |
|
| `size` | number | File size in bytes |
|
||||||
|
|||||||
@@ -144,7 +144,6 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
|
|||||||
| `ts` | string | Message timestamp |
|
| `ts` | string | Message timestamp |
|
||||||
| `channel` | string | Channel ID where message was sent |
|
| `channel` | string | Channel ID where message was sent |
|
||||||
| `fileCount` | number | Number of files uploaded \(when files are attached\) |
|
| `fileCount` | number | Number of files uploaded \(when files are attached\) |
|
||||||
| `files` | file[] | Files attached to the message |
|
|
||||||
|
|
||||||
### `slack_canvas`
|
### `slack_canvas`
|
||||||
|
|
||||||
|
|||||||
@@ -170,7 +170,6 @@ Download a file from a remote SSH server
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `downloaded` | boolean | Whether the file was downloaded successfully |
|
| `downloaded` | boolean | Whether the file was downloaded successfully |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
|
||||||
| `fileContent` | string | File content \(base64 encoded for binary files\) |
|
| `fileContent` | string | File content \(base64 encoded for binary files\) |
|
||||||
| `fileName` | string | Name of the downloaded file |
|
| `fileName` | string | Name of the downloaded file |
|
||||||
| `remotePath` | string | Source path on the remote server |
|
| `remotePath` | string | Source path on the remote server |
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Convert speech to text using AI
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="stt_v2"
|
type="stt"
|
||||||
color="#181C1E"
|
color="#181C1E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -50,6 +50,8 @@ Transcribe audio and video files to text using leading AI providers. Supports mu
|
|||||||
|
|
||||||
### `stt_whisper`
|
### `stt_whisper`
|
||||||
|
|
||||||
|
Transcribe audio to text using OpenAI Whisper
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -69,10 +71,22 @@ Transcribe audio and video files to text using leading AI providers. Supports mu
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `transcript` | string | Full transcribed text |
|
||||||
|
| `segments` | array | Timestamped segments |
|
||||||
|
| ↳ `text` | string | Transcribed text for this segment |
|
||||||
|
| ↳ `start` | number | Start time in seconds |
|
||||||
|
| ↳ `end` | number | End time in seconds |
|
||||||
|
| ↳ `speaker` | string | Speaker identifier \(if diarization enabled\) |
|
||||||
|
| ↳ `confidence` | number | Confidence score \(0-1\) |
|
||||||
|
| `language` | string | Detected or specified language |
|
||||||
|
| `duration` | number | Audio duration in seconds |
|
||||||
|
|
||||||
### `stt_deepgram`
|
### `stt_deepgram`
|
||||||
|
|
||||||
|
Transcribe audio to text using Deepgram
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -89,10 +103,23 @@ This tool does not produce any outputs.
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `transcript` | string | Full transcribed text |
|
||||||
|
| `segments` | array | Timestamped segments with speaker labels |
|
||||||
|
| ↳ `text` | string | Transcribed text for this segment |
|
||||||
|
| ↳ `start` | number | Start time in seconds |
|
||||||
|
| ↳ `end` | number | End time in seconds |
|
||||||
|
| ↳ `speaker` | string | Speaker identifier \(if diarization enabled\) |
|
||||||
|
| ↳ `confidence` | number | Confidence score \(0-1\) |
|
||||||
|
| `language` | string | Detected or specified language |
|
||||||
|
| `duration` | number | Audio duration in seconds |
|
||||||
|
| `confidence` | number | Overall confidence score |
|
||||||
|
|
||||||
### `stt_elevenlabs`
|
### `stt_elevenlabs`
|
||||||
|
|
||||||
|
Transcribe audio to text using ElevenLabs
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -108,10 +135,18 @@ This tool does not produce any outputs.
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `transcript` | string | Full transcribed text |
|
||||||
|
| `segments` | array | Timestamped segments |
|
||||||
|
| `language` | string | Detected or specified language |
|
||||||
|
| `duration` | number | Audio duration in seconds |
|
||||||
|
| `confidence` | number | Overall confidence score |
|
||||||
|
|
||||||
### `stt_assemblyai`
|
### `stt_assemblyai`
|
||||||
|
|
||||||
|
Transcribe audio to text using AssemblyAI with advanced NLP features
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -132,10 +167,35 @@ This tool does not produce any outputs.
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `transcript` | string | Full transcribed text |
|
||||||
|
| `segments` | array | Timestamped segments with speaker labels |
|
||||||
|
| ↳ `text` | string | Transcribed text for this segment |
|
||||||
|
| ↳ `start` | number | Start time in seconds |
|
||||||
|
| ↳ `end` | number | End time in seconds |
|
||||||
|
| ↳ `speaker` | string | Speaker identifier \(if diarization enabled\) |
|
||||||
|
| ↳ `confidence` | number | Confidence score \(0-1\) |
|
||||||
|
| `language` | string | Detected or specified language |
|
||||||
|
| `duration` | number | Audio duration in seconds |
|
||||||
|
| `confidence` | number | Overall confidence score |
|
||||||
|
| `sentiment` | array | Sentiment analysis results |
|
||||||
|
| ↳ `text` | string | Text that was analyzed |
|
||||||
|
| ↳ `sentiment` | string | Sentiment \(POSITIVE, NEGATIVE, NEUTRAL\) |
|
||||||
|
| ↳ `confidence` | number | Confidence score |
|
||||||
|
| ↳ `start` | number | Start time in milliseconds |
|
||||||
|
| ↳ `end` | number | End time in milliseconds |
|
||||||
|
| `entities` | array | Detected entities |
|
||||||
|
| ↳ `entity_type` | string | Entity type \(e.g., person_name, location, organization\) |
|
||||||
|
| ↳ `text` | string | Entity text |
|
||||||
|
| ↳ `start` | number | Start time in milliseconds |
|
||||||
|
| ↳ `end` | number | End time in milliseconds |
|
||||||
|
| `summary` | string | Auto-generated summary |
|
||||||
|
|
||||||
### `stt_gemini`
|
### `stt_gemini`
|
||||||
|
|
||||||
|
Transcribe audio to text using Google Gemini with multimodal capabilities
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -151,6 +211,12 @@ This tool does not produce any outputs.
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `transcript` | string | Full transcribed text |
|
||||||
|
| `segments` | array | Timestamped segments |
|
||||||
|
| `language` | string | Detected or specified language |
|
||||||
|
| `duration` | number | Audio duration in seconds |
|
||||||
|
| `confidence` | number | Overall confidence score |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -354,7 +354,6 @@ Send documents (PDF, ZIP, DOC, etc.) to Telegram channels or users through the T
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `message` | string | Success or error message |
|
| `message` | string | Success or error message |
|
||||||
| `files` | file[] | Files attached to the message |
|
|
||||||
| `data` | object | Telegram message data including document |
|
| `data` | object | Telegram message data including document |
|
||||||
| ↳ `message_id` | number | Unique Telegram message identifier |
|
| ↳ `message_id` | number | Unique Telegram message identifier |
|
||||||
| ↳ `from` | object | Information about the sender |
|
| ↳ `from` | object | Information about the sender |
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Extract text, tables, and forms from documents
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="textract_v2"
|
type="textract"
|
||||||
color="linear-gradient(135deg, #055F4E 0%, #56C0A7 100%)"
|
color="linear-gradient(135deg, #055F4E 0%, #56C0A7 100%)"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -35,6 +35,8 @@ Integrate AWS Textract into your workflow to extract text, tables, forms, and ke
|
|||||||
|
|
||||||
### `textract_parser`
|
### `textract_parser`
|
||||||
|
|
||||||
|
Parse documents using AWS Textract OCR and document analysis
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -44,8 +46,8 @@ Integrate AWS Textract into your workflow to extract text, tables, forms, and ke
|
|||||||
| `region` | string | Yes | AWS region for Textract service \(e.g., us-east-1\) |
|
| `region` | string | Yes | AWS region for Textract service \(e.g., us-east-1\) |
|
||||||
| `processingMode` | string | No | Document type: single-page or multi-page. Defaults to single-page. |
|
| `processingMode` | string | No | Document type: single-page or multi-page. Defaults to single-page. |
|
||||||
| `filePath` | string | No | URL to a document to be processed \(JPEG, PNG, or single-page PDF\). |
|
| `filePath` | string | No | URL to a document to be processed \(JPEG, PNG, or single-page PDF\). |
|
||||||
| `file` | file | No | Document file to be processed \(JPEG, PNG, or single-page PDF\). |
|
|
||||||
| `s3Uri` | string | No | S3 URI for multi-page processing \(s3://bucket/key\). |
|
| `s3Uri` | string | No | S3 URI for multi-page processing \(s3://bucket/key\). |
|
||||||
|
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||||
| `featureTypes` | array | No | Feature types to detect: TABLES, FORMS, QUERIES, SIGNATURES, LAYOUT. If not specified, only text detection is performed. |
|
| `featureTypes` | array | No | Feature types to detect: TABLES, FORMS, QUERIES, SIGNATURES, LAYOUT. If not specified, only text detection is performed. |
|
||||||
| `items` | string | No | Feature type |
|
| `items` | string | No | Feature type |
|
||||||
| `queries` | array | No | Custom queries to extract specific information. Only used when featureTypes includes QUERIES. |
|
| `queries` | array | No | Custom queries to extract specific information. Only used when featureTypes includes QUERIES. |
|
||||||
@@ -56,6 +58,39 @@ Integrate AWS Textract into your workflow to extract text, tables, forms, and ke
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `blocks` | array | Array of Block objects containing detected text, tables, forms, and other elements |
|
||||||
|
| ↳ `BlockType` | string | Type of block \(PAGE, LINE, WORD, TABLE, CELL, KEY_VALUE_SET, etc.\) |
|
||||||
|
| ↳ `Id` | string | Unique identifier for the block |
|
||||||
|
| ↳ `Text` | string | The text content \(for LINE and WORD blocks\) |
|
||||||
|
| ↳ `TextType` | string | Type of text \(PRINTED or HANDWRITING\) |
|
||||||
|
| ↳ `Confidence` | number | Confidence score \(0-100\) |
|
||||||
|
| ↳ `Page` | number | Page number |
|
||||||
|
| ↳ `Geometry` | object | Location and bounding box information |
|
||||||
|
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||||
|
| ↳ `Height` | number | Height as ratio of document height |
|
||||||
|
| ↳ `Left` | number | Left position as ratio of document width |
|
||||||
|
| ↳ `Top` | number | Top position as ratio of document height |
|
||||||
|
| ↳ `Width` | number | Width as ratio of document width |
|
||||||
|
| ↳ `Polygon` | array | Polygon coordinates |
|
||||||
|
| ↳ `X` | number | X coordinate |
|
||||||
|
| ↳ `Y` | number | Y coordinate |
|
||||||
|
| ↳ `Relationships` | array | Relationships to other blocks |
|
||||||
|
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||||
|
| ↳ `Ids` | array | IDs of related blocks |
|
||||||
|
| ↳ `EntityTypes` | array | Entity types for KEY_VALUE_SET \(KEY or VALUE\) |
|
||||||
|
| ↳ `SelectionStatus` | string | For checkboxes: SELECTED or NOT_SELECTED |
|
||||||
|
| ↳ `RowIndex` | number | Row index for table cells |
|
||||||
|
| ↳ `ColumnIndex` | number | Column index for table cells |
|
||||||
|
| ↳ `RowSpan` | number | Row span for merged cells |
|
||||||
|
| ↳ `ColumnSpan` | number | Column span for merged cells |
|
||||||
|
| ↳ `Query` | object | Query information for QUERY blocks |
|
||||||
|
| ↳ `Text` | string | Query text |
|
||||||
|
| ↳ `Alias` | string | Query alias |
|
||||||
|
| ↳ `Pages` | array | Pages to search |
|
||||||
|
| `documentMetadata` | object | Metadata about the analyzed document |
|
||||||
|
| ↳ `pages` | number | Number of pages in the document |
|
||||||
|
| `modelVersion` | string | Version of the Textract model used for processing |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -122,7 +122,6 @@ Retrieve call recording information and transcription (if enabled via TwiML).
|
|||||||
| `channels` | number | Number of channels \(1 for mono, 2 for dual\) |
|
| `channels` | number | Number of channels \(1 for mono, 2 for dual\) |
|
||||||
| `source` | string | How the recording was created |
|
| `source` | string | How the recording was created |
|
||||||
| `mediaUrl` | string | URL to download the recording media file |
|
| `mediaUrl` | string | URL to download the recording media file |
|
||||||
| `file` | file | Downloaded recording media file |
|
|
||||||
| `price` | string | Cost of the recording |
|
| `price` | string | Cost of the recording |
|
||||||
| `priceUnit` | string | Currency of the price |
|
| `priceUnit` | string | Currency of the price |
|
||||||
| `uri` | string | Relative URI of the recording resource |
|
| `uri` | string | Relative URI of the recording resource |
|
||||||
|
|||||||
@@ -75,7 +75,6 @@ Download files uploaded in Typeform responses
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `fileUrl` | string | Direct download URL for the uploaded file |
|
| `fileUrl` | string | Direct download URL for the uploaded file |
|
||||||
| `file` | file | Downloaded file stored in execution files |
|
|
||||||
| `contentType` | string | MIME type of the uploaded file |
|
| `contentType` | string | MIME type of the uploaded file |
|
||||||
| `filename` | string | Original filename of the uploaded file |
|
| `filename` | string | Original filename of the uploaded file |
|
||||||
|
|
||||||
|
|||||||
@@ -57,14 +57,14 @@ Generate videos using Runway Gen-4 with world consistency and visual references
|
|||||||
| `duration` | number | No | Video duration in seconds \(5 or 10, default: 5\) |
|
| `duration` | number | No | Video duration in seconds \(5 or 10, default: 5\) |
|
||||||
| `aspectRatio` | string | No | Aspect ratio: 16:9 \(landscape\), 9:16 \(portrait\), or 1:1 \(square\) |
|
| `aspectRatio` | string | No | Aspect ratio: 16:9 \(landscape\), 9:16 \(portrait\), or 1:1 \(square\) |
|
||||||
| `resolution` | string | No | Video resolution \(720p output\). Note: Gen-4 Turbo outputs at 720p natively |
|
| `resolution` | string | No | Video resolution \(720p output\). Note: Gen-4 Turbo outputs at 720p natively |
|
||||||
| `visualReference` | file | Yes | Reference image REQUIRED for Gen-4 \(UserFile object\). Gen-4 only supports image-to-video, not text-only generation |
|
| `visualReference` | json | Yes | Reference image REQUIRED for Gen-4 \(UserFile object\). Gen-4 only supports image-to-video, not text-only generation |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `videoUrl` | string | Generated video URL |
|
| `videoUrl` | string | Generated video URL |
|
||||||
| `videoFile` | file | Video file object with metadata |
|
| `videoFile` | json | Video file object with metadata |
|
||||||
| `duration` | number | Video duration in seconds |
|
| `duration` | number | Video duration in seconds |
|
||||||
| `width` | number | Video width in pixels |
|
| `width` | number | Video width in pixels |
|
||||||
| `height` | number | Video height in pixels |
|
| `height` | number | Video height in pixels |
|
||||||
@@ -93,7 +93,7 @@ Generate videos using Google Veo 3/3.1 with native audio generation
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `videoUrl` | string | Generated video URL |
|
| `videoUrl` | string | Generated video URL |
|
||||||
| `videoFile` | file | Video file object with metadata |
|
| `videoFile` | json | Video file object with metadata |
|
||||||
| `duration` | number | Video duration in seconds |
|
| `duration` | number | Video duration in seconds |
|
||||||
| `width` | number | Video width in pixels |
|
| `width` | number | Video width in pixels |
|
||||||
| `height` | number | Video height in pixels |
|
| `height` | number | Video height in pixels |
|
||||||
@@ -123,7 +123,7 @@ Generate videos using Luma Dream Machine with advanced camera controls
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `videoUrl` | string | Generated video URL |
|
| `videoUrl` | string | Generated video URL |
|
||||||
| `videoFile` | file | Video file object with metadata |
|
| `videoFile` | json | Video file object with metadata |
|
||||||
| `duration` | number | Video duration in seconds |
|
| `duration` | number | Video duration in seconds |
|
||||||
| `width` | number | Video width in pixels |
|
| `width` | number | Video width in pixels |
|
||||||
| `height` | number | Video height in pixels |
|
| `height` | number | Video height in pixels |
|
||||||
@@ -151,7 +151,7 @@ Generate videos using MiniMax Hailuo through MiniMax Platform API with advanced
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `videoUrl` | string | Generated video URL |
|
| `videoUrl` | string | Generated video URL |
|
||||||
| `videoFile` | file | Video file object with metadata |
|
| `videoFile` | json | Video file object with metadata |
|
||||||
| `duration` | number | Video duration in seconds |
|
| `duration` | number | Video duration in seconds |
|
||||||
| `width` | number | Video width in pixels |
|
| `width` | number | Video width in pixels |
|
||||||
| `height` | number | Video height in pixels |
|
| `height` | number | Video height in pixels |
|
||||||
@@ -181,7 +181,7 @@ Generate videos using Fal.ai platform with access to multiple models including V
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `videoUrl` | string | Generated video URL |
|
| `videoUrl` | string | Generated video URL |
|
||||||
| `videoFile` | file | Video file object with metadata |
|
| `videoFile` | json | Video file object with metadata |
|
||||||
| `duration` | number | Video duration in seconds |
|
| `duration` | number | Video duration in seconds |
|
||||||
| `width` | number | Video width in pixels |
|
| `width` | number | Video width in pixels |
|
||||||
| `height` | number | Video height in pixels |
|
| `height` | number | Video height in pixels |
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Analyze images with vision models
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="vision_v2"
|
type="vision"
|
||||||
color="#4D5FFF"
|
color="#4D5FFF"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -35,6 +35,8 @@ Integrate Vision into the workflow. Can analyze images with vision models.
|
|||||||
|
|
||||||
### `vision_tool`
|
### `vision_tool`
|
||||||
|
|
||||||
|
Process and analyze images using advanced vision models. Capable of understanding image content, extracting text, identifying objects, and providing detailed visual descriptions.
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
| Parameter | Type | Required | Description |
|
||||||
@@ -47,6 +49,14 @@ Integrate Vision into the workflow. Can analyze images with vision models.
|
|||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
This tool does not produce any outputs.
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `content` | string | The analyzed content and description of the image |
|
||||||
|
| `model` | string | The vision model that was used for analysis |
|
||||||
|
| `tokens` | number | Total tokens used for the analysis |
|
||||||
|
| `usage` | object | Detailed token usage breakdown |
|
||||||
|
| ↳ `input_tokens` | number | Tokens used for input processing |
|
||||||
|
| ↳ `output_tokens` | number | Tokens used for response generation |
|
||||||
|
| ↳ `total_tokens` | number | Total tokens consumed |
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -335,7 +335,6 @@ Get all recordings for a specific Zoom meeting
|
|||||||
| `meetingId` | string | Yes | The meeting ID or meeting UUID \(e.g., "1234567890" or "4444AAABBBccccc12345=="\) |
|
| `meetingId` | string | Yes | The meeting ID or meeting UUID \(e.g., "1234567890" or "4444AAABBBccccc12345=="\) |
|
||||||
| `includeFolderItems` | boolean | No | Include items within a folder |
|
| `includeFolderItems` | boolean | No | Include items within a folder |
|
||||||
| `ttl` | number | No | Time to live for download URLs in seconds \(max 604800\) |
|
| `ttl` | number | No | Time to live for download URLs in seconds \(max 604800\) |
|
||||||
| `downloadFiles` | boolean | No | Download recording files into file outputs |
|
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
@@ -365,7 +364,6 @@ Get all recordings for a specific Zoom meeting
|
|||||||
| ↳ `download_url` | string | URL to download the recording |
|
| ↳ `download_url` | string | URL to download the recording |
|
||||||
| ↳ `status` | string | Recording status |
|
| ↳ `status` | string | Recording status |
|
||||||
| ↳ `recording_type` | string | Type of recording \(shared_screen, audio_only, etc.\) |
|
| ↳ `recording_type` | string | Type of recording \(shared_screen, audio_only, etc.\) |
|
||||||
| `files` | file[] | Downloaded recording files |
|
|
||||||
|
|
||||||
### `zoom_delete_recording`
|
### `zoom_delete_recording`
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Leer y analizar múltiples archivos
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="file_v3"
|
type="file"
|
||||||
color="#40916C"
|
color="#40916C"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Interactúa con transcripciones y grabaciones de reuniones de Firef
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="fireflies_v2"
|
type="fireflies"
|
||||||
color="#100730"
|
color="#100730"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Extraer texto de documentos PDF
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="mistral_parse_v3"
|
type="mistral_parse"
|
||||||
color="#000000"
|
color="#000000"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Lire et analyser plusieurs fichiers
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="file_v3"
|
type="file"
|
||||||
color="#40916C"
|
color="#40916C"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ description: Interagissez avec les transcriptions et enregistrements de réunion
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="fireflies_v2"
|
type="fireflies"
|
||||||
color="#100730"
|
color="#100730"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Extraire du texte à partir de documents PDF
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="mistral_parse_v3"
|
type="mistral_parse"
|
||||||
color="#000000"
|
color="#000000"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: 複数のファイルを読み込んで解析する
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="file_v3"
|
type="file"
|
||||||
color="#40916C"
|
color="#40916C"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: Fireflies.aiの会議文字起こしと録画を操作
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="fireflies_v2"
|
type="fireflies"
|
||||||
color="#100730"
|
color="#100730"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: PDFドキュメントからテキストを抽出する
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="mistral_parse_v3"
|
type="mistral_parse"
|
||||||
color="#000000"
|
color="#000000"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: 读取并解析多个文件
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="file_v3"
|
type="file"
|
||||||
color="#40916C"
|
color="#40916C"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: 与 Fireflies.ai 会议转录和录音进行交互
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="fireflies_v2"
|
type="fireflies"
|
||||||
color="#100730"
|
color="#100730"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ description: 从 PDF 文档中提取文本
|
|||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
<BlockInfoCard
|
<BlockInfoCard
|
||||||
type="mistral_parse_v3"
|
type="mistral_parse"
|
||||||
color="#000000"
|
color="#000000"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
|
import { useBrandConfig } from '@/lib/branding/branding'
|
||||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||||
import { useBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
|
|
||||||
export interface SupportFooterProps {
|
export interface SupportFooterProps {
|
||||||
/** Position style - 'fixed' for pages without AuthLayout, 'absolute' for pages with AuthLayout */
|
/** Position style - 'fixed' for pages without AuthLayout, 'absolute' for pages with AuthLayout */
|
||||||
|
|||||||
@@ -7,10 +7,10 @@ import Image from 'next/image'
|
|||||||
import Link from 'next/link'
|
import Link from 'next/link'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import { GithubIcon } from '@/components/icons'
|
import { GithubIcon } from '@/components/icons'
|
||||||
|
import { useBrandConfig } from '@/lib/branding/branding'
|
||||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||||
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
||||||
import { useBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||||
|
|
||||||
const logger = createLogger('nav')
|
const logger = createLogger('nav')
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import {
|
|||||||
parseWorkflowSSEChunk,
|
parseWorkflowSSEChunk,
|
||||||
} from '@/lib/a2a/utils'
|
} from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getBrandConfig } from '@/lib/branding/branding'
|
||||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
@@ -34,7 +35,6 @@ import {
|
|||||||
type PushNotificationSetParams,
|
type PushNotificationSetParams,
|
||||||
type TaskIdParams,
|
type TaskIdParams,
|
||||||
} from '@/app/api/a2a/serve/[agentId]/utils'
|
} from '@/app/api/a2a/serve/[agentId]/utils'
|
||||||
import { getBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
|
|
||||||
const logger = createLogger('A2AServeAPI')
|
const logger = createLogger('A2AServeAPI')
|
||||||
|
|
||||||
|
|||||||
@@ -7,8 +7,14 @@ import { z } from 'zod'
|
|||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateChatTitle } from '@/lib/copilot/chat-title'
|
import { generateChatTitle } from '@/lib/copilot/chat-title'
|
||||||
import { getCopilotModel } from '@/lib/copilot/config'
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import {
|
||||||
|
createStreamEventWriter,
|
||||||
|
resetStreamBuffer,
|
||||||
|
setStreamMeta,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -21,13 +27,12 @@ import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
|||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
import { CopilotFiles } from '@/lib/uploads'
|
import { CopilotFiles } from '@/lib/uploads'
|
||||||
import { createFileContent } from '@/lib/uploads/utils/file-utils'
|
import { createFileContent } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
import { tools } from '@/tools/registry'
|
import { tools } from '@/tools/registry'
|
||||||
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
||||||
|
|
||||||
const logger = createLogger('CopilotChatAPI')
|
const logger = createLogger('CopilotChatAPI')
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const FileAttachmentSchema = z.object({
|
const FileAttachmentSchema = z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
key: z.string(),
|
key: z.string(),
|
||||||
@@ -40,7 +45,8 @@ const ChatMessageSchema = z.object({
|
|||||||
message: z.string().min(1, 'Message is required'),
|
message: z.string().min(1, 'Message is required'),
|
||||||
userMessageId: z.string().optional(), // ID from frontend for the user message
|
userMessageId: z.string().optional(), // ID from frontend for the user message
|
||||||
chatId: z.string().optional(),
|
chatId: z.string().optional(),
|
||||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
workflowId: z.string().optional(),
|
||||||
|
workflowName: z.string().optional(),
|
||||||
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
||||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
prefetch: z.boolean().optional(),
|
prefetch: z.boolean().optional(),
|
||||||
@@ -100,7 +106,8 @@ export async function POST(req: NextRequest) {
|
|||||||
message,
|
message,
|
||||||
userMessageId,
|
userMessageId,
|
||||||
chatId,
|
chatId,
|
||||||
workflowId,
|
workflowId: providedWorkflowId,
|
||||||
|
workflowName,
|
||||||
model,
|
model,
|
||||||
mode,
|
mode,
|
||||||
prefetch,
|
prefetch,
|
||||||
@@ -113,6 +120,20 @@ export async function POST(req: NextRequest) {
|
|||||||
contexts,
|
contexts,
|
||||||
commands,
|
commands,
|
||||||
} = ChatMessageSchema.parse(body)
|
} = ChatMessageSchema.parse(body)
|
||||||
|
|
||||||
|
// Resolve workflowId - if not provided, use first workflow or find by name
|
||||||
|
const resolved = await resolveWorkflowIdForUser(
|
||||||
|
authenticatedUserId,
|
||||||
|
providedWorkflowId,
|
||||||
|
workflowName
|
||||||
|
)
|
||||||
|
if (!resolved) {
|
||||||
|
return createBadRequestResponse(
|
||||||
|
'No workflows found. Create a workflow first or provide a valid workflowId.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const workflowId = resolved.workflowId
|
||||||
|
|
||||||
// Ensure we have a consistent user message ID for this request
|
// Ensure we have a consistent user message ID for this request
|
||||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||||
try {
|
try {
|
||||||
@@ -465,77 +486,53 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
} catch {}
|
} catch {}
|
||||||
|
|
||||||
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
|
if (stream) {
|
||||||
method: 'POST',
|
const streamId = userMessageIdToUse
|
||||||
headers: {
|
let eventWriter: ReturnType<typeof createStreamEventWriter> | null = null
|
||||||
'Content-Type': 'application/json',
|
let clientDisconnected = false
|
||||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
|
||||||
},
|
|
||||||
body: JSON.stringify(requestPayload),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!simAgentResponse.ok) {
|
|
||||||
if (simAgentResponse.status === 401 || simAgentResponse.status === 402) {
|
|
||||||
// Rethrow status only; client will render appropriate assistant message
|
|
||||||
return new NextResponse(null, { status: simAgentResponse.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await simAgentResponse.text().catch(() => '')
|
|
||||||
logger.error(`[${tracker.requestId}] Sim agent API error:`, {
|
|
||||||
status: simAgentResponse.status,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `Sim agent API error: ${simAgentResponse.statusText}` },
|
|
||||||
{ status: simAgentResponse.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// If streaming is requested, forward the stream and update chat later
|
|
||||||
if (stream && simAgentResponse.body) {
|
|
||||||
// Create user message to save
|
|
||||||
const userMessage = {
|
|
||||||
id: userMessageIdToUse, // Consistent ID used for request and persistence
|
|
||||||
role: 'user',
|
|
||||||
content: message,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
|
||||||
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
|
|
||||||
...(Array.isArray(contexts) &&
|
|
||||||
contexts.length > 0 && {
|
|
||||||
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a pass-through stream that captures the response
|
|
||||||
const transformedStream = new ReadableStream({
|
const transformedStream = new ReadableStream({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
const encoder = new TextEncoder()
|
const encoder = new TextEncoder()
|
||||||
let assistantContent = ''
|
|
||||||
const toolCalls: any[] = []
|
|
||||||
let buffer = ''
|
|
||||||
const isFirstDone = true
|
|
||||||
let responseIdFromStart: string | undefined
|
|
||||||
let responseIdFromDone: string | undefined
|
|
||||||
// Track tool call progress to identify a safe done event
|
|
||||||
const announcedToolCallIds = new Set<string>()
|
|
||||||
const startedToolExecutionIds = new Set<string>()
|
|
||||||
const completedToolExecutionIds = new Set<string>()
|
|
||||||
let lastDoneResponseId: string | undefined
|
|
||||||
let lastSafeDoneResponseId: string | undefined
|
|
||||||
|
|
||||||
// Send chatId as first event
|
await resetStreamBuffer(streamId)
|
||||||
if (actualChatId) {
|
await setStreamMeta(streamId, { status: 'active', userId: authenticatedUserId })
|
||||||
const chatIdEvent = `data: ${JSON.stringify({
|
eventWriter = createStreamEventWriter(streamId)
|
||||||
type: 'chat_id',
|
|
||||||
chatId: actualChatId,
|
const shouldFlushEvent = (event: Record<string, any>) =>
|
||||||
})}\n\n`
|
event.type === 'tool_call' ||
|
||||||
controller.enqueue(encoder.encode(chatIdEvent))
|
event.type === 'tool_result' ||
|
||||||
logger.debug(`[${tracker.requestId}] Sent initial chatId event to client`)
|
event.type === 'tool_error' ||
|
||||||
|
event.type === 'subagent_end' ||
|
||||||
|
event.type === 'structured_result' ||
|
||||||
|
event.type === 'subagent_result' ||
|
||||||
|
event.type === 'done' ||
|
||||||
|
event.type === 'error'
|
||||||
|
|
||||||
|
const pushEvent = async (event: Record<string, any>) => {
|
||||||
|
if (!eventWriter) return
|
||||||
|
const entry = await eventWriter.write(event)
|
||||||
|
if (shouldFlushEvent(event)) {
|
||||||
|
await eventWriter.flush()
|
||||||
|
}
|
||||||
|
const payload = {
|
||||||
|
...event,
|
||||||
|
eventId: entry.eventId,
|
||||||
|
streamId,
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (!clientDisconnected) {
|
||||||
|
controller.enqueue(encoder.encode(`data: ${JSON.stringify(payload)}\n\n`))
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
clientDisconnected = true
|
||||||
|
await eventWriter.flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (actualChatId) {
|
||||||
|
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start title generation in parallel if needed
|
|
||||||
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
||||||
generateChatTitle(message)
|
generateChatTitle(message)
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
@@ -547,311 +544,64 @@ export async function POST(req: NextRequest) {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
|
await pushEvent({ type: 'title_updated', title })
|
||||||
const titleEvent = `data: ${JSON.stringify({
|
|
||||||
type: 'title_updated',
|
|
||||||
title: title,
|
|
||||||
})}\n\n`
|
|
||||||
controller.enqueue(encoder.encode(titleEvent))
|
|
||||||
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
logger.debug(`[${tracker.requestId}] Skipping title generation`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Forward the sim agent stream and capture assistant response
|
|
||||||
const reader = simAgentResponse.body!.getReader()
|
|
||||||
const decoder = new TextDecoder()
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
while (true) {
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
const { done, value } = await reader.read()
|
userId: authenticatedUserId,
|
||||||
if (done) {
|
workflowId,
|
||||||
break
|
chatId: actualChatId,
|
||||||
}
|
autoExecuteTools: true,
|
||||||
|
interactive: true,
|
||||||
// Decode and parse SSE events for logging and capturing content
|
onEvent: async (event) => {
|
||||||
const decodedChunk = decoder.decode(value, { stream: true })
|
await pushEvent(event)
|
||||||
buffer += decodedChunk
|
},
|
||||||
|
|
||||||
const lines = buffer.split('\n')
|
|
||||||
buffer = lines.pop() || '' // Keep incomplete line in buffer
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
if (line.trim() === '') continue // Skip empty lines
|
|
||||||
|
|
||||||
if (line.startsWith('data: ') && line.length > 6) {
|
|
||||||
try {
|
|
||||||
const jsonStr = line.slice(6)
|
|
||||||
|
|
||||||
// Check if the JSON string is unusually large (potential streaming issue)
|
|
||||||
if (jsonStr.length > 50000) {
|
|
||||||
// 50KB limit
|
|
||||||
logger.warn(`[${tracker.requestId}] Large SSE event detected`, {
|
|
||||||
size: jsonStr.length,
|
|
||||||
preview: `${jsonStr.substring(0, 100)}...`,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const event = JSON.parse(jsonStr)
|
|
||||||
|
|
||||||
// Log different event types comprehensively
|
|
||||||
switch (event.type) {
|
|
||||||
case 'content':
|
|
||||||
if (event.data) {
|
|
||||||
assistantContent += event.data
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'reasoning':
|
|
||||||
logger.debug(
|
|
||||||
`[${tracker.requestId}] Reasoning chunk received (${(event.data || event.content || '').length} chars)`
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_call':
|
|
||||||
if (!event.data?.partial) {
|
|
||||||
toolCalls.push(event.data)
|
|
||||||
if (event.data?.id) {
|
|
||||||
announcedToolCallIds.add(event.data.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_generating':
|
|
||||||
if (event.toolCallId) {
|
|
||||||
startedToolExecutionIds.add(event.toolCallId)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_result':
|
|
||||||
if (event.toolCallId) {
|
|
||||||
completedToolExecutionIds.add(event.toolCallId)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_error':
|
|
||||||
logger.error(`[${tracker.requestId}] Tool error:`, {
|
|
||||||
toolCallId: event.toolCallId,
|
|
||||||
toolName: event.toolName,
|
|
||||||
error: event.error,
|
|
||||||
success: event.success,
|
|
||||||
})
|
|
||||||
if (event.toolCallId) {
|
|
||||||
completedToolExecutionIds.add(event.toolCallId)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'start':
|
|
||||||
if (event.data?.responseId) {
|
|
||||||
responseIdFromStart = event.data.responseId
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'done':
|
|
||||||
if (event.data?.responseId) {
|
|
||||||
responseIdFromDone = event.data.responseId
|
|
||||||
lastDoneResponseId = responseIdFromDone
|
|
||||||
|
|
||||||
// Mark this done as safe only if no tool call is currently in progress or pending
|
|
||||||
const announced = announcedToolCallIds.size
|
|
||||||
const completed = completedToolExecutionIds.size
|
|
||||||
const started = startedToolExecutionIds.size
|
|
||||||
const hasToolInProgress = announced > completed || started > completed
|
|
||||||
if (!hasToolInProgress) {
|
|
||||||
lastSafeDoneResponseId = responseIdFromDone
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'error':
|
|
||||||
break
|
|
||||||
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
|
|
||||||
// Emit to client: rewrite 'error' events into user-friendly assistant message
|
|
||||||
if (event?.type === 'error') {
|
|
||||||
try {
|
|
||||||
const displayMessage: string =
|
|
||||||
(event?.data && (event.data.displayMessage as string)) ||
|
|
||||||
'Sorry, I encountered an error. Please try again.'
|
|
||||||
const formatted = `_${displayMessage}_`
|
|
||||||
// Accumulate so it persists to DB as assistant content
|
|
||||||
assistantContent += formatted
|
|
||||||
// Send as content chunk
|
|
||||||
try {
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(
|
|
||||||
`data: ${JSON.stringify({ type: 'content', data: formatted })}\n\n`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
// Then close this response cleanly for the client
|
|
||||||
try {
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`)
|
|
||||||
)
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
// Do not forward the original error event
|
|
||||||
} else {
|
|
||||||
// Forward original event to client
|
|
||||||
try {
|
|
||||||
controller.enqueue(encoder.encode(`data: ${jsonStr}\n\n`))
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
// Enhanced error handling for large payloads and parsing issues
|
|
||||||
const lineLength = line.length
|
|
||||||
const isLargePayload = lineLength > 10000
|
|
||||||
|
|
||||||
if (isLargePayload) {
|
|
||||||
logger.error(
|
|
||||||
`[${tracker.requestId}] Failed to parse large SSE event (${lineLength} chars)`,
|
|
||||||
{
|
|
||||||
error: e,
|
|
||||||
preview: `${line.substring(0, 200)}...`,
|
|
||||||
size: lineLength,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
logger.warn(
|
|
||||||
`[${tracker.requestId}] Failed to parse SSE event: "${line.substring(0, 200)}..."`,
|
|
||||||
e
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (line.trim() && line !== 'data: [DONE]') {
|
|
||||||
logger.debug(`[${tracker.requestId}] Non-SSE line from sim agent: "${line}"`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process any remaining buffer
|
|
||||||
if (buffer.trim()) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Processing remaining buffer: "${buffer}"`)
|
|
||||||
if (buffer.startsWith('data: ')) {
|
|
||||||
try {
|
|
||||||
const jsonStr = buffer.slice(6)
|
|
||||||
const event = JSON.parse(jsonStr)
|
|
||||||
if (event.type === 'content' && event.data) {
|
|
||||||
assistantContent += event.data
|
|
||||||
}
|
|
||||||
// Forward remaining event, applying same error rewrite behavior
|
|
||||||
if (event?.type === 'error') {
|
|
||||||
const displayMessage: string =
|
|
||||||
(event?.data && (event.data.displayMessage as string)) ||
|
|
||||||
'Sorry, I encountered an error. Please try again.'
|
|
||||||
const formatted = `_${displayMessage}_`
|
|
||||||
assistantContent += formatted
|
|
||||||
try {
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(
|
|
||||||
`data: ${JSON.stringify({ type: 'content', data: formatted })}\n\n`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`)
|
|
||||||
)
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
controller.enqueue(encoder.encode(`data: ${jsonStr}\n\n`))
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Failed to parse final buffer: "${buffer}"`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log final streaming summary
|
|
||||||
logger.info(`[${tracker.requestId}] Streaming complete summary:`, {
|
|
||||||
totalContentLength: assistantContent.length,
|
|
||||||
toolCallsCount: toolCalls.length,
|
|
||||||
hasContent: assistantContent.length > 0,
|
|
||||||
toolNames: toolCalls.map((tc) => tc?.name).filter(Boolean),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// NOTE: Messages are saved by the client via update-messages endpoint with full contentBlocks.
|
if (currentChat && result.conversationId) {
|
||||||
// Server only updates conversationId here to avoid overwriting client's richer save.
|
await db
|
||||||
if (currentChat) {
|
.update(copilotChats)
|
||||||
// Persist only a safe conversationId to avoid continuing from a state that expects tool outputs
|
.set({
|
||||||
const previousConversationId = currentChat?.conversationId as string | undefined
|
updatedAt: new Date(),
|
||||||
const responseId = lastSafeDoneResponseId || previousConversationId || undefined
|
conversationId: result.conversationId,
|
||||||
|
})
|
||||||
if (responseId) {
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
await db
|
|
||||||
.update(copilotChats)
|
|
||||||
.set({
|
|
||||||
updatedAt: new Date(),
|
|
||||||
conversationId: responseId,
|
|
||||||
})
|
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${tracker.requestId}] Updated conversationId for chat ${actualChatId}`,
|
|
||||||
{
|
|
||||||
updatedConversationId: responseId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
await eventWriter.close()
|
||||||
|
await setStreamMeta(streamId, { status: 'complete', userId: authenticatedUserId })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${tracker.requestId}] Error processing stream:`, error)
|
logger.error(`[${tracker.requestId}] Orchestration error:`, error)
|
||||||
|
await eventWriter.close()
|
||||||
// Send an error event to the client before closing so it knows what happened
|
await setStreamMeta(streamId, {
|
||||||
try {
|
status: 'error',
|
||||||
const errorMessage =
|
userId: authenticatedUserId,
|
||||||
error instanceof Error && error.message === 'terminated'
|
error: error instanceof Error ? error.message : 'Stream error',
|
||||||
? 'Connection to AI service was interrupted. Please try again.'
|
})
|
||||||
: 'An unexpected error occurred while processing the response.'
|
await pushEvent({
|
||||||
const encoder = new TextEncoder()
|
type: 'error',
|
||||||
|
data: {
|
||||||
// Send error as content so it shows in the chat
|
displayMessage: 'An unexpected error occurred while processing the response.',
|
||||||
controller.enqueue(
|
},
|
||||||
encoder.encode(
|
})
|
||||||
`data: ${JSON.stringify({ type: 'content', data: `\n\n_${errorMessage}_` })}\n\n`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
// Send done event to properly close the stream on client
|
|
||||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`))
|
|
||||||
} catch (enqueueError) {
|
|
||||||
// Stream might already be closed, that's ok
|
|
||||||
logger.warn(
|
|
||||||
`[${tracker.requestId}] Could not send error event to client:`,
|
|
||||||
enqueueError
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
controller.close()
|
||||||
controller.close()
|
}
|
||||||
} catch {
|
},
|
||||||
// Controller might already be closed
|
async cancel() {
|
||||||
}
|
clientDisconnected = true
|
||||||
|
if (eventWriter) {
|
||||||
|
await eventWriter.flush()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const response = new Response(transformedStream, {
|
return new Response(transformedStream, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'text/event-stream',
|
'Content-Type': 'text/event-stream',
|
||||||
'Cache-Control': 'no-cache',
|
'Cache-Control': 'no-cache',
|
||||||
@@ -859,43 +609,31 @@ export async function POST(req: NextRequest) {
|
|||||||
'X-Accel-Buffering': 'no',
|
'X-Accel-Buffering': 'no',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Returning streaming response to client`, {
|
|
||||||
duration: tracker.getDuration(),
|
|
||||||
chatId: actualChatId,
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'text/event-stream',
|
|
||||||
'Cache-Control': 'no-cache',
|
|
||||||
Connection: 'keep-alive',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
return response
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// For non-streaming responses
|
const nonStreamingResult = await orchestrateCopilotStream(requestPayload, {
|
||||||
const responseData = await simAgentResponse.json()
|
userId: authenticatedUserId,
|
||||||
logger.info(`[${tracker.requestId}] Non-streaming response from sim agent:`, {
|
workflowId,
|
||||||
|
chatId: actualChatId,
|
||||||
|
autoExecuteTools: true,
|
||||||
|
interactive: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseData = {
|
||||||
|
content: nonStreamingResult.content,
|
||||||
|
toolCalls: nonStreamingResult.toolCalls,
|
||||||
|
model: selectedModel,
|
||||||
|
provider: providerConfig?.provider || env.COPILOT_PROVIDER || 'openai',
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${tracker.requestId}] Non-streaming response from orchestrator:`, {
|
||||||
hasContent: !!responseData.content,
|
hasContent: !!responseData.content,
|
||||||
contentLength: responseData.content?.length || 0,
|
contentLength: responseData.content?.length || 0,
|
||||||
model: responseData.model,
|
model: responseData.model,
|
||||||
provider: responseData.provider,
|
provider: responseData.provider,
|
||||||
toolCallsCount: responseData.toolCalls?.length || 0,
|
toolCallsCount: responseData.toolCalls?.length || 0,
|
||||||
hasTokens: !!responseData.tokens,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// Log tool calls if present
|
|
||||||
if (responseData.toolCalls?.length > 0) {
|
|
||||||
responseData.toolCalls.forEach((toolCall: any) => {
|
|
||||||
logger.info(`[${tracker.requestId}] Tool call in response:`, {
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
success: toolCall.success,
|
|
||||||
result: `${JSON.stringify(toolCall.result).substring(0, 200)}...`,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save messages if we have a chat
|
// Save messages if we have a chat
|
||||||
if (currentChat && responseData.content) {
|
if (currentChat && responseData.content) {
|
||||||
const userMessage = {
|
const userMessage = {
|
||||||
@@ -947,6 +685,9 @@ export async function POST(req: NextRequest) {
|
|||||||
.set({
|
.set({
|
||||||
messages: updatedMessages,
|
messages: updatedMessages,
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
|
...(nonStreamingResult.conversationId
|
||||||
|
? { conversationId: nonStreamingResult.conversationId }
|
||||||
|
: {}),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
}
|
}
|
||||||
@@ -998,10 +739,7 @@ export async function GET(req: NextRequest) {
|
|||||||
try {
|
try {
|
||||||
const { searchParams } = new URL(req.url)
|
const { searchParams } = new URL(req.url)
|
||||||
const workflowId = searchParams.get('workflowId')
|
const workflowId = searchParams.get('workflowId')
|
||||||
|
const chatId = searchParams.get('chatId')
|
||||||
if (!workflowId) {
|
|
||||||
return createBadRequestResponse('workflowId is required')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get authenticated user using consolidated helper
|
// Get authenticated user using consolidated helper
|
||||||
const { userId: authenticatedUserId, isAuthenticated } =
|
const { userId: authenticatedUserId, isAuthenticated } =
|
||||||
@@ -1010,6 +748,47 @@ export async function GET(req: NextRequest) {
|
|||||||
return createUnauthorizedResponse()
|
return createUnauthorizedResponse()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If chatId is provided, fetch a single chat
|
||||||
|
if (chatId) {
|
||||||
|
const [chat] = await db
|
||||||
|
.select({
|
||||||
|
id: copilotChats.id,
|
||||||
|
title: copilotChats.title,
|
||||||
|
model: copilotChats.model,
|
||||||
|
messages: copilotChats.messages,
|
||||||
|
planArtifact: copilotChats.planArtifact,
|
||||||
|
config: copilotChats.config,
|
||||||
|
createdAt: copilotChats.createdAt,
|
||||||
|
updatedAt: copilotChats.updatedAt,
|
||||||
|
})
|
||||||
|
.from(copilotChats)
|
||||||
|
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, authenticatedUserId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!chat) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const transformedChat = {
|
||||||
|
id: chat.id,
|
||||||
|
title: chat.title,
|
||||||
|
model: chat.model,
|
||||||
|
messages: Array.isArray(chat.messages) ? chat.messages : [],
|
||||||
|
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
|
||||||
|
planArtifact: chat.planArtifact || null,
|
||||||
|
config: chat.config || null,
|
||||||
|
createdAt: chat.createdAt,
|
||||||
|
updatedAt: chat.updatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Retrieved chat ${chatId}`)
|
||||||
|
return NextResponse.json({ success: true, chat: transformedChat })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflowId) {
|
||||||
|
return createBadRequestResponse('workflowId or chatId is required')
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch chats for this user and workflow
|
// Fetch chats for this user and workflow
|
||||||
const chats = await db
|
const chats = await db
|
||||||
.select({
|
.select({
|
||||||
|
|||||||
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import {
|
||||||
|
getStreamMeta,
|
||||||
|
readStreamEvents,
|
||||||
|
type StreamMeta,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatStreamAPI')
|
||||||
|
const POLL_INTERVAL_MS = 250
|
||||||
|
const MAX_STREAM_MS = 10 * 60 * 1000
|
||||||
|
|
||||||
|
function encodeEvent(event: Record<string, any>): Uint8Array {
|
||||||
|
return new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const { userId: authenticatedUserId, isAuthenticated } =
|
||||||
|
await authenticateCopilotRequestSessionOnly()
|
||||||
|
|
||||||
|
if (!isAuthenticated || !authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const streamId = url.searchParams.get('streamId') || ''
|
||||||
|
const fromParam = url.searchParams.get('from') || '0'
|
||||||
|
const fromEventId = Number(fromParam || 0)
|
||||||
|
// If batch=true, return buffered events as JSON instead of SSE
|
||||||
|
const batchMode = url.searchParams.get('batch') === 'true'
|
||||||
|
const toParam = url.searchParams.get('to')
|
||||||
|
const toEventId = toParam ? Number(toParam) : undefined
|
||||||
|
|
||||||
|
if (!streamId) {
|
||||||
|
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = (await getStreamMeta(streamId)) as StreamMeta | null
|
||||||
|
logger.info('[Resume] Stream lookup', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
batchMode,
|
||||||
|
hasMeta: !!meta,
|
||||||
|
metaStatus: meta?.status,
|
||||||
|
})
|
||||||
|
if (!meta) {
|
||||||
|
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
if (meta.userId && meta.userId !== authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch mode: return all buffered events as JSON
|
||||||
|
if (batchMode) {
|
||||||
|
const events = await readStreamEvents(streamId, fromEventId)
|
||||||
|
const filteredEvents = toEventId ? events.filter((e) => e.eventId <= toEventId) : events
|
||||||
|
logger.info('[Resume] Batch response', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
eventCount: filteredEvents.length,
|
||||||
|
})
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
events: filteredEvents,
|
||||||
|
status: meta.status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
let lastEventId = Number.isFinite(fromEventId) ? fromEventId : 0
|
||||||
|
|
||||||
|
const flushEvents = async () => {
|
||||||
|
const events = await readStreamEvents(streamId, lastEventId)
|
||||||
|
if (events.length > 0) {
|
||||||
|
logger.info('[Resume] Flushing events', {
|
||||||
|
streamId,
|
||||||
|
fromEventId: lastEventId,
|
||||||
|
eventCount: events.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (const entry of events) {
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
const payload = {
|
||||||
|
...entry.event,
|
||||||
|
eventId: entry.eventId,
|
||||||
|
streamId: entry.streamId,
|
||||||
|
}
|
||||||
|
controller.enqueue(encodeEvent(payload))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
while (Date.now() - startTime < MAX_STREAM_MS) {
|
||||||
|
const currentMeta = await getStreamMeta(streamId)
|
||||||
|
if (!currentMeta) break
|
||||||
|
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
if (currentMeta.status === 'complete' || currentMeta.status === 'error') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.signal.aborted) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Stream replay failed', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
} finally {
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Response(stream, { headers: SSE_HEADERS })
|
||||||
|
}
|
||||||
413
apps/sim/app/api/mcp/copilot/route.ts
Normal file
413
apps/sim/app/api/mcp/copilot/route.ts
Normal file
@@ -0,0 +1,413 @@
|
|||||||
|
import {
|
||||||
|
type CallToolResult,
|
||||||
|
ErrorCode,
|
||||||
|
type InitializeResult,
|
||||||
|
isJSONRPCNotification,
|
||||||
|
isJSONRPCRequest,
|
||||||
|
type JSONRPCError,
|
||||||
|
type JSONRPCMessage,
|
||||||
|
type JSONRPCResponse,
|
||||||
|
type ListToolsResult,
|
||||||
|
type RequestId,
|
||||||
|
} from '@modelcontextprotocol/sdk/types.js'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent'
|
||||||
|
import {
|
||||||
|
executeToolServerSide,
|
||||||
|
prepareExecutionContext,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMcpAPI')
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||||
|
* This is included in the initialize response to help external LLMs understand
|
||||||
|
* the workflow lifecycle and best practices.
|
||||||
|
*/
|
||||||
|
const MCP_SERVER_INSTRUCTIONS = `
|
||||||
|
## Sim Workflow Copilot - Usage Guide
|
||||||
|
|
||||||
|
You are interacting with Sim's workflow automation platform. These tools orchestrate specialized AI agents that build workflows. Follow these guidelines carefully.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Platform Knowledge
|
||||||
|
|
||||||
|
Sim is a workflow automation platform. Workflows are visual pipelines of blocks.
|
||||||
|
|
||||||
|
### Block Types
|
||||||
|
|
||||||
|
**Core Logic:**
|
||||||
|
- **Agent** - The heart of Sim (LLM block with tools, memory, structured output, knowledge bases)
|
||||||
|
- **Function** - JavaScript code execution
|
||||||
|
- **Condition** - If/else branching
|
||||||
|
- **Router** - AI-powered content-based routing
|
||||||
|
- **Loop** - While/do-while iteration
|
||||||
|
- **Parallel** - Simultaneous execution
|
||||||
|
- **API** - HTTP requests
|
||||||
|
|
||||||
|
**Integrations (3rd Party):**
|
||||||
|
- OAuth: Slack, Gmail, Google Calendar, Sheets, Outlook, Linear, GitHub, Notion
|
||||||
|
- API: Stripe, Twilio, SendGrid, any REST API
|
||||||
|
|
||||||
|
### The Agent Block
|
||||||
|
|
||||||
|
The Agent block is the core of intelligent workflows:
|
||||||
|
- **Tools** - Add integrations, custom tools, web search to give it capabilities
|
||||||
|
- **Memory** - Multi-turn conversations with persistent context
|
||||||
|
- **Structured Output** - JSON schema for reliable parsing
|
||||||
|
- **Knowledge Bases** - RAG-powered document retrieval
|
||||||
|
|
||||||
|
**Design principle:** Put tools INSIDE agents rather than using standalone tool blocks.
|
||||||
|
|
||||||
|
### Triggers
|
||||||
|
|
||||||
|
| Type | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| Manual/Chat | User sends message in UI (start block: input, files, conversationId) |
|
||||||
|
| API | REST endpoint with custom input schema |
|
||||||
|
| Webhook | External services POST to trigger URL |
|
||||||
|
| Schedule | Cron-based (hourly, daily, weekly) |
|
||||||
|
|
||||||
|
### Deployments
|
||||||
|
|
||||||
|
| Type | Trigger | Use Case |
|
||||||
|
|------|---------|----------|
|
||||||
|
| API | Start block | REST endpoint for programmatic access |
|
||||||
|
| Chat | Start block | Managed chat UI with auth options |
|
||||||
|
| MCP | Start block | Expose as MCP tool for AI agents |
|
||||||
|
| General | Schedule/Webhook | Activate triggers to run automatically |
|
||||||
|
|
||||||
|
**Undeployed workflows only run in the builder UI.**
|
||||||
|
|
||||||
|
### Variable Syntax
|
||||||
|
|
||||||
|
Reference outputs from previous blocks: \`<blockname.field>\`
|
||||||
|
Reference environment variables: \`{{ENV_VAR_NAME}}\`
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
- Block names must be lowercase, no spaces, no special characters
|
||||||
|
- Use dot notation for nested fields: \`<blockname.field.subfield>\`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Workflow Lifecycle
|
||||||
|
|
||||||
|
1. **Create**: For NEW workflows, FIRST call create_workflow to get a workflowId
|
||||||
|
2. **Plan**: Use copilot_plan with the workflowId to plan the workflow
|
||||||
|
3. **Edit**: Use copilot_edit with the workflowId AND the plan to build the workflow
|
||||||
|
4. **Deploy**: ALWAYS deploy after building using copilot_deploy before testing/running
|
||||||
|
5. **Test**: Use copilot_test to verify the workflow works correctly
|
||||||
|
6. **Share**: Provide the user with the workflow URL after completion
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CRITICAL: Always Pass workflowId
|
||||||
|
|
||||||
|
- For NEW workflows: Call create_workflow FIRST, then use the returned workflowId
|
||||||
|
- For EXISTING workflows: Pass the workflowId to all copilot tools
|
||||||
|
- copilot_plan, copilot_edit, copilot_deploy, copilot_test, copilot_debug all REQUIRE workflowId
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CRITICAL: How to Handle Plans
|
||||||
|
|
||||||
|
The copilot_plan tool returns a structured plan object. You MUST:
|
||||||
|
|
||||||
|
1. **Do NOT modify the plan**: Pass the plan object EXACTLY as returned to copilot_edit
|
||||||
|
2. **Do NOT interpret or summarize the plan**: The edit agent needs the raw plan data
|
||||||
|
3. **Pass the plan in the context.plan field**: \`{ "context": { "plan": <plan_object> } }\`
|
||||||
|
4. **Include ALL plan data**: Block configurations, connections, credentials, everything
|
||||||
|
|
||||||
|
Example flow:
|
||||||
|
\`\`\`
|
||||||
|
1. copilot_plan({ request: "build a workflow...", workflowId: "abc123" })
|
||||||
|
-> Returns: { "plan": { "blocks": [...], "connections": [...], ... } }
|
||||||
|
|
||||||
|
2. copilot_edit({
|
||||||
|
workflowId: "abc123",
|
||||||
|
message: "Execute the plan",
|
||||||
|
context: { "plan": <EXACT plan object from step 1> }
|
||||||
|
})
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
**Why this matters**: The plan contains technical details (block IDs, field mappings, API schemas) that the edit agent needs verbatim. Summarizing or rephrasing loses critical information.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## CRITICAL: Error Handling
|
||||||
|
|
||||||
|
**If the user says "doesn't work", "broke", "failed", "error" → ALWAYS use copilot_debug FIRST.**
|
||||||
|
|
||||||
|
Don't guess. Don't plan. Debug first to find the actual problem.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Important Rules
|
||||||
|
|
||||||
|
- ALWAYS deploy a workflow before attempting to run or test it
|
||||||
|
- Workflows must be deployed to have an "active deployment" for execution
|
||||||
|
- After building, call copilot_deploy with the appropriate deployment type (api, chat, or mcp)
|
||||||
|
- Return the workflow URL to the user so they can access it in Sim
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick Operations (use direct tools)
|
||||||
|
- list_workflows, list_workspaces, list_folders, get_workflow: Fast database queries
|
||||||
|
- create_workflow: Create new workflow and get workflowId (CALL THIS FIRST for new workflows)
|
||||||
|
- create_folder: Create new resources
|
||||||
|
|
||||||
|
## Workflow Building (use copilot tools)
|
||||||
|
- copilot_plan: Plan workflow changes (REQUIRES workflowId) - returns a plan object
|
||||||
|
- copilot_edit: Execute the plan (REQUIRES workflowId AND plan from copilot_plan)
|
||||||
|
- copilot_deploy: Deploy workflows (REQUIRES workflowId)
|
||||||
|
- copilot_test: Test workflow execution (REQUIRES workflowId)
|
||||||
|
- copilot_debug: Diagnose errors (REQUIRES workflowId) - USE THIS FIRST for issues
|
||||||
|
`
|
||||||
|
|
||||||
|
function createResponse(id: RequestId, result: unknown): JSONRPCResponse {
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
result: result as JSONRPCResponse['result'],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createError(id: RequestId, code: ErrorCode | number, message: string): JSONRPCError {
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
error: { code, message },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
return NextResponse.json({
|
||||||
|
name: 'copilot-subagents',
|
||||||
|
version: '1.0.0',
|
||||||
|
protocolVersion: '2024-11-05',
|
||||||
|
capabilities: { tools: {} },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = (await request.json()) as JSONRPCMessage
|
||||||
|
|
||||||
|
if (isJSONRPCNotification(body)) {
|
||||||
|
return new NextResponse(null, { status: 202 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isJSONRPCRequest(body)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(0, ErrorCode.InvalidRequest, 'Invalid JSON-RPC message'),
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { id, method, params } = body
|
||||||
|
|
||||||
|
switch (method) {
|
||||||
|
case 'initialize': {
|
||||||
|
const result: InitializeResult = {
|
||||||
|
protocolVersion: '2024-11-05',
|
||||||
|
capabilities: { tools: {} },
|
||||||
|
serverInfo: { name: 'sim-copilot', version: '1.0.0' },
|
||||||
|
instructions: MCP_SERVER_INSTRUCTIONS,
|
||||||
|
}
|
||||||
|
return NextResponse.json(createResponse(id, result))
|
||||||
|
}
|
||||||
|
case 'ping':
|
||||||
|
return NextResponse.json(createResponse(id, {}))
|
||||||
|
case 'tools/list':
|
||||||
|
return handleToolsList(id)
|
||||||
|
case 'tools/call':
|
||||||
|
return handleToolsCall(
|
||||||
|
id,
|
||||||
|
params as { name: string; arguments?: Record<string, unknown> },
|
||||||
|
auth.userId
|
||||||
|
)
|
||||||
|
default:
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, ErrorCode.MethodNotFound, `Method not found: ${method}`),
|
||||||
|
{ status: 404 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error handling MCP request', { error })
|
||||||
|
return NextResponse.json(createError(0, ErrorCode.InternalError, 'Internal error'), {
|
||||||
|
status: 500,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToolsList(id: RequestId): Promise<NextResponse> {
|
||||||
|
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const result: ListToolsResult = {
|
||||||
|
tools: [...directTools, ...subagentTools],
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, result))
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToolsCall(
|
||||||
|
id: RequestId,
|
||||||
|
params: { name: string; arguments?: Record<string, unknown> },
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
const args = params.arguments || {}
|
||||||
|
|
||||||
|
// Check if this is a direct tool (fast, no LLM)
|
||||||
|
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (directTool) {
|
||||||
|
return handleDirectToolCall(id, directTool, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a subagent tool (uses LLM orchestration)
|
||||||
|
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (subagentTool) {
|
||||||
|
return handleSubagentToolCall(id, subagentTool, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, ErrorCode.MethodNotFound, `Tool not found: ${params.name}`),
|
||||||
|
{ status: 404 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleDirectToolCall(
|
||||||
|
id: RequestId,
|
||||||
|
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
try {
|
||||||
|
const execContext = await prepareExecutionContext(userId, (args.workflowId as string) || '')
|
||||||
|
|
||||||
|
const toolCall = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
name: toolDef.toolId,
|
||||||
|
status: 'pending' as const,
|
||||||
|
params: args as Record<string, any>,
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
|
|
||||||
|
const response: CallToolResult = {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(result.output ?? result, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, response))
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Direct tool execution failed', { tool: toolDef.name, error })
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, ErrorCode.InternalError, `Tool execution failed: ${error}`),
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSubagentToolCall(
|
||||||
|
id: RequestId,
|
||||||
|
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
const requestText =
|
||||||
|
(args.request as string) ||
|
||||||
|
(args.message as string) ||
|
||||||
|
(args.error as string) ||
|
||||||
|
JSON.stringify(args)
|
||||||
|
|
||||||
|
const context = (args.context as Record<string, unknown>) || {}
|
||||||
|
if (args.plan && !context.plan) {
|
||||||
|
context.plan = args.plan
|
||||||
|
}
|
||||||
|
|
||||||
|
const { model } = getCopilotModel('chat')
|
||||||
|
|
||||||
|
const result = await orchestrateSubagentStream(
|
||||||
|
toolDef.agentId,
|
||||||
|
{
|
||||||
|
message: requestText,
|
||||||
|
workflowId: args.workflowId,
|
||||||
|
workspaceId: args.workspaceId,
|
||||||
|
context,
|
||||||
|
model,
|
||||||
|
// Signal to the copilot backend that this is a headless request
|
||||||
|
// so it can enforce workflowId requirements on tools
|
||||||
|
headless: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
workflowId: args.workflowId as string | undefined,
|
||||||
|
workspaceId: args.workspaceId as string | undefined,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// When a respond tool (plan_respond, edit_respond, etc.) was used,
|
||||||
|
// return only the structured result - not the full result with all internal tool calls.
|
||||||
|
// This provides clean output for MCP consumers.
|
||||||
|
let responseData: unknown
|
||||||
|
if (result.structuredResult) {
|
||||||
|
responseData = {
|
||||||
|
success: result.structuredResult.success ?? result.success,
|
||||||
|
type: result.structuredResult.type,
|
||||||
|
summary: result.structuredResult.summary,
|
||||||
|
data: result.structuredResult.data,
|
||||||
|
}
|
||||||
|
} else if (result.error) {
|
||||||
|
responseData = {
|
||||||
|
success: false,
|
||||||
|
error: result.error,
|
||||||
|
errors: result.errors,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback: return content if no structured result
|
||||||
|
responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: CallToolResult = {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(responseData, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, response))
|
||||||
|
}
|
||||||
@@ -21,8 +21,7 @@ export async function GET(request: NextRequest) {
|
|||||||
const accessToken = searchParams.get('accessToken')
|
const accessToken = searchParams.get('accessToken')
|
||||||
const pageId = searchParams.get('pageId')
|
const pageId = searchParams.get('pageId')
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
const providedCloudId = searchParams.get('cloudId')
|
||||||
const limit = searchParams.get('limit') || '50'
|
const limit = searchParams.get('limit') || '25'
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||||
@@ -48,12 +47,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/attachments?limit=${limit}`
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/attachments?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
@@ -83,20 +77,9 @@ export async function GET(request: NextRequest) {
|
|||||||
fileSize: attachment.fileSize || 0,
|
fileSize: attachment.fileSize || 0,
|
||||||
mediaType: attachment.mediaType || '',
|
mediaType: attachment.mediaType || '',
|
||||||
downloadUrl: attachment.downloadLink || attachment._links?.download || '',
|
downloadUrl: attachment.downloadLink || attachment._links?.download || '',
|
||||||
status: attachment.status ?? null,
|
|
||||||
webuiUrl: attachment._links?.webui ?? null,
|
|
||||||
pageId: attachment.pageId ?? null,
|
|
||||||
blogPostId: attachment.blogPostId ?? null,
|
|
||||||
comment: attachment.comment ?? null,
|
|
||||||
version: attachment.version ?? null,
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({ attachments })
|
||||||
attachments,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error listing Confluence attachments:', error)
|
logger.error('Error listing Confluence attachments:', error)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -1,285 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluenceBlogPostsAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const getBlogPostSchema = z
|
|
||||||
.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
blogPostId: z.string().min(1, 'Blog post ID is required'),
|
|
||||||
bodyFormat: z.string().optional(),
|
|
||||||
})
|
|
||||||
.refine(
|
|
||||||
(data) => {
|
|
||||||
const validation = validateAlphanumericId(data.blogPostId, 'blogPostId', 255)
|
|
||||||
return validation.isValid
|
|
||||||
},
|
|
||||||
(data) => {
|
|
||||||
const validation = validateAlphanumericId(data.blogPostId, 'blogPostId', 255)
|
|
||||||
return { message: validation.error || 'Invalid blog post ID', path: ['blogPostId'] }
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const createBlogPostSchema = z.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
spaceId: z.string().min(1, 'Space ID is required'),
|
|
||||||
title: z.string().min(1, 'Title is required'),
|
|
||||||
content: z.string().min(1, 'Content is required'),
|
|
||||||
status: z.enum(['current', 'draft']).optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all blog posts or get a specific blog post
|
|
||||||
*/
|
|
||||||
export async function GET(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { searchParams } = new URL(request.url)
|
|
||||||
const domain = searchParams.get('domain')
|
|
||||||
const accessToken = searchParams.get('accessToken')
|
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
|
||||||
const limit = searchParams.get('limit') || '25'
|
|
||||||
const status = searchParams.get('status')
|
|
||||||
const sortOrder = searchParams.get('sort')
|
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
|
|
||||||
if (status) {
|
|
||||||
queryParams.append('status', status)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sortOrder) {
|
|
||||||
queryParams.append('sort', sortOrder)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to list blog posts (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const blogPosts = (data.results || []).map((post: any) => ({
|
|
||||||
id: post.id,
|
|
||||||
title: post.title,
|
|
||||||
status: post.status ?? null,
|
|
||||||
spaceId: post.spaceId ?? null,
|
|
||||||
authorId: post.authorId ?? null,
|
|
||||||
createdAt: post.createdAt ?? null,
|
|
||||||
version: post.version ?? null,
|
|
||||||
webUrl: post._links?.webui ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
blogPosts,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing blog posts:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a specific blog post by ID
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
// Check if this is a create or get request
|
|
||||||
if (body.title && body.content && body.spaceId) {
|
|
||||||
// Create blog post
|
|
||||||
const validation = createBlogPostSchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
spaceId,
|
|
||||||
title,
|
|
||||||
content,
|
|
||||||
status,
|
|
||||||
} = validation.data
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts`
|
|
||||||
|
|
||||||
const createBody = {
|
|
||||||
spaceId,
|
|
||||||
status: status || 'current',
|
|
||||||
title,
|
|
||||||
body: {
|
|
||||||
representation: 'storage',
|
|
||||||
value: content,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
body: JSON.stringify(createBody),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to create blog post (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
return NextResponse.json({
|
|
||||||
id: data.id,
|
|
||||||
title: data.title,
|
|
||||||
spaceId: data.spaceId,
|
|
||||||
webUrl: data._links?.webui ?? null,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// Get blog post by ID
|
|
||||||
const validation = getBlogPostSchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
blogPostId,
|
|
||||||
bodyFormat,
|
|
||||||
} = validation.data
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
if (bodyFormat) {
|
|
||||||
queryParams.append('body-format', bodyFormat)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts/${blogPostId}${queryParams.toString() ? `?${queryParams.toString()}` : ''}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to get blog post (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
return NextResponse.json({
|
|
||||||
id: data.id,
|
|
||||||
title: data.title,
|
|
||||||
status: data.status ?? null,
|
|
||||||
spaceId: data.spaceId ?? null,
|
|
||||||
authorId: data.authorId ?? null,
|
|
||||||
createdAt: data.createdAt ?? null,
|
|
||||||
version: data.version ?? null,
|
|
||||||
body: data.body ?? null,
|
|
||||||
webUrl: data._links?.webui ?? null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error with blog post operation:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -105,8 +105,6 @@ export async function GET(request: NextRequest) {
|
|||||||
const pageId = searchParams.get('pageId')
|
const pageId = searchParams.get('pageId')
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
const providedCloudId = searchParams.get('cloudId')
|
||||||
const limit = searchParams.get('limit') || '25'
|
const limit = searchParams.get('limit') || '25'
|
||||||
const bodyFormat = searchParams.get('bodyFormat') || 'storage'
|
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||||
@@ -132,13 +130,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/footer-comments?limit=${limit}`
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
queryParams.append('body-format', bodyFormat)
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/footer-comments?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
@@ -162,31 +154,14 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
|
|
||||||
const comments = (data.results || []).map((comment: any) => {
|
const comments = (data.results || []).map((comment: any) => ({
|
||||||
const bodyValue = comment.body?.storage?.value || comment.body?.view?.value || ''
|
id: comment.id,
|
||||||
return {
|
body: comment.body?.storage?.value || comment.body?.view?.value || '',
|
||||||
id: comment.id,
|
createdAt: comment.createdAt || '',
|
||||||
body: {
|
authorId: comment.authorId || '',
|
||||||
value: bodyValue,
|
}))
|
||||||
representation: bodyFormat,
|
|
||||||
},
|
|
||||||
createdAt: comment.createdAt || '',
|
|
||||||
authorId: comment.authorId || '',
|
|
||||||
status: comment.status ?? null,
|
|
||||||
title: comment.title ?? null,
|
|
||||||
pageId: comment.pageId ?? null,
|
|
||||||
blogPostId: comment.blogPostId ?? null,
|
|
||||||
parentCommentId: comment.parentCommentId ?? null,
|
|
||||||
version: comment.version ?? null,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({ comments })
|
||||||
comments,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error listing Confluence comments:', error)
|
logger.error('Error listing Confluence comments:', error)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ export async function POST(request: NextRequest) {
|
|||||||
cloudId: providedCloudId,
|
cloudId: providedCloudId,
|
||||||
pageId,
|
pageId,
|
||||||
labelName,
|
labelName,
|
||||||
prefix: labelPrefix,
|
|
||||||
} = await request.json()
|
} = await request.json()
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
@@ -53,14 +52,12 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label`
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/labels`
|
||||||
|
|
||||||
const body = [
|
const body = {
|
||||||
{
|
prefix: 'global',
|
||||||
prefix: labelPrefix || 'global',
|
name: labelName,
|
||||||
name: labelName,
|
}
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -85,14 +82,7 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
const addedLabel = data.results?.[0] || data[0] || data
|
return NextResponse.json({ ...data, pageId, labelName })
|
||||||
return NextResponse.json({
|
|
||||||
id: addedLabel.id ?? '',
|
|
||||||
name: addedLabel.name ?? labelName,
|
|
||||||
prefix: addedLabel.prefix ?? labelPrefix ?? 'global',
|
|
||||||
pageId,
|
|
||||||
labelName,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error adding Confluence label:', error)
|
logger.error('Error adding Confluence label:', error)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -115,8 +105,6 @@ export async function GET(request: NextRequest) {
|
|||||||
const accessToken = searchParams.get('accessToken')
|
const accessToken = searchParams.get('accessToken')
|
||||||
const pageId = searchParams.get('pageId')
|
const pageId = searchParams.get('pageId')
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
const providedCloudId = searchParams.get('cloudId')
|
||||||
const limit = searchParams.get('limit') || '25'
|
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||||
@@ -142,12 +130,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/labels`
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/labels?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
@@ -177,12 +160,7 @@ export async function GET(request: NextRequest) {
|
|||||||
prefix: label.prefix || 'global',
|
prefix: label.prefix || 'global',
|
||||||
}))
|
}))
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({ labels })
|
||||||
labels,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error listing Confluence labels:', error)
|
logger.error('Error listing Confluence labels:', error)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -1,96 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluencePageAncestorsAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get ancestors (parent pages) of a specific Confluence page.
|
|
||||||
* Uses GET /wiki/api/v2/pages/{id}/ancestors
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const { domain, accessToken, pageId, cloudId: providedCloudId, limit = 25 } = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pageId) {
|
|
||||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/ancestors?${queryParams.toString()}`
|
|
||||||
|
|
||||||
logger.info(`Fetching ancestors for page ${pageId}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to get page ancestors (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const ancestors = (data.results || []).map((page: any) => ({
|
|
||||||
id: page.id,
|
|
||||||
title: page.title,
|
|
||||||
status: page.status ?? null,
|
|
||||||
spaceId: page.spaceId ?? null,
|
|
||||||
webUrl: page._links?.webui ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
ancestors,
|
|
||||||
pageId,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error getting page ancestors:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluencePageChildrenAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get child pages of a specific Confluence page.
|
|
||||||
* Uses GET /wiki/api/v2/pages/{id}/children
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const { domain, accessToken, pageId, cloudId: providedCloudId, limit = 50, cursor } = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pageId) {
|
|
||||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
|
||||||
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/children?${queryParams.toString()}`
|
|
||||||
|
|
||||||
logger.info(`Fetching child pages for page ${pageId}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to get child pages (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const children = (data.results || []).map((page: any) => ({
|
|
||||||
id: page.id,
|
|
||||||
title: page.title,
|
|
||||||
status: page.status ?? null,
|
|
||||||
spaceId: page.spaceId ?? null,
|
|
||||||
childPosition: page.childPosition ?? null,
|
|
||||||
webUrl: page._links?.webui ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
children,
|
|
||||||
parentId: pageId,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error getting child pages:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,365 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluencePagePropertiesAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const createPropertySchema = z.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
pageId: z.string().min(1, 'Page ID is required'),
|
|
||||||
key: z.string().min(1, 'Property key is required'),
|
|
||||||
value: z.any(),
|
|
||||||
})
|
|
||||||
|
|
||||||
const updatePropertySchema = z.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
pageId: z.string().min(1, 'Page ID is required'),
|
|
||||||
propertyId: z.string().min(1, 'Property ID is required'),
|
|
||||||
key: z.string().min(1, 'Property key is required'),
|
|
||||||
value: z.any(),
|
|
||||||
versionNumber: z.number().min(1, 'Version number is required'),
|
|
||||||
})
|
|
||||||
|
|
||||||
const deletePropertySchema = z.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
pageId: z.string().min(1, 'Page ID is required'),
|
|
||||||
propertyId: z.string().min(1, 'Property ID is required'),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all content properties on a page.
|
|
||||||
*/
|
|
||||||
export async function GET(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { searchParams } = new URL(request.url)
|
|
||||||
const domain = searchParams.get('domain')
|
|
||||||
const accessToken = searchParams.get('accessToken')
|
|
||||||
const pageId = searchParams.get('pageId')
|
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
|
||||||
const limit = searchParams.get('limit') || '50'
|
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pageId) {
|
|
||||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/properties?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to list page properties (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const properties = (data.results || []).map((prop: any) => ({
|
|
||||||
id: prop.id,
|
|
||||||
key: prop.key,
|
|
||||||
value: prop.value ?? null,
|
|
||||||
version: prop.version ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
properties,
|
|
||||||
pageId,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing page properties:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new content property on a page.
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const validation = createPropertySchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { domain, accessToken, cloudId: providedCloudId, pageId, key, value } = validation.data
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/properties`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
body: JSON.stringify({ key, value }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to create page property (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
return NextResponse.json({
|
|
||||||
id: data.id,
|
|
||||||
key: data.key,
|
|
||||||
value: data.value,
|
|
||||||
version: data.version,
|
|
||||||
pageId,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error creating page property:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update a content property on a page.
|
|
||||||
*/
|
|
||||||
export async function PUT(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const validation = updatePropertySchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
pageId,
|
|
||||||
propertyId,
|
|
||||||
key,
|
|
||||||
value,
|
|
||||||
versionNumber,
|
|
||||||
} = validation.data
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const propertyIdValidation = validateAlphanumericId(propertyId, 'propertyId', 255)
|
|
||||||
if (!propertyIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: propertyIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/properties/${propertyId}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
key,
|
|
||||||
value,
|
|
||||||
version: { number: versionNumber },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to update page property (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
return NextResponse.json({
|
|
||||||
id: data.id,
|
|
||||||
key: data.key,
|
|
||||||
value: data.value,
|
|
||||||
version: data.version,
|
|
||||||
pageId,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error updating page property:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a content property from a page.
|
|
||||||
*/
|
|
||||||
export async function DELETE(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const validation = deletePropertySchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { domain, accessToken, cloudId: providedCloudId, pageId, propertyId } = validation.data
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const propertyIdValidation = validateAlphanumericId(propertyId, 'propertyId', 255)
|
|
||||||
if (!propertyIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: propertyIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/properties/${propertyId}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to delete page property (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ propertyId, pageId, deleted: true })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error deleting page property:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluencePageVersionsAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all versions of a page or get a specific version.
|
|
||||||
* Uses GET /wiki/api/v2/pages/{id}/versions
|
|
||||||
* and GET /wiki/api/v2/pages/{page-id}/versions/{version-number}
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
pageId,
|
|
||||||
versionNumber,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
limit = 50,
|
|
||||||
cursor,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pageId) {
|
|
||||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// If versionNumber is provided, get specific version
|
|
||||||
if (versionNumber !== undefined && versionNumber !== null) {
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/versions/${versionNumber}`
|
|
||||||
|
|
||||||
logger.info(`Fetching version ${versionNumber} for page ${pageId}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to get page version (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
version: {
|
|
||||||
number: data.number,
|
|
||||||
message: data.message ?? null,
|
|
||||||
minorEdit: data.minorEdit ?? false,
|
|
||||||
authorId: data.authorId ?? null,
|
|
||||||
createdAt: data.createdAt ?? null,
|
|
||||||
},
|
|
||||||
pageId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// List all versions
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
|
||||||
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/versions?${queryParams.toString()}`
|
|
||||||
|
|
||||||
logger.info(`Fetching versions for page ${pageId}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to list page versions (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const versions = (data.results || []).map((version: any) => ({
|
|
||||||
number: version.number,
|
|
||||||
message: version.message ?? null,
|
|
||||||
minorEdit: version.minorEdit ?? false,
|
|
||||||
authorId: version.authorId ?? null,
|
|
||||||
createdAt: version.createdAt ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
versions,
|
|
||||||
pageId,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error with page versions:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -62,7 +62,6 @@ const deletePageSchema = z
|
|||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
cloudId: z.string().optional(),
|
cloudId: z.string().optional(),
|
||||||
pageId: z.string().min(1, 'Page ID is required'),
|
pageId: z.string().min(1, 'Page ID is required'),
|
||||||
purge: z.boolean().optional(),
|
|
||||||
})
|
})
|
||||||
.refine(
|
.refine(
|
||||||
(data) => {
|
(data) => {
|
||||||
@@ -99,7 +98,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}?body-format=storage`
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}?expand=body.storage,body.view,body.atlas_doc_format`
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
@@ -131,18 +130,16 @@ export async function POST(request: NextRequest) {
|
|||||||
id: data.id,
|
id: data.id,
|
||||||
title: data.title,
|
title: data.title,
|
||||||
body: {
|
body: {
|
||||||
storage: {
|
view: {
|
||||||
value: data.body?.storage?.value ?? null,
|
value:
|
||||||
representation: 'storage',
|
data.body?.storage?.value ||
|
||||||
|
data.body?.view?.value ||
|
||||||
|
data.body?.atlas_doc_format?.value ||
|
||||||
|
data.content || // try alternative fields
|
||||||
|
data.description ||
|
||||||
|
`Content for page ${data.title}`, // fallback content
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
status: data.status ?? null,
|
|
||||||
spaceId: data.spaceId ?? null,
|
|
||||||
parentId: data.parentId ?? null,
|
|
||||||
authorId: data.authorId ?? null,
|
|
||||||
createdAt: data.createdAt ?? null,
|
|
||||||
version: data.version ?? null,
|
|
||||||
_links: data._links ?? null,
|
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error fetching Confluence page:', error)
|
logger.error('Error fetching Confluence page:', error)
|
||||||
@@ -277,7 +274,7 @@ export async function DELETE(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const { domain, accessToken, cloudId: providedCloudId, pageId, purge } = validation.data
|
const { domain, accessToken, cloudId: providedCloudId, pageId } = validation.data
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||||
|
|
||||||
@@ -286,12 +283,7 @@ export async function DELETE(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}`
|
||||||
if (purge) {
|
|
||||||
queryParams.append('purge', 'true')
|
|
||||||
}
|
|
||||||
const queryString = queryParams.toString()
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}${queryString ? `?${queryString}` : ''}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use provided cloudId or fetch it if not provided
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||||
@@ -39,6 +40,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Build the URL with query parameters
|
||||||
const baseUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages`
|
const baseUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages`
|
||||||
const queryParams = new URLSearchParams()
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
@@ -55,6 +57,7 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`Fetching Confluence pages from: ${url}`)
|
logger.info(`Fetching Confluence pages from: ${url}`)
|
||||||
|
|
||||||
|
// Make the request to Confluence API with OAuth Bearer token
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -76,6 +79,7 @@ export async function POST(request: NextRequest) {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.error('Could not parse error response as JSON:', e)
|
logger.error('Could not parse error response as JSON:', e)
|
||||||
|
|
||||||
|
// Try to get the response text for more context
|
||||||
try {
|
try {
|
||||||
const text = await response.text()
|
const text = await response.text()
|
||||||
logger.error('Response text:', text)
|
logger.error('Response text:', text)
|
||||||
|
|||||||
@@ -1,120 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluenceSearchInSpaceAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Search for content within a specific Confluence space using CQL.
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
spaceKey,
|
|
||||||
query,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
limit = 25,
|
|
||||||
contentType,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!spaceKey) {
|
|
||||||
return NextResponse.json({ error: 'Space key is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const spaceKeyValidation = validateAlphanumericId(spaceKey, 'spaceKey', 255)
|
|
||||||
if (!spaceKeyValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: spaceKeyValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const escapeCqlValue = (value: string) => value.replace(/"/g, '\\"')
|
|
||||||
|
|
||||||
let cql = `space = "${escapeCqlValue(spaceKey)}"`
|
|
||||||
|
|
||||||
if (query) {
|
|
||||||
cql += ` AND text ~ "${escapeCqlValue(query)}"`
|
|
||||||
}
|
|
||||||
|
|
||||||
if (contentType) {
|
|
||||||
cql += ` AND type = "${escapeCqlValue(contentType)}"`
|
|
||||||
}
|
|
||||||
|
|
||||||
const searchParams = new URLSearchParams({
|
|
||||||
cql,
|
|
||||||
limit: String(Math.min(limit, 250)),
|
|
||||||
})
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/search?${searchParams.toString()}`
|
|
||||||
|
|
||||||
logger.info(`Searching in space ${spaceKey} with CQL: ${cql}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to search in space (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const results = (data.results || []).map((result: any) => ({
|
|
||||||
id: result.content?.id ?? result.id,
|
|
||||||
title: result.content?.title ?? result.title,
|
|
||||||
type: result.content?.type ?? result.type,
|
|
||||||
status: result.content?.status ?? null,
|
|
||||||
url: result.url ?? result._links?.webui ?? '',
|
|
||||||
excerpt: result.excerpt ?? '',
|
|
||||||
lastModified: result.lastModified ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
results,
|
|
||||||
spaceKey,
|
|
||||||
totalSize: data.totalSize ?? results.length,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error searching in space:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -42,10 +42,8 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const escapeCqlValue = (value: string) => value.replace(/"/g, '\\"')
|
|
||||||
|
|
||||||
const searchParams = new URLSearchParams({
|
const searchParams = new URLSearchParams({
|
||||||
cql: `text ~ "${escapeCqlValue(query)}"`,
|
cql: `text ~ "${query}"`,
|
||||||
limit: limit.toString(),
|
limit: limit.toString(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -72,27 +70,13 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
|
|
||||||
const results = (data.results || []).map((result: any) => {
|
const results = (data.results || []).map((result: any) => ({
|
||||||
const spaceData = result.resultGlobalContainer || result.content?.space
|
id: result.content?.id || result.id,
|
||||||
return {
|
title: result.content?.title || result.title,
|
||||||
id: result.content?.id || result.id,
|
type: result.content?.type || result.type,
|
||||||
title: result.content?.title || result.title,
|
url: result.url || result._links?.webui || '',
|
||||||
type: result.content?.type || result.type,
|
excerpt: result.excerpt || '',
|
||||||
url: result.url || result._links?.webui || '',
|
}))
|
||||||
excerpt: result.excerpt || '',
|
|
||||||
status: result.content?.status ?? null,
|
|
||||||
spaceKey: result.resultGlobalContainer?.key ?? result.content?.space?.key ?? null,
|
|
||||||
space: spaceData
|
|
||||||
? {
|
|
||||||
id: spaceData.id ?? null,
|
|
||||||
key: spaceData.key ?? null,
|
|
||||||
name: spaceData.name ?? spaceData.title ?? null,
|
|
||||||
}
|
|
||||||
: null,
|
|
||||||
lastModified: result.lastModified ?? result.content?.history?.lastUpdated?.when ?? null,
|
|
||||||
entityType: result.entityType ?? null,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({ results })
|
return NextResponse.json({ results })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -1,124 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluenceSpaceBlogPostsAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all blog posts in a specific Confluence space.
|
|
||||||
* Uses GET /wiki/api/v2/spaces/{id}/blogposts
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
spaceId,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
limit = 25,
|
|
||||||
status,
|
|
||||||
bodyFormat,
|
|
||||||
cursor,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!spaceId) {
|
|
||||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
|
||||||
if (!spaceIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
|
||||||
|
|
||||||
if (status) {
|
|
||||||
queryParams.append('status', status)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bodyFormat) {
|
|
||||||
queryParams.append('body-format', bodyFormat)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/blogposts?${queryParams.toString()}`
|
|
||||||
|
|
||||||
logger.info(`Fetching blog posts in space ${spaceId}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to list blog posts in space (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const blogPosts = (data.results || []).map((post: any) => ({
|
|
||||||
id: post.id,
|
|
||||||
title: post.title,
|
|
||||||
status: post.status ?? null,
|
|
||||||
spaceId: post.spaceId ?? null,
|
|
||||||
authorId: post.authorId ?? null,
|
|
||||||
createdAt: post.createdAt ?? null,
|
|
||||||
version: post.version ?? null,
|
|
||||||
body: post.body ?? null,
|
|
||||||
webUrl: post._links?.webui ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
blogPosts,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing blog posts in space:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluenceSpacePagesAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all pages in a specific Confluence space.
|
|
||||||
* Uses GET /wiki/api/v2/spaces/{id}/pages
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
spaceId,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
limit = 50,
|
|
||||||
status,
|
|
||||||
bodyFormat,
|
|
||||||
cursor,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!spaceId) {
|
|
||||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
|
||||||
if (!spaceIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
|
||||||
|
|
||||||
if (status) {
|
|
||||||
queryParams.append('status', status)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bodyFormat) {
|
|
||||||
queryParams.append('body-format', bodyFormat)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/pages?${queryParams.toString()}`
|
|
||||||
|
|
||||||
logger.info(`Fetching pages in space ${spaceId}`)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to list pages in space (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const pages = (data.results || []).map((page: any) => ({
|
|
||||||
id: page.id,
|
|
||||||
title: page.title,
|
|
||||||
status: page.status ?? null,
|
|
||||||
spaceId: page.spaceId ?? null,
|
|
||||||
parentId: page.parentId ?? null,
|
|
||||||
authorId: page.authorId ?? null,
|
|
||||||
createdAt: page.createdAt ?? null,
|
|
||||||
version: page.version ?? null,
|
|
||||||
body: page.body ?? null,
|
|
||||||
webUrl: page._links?.webui ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
pages,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing pages in space:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -21,7 +21,6 @@ export async function GET(request: NextRequest) {
|
|||||||
const accessToken = searchParams.get('accessToken')
|
const accessToken = searchParams.get('accessToken')
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
const providedCloudId = searchParams.get('cloudId')
|
||||||
const limit = searchParams.get('limit') || '25'
|
const limit = searchParams.get('limit') || '25'
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||||
@@ -38,12 +37,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces?limit=${limit}`
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
@@ -73,18 +67,9 @@ export async function GET(request: NextRequest) {
|
|||||||
key: space.key,
|
key: space.key,
|
||||||
type: space.type,
|
type: space.type,
|
||||||
status: space.status,
|
status: space.status,
|
||||||
authorId: space.authorId ?? null,
|
|
||||||
createdAt: space.createdAt ?? null,
|
|
||||||
homepageId: space.homepageId ?? null,
|
|
||||||
description: space.description ?? null,
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({ spaces })
|
||||||
spaces,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error listing Confluence spaces:', error)
|
logger.error('Error listing Confluence spaces:', error)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
116
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
116
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
|
import { authenticateV1Request } from '@/app/api/v1/auth'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotHeadlessAPI')
|
||||||
|
|
||||||
|
const RequestSchema = z.object({
|
||||||
|
message: z.string().min(1, 'message is required'),
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
workflowName: z.string().optional(),
|
||||||
|
chatId: z.string().optional(),
|
||||||
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
|
model: z.string().optional(),
|
||||||
|
autoExecuteTools: z.boolean().optional().default(true),
|
||||||
|
timeout: z.number().optional().default(300000),
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/v1/copilot/chat
|
||||||
|
* Headless copilot endpoint for server-side orchestration.
|
||||||
|
*
|
||||||
|
* workflowId is optional - if not provided:
|
||||||
|
* - If workflowName is provided, finds that workflow
|
||||||
|
* - Otherwise uses the user's first workflow as context
|
||||||
|
* - The copilot can still operate on any workflow using list_user_workflows
|
||||||
|
*/
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
const auth = await authenticateV1Request(req)
|
||||||
|
if (!auth.authenticated || !auth.userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: auth.error || 'Unauthorized' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await req.json()
|
||||||
|
const parsed = RequestSchema.parse(body)
|
||||||
|
const defaults = getCopilotModel('chat')
|
||||||
|
const selectedModel = parsed.model || defaults.model
|
||||||
|
|
||||||
|
// Resolve workflow ID
|
||||||
|
const resolved = await resolveWorkflowIdForUser(
|
||||||
|
auth.userId,
|
||||||
|
parsed.workflowId,
|
||||||
|
parsed.workflowName
|
||||||
|
)
|
||||||
|
if (!resolved) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'No workflows found. Create a workflow first or provide a valid workflowId.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform mode to transport mode (same as client API)
|
||||||
|
// build and agent both map to 'agent' on the backend
|
||||||
|
const effectiveMode = parsed.mode === 'agent' ? 'build' : parsed.mode
|
||||||
|
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||||
|
|
||||||
|
// Always generate a chatId - required for artifacts system to work with subagents
|
||||||
|
const chatId = parsed.chatId || crypto.randomUUID()
|
||||||
|
|
||||||
|
const requestPayload = {
|
||||||
|
message: parsed.message,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
userId: auth.userId,
|
||||||
|
stream: true,
|
||||||
|
streamToolCalls: true,
|
||||||
|
model: selectedModel,
|
||||||
|
mode: transportMode,
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
headless: true, // Enable cross-workflow operations via workflowId params
|
||||||
|
chatId,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
|
userId: auth.userId,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
chatId,
|
||||||
|
autoExecuteTools: parsed.autoExecuteTools,
|
||||||
|
timeout: parsed.timeout,
|
||||||
|
interactive: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
toolCalls: result.toolCalls,
|
||||||
|
chatId: result.chatId || chatId, // Return the chatId for conversation continuity
|
||||||
|
conversationId: result.conversationId,
|
||||||
|
error: result.error,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid request', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error('Headless copilot request failed', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json({ success: false, error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,8 +3,8 @@
|
|||||||
import Image from 'next/image'
|
import Image from 'next/image'
|
||||||
import Link from 'next/link'
|
import Link from 'next/link'
|
||||||
import { GithubIcon } from '@/components/icons'
|
import { GithubIcon } from '@/components/icons'
|
||||||
|
import { useBrandConfig } from '@/lib/branding/branding'
|
||||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||||
import { useBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
|
|
||||||
interface ChatHeaderProps {
|
interface ChatHeaderProps {
|
||||||
chatConfig: {
|
chatConfig: {
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import Image from 'next/image'
|
import Image from 'next/image'
|
||||||
|
import { useBrandConfig } from '@/lib/branding/branding'
|
||||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||||
import { useBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
|
|
||||||
export function PoweredBySim() {
|
export function PoweredBySim() {
|
||||||
const brandConfig = useBrandConfig()
|
const brandConfig = useBrandConfig()
|
||||||
|
|||||||
@@ -2,12 +2,9 @@ import type { Metadata, Viewport } from 'next'
|
|||||||
import Script from 'next/script'
|
import Script from 'next/script'
|
||||||
import { PublicEnvScript } from 'next-runtime-env'
|
import { PublicEnvScript } from 'next-runtime-env'
|
||||||
import { BrandedLayout } from '@/components/branded-layout'
|
import { BrandedLayout } from '@/components/branded-layout'
|
||||||
|
import { generateThemeCSS } from '@/lib/branding/inject-theme'
|
||||||
|
import { generateBrandedMetadata, generateStructuredData } from '@/lib/branding/metadata'
|
||||||
import { PostHogProvider } from '@/app/_shell/providers/posthog-provider'
|
import { PostHogProvider } from '@/app/_shell/providers/posthog-provider'
|
||||||
import {
|
|
||||||
generateBrandedMetadata,
|
|
||||||
generateStructuredData,
|
|
||||||
generateThemeCSS,
|
|
||||||
} from '@/ee/whitelabeling'
|
|
||||||
import '@/app/_styles/globals.css'
|
import '@/app/_styles/globals.css'
|
||||||
import { OneDollarStats } from '@/components/analytics/onedollarstats'
|
import { OneDollarStats } from '@/components/analytics/onedollarstats'
|
||||||
import { isReactGrabEnabled, isReactScanEnabled } from '@/lib/core/config/feature-flags'
|
import { isReactGrabEnabled, isReactScanEnabled } from '@/lib/core/config/feature-flags'
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import type { MetadataRoute } from 'next'
|
import type { MetadataRoute } from 'next'
|
||||||
import { getBrandConfig } from '@/ee/whitelabeling'
|
import { getBrandConfig } from '@/lib/branding/branding'
|
||||||
|
|
||||||
export default function manifest(): MetadataRoute.Manifest {
|
export default function manifest(): MetadataRoute.Manifest {
|
||||||
const brand = getBrandConfig()
|
const brand = getBrandConfig()
|
||||||
|
|||||||
@@ -24,8 +24,8 @@ import {
|
|||||||
SelectTrigger,
|
SelectTrigger,
|
||||||
SelectValue,
|
SelectValue,
|
||||||
} from '@/components/ui/select'
|
} from '@/components/ui/select'
|
||||||
|
import { useBrandConfig } from '@/lib/branding/branding'
|
||||||
import Nav from '@/app/(landing)/components/nav/nav'
|
import Nav from '@/app/(landing)/components/nav/nav'
|
||||||
import { useBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
import type { ResumeStatus } from '@/executor/types'
|
import type { ResumeStatus } from '@/executor/types'
|
||||||
|
|
||||||
interface ResumeLinks {
|
interface ResumeLinks {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
import clsx from 'clsx'
|
import clsx from 'clsx'
|
||||||
import { ChevronUp, LayoutList } from 'lucide-react'
|
import { ChevronUp, LayoutList } from 'lucide-react'
|
||||||
import Editor from 'react-simple-code-editor'
|
import Editor from 'react-simple-code-editor'
|
||||||
@@ -1257,99 +1258,42 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
|
async function sendToolDecision(
|
||||||
|
toolCallId: string,
|
||||||
|
status: 'accepted' | 'rejected' | 'background'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
await fetch('/api/copilot/confirm', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolCallId, status }),
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
|
toolCallId,
|
||||||
|
status,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
) {
|
) {
|
||||||
const instance = getClientTool(toolCall.id)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
|
onStateChange?.('executing')
|
||||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
await sendToolDecision(toolCall.id, 'accepted')
|
||||||
onStateChange?.('executing')
|
|
||||||
try {
|
|
||||||
await useCopilotStore.getState().executeIntegrationTool(toolCall.id)
|
|
||||||
} catch (e) {
|
|
||||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
|
||||||
onStateChange?.('error')
|
|
||||||
try {
|
|
||||||
await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
status: 500,
|
|
||||||
message: e instanceof Error ? e.message : 'Tool execution failed',
|
|
||||||
data: { error: e instanceof Error ? e.message : String(e) },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
} catch {
|
|
||||||
console.error('[handleRun] Failed to notify backend of tool error:', toolCall.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!instance) return
|
|
||||||
try {
|
|
||||||
const mergedParams =
|
|
||||||
editedParams ||
|
|
||||||
(toolCall as any).params ||
|
|
||||||
(toolCall as any).parameters ||
|
|
||||||
(toolCall as any).input ||
|
|
||||||
{}
|
|
||||||
await instance.handleAccept?.(mergedParams)
|
|
||||||
onStateChange?.('executing')
|
|
||||||
} catch (e) {
|
|
||||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||||
const instance = getClientTool(toolCall.id)
|
|
||||||
|
|
||||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
|
||||||
setToolCallState(toolCall, 'rejected')
|
|
||||||
onStateChange?.('rejected')
|
|
||||||
|
|
||||||
let notified = false
|
|
||||||
for (let attempt = 0; attempt < 3 && !notified; attempt++) {
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
status: 400,
|
|
||||||
message: 'Tool execution skipped by user',
|
|
||||||
data: { skipped: true, reason: 'user_skipped' },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
if (res.ok) {
|
|
||||||
notified = true
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
if (attempt < 2) {
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!notified) {
|
|
||||||
console.error('[handleSkip] Failed to notify backend after 3 attempts:', toolCall.id)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (instance) {
|
|
||||||
try {
|
|
||||||
await instance.handleReject?.()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
setToolCallState(toolCall, 'rejected')
|
setToolCallState(toolCall, 'rejected')
|
||||||
onStateChange?.('rejected')
|
onStateChange?.('rejected')
|
||||||
|
await sendToolDecision(toolCall.id, 'rejected')
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
@@ -1509,7 +1453,7 @@ export function ToolCall({
|
|||||||
// Check if this integration tool is auto-allowed
|
// Check if this integration tool is auto-allowed
|
||||||
// Subscribe to autoAllowedTools so we re-render when it changes
|
// Subscribe to autoAllowedTools so we re-render when it changes
|
||||||
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
||||||
const { removeAutoAllowedTool } = useCopilotStore()
|
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||||
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
@@ -2211,16 +2155,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
try {
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
const instance = getClientTool(toolCall.id)
|
onStateChange?.('background')
|
||||||
instance?.setState?.((ClientToolCallState as any).background)
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
await instance?.markToolComplete?.(
|
|
||||||
200,
|
|
||||||
'The user has chosen to move the workflow execution to the background. Check back with them later to know when the workflow execution is complete'
|
|
||||||
)
|
|
||||||
forceUpdate({})
|
|
||||||
onStateChange?.('background')
|
|
||||||
} catch {}
|
|
||||||
}}
|
}}
|
||||||
variant='tertiary'
|
variant='tertiary'
|
||||||
title='Move to Background'
|
title='Move to Background'
|
||||||
@@ -2232,21 +2169,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
try {
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
const instance = getClientTool(toolCall.id)
|
onStateChange?.('background')
|
||||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
instance?.setState?.((ClientToolCallState as any).background, {
|
|
||||||
result: { _elapsedSeconds: elapsedSeconds },
|
|
||||||
})
|
|
||||||
const { updateToolCallParams } = useCopilotStore.getState()
|
|
||||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
|
||||||
await instance?.markToolComplete?.(
|
|
||||||
200,
|
|
||||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
|
||||||
)
|
|
||||||
forceUpdate({})
|
|
||||||
onStateChange?.('background')
|
|
||||||
} catch {}
|
|
||||||
}}
|
}}
|
||||||
variant='tertiary'
|
variant='tertiary'
|
||||||
title='Wake'
|
title='Wake'
|
||||||
|
|||||||
@@ -114,6 +114,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
// Initialize copilot
|
// Initialize copilot
|
||||||
@@ -126,6 +127,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
currentChat,
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Handle scroll management (80px stickiness for copilot)
|
// Handle scroll management (80px stickiness for copilot)
|
||||||
@@ -421,8 +423,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Show loading state until fully initialized */}
|
{/* Show loading state until fully initialized, but skip if actively streaming (resume case) */}
|
||||||
{!isInitialized ? (
|
{!isInitialized && !isSendingMessage ? (
|
||||||
<div className='flex h-full w-full items-center justify-center'>
|
<div className='flex h-full w-full items-center justify-center'>
|
||||||
<div className='flex flex-col items-center gap-3'>
|
<div className='flex flex-col items-center gap-3'>
|
||||||
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ interface UseCopilotInitializationProps {
|
|||||||
loadAutoAllowedTools: () => Promise<void>
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
currentChat: any
|
currentChat: any
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -32,11 +33,13 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
currentChat,
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
|
|
||||||
const [isInitialized, setIsInitialized] = useState(false)
|
const [isInitialized, setIsInitialized] = useState(false)
|
||||||
const lastWorkflowIdRef = useRef<string | null>(null)
|
const lastWorkflowIdRef = useRef<string | null>(null)
|
||||||
const hasMountedRef = useRef(false)
|
const hasMountedRef = useRef(false)
|
||||||
|
const hasResumedRef = useRef(false)
|
||||||
|
|
||||||
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -105,6 +108,16 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
/** Try to resume active stream on mount - runs early, before waiting for chats */
|
||||||
|
useEffect(() => {
|
||||||
|
if (hasResumedRef.current || isSendingMessage) return
|
||||||
|
hasResumedRef.current = true
|
||||||
|
// Resume immediately on mount - don't wait for isInitialized
|
||||||
|
resumeActiveStream().catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to resume active stream', err)
|
||||||
|
})
|
||||||
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -74,12 +74,6 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
|||||||
'write:label:confluence': 'Add and remove labels',
|
'write:label:confluence': 'Add and remove labels',
|
||||||
'search:confluence': 'Search Confluence content',
|
'search:confluence': 'Search Confluence content',
|
||||||
'readonly:content.attachment:confluence': 'View attachments',
|
'readonly:content.attachment:confluence': 'View attachments',
|
||||||
'read:blogpost:confluence': 'View Confluence blog posts',
|
|
||||||
'write:blogpost:confluence': 'Create and update Confluence blog posts',
|
|
||||||
'read:content.property:confluence': 'View properties on Confluence content',
|
|
||||||
'write:content.property:confluence': 'Create and manage content properties',
|
|
||||||
'read:hierarchical-content:confluence': 'View page hierarchy (children and ancestors)',
|
|
||||||
'read:content.metadata:confluence': 'View content metadata (required for ancestors)',
|
|
||||||
'read:me': 'Read profile information',
|
'read:me': 'Read profile information',
|
||||||
'database.read': 'Read database',
|
'database.read': 'Read database',
|
||||||
'database.write': 'Write to database',
|
'database.write': 'Write to database',
|
||||||
@@ -364,7 +358,6 @@ export function OAuthRequiredModal({
|
|||||||
logger.info('Linking OAuth2:', {
|
logger.info('Linking OAuth2:', {
|
||||||
providerId,
|
providerId,
|
||||||
requiredScopes,
|
requiredScopes,
|
||||||
hasNewScopes: newScopes.length > 0,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (providerId === 'trello') {
|
if (providerId === 'trello') {
|
||||||
|
|||||||
@@ -34,103 +34,6 @@ interface UploadedFile {
|
|||||||
type: string
|
type: string
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SingleFileSelectorProps {
|
|
||||||
file: UploadedFile
|
|
||||||
options: Array<{ label: string; value: string; disabled?: boolean }>
|
|
||||||
selectedValue: string
|
|
||||||
inputValue: string
|
|
||||||
onInputChange: (value: string) => void
|
|
||||||
onClear: (e: React.MouseEvent) => void
|
|
||||||
onOpenChange: (open: boolean) => void
|
|
||||||
disabled: boolean
|
|
||||||
isLoading: boolean
|
|
||||||
formatFileSize: (bytes: number) => string
|
|
||||||
truncateMiddle: (text: string, start?: number, end?: number) => string
|
|
||||||
isDeleting: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Single file selector component that shows the selected file with both
|
|
||||||
* a clear button (X) and a chevron to change the selection.
|
|
||||||
* Follows the same pattern as SelectorCombobox for consistency.
|
|
||||||
*/
|
|
||||||
function SingleFileSelector({
|
|
||||||
file,
|
|
||||||
options,
|
|
||||||
selectedValue,
|
|
||||||
inputValue,
|
|
||||||
onInputChange,
|
|
||||||
onClear,
|
|
||||||
onOpenChange,
|
|
||||||
disabled,
|
|
||||||
isLoading,
|
|
||||||
formatFileSize,
|
|
||||||
truncateMiddle,
|
|
||||||
isDeleting,
|
|
||||||
}: SingleFileSelectorProps) {
|
|
||||||
const displayLabel = `${truncateMiddle(file.name, 20, 12)} (${formatFileSize(file.size)})`
|
|
||||||
const [localInputValue, setLocalInputValue] = useState(displayLabel)
|
|
||||||
const [isEditing, setIsEditing] = useState(false)
|
|
||||||
|
|
||||||
// Sync display label when file changes
|
|
||||||
useEffect(() => {
|
|
||||||
if (!isEditing) {
|
|
||||||
setLocalInputValue(displayLabel)
|
|
||||||
}
|
|
||||||
}, [displayLabel, isEditing])
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className='relative w-full'>
|
|
||||||
<Combobox
|
|
||||||
options={options}
|
|
||||||
value={localInputValue}
|
|
||||||
selectedValue={selectedValue}
|
|
||||||
onChange={(newValue) => {
|
|
||||||
// Check if user selected an option
|
|
||||||
const matched = options.find((opt) => opt.value === newValue || opt.label === newValue)
|
|
||||||
if (matched) {
|
|
||||||
setIsEditing(false)
|
|
||||||
setLocalInputValue(displayLabel)
|
|
||||||
onInputChange(matched.value)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// User is typing to search
|
|
||||||
setIsEditing(true)
|
|
||||||
setLocalInputValue(newValue)
|
|
||||||
}}
|
|
||||||
onOpenChange={(open) => {
|
|
||||||
if (!open) {
|
|
||||||
setIsEditing(false)
|
|
||||||
setLocalInputValue(displayLabel)
|
|
||||||
}
|
|
||||||
onOpenChange(open)
|
|
||||||
}}
|
|
||||||
placeholder={isLoading ? 'Loading files...' : 'Select or upload file'}
|
|
||||||
disabled={disabled || isDeleting}
|
|
||||||
editable={true}
|
|
||||||
filterOptions={isEditing}
|
|
||||||
isLoading={isLoading}
|
|
||||||
inputProps={{
|
|
||||||
className: 'pr-[60px]',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
type='button'
|
|
||||||
variant='ghost'
|
|
||||||
className='-translate-y-1/2 absolute top-1/2 right-[28px] z-10 h-6 w-6 p-0'
|
|
||||||
onClick={onClear}
|
|
||||||
disabled={isDeleting}
|
|
||||||
>
|
|
||||||
{isDeleting ? (
|
|
||||||
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
|
|
||||||
) : (
|
|
||||||
<X className='h-4 w-4 opacity-50 hover:opacity-100' />
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
interface UploadingFile {
|
interface UploadingFile {
|
||||||
id: string
|
id: string
|
||||||
name: string
|
name: string
|
||||||
@@ -597,7 +500,6 @@ export function FileUpload({
|
|||||||
const hasFiles = filesArray.length > 0
|
const hasFiles = filesArray.length > 0
|
||||||
const isUploading = uploadingFiles.length > 0
|
const isUploading = uploadingFiles.length > 0
|
||||||
|
|
||||||
// Options for multiple file mode (filters out already selected files)
|
|
||||||
const comboboxOptions = useMemo(
|
const comboboxOptions = useMemo(
|
||||||
() => [
|
() => [
|
||||||
{ label: 'Upload New File', value: '__upload_new__' },
|
{ label: 'Upload New File', value: '__upload_new__' },
|
||||||
@@ -614,43 +516,10 @@ export function FileUpload({
|
|||||||
[availableWorkspaceFiles, acceptedTypes]
|
[availableWorkspaceFiles, acceptedTypes]
|
||||||
)
|
)
|
||||||
|
|
||||||
// Options for single file mode (includes all files, selected one will be highlighted)
|
|
||||||
const singleFileOptions = useMemo(
|
|
||||||
() => [
|
|
||||||
{ label: 'Upload New File', value: '__upload_new__' },
|
|
||||||
...workspaceFiles.map((file) => {
|
|
||||||
const isAccepted =
|
|
||||||
!acceptedTypes || acceptedTypes === '*' || isFileTypeAccepted(file.type, acceptedTypes)
|
|
||||||
return {
|
|
||||||
label: file.name,
|
|
||||||
value: file.id,
|
|
||||||
disabled: !isAccepted,
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
[workspaceFiles, acceptedTypes]
|
|
||||||
)
|
|
||||||
|
|
||||||
// Find the selected file's workspace ID for highlighting in single file mode
|
|
||||||
const selectedFileId = useMemo(() => {
|
|
||||||
if (!hasFiles || multiple) return ''
|
|
||||||
const currentFile = filesArray[0]
|
|
||||||
if (!currentFile) return ''
|
|
||||||
// Match by key or path
|
|
||||||
const matchedWorkspaceFile = workspaceFiles.find(
|
|
||||||
(wf) =>
|
|
||||||
wf.key === currentFile.key ||
|
|
||||||
wf.name === currentFile.name ||
|
|
||||||
currentFile.path?.includes(wf.key)
|
|
||||||
)
|
|
||||||
return matchedWorkspaceFile?.id || ''
|
|
||||||
}, [filesArray, workspaceFiles, hasFiles, multiple])
|
|
||||||
|
|
||||||
const handleComboboxChange = (value: string) => {
|
const handleComboboxChange = (value: string) => {
|
||||||
setInputValue(value)
|
setInputValue(value)
|
||||||
|
|
||||||
// Look in full workspaceFiles list (not filtered) to allow re-selecting same file in single mode
|
const selectedFile = availableWorkspaceFiles.find((file) => file.id === value)
|
||||||
const selectedFile = workspaceFiles.find((file) => file.id === value)
|
|
||||||
const isAcceptedType =
|
const isAcceptedType =
|
||||||
selectedFile &&
|
selectedFile &&
|
||||||
(!acceptedTypes ||
|
(!acceptedTypes ||
|
||||||
@@ -690,17 +559,16 @@ export function FileUpload({
|
|||||||
{/* Error message */}
|
{/* Error message */}
|
||||||
{uploadError && <div className='mb-2 text-red-600 text-sm'>{uploadError}</div>}
|
{uploadError && <div className='mb-2 text-red-600 text-sm'>{uploadError}</div>}
|
||||||
|
|
||||||
{/* File list with consistent spacing - only show for multiple mode or when uploading */}
|
{/* File list with consistent spacing */}
|
||||||
{((hasFiles && multiple) || isUploading) && (
|
{(hasFiles || isUploading) && (
|
||||||
<div className={cn('space-y-2', multiple && 'mb-2')}>
|
<div className={cn('space-y-2', multiple && 'mb-2')}>
|
||||||
{/* Only show files that aren't currently uploading (for multiple mode only) */}
|
{/* Only show files that aren't currently uploading */}
|
||||||
{multiple &&
|
{filesArray.map((file) => {
|
||||||
filesArray.map((file) => {
|
const isCurrentlyUploading = uploadingFiles.some(
|
||||||
const isCurrentlyUploading = uploadingFiles.some(
|
(uploadingFile) => uploadingFile.name === file.name
|
||||||
(uploadingFile) => uploadingFile.name === file.name
|
)
|
||||||
)
|
return !isCurrentlyUploading && renderFileItem(file)
|
||||||
return !isCurrentlyUploading && renderFileItem(file)
|
})}
|
||||||
})}
|
|
||||||
{isUploading && (
|
{isUploading && (
|
||||||
<>
|
<>
|
||||||
{uploadingFiles.map(renderUploadingItem)}
|
{uploadingFiles.map(renderUploadingItem)}
|
||||||
@@ -736,26 +604,6 @@ export function FileUpload({
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Single file mode with file selected: show combobox-style UI with X and chevron */}
|
|
||||||
{hasFiles && !multiple && !isUploading && (
|
|
||||||
<SingleFileSelector
|
|
||||||
file={filesArray[0]}
|
|
||||||
options={singleFileOptions}
|
|
||||||
selectedValue={selectedFileId}
|
|
||||||
inputValue={inputValue}
|
|
||||||
onInputChange={handleComboboxChange}
|
|
||||||
onClear={(e) => handleRemoveFile(filesArray[0], e)}
|
|
||||||
onOpenChange={(open) => {
|
|
||||||
if (open) void loadWorkspaceFiles()
|
|
||||||
}}
|
|
||||||
disabled={disabled}
|
|
||||||
isLoading={loadingWorkspaceFiles}
|
|
||||||
formatFileSize={formatFileSize}
|
|
||||||
truncateMiddle={truncateMiddle}
|
|
||||||
isDeleting={deletingFiles[filesArray[0]?.path || '']}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Show dropdown selector if no files and not uploading */}
|
{/* Show dropdown selector if no files and not uploading */}
|
||||||
{!hasFiles && !isUploading && (
|
{!hasFiles && !isUploading && (
|
||||||
<Combobox
|
<Combobox
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import type React from 'react'
|
import type React from 'react'
|
||||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
import { X } from 'lucide-react'
|
import { Combobox as EditableCombobox } from '@/components/emcn/components'
|
||||||
import { Button, Combobox as EditableCombobox } from '@/components/emcn/components'
|
|
||||||
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
|
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
|
||||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||||
import type { SubBlockConfig } from '@/blocks/types'
|
import type { SubBlockConfig } from '@/blocks/types'
|
||||||
@@ -109,20 +108,6 @@ export function SelectorCombobox({
|
|||||||
[setStoreValue, onOptionChange, readOnly, disabled]
|
[setStoreValue, onOptionChange, readOnly, disabled]
|
||||||
)
|
)
|
||||||
|
|
||||||
const handleClear = useCallback(
|
|
||||||
(e: React.MouseEvent) => {
|
|
||||||
e.preventDefault()
|
|
||||||
e.stopPropagation()
|
|
||||||
if (readOnly || disabled) return
|
|
||||||
setStoreValue(null)
|
|
||||||
setInputValue('')
|
|
||||||
onOptionChange?.('')
|
|
||||||
},
|
|
||||||
[setStoreValue, onOptionChange, readOnly, disabled]
|
|
||||||
)
|
|
||||||
|
|
||||||
const showClearButton = Boolean(activeValue) && !disabled && !readOnly
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='w-full'>
|
<div className='w-full'>
|
||||||
<SubBlockInputController
|
<SubBlockInputController
|
||||||
@@ -134,49 +119,36 @@ export function SelectorCombobox({
|
|||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
>
|
>
|
||||||
{({ ref, onDrop, onDragOver }) => (
|
{({ ref, onDrop, onDragOver }) => (
|
||||||
<div className='relative w-full'>
|
<EditableCombobox
|
||||||
<EditableCombobox
|
options={comboboxOptions}
|
||||||
options={comboboxOptions}
|
value={allowSearch ? inputValue : selectedLabel}
|
||||||
value={allowSearch ? inputValue : selectedLabel}
|
selectedValue={activeValue ?? ''}
|
||||||
selectedValue={activeValue ?? ''}
|
onChange={(newValue) => {
|
||||||
onChange={(newValue) => {
|
const matched = optionMap.get(newValue)
|
||||||
const matched = optionMap.get(newValue)
|
if (matched) {
|
||||||
if (matched) {
|
setInputValue(matched.label)
|
||||||
setInputValue(matched.label)
|
setIsEditing(false)
|
||||||
setIsEditing(false)
|
handleSelection(matched.id)
|
||||||
handleSelection(matched.id)
|
return
|
||||||
return
|
}
|
||||||
}
|
if (allowSearch) {
|
||||||
if (allowSearch) {
|
setInputValue(newValue)
|
||||||
setInputValue(newValue)
|
setIsEditing(true)
|
||||||
setIsEditing(true)
|
setSearchTerm(newValue)
|
||||||
setSearchTerm(newValue)
|
}
|
||||||
}
|
}}
|
||||||
}}
|
placeholder={placeholder || subBlock.placeholder || 'Select an option'}
|
||||||
placeholder={placeholder || subBlock.placeholder || 'Select an option'}
|
disabled={disabled || readOnly}
|
||||||
disabled={disabled || readOnly}
|
editable={allowSearch}
|
||||||
editable={allowSearch}
|
filterOptions={allowSearch}
|
||||||
filterOptions={allowSearch}
|
inputRef={ref as React.RefObject<HTMLInputElement>}
|
||||||
inputRef={ref as React.RefObject<HTMLInputElement>}
|
inputProps={{
|
||||||
inputProps={{
|
onDrop: onDrop as (e: React.DragEvent<HTMLInputElement>) => void,
|
||||||
onDrop: onDrop as (e: React.DragEvent<HTMLInputElement>) => void,
|
onDragOver: onDragOver as (e: React.DragEvent<HTMLInputElement>) => void,
|
||||||
onDragOver: onDragOver as (e: React.DragEvent<HTMLInputElement>) => void,
|
}}
|
||||||
className: showClearButton ? 'pr-[60px]' : undefined,
|
isLoading={isLoading}
|
||||||
}}
|
error={error instanceof Error ? error.message : null}
|
||||||
isLoading={isLoading}
|
/>
|
||||||
error={error instanceof Error ? error.message : null}
|
|
||||||
/>
|
|
||||||
{showClearButton && (
|
|
||||||
<Button
|
|
||||||
type='button'
|
|
||||||
variant='ghost'
|
|
||||||
className='-translate-y-1/2 absolute top-1/2 right-[28px] z-10 h-6 w-6 p-0'
|
|
||||||
onClick={handleClear}
|
|
||||||
>
|
|
||||||
<X className='h-4 w-4 opacity-50 hover:opacity-100' />
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
)}
|
||||||
</SubBlockInputController>
|
</SubBlockInputController>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ const BlockRow = memo(function BlockRow({
|
|||||||
>
|
>
|
||||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||||
<div
|
<div
|
||||||
className='flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center rounded-[4px]'
|
className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
|
||||||
style={{ background: bgColor }}
|
style={{ background: bgColor }}
|
||||||
>
|
>
|
||||||
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
|
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
|
||||||
@@ -276,7 +276,7 @@ const SubflowNodeRow = memo(function SubflowNodeRow({
|
|||||||
>
|
>
|
||||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||||
<div
|
<div
|
||||||
className='flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center rounded-[4px]'
|
className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
|
||||||
style={{ background: bgColor }}
|
style={{ background: bgColor }}
|
||||||
>
|
>
|
||||||
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
|
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
|
||||||
|
|||||||
@@ -19,11 +19,11 @@ import {
|
|||||||
import { Input, Skeleton } from '@/components/ui'
|
import { Input, Skeleton } from '@/components/ui'
|
||||||
import { signOut, useSession } from '@/lib/auth/auth-client'
|
import { signOut, useSession } from '@/lib/auth/auth-client'
|
||||||
import { ANONYMOUS_USER_ID } from '@/lib/auth/constants'
|
import { ANONYMOUS_USER_ID } from '@/lib/auth/constants'
|
||||||
|
import { useBrandConfig } from '@/lib/branding/branding'
|
||||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { useProfilePictureUpload } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/hooks/use-profile-picture-upload'
|
import { useProfilePictureUpload } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/hooks/use-profile-picture-upload'
|
||||||
import { useBrandConfig } from '@/ee/whitelabeling'
|
|
||||||
import { useGeneralSettings, useUpdateGeneralSetting } from '@/hooks/queries/general-settings'
|
import { useGeneralSettings, useUpdateGeneralSetting } from '@/hooks/queries/general-settings'
|
||||||
import { useUpdateUserProfile, useUserProfile } from '@/hooks/queries/user-profile'
|
import { useUpdateUserProfile, useUserProfile } from '@/hooks/queries/user-profile'
|
||||||
import { clearUserData } from '@/stores'
|
import { clearUserData } from '@/stores'
|
||||||
|
|||||||
@@ -397,7 +397,7 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
|||||||
return () => window.clearInterval(interval)
|
return () => window.clearInterval(interval)
|
||||||
}, [isHovered, pillCount, startAnimationIndex])
|
}, [isHovered, pillCount, startAnimationIndex])
|
||||||
|
|
||||||
if (isLoading && !subscriptionData) {
|
if (isLoading) {
|
||||||
return (
|
return (
|
||||||
<div className='flex flex-shrink-0 flex-col gap-[8px] border-t px-[13.5px] pt-[8px] pb-[10px]'>
|
<div className='flex flex-shrink-0 flex-col gap-[8px] border-t px-[13.5px] pt-[8px] pb-[10px]'>
|
||||||
<div className='flex h-[18px] items-center justify-between'>
|
<div className='flex h-[18px] items-center justify-between'>
|
||||||
|
|||||||
@@ -649,394 +649,4 @@ describe('Blocks Module', () => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Canonical Param Validation', () => {
|
|
||||||
/**
|
|
||||||
* Helper to serialize a condition for comparison
|
|
||||||
*/
|
|
||||||
function serializeCondition(condition: unknown): string {
|
|
||||||
if (!condition) return ''
|
|
||||||
return JSON.stringify(condition)
|
|
||||||
}
|
|
||||||
|
|
||||||
it('should not have canonicalParamId that matches any subBlock id within the same block', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
const allSubBlockIds = new Set(block.subBlocks.map((sb) => sb.id))
|
|
||||||
const canonicalParamIds = new Set(
|
|
||||||
block.subBlocks.filter((sb) => sb.canonicalParamId).map((sb) => sb.canonicalParamId)
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const canonicalId of canonicalParamIds) {
|
|
||||||
if (allSubBlockIds.has(canonicalId!)) {
|
|
||||||
// Check if the matching subBlock also has a canonicalParamId pointing to itself
|
|
||||||
const matchingSubBlock = block.subBlocks.find(
|
|
||||||
(sb) => sb.id === canonicalId && !sb.canonicalParamId
|
|
||||||
)
|
|
||||||
if (matchingSubBlock) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": canonicalParamId "${canonicalId}" clashes with subBlock id "${canonicalId}"`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Canonical param ID clashes detected:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have unique subBlock IDs within the same condition context', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
// Group subBlocks by their condition (only for static/JSON conditions, not functions)
|
|
||||||
const subBlocksByCondition = new Map<
|
|
||||||
string,
|
|
||||||
Array<{ id: string; mode?: string; hasCanonical: boolean }>
|
|
||||||
>()
|
|
||||||
|
|
||||||
for (const subBlock of block.subBlocks) {
|
|
||||||
// Skip subBlocks with function conditions - we can't evaluate them statically
|
|
||||||
// These are valid when the function returns different conditions at runtime
|
|
||||||
if (typeof subBlock.condition === 'function') {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const conditionKey = serializeCondition(subBlock.condition)
|
|
||||||
if (!subBlocksByCondition.has(conditionKey)) {
|
|
||||||
subBlocksByCondition.set(conditionKey, [])
|
|
||||||
}
|
|
||||||
subBlocksByCondition.get(conditionKey)!.push({
|
|
||||||
id: subBlock.id,
|
|
||||||
mode: subBlock.mode,
|
|
||||||
hasCanonical: Boolean(subBlock.canonicalParamId),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for duplicate IDs within the same condition (excluding canonical pairs and mode swaps)
|
|
||||||
for (const [conditionKey, subBlocks] of subBlocksByCondition) {
|
|
||||||
const idCounts = new Map<string, number>()
|
|
||||||
for (const sb of subBlocks) {
|
|
||||||
idCounts.set(sb.id, (idCounts.get(sb.id) || 0) + 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [id, count] of idCounts) {
|
|
||||||
if (count > 1) {
|
|
||||||
const duplicates = subBlocks.filter((sb) => sb.id === id)
|
|
||||||
|
|
||||||
// Categorize modes
|
|
||||||
const basicModes = duplicates.filter(
|
|
||||||
(sb) => !sb.mode || sb.mode === 'basic' || sb.mode === 'both'
|
|
||||||
)
|
|
||||||
const advancedModes = duplicates.filter((sb) => sb.mode === 'advanced')
|
|
||||||
const triggerModes = duplicates.filter((sb) => sb.mode === 'trigger')
|
|
||||||
|
|
||||||
// Valid pattern 1: basic/advanced mode swap (with or without canonicalParamId)
|
|
||||||
if (
|
|
||||||
basicModes.length === 1 &&
|
|
||||||
advancedModes.length === 1 &&
|
|
||||||
triggerModes.length === 0
|
|
||||||
) {
|
|
||||||
continue // This is a valid basic/advanced mode swap pair
|
|
||||||
}
|
|
||||||
|
|
||||||
// Valid pattern 2: basic/trigger mode separation (trigger version for trigger mode)
|
|
||||||
// One basic/both + one or more trigger versions is valid
|
|
||||||
if (
|
|
||||||
basicModes.length <= 1 &&
|
|
||||||
advancedModes.length === 0 &&
|
|
||||||
triggerModes.length >= 1
|
|
||||||
) {
|
|
||||||
continue // This is a valid pattern where trigger mode has its own subBlock
|
|
||||||
}
|
|
||||||
|
|
||||||
// Valid pattern 3: All duplicates have canonicalParamId (they form a canonical group)
|
|
||||||
const allHaveCanonical = duplicates.every((sb) => sb.hasCanonical)
|
|
||||||
if (allHaveCanonical) {
|
|
||||||
continue // Validated separately by canonical pair tests
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invalid: duplicates without proper pairing
|
|
||||||
const condition = conditionKey || '(no condition)'
|
|
||||||
const modeBreakdown = duplicates.map((d) => d.mode || 'basic/both').join(', ')
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": Duplicate subBlock id "${id}" with condition ${condition} (count: ${count}, modes: ${modeBreakdown})`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Duplicate subBlock IDs detected:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have properly formed canonical pairs (matching conditions)', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
// Group subBlocks by canonicalParamId
|
|
||||||
const canonicalGroups = new Map<
|
|
||||||
string,
|
|
||||||
Array<{ id: string; mode?: string; condition: unknown; isStaticCondition: boolean }>
|
|
||||||
>()
|
|
||||||
|
|
||||||
for (const subBlock of block.subBlocks) {
|
|
||||||
if (subBlock.canonicalParamId) {
|
|
||||||
if (!canonicalGroups.has(subBlock.canonicalParamId)) {
|
|
||||||
canonicalGroups.set(subBlock.canonicalParamId, [])
|
|
||||||
}
|
|
||||||
canonicalGroups.get(subBlock.canonicalParamId)!.push({
|
|
||||||
id: subBlock.id,
|
|
||||||
mode: subBlock.mode,
|
|
||||||
condition: subBlock.condition,
|
|
||||||
isStaticCondition: typeof subBlock.condition !== 'function',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate each canonical group
|
|
||||||
for (const [canonicalId, members] of canonicalGroups) {
|
|
||||||
// Only validate condition matching for static conditions
|
|
||||||
const staticMembers = members.filter((m) => m.isStaticCondition)
|
|
||||||
if (staticMembers.length > 1) {
|
|
||||||
const conditions = staticMembers.map((m) => serializeCondition(m.condition))
|
|
||||||
const uniqueConditions = new Set(conditions)
|
|
||||||
|
|
||||||
if (uniqueConditions.size > 1) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": Canonical param "${canonicalId}" has members with different conditions: ${[...uniqueConditions].join(' vs ')}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for proper basic/advanced pairing
|
|
||||||
const basicMembers = members.filter((m) => !m.mode || m.mode === 'basic')
|
|
||||||
const advancedMembers = members.filter((m) => m.mode === 'advanced')
|
|
||||||
|
|
||||||
if (basicMembers.length > 1) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": Canonical param "${canonicalId}" has ${basicMembers.length} basic mode members (should have at most 1)`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (basicMembers.length === 0 && advancedMembers.length === 0) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": Canonical param "${canonicalId}" has no basic or advanced mode members`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Canonical pair validation errors:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have unique canonicalParamIds per operation/condition context', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
// Group by condition + canonicalParamId to detect same canonical used for different operations
|
|
||||||
const canonicalByCondition = new Map<string, Set<string>>()
|
|
||||||
|
|
||||||
for (const subBlock of block.subBlocks) {
|
|
||||||
if (subBlock.canonicalParamId) {
|
|
||||||
// Skip function conditions - we can't evaluate them statically
|
|
||||||
if (typeof subBlock.condition === 'function') {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const conditionKey = serializeCondition(subBlock.condition)
|
|
||||||
if (!canonicalByCondition.has(subBlock.canonicalParamId)) {
|
|
||||||
canonicalByCondition.set(subBlock.canonicalParamId, new Set())
|
|
||||||
}
|
|
||||||
canonicalByCondition.get(subBlock.canonicalParamId)!.add(conditionKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that each canonicalParamId is only used for one condition
|
|
||||||
for (const [canonicalId, conditions] of canonicalByCondition) {
|
|
||||||
if (conditions.size > 1) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": Canonical param "${canonicalId}" is used across ${conditions.size} different conditions. Each operation should have its own unique canonicalParamId.`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Canonical param reuse across conditions:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have inputs containing canonical param IDs instead of raw subBlock IDs', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
if (!block.inputs) continue
|
|
||||||
|
|
||||||
// Find all canonical groups (subBlocks with canonicalParamId)
|
|
||||||
const canonicalGroups = new Map<string, string[]>()
|
|
||||||
for (const subBlock of block.subBlocks) {
|
|
||||||
if (subBlock.canonicalParamId) {
|
|
||||||
if (!canonicalGroups.has(subBlock.canonicalParamId)) {
|
|
||||||
canonicalGroups.set(subBlock.canonicalParamId, [])
|
|
||||||
}
|
|
||||||
canonicalGroups.get(subBlock.canonicalParamId)!.push(subBlock.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const inputKeys = Object.keys(block.inputs)
|
|
||||||
|
|
||||||
for (const [canonicalId, rawSubBlockIds] of canonicalGroups) {
|
|
||||||
// Check that the canonical param ID is in inputs
|
|
||||||
if (!inputKeys.includes(canonicalId)) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": inputs section is missing canonical param "${canonicalId}"`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that raw subBlock IDs are NOT in inputs (they get deleted after transformation)
|
|
||||||
for (const rawId of rawSubBlockIds) {
|
|
||||||
if (rawId !== canonicalId && inputKeys.includes(rawId)) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": inputs section contains raw subBlock id "${rawId}" which should be replaced by canonical param "${canonicalId}"`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Inputs section validation errors:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have params function using canonical IDs instead of raw subBlock IDs', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
// Check if block has a params function
|
|
||||||
const paramsFunc = block.tools?.config?.params
|
|
||||||
if (!paramsFunc || typeof paramsFunc !== 'function') continue
|
|
||||||
|
|
||||||
// Get the function source code, stripping comments to avoid false positives
|
|
||||||
const rawFuncSource = paramsFunc.toString()
|
|
||||||
// Remove single-line comments (// ...) and multi-line comments (/* ... */)
|
|
||||||
const funcSource = rawFuncSource
|
|
||||||
.replace(/\/\/[^\n]*/g, '') // Remove single-line comments
|
|
||||||
.replace(/\/\*[\s\S]*?\*\//g, '') // Remove multi-line comments
|
|
||||||
|
|
||||||
// Find all canonical groups (subBlocks with canonicalParamId)
|
|
||||||
const canonicalGroups = new Map<string, string[]>()
|
|
||||||
for (const subBlock of block.subBlocks) {
|
|
||||||
if (subBlock.canonicalParamId) {
|
|
||||||
if (!canonicalGroups.has(subBlock.canonicalParamId)) {
|
|
||||||
canonicalGroups.set(subBlock.canonicalParamId, [])
|
|
||||||
}
|
|
||||||
canonicalGroups.get(subBlock.canonicalParamId)!.push(subBlock.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for raw subBlock IDs being used in the params function
|
|
||||||
for (const [canonicalId, rawSubBlockIds] of canonicalGroups) {
|
|
||||||
for (const rawId of rawSubBlockIds) {
|
|
||||||
// Skip if the rawId is the same as the canonicalId (self-referential, which is allowed in some cases)
|
|
||||||
if (rawId === canonicalId) continue
|
|
||||||
|
|
||||||
// Check if the params function references the raw subBlock ID
|
|
||||||
// Look for patterns like: params.rawId, { rawId }, destructuring rawId
|
|
||||||
const patterns = [
|
|
||||||
new RegExp(`params\\.${rawId}\\b`), // params.rawId
|
|
||||||
new RegExp(`\\{[^}]*\\b${rawId}\\b[^}]*\\}\\s*=\\s*params`), // { rawId } = params
|
|
||||||
new RegExp(`\\b${rawId}\\s*[,}]`), // rawId in destructuring
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const pattern of patterns) {
|
|
||||||
if (pattern.test(funcSource)) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": params function references raw subBlock id "${rawId}" which is deleted after canonical transformation. Use canonical param "${canonicalId}" instead.`
|
|
||||||
)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Params function validation errors:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have consistent required status across canonical param groups', () => {
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
for (const block of blocks) {
|
|
||||||
// Find all canonical groups (subBlocks with canonicalParamId)
|
|
||||||
const canonicalGroups = new Map<string, typeof block.subBlocks>()
|
|
||||||
for (const subBlock of block.subBlocks) {
|
|
||||||
if (subBlock.canonicalParamId) {
|
|
||||||
if (!canonicalGroups.has(subBlock.canonicalParamId)) {
|
|
||||||
canonicalGroups.set(subBlock.canonicalParamId, [])
|
|
||||||
}
|
|
||||||
canonicalGroups.get(subBlock.canonicalParamId)!.push(subBlock)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For each canonical group, check that required status is consistent
|
|
||||||
for (const [canonicalId, subBlocks] of canonicalGroups) {
|
|
||||||
if (subBlocks.length < 2) continue // Single subblock, no consistency check needed
|
|
||||||
|
|
||||||
// Get required status for each subblock (handling both boolean and condition object)
|
|
||||||
const requiredStatuses = subBlocks.map((sb) => {
|
|
||||||
// If required is a condition object or function, we can't statically determine it
|
|
||||||
// so we skip those cases
|
|
||||||
if (typeof sb.required === 'object' || typeof sb.required === 'function') {
|
|
||||||
return 'dynamic'
|
|
||||||
}
|
|
||||||
return sb.required === true ? 'required' : 'optional'
|
|
||||||
})
|
|
||||||
|
|
||||||
// Filter out dynamic cases
|
|
||||||
const staticStatuses = requiredStatuses.filter((s) => s !== 'dynamic')
|
|
||||||
if (staticStatuses.length < 2) continue // Not enough static statuses to compare
|
|
||||||
|
|
||||||
// Check if all static statuses are the same
|
|
||||||
const hasRequired = staticStatuses.includes('required')
|
|
||||||
const hasOptional = staticStatuses.includes('optional')
|
|
||||||
|
|
||||||
if (hasRequired && hasOptional) {
|
|
||||||
const requiredSubBlocks = subBlocks
|
|
||||||
.filter((sb, i) => requiredStatuses[i] === 'required')
|
|
||||||
.map((sb) => `${sb.id} (${sb.mode || 'both'})`)
|
|
||||||
const optionalSubBlocks = subBlocks
|
|
||||||
.filter((sb, i) => requiredStatuses[i] === 'optional')
|
|
||||||
.map((sb) => `${sb.id} (${sb.mode || 'both'})`)
|
|
||||||
|
|
||||||
errors.push(
|
|
||||||
`Block "${block.type}": canonical param "${canonicalId}" has inconsistent required status. ` +
|
|
||||||
`Required: [${requiredSubBlocks.join(', ')}], Optional: [${optionalSubBlocks.join(', ')}]. ` +
|
|
||||||
`All subBlocks in a canonical group should have the same required status.`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
throw new Error(`Required status consistency errors:\n${errors.join('\n')}`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -216,8 +216,8 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
|||||||
config: {
|
config: {
|
||||||
tool: (params) => params.operation as string,
|
tool: (params) => params.operation as string,
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
const { files, ...rest } = params
|
const { fileUpload, fileReference, ...rest } = params
|
||||||
const normalizedFiles = normalizeFileInput(files)
|
const normalizedFiles = normalizeFileInput(fileUpload || fileReference || params.files)
|
||||||
return {
|
return {
|
||||||
...rest,
|
...rest,
|
||||||
...(normalizedFiles && { files: normalizedFiles }),
|
...(normalizedFiles && { files: normalizedFiles }),
|
||||||
@@ -252,7 +252,15 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
|||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
type: 'array',
|
type: 'array',
|
||||||
description: 'Files to include with the message (canonical param)',
|
description: 'Files to include with the message',
|
||||||
|
},
|
||||||
|
fileUpload: {
|
||||||
|
type: 'array',
|
||||||
|
description: 'Uploaded files (basic mode)',
|
||||||
|
},
|
||||||
|
fileReference: {
|
||||||
|
type: 'json',
|
||||||
|
description: 'File reference from previous blocks (advanced mode)',
|
||||||
},
|
},
|
||||||
historyLength: {
|
historyLength: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
|
|||||||
@@ -75,12 +75,6 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
'search:confluence',
|
'search:confluence',
|
||||||
'read:me',
|
'read:me',
|
||||||
'offline_access',
|
'offline_access',
|
||||||
'read:blogpost:confluence',
|
|
||||||
'write:blogpost:confluence',
|
|
||||||
'read:content.property:confluence',
|
|
||||||
'write:content.property:confluence',
|
|
||||||
'read:hierarchical-content:confluence',
|
|
||||||
'read:content.metadata:confluence',
|
|
||||||
],
|
],
|
||||||
placeholder: 'Select Confluence account',
|
placeholder: 'Select Confluence account',
|
||||||
required: true,
|
required: true,
|
||||||
@@ -94,19 +88,6 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
placeholder: 'Select Confluence page',
|
placeholder: 'Select Confluence page',
|
||||||
dependsOn: ['credential', 'domain'],
|
dependsOn: ['credential', 'domain'],
|
||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
required: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'read',
|
|
||||||
'update',
|
|
||||||
'delete',
|
|
||||||
'create_comment',
|
|
||||||
'list_comments',
|
|
||||||
'list_attachments',
|
|
||||||
'list_labels',
|
|
||||||
'upload_attachment',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'manualPageId',
|
id: 'manualPageId',
|
||||||
@@ -115,26 +96,14 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
canonicalParamId: 'pageId',
|
canonicalParamId: 'pageId',
|
||||||
placeholder: 'Enter Confluence page ID',
|
placeholder: 'Enter Confluence page ID',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
required: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'read',
|
|
||||||
'update',
|
|
||||||
'delete',
|
|
||||||
'create_comment',
|
|
||||||
'list_comments',
|
|
||||||
'list_attachments',
|
|
||||||
'list_labels',
|
|
||||||
'upload_attachment',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'spaceId',
|
id: 'spaceId',
|
||||||
title: 'Space ID',
|
title: 'Space ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter Confluence space ID',
|
placeholder: 'Enter Confluence space ID',
|
||||||
required: { field: 'operation', value: ['create', 'get_space'] },
|
required: true,
|
||||||
|
condition: { field: 'operation', value: ['create', 'get_space'] },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'title',
|
id: 'title',
|
||||||
@@ -289,6 +258,7 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
pageId,
|
pageId,
|
||||||
|
manualPageId,
|
||||||
operation,
|
operation,
|
||||||
attachmentFile,
|
attachmentFile,
|
||||||
attachmentFileName,
|
attachmentFileName,
|
||||||
@@ -296,7 +266,28 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
...rest
|
...rest
|
||||||
} = params
|
} = params
|
||||||
|
|
||||||
const effectivePageId = pageId ? String(pageId).trim() : ''
|
const effectivePageId = (pageId || manualPageId || '').trim()
|
||||||
|
|
||||||
|
const requiresPageId = [
|
||||||
|
'read',
|
||||||
|
'update',
|
||||||
|
'delete',
|
||||||
|
'create_comment',
|
||||||
|
'list_comments',
|
||||||
|
'list_attachments',
|
||||||
|
'list_labels',
|
||||||
|
'upload_attachment',
|
||||||
|
]
|
||||||
|
|
||||||
|
const requiresSpaceId = ['create', 'get_space']
|
||||||
|
|
||||||
|
if (requiresPageId.includes(operation) && !effectivePageId) {
|
||||||
|
throw new Error('Page ID is required. Please select a page or enter a page ID manually.')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requiresSpaceId.includes(operation) && !rest.spaceId) {
|
||||||
|
throw new Error('Space ID is required for this operation.')
|
||||||
|
}
|
||||||
|
|
||||||
if (operation === 'upload_attachment') {
|
if (operation === 'upload_attachment') {
|
||||||
return {
|
return {
|
||||||
@@ -323,7 +314,8 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
domain: { type: 'string', description: 'Confluence domain' },
|
domain: { type: 'string', description: 'Confluence domain' },
|
||||||
credential: { type: 'string', description: 'Confluence access token' },
|
credential: { type: 'string', description: 'Confluence access token' },
|
||||||
pageId: { type: 'string', description: 'Page identifier (canonical param)' },
|
pageId: { type: 'string', description: 'Page identifier' },
|
||||||
|
manualPageId: { type: 'string', description: 'Manual page identifier' },
|
||||||
spaceId: { type: 'string', description: 'Space identifier' },
|
spaceId: { type: 'string', description: 'Space identifier' },
|
||||||
title: { type: 'string', description: 'Page title' },
|
title: { type: 'string', description: 'Page title' },
|
||||||
content: { type: 'string', description: 'Page content' },
|
content: { type: 'string', description: 'Page content' },
|
||||||
@@ -332,7 +324,7 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
comment: { type: 'string', description: 'Comment text' },
|
comment: { type: 'string', description: 'Comment text' },
|
||||||
commentId: { type: 'string', description: 'Comment identifier' },
|
commentId: { type: 'string', description: 'Comment identifier' },
|
||||||
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
||||||
attachmentFile: { type: 'json', description: 'File to upload as attachment (canonical param)' },
|
attachmentFile: { type: 'json', description: 'File to upload as attachment' },
|
||||||
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
||||||
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
||||||
labelName: { type: 'string', description: 'Label name' },
|
labelName: { type: 'string', description: 'Label name' },
|
||||||
@@ -342,7 +334,6 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
|||||||
ts: { type: 'string', description: 'Timestamp' },
|
ts: { type: 'string', description: 'Timestamp' },
|
||||||
pageId: { type: 'string', description: 'Page identifier' },
|
pageId: { type: 'string', description: 'Page identifier' },
|
||||||
content: { type: 'string', description: 'Page content' },
|
content: { type: 'string', description: 'Page content' },
|
||||||
body: { type: 'json', description: 'Page body with storage format' },
|
|
||||||
title: { type: 'string', description: 'Page title' },
|
title: { type: 'string', description: 'Page title' },
|
||||||
url: { type: 'string', description: 'Page or resource URL' },
|
url: { type: 'string', description: 'Page or resource URL' },
|
||||||
success: { type: 'boolean', description: 'Operation success status' },
|
success: { type: 'boolean', description: 'Operation success status' },
|
||||||
@@ -380,46 +371,31 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
title: 'Operation',
|
title: 'Operation',
|
||||||
type: 'dropdown',
|
type: 'dropdown',
|
||||||
options: [
|
options: [
|
||||||
// Page Operations
|
|
||||||
{ label: 'Read Page', id: 'read' },
|
{ label: 'Read Page', id: 'read' },
|
||||||
{ label: 'Create Page', id: 'create' },
|
{ label: 'Create Page', id: 'create' },
|
||||||
{ label: 'Update Page', id: 'update' },
|
{ label: 'Update Page', id: 'update' },
|
||||||
{ label: 'Delete Page', id: 'delete' },
|
{ label: 'Delete Page', id: 'delete' },
|
||||||
{ label: 'List Pages in Space', id: 'list_pages_in_space' },
|
|
||||||
{ label: 'Get Page Children', id: 'get_page_children' },
|
|
||||||
{ label: 'Get Page Ancestors', id: 'get_page_ancestors' },
|
|
||||||
// Version Operations
|
|
||||||
{ label: 'List Page Versions', id: 'list_page_versions' },
|
|
||||||
{ label: 'Get Page Version', id: 'get_page_version' },
|
|
||||||
// Page Property Operations
|
|
||||||
{ label: 'List Page Properties', id: 'list_page_properties' },
|
|
||||||
{ label: 'Create Page Property', id: 'create_page_property' },
|
|
||||||
// Search Operations
|
|
||||||
{ label: 'Search Content', id: 'search' },
|
{ label: 'Search Content', id: 'search' },
|
||||||
{ label: 'Search in Space', id: 'search_in_space' },
|
|
||||||
// Blog Post Operations
|
|
||||||
{ label: 'List Blog Posts', id: 'list_blogposts' },
|
|
||||||
{ label: 'Get Blog Post', id: 'get_blogpost' },
|
|
||||||
{ label: 'Create Blog Post', id: 'create_blogpost' },
|
|
||||||
{ label: 'List Blog Posts in Space', id: 'list_blogposts_in_space' },
|
|
||||||
// Comment Operations
|
|
||||||
{ label: 'Create Comment', id: 'create_comment' },
|
{ label: 'Create Comment', id: 'create_comment' },
|
||||||
{ label: 'List Comments', id: 'list_comments' },
|
{ label: 'List Comments', id: 'list_comments' },
|
||||||
{ label: 'Update Comment', id: 'update_comment' },
|
{ label: 'Update Comment', id: 'update_comment' },
|
||||||
{ label: 'Delete Comment', id: 'delete_comment' },
|
{ label: 'Delete Comment', id: 'delete_comment' },
|
||||||
// Attachment Operations
|
|
||||||
{ label: 'Upload Attachment', id: 'upload_attachment' },
|
{ label: 'Upload Attachment', id: 'upload_attachment' },
|
||||||
{ label: 'List Attachments', id: 'list_attachments' },
|
{ label: 'List Attachments', id: 'list_attachments' },
|
||||||
{ label: 'Delete Attachment', id: 'delete_attachment' },
|
{ label: 'Delete Attachment', id: 'delete_attachment' },
|
||||||
// Label Operations
|
|
||||||
{ label: 'List Labels', id: 'list_labels' },
|
{ label: 'List Labels', id: 'list_labels' },
|
||||||
{ label: 'Add Label', id: 'add_label' },
|
|
||||||
// Space Operations
|
|
||||||
{ label: 'Get Space', id: 'get_space' },
|
{ label: 'Get Space', id: 'get_space' },
|
||||||
{ label: 'List Spaces', id: 'list_spaces' },
|
{ label: 'List Spaces', id: 'list_spaces' },
|
||||||
],
|
],
|
||||||
value: () => 'read',
|
value: () => 'read',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'domain',
|
||||||
|
title: 'Domain',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Enter Confluence domain (e.g., simstudio.atlassian.net)',
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: 'credential',
|
id: 'credential',
|
||||||
title: 'Confluence Account',
|
title: 'Confluence Account',
|
||||||
@@ -448,23 +424,10 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'search:confluence',
|
'search:confluence',
|
||||||
'read:me',
|
'read:me',
|
||||||
'offline_access',
|
'offline_access',
|
||||||
'read:blogpost:confluence',
|
|
||||||
'write:blogpost:confluence',
|
|
||||||
'read:content.property:confluence',
|
|
||||||
'write:content.property:confluence',
|
|
||||||
'read:hierarchical-content:confluence',
|
|
||||||
'read:content.metadata:confluence',
|
|
||||||
],
|
],
|
||||||
placeholder: 'Select Confluence account',
|
placeholder: 'Select Confluence account',
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'domain',
|
|
||||||
title: 'Domain',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter Confluence domain (e.g., simstudio.atlassian.net)',
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
id: 'pageId',
|
id: 'pageId',
|
||||||
title: 'Select Page',
|
title: 'Select Page',
|
||||||
@@ -474,40 +437,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
placeholder: 'Select Confluence page',
|
placeholder: 'Select Confluence page',
|
||||||
dependsOn: ['credential', 'domain'],
|
dependsOn: ['credential', 'domain'],
|
||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
condition: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'list_pages_in_space',
|
|
||||||
'list_blogposts',
|
|
||||||
'get_blogpost',
|
|
||||||
'list_blogposts_in_space',
|
|
||||||
'search',
|
|
||||||
'search_in_space',
|
|
||||||
'get_space',
|
|
||||||
'list_spaces',
|
|
||||||
],
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
required: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'read',
|
|
||||||
'update',
|
|
||||||
'delete',
|
|
||||||
'create_comment',
|
|
||||||
'list_comments',
|
|
||||||
'list_attachments',
|
|
||||||
'list_labels',
|
|
||||||
'upload_attachment',
|
|
||||||
'add_label',
|
|
||||||
'get_page_children',
|
|
||||||
'get_page_ancestors',
|
|
||||||
'list_page_versions',
|
|
||||||
'get_page_version',
|
|
||||||
'list_page_properties',
|
|
||||||
'create_page_property',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'manualPageId',
|
id: 'manualPageId',
|
||||||
@@ -516,40 +445,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
canonicalParamId: 'pageId',
|
canonicalParamId: 'pageId',
|
||||||
placeholder: 'Enter Confluence page ID',
|
placeholder: 'Enter Confluence page ID',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'list_pages_in_space',
|
|
||||||
'list_blogposts',
|
|
||||||
'get_blogpost',
|
|
||||||
'list_blogposts_in_space',
|
|
||||||
'search',
|
|
||||||
'search_in_space',
|
|
||||||
'get_space',
|
|
||||||
'list_spaces',
|
|
||||||
],
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
required: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'read',
|
|
||||||
'update',
|
|
||||||
'delete',
|
|
||||||
'create_comment',
|
|
||||||
'list_comments',
|
|
||||||
'list_attachments',
|
|
||||||
'list_labels',
|
|
||||||
'upload_attachment',
|
|
||||||
'add_label',
|
|
||||||
'get_page_children',
|
|
||||||
'get_page_ancestors',
|
|
||||||
'list_page_versions',
|
|
||||||
'get_page_version',
|
|
||||||
'list_page_properties',
|
|
||||||
'create_page_property',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'spaceId',
|
id: 'spaceId',
|
||||||
@@ -557,63 +452,21 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter Confluence space ID',
|
placeholder: 'Enter Confluence space ID',
|
||||||
required: true,
|
required: true,
|
||||||
condition: {
|
condition: { field: 'operation', value: ['create', 'get_space'] },
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'create',
|
|
||||||
'get_space',
|
|
||||||
'list_pages_in_space',
|
|
||||||
'search_in_space',
|
|
||||||
'create_blogpost',
|
|
||||||
'list_blogposts_in_space',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'blogPostId',
|
|
||||||
title: 'Blog Post ID',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter blog post ID',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'get_blogpost' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'versionNumber',
|
|
||||||
title: 'Version Number',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter version number',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'get_page_version' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'propertyKey',
|
|
||||||
title: 'Property Key',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter property key/name',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'create_page_property' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'propertyValue',
|
|
||||||
title: 'Property Value',
|
|
||||||
type: 'long-input',
|
|
||||||
placeholder: 'Enter property value (JSON supported)',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'create_page_property' },
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'title',
|
id: 'title',
|
||||||
title: 'Title',
|
title: 'Title',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter title',
|
placeholder: 'Enter title for the page',
|
||||||
condition: { field: 'operation', value: ['create', 'update', 'create_blogpost'] },
|
condition: { field: 'operation', value: ['create', 'update'] },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'content',
|
id: 'content',
|
||||||
title: 'Content',
|
title: 'Content',
|
||||||
type: 'long-input',
|
type: 'long-input',
|
||||||
placeholder: 'Enter content',
|
placeholder: 'Enter content for the page',
|
||||||
condition: { field: 'operation', value: ['create', 'update', 'create_blogpost'] },
|
condition: { field: 'operation', value: ['create', 'update'] },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'parentId',
|
id: 'parentId',
|
||||||
@@ -628,7 +481,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter search query',
|
placeholder: 'Enter search query',
|
||||||
required: true,
|
required: true,
|
||||||
condition: { field: 'operation', value: ['search', 'search_in_space'] },
|
condition: { field: 'operation', value: 'search' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'comment',
|
id: 'comment',
|
||||||
@@ -662,7 +515,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
placeholder: 'Select file to upload',
|
placeholder: 'Select file to upload',
|
||||||
condition: { field: 'operation', value: 'upload_attachment' },
|
condition: { field: 'operation', value: 'upload_attachment' },
|
||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
required: { field: 'operation', value: 'upload_attachment' },
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'attachmentFileReference',
|
id: 'attachmentFileReference',
|
||||||
@@ -672,7 +524,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
placeholder: 'Reference file from previous blocks',
|
placeholder: 'Reference file from previous blocks',
|
||||||
condition: { field: 'operation', value: 'upload_attachment' },
|
condition: { field: 'operation', value: 'upload_attachment' },
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
required: { field: 'operation', value: 'upload_attachment' },
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'attachmentFileName',
|
id: 'attachmentFileName',
|
||||||
@@ -694,140 +545,40 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter label name',
|
placeholder: 'Enter label name',
|
||||||
required: true,
|
required: true,
|
||||||
condition: { field: 'operation', value: 'add_label' },
|
condition: { field: 'operation', value: ['add_label', 'remove_label'] },
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'labelPrefix',
|
|
||||||
title: 'Label Prefix',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'Global (default)', id: 'global' },
|
|
||||||
{ label: 'My', id: 'my' },
|
|
||||||
{ label: 'Team', id: 'team' },
|
|
||||||
{ label: 'System', id: 'system' },
|
|
||||||
],
|
|
||||||
value: () => 'global',
|
|
||||||
condition: { field: 'operation', value: 'add_label' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'blogPostStatus',
|
|
||||||
title: 'Status',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'Published (current)', id: 'current' },
|
|
||||||
{ label: 'Draft', id: 'draft' },
|
|
||||||
],
|
|
||||||
value: () => 'current',
|
|
||||||
condition: { field: 'operation', value: 'create_blogpost' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'purge',
|
|
||||||
title: 'Permanently Delete',
|
|
||||||
type: 'switch',
|
|
||||||
condition: { field: 'operation', value: 'delete' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'bodyFormat',
|
|
||||||
title: 'Body Format',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'Storage (default)', id: 'storage' },
|
|
||||||
{ label: 'Atlas Doc Format', id: 'atlas_doc_format' },
|
|
||||||
{ label: 'View', id: 'view' },
|
|
||||||
{ label: 'Export View', id: 'export_view' },
|
|
||||||
],
|
|
||||||
value: () => 'storage',
|
|
||||||
condition: { field: 'operation', value: 'list_comments' },
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'limit',
|
id: 'limit',
|
||||||
title: 'Limit',
|
title: 'Limit',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter maximum number of results (default: 50, max: 250)',
|
placeholder: 'Enter maximum number of results (default: 25)',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
value: [
|
value: ['search', 'list_comments', 'list_attachments', 'list_spaces'],
|
||||||
'search',
|
|
||||||
'search_in_space',
|
|
||||||
'list_comments',
|
|
||||||
'list_attachments',
|
|
||||||
'list_spaces',
|
|
||||||
'list_pages_in_space',
|
|
||||||
'list_blogposts',
|
|
||||||
'list_blogposts_in_space',
|
|
||||||
'get_page_children',
|
|
||||||
'list_page_versions',
|
|
||||||
'list_page_properties',
|
|
||||||
'list_labels',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'cursor',
|
|
||||||
title: 'Pagination Cursor',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter cursor from previous response (optional)',
|
|
||||||
condition: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'list_comments',
|
|
||||||
'list_attachments',
|
|
||||||
'list_spaces',
|
|
||||||
'list_pages_in_space',
|
|
||||||
'list_blogposts',
|
|
||||||
'list_blogposts_in_space',
|
|
||||||
'get_page_children',
|
|
||||||
'list_page_versions',
|
|
||||||
'list_page_properties',
|
|
||||||
'list_labels',
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
tools: {
|
tools: {
|
||||||
access: [
|
access: [
|
||||||
// Page Tools
|
|
||||||
'confluence_retrieve',
|
'confluence_retrieve',
|
||||||
'confluence_update',
|
'confluence_update',
|
||||||
'confluence_create_page',
|
'confluence_create_page',
|
||||||
'confluence_delete_page',
|
'confluence_delete_page',
|
||||||
'confluence_list_pages_in_space',
|
|
||||||
'confluence_get_page_children',
|
|
||||||
'confluence_get_page_ancestors',
|
|
||||||
// Version Tools
|
|
||||||
'confluence_list_page_versions',
|
|
||||||
'confluence_get_page_version',
|
|
||||||
// Property Tools
|
|
||||||
'confluence_list_page_properties',
|
|
||||||
'confluence_create_page_property',
|
|
||||||
// Search Tools
|
|
||||||
'confluence_search',
|
'confluence_search',
|
||||||
'confluence_search_in_space',
|
|
||||||
// Blog Post Tools
|
|
||||||
'confluence_list_blogposts',
|
|
||||||
'confluence_get_blogpost',
|
|
||||||
'confluence_create_blogpost',
|
|
||||||
'confluence_list_blogposts_in_space',
|
|
||||||
// Comment Tools
|
|
||||||
'confluence_create_comment',
|
'confluence_create_comment',
|
||||||
'confluence_list_comments',
|
'confluence_list_comments',
|
||||||
'confluence_update_comment',
|
'confluence_update_comment',
|
||||||
'confluence_delete_comment',
|
'confluence_delete_comment',
|
||||||
// Attachment Tools
|
|
||||||
'confluence_upload_attachment',
|
'confluence_upload_attachment',
|
||||||
'confluence_list_attachments',
|
'confluence_list_attachments',
|
||||||
'confluence_delete_attachment',
|
'confluence_delete_attachment',
|
||||||
// Label Tools
|
|
||||||
'confluence_list_labels',
|
'confluence_list_labels',
|
||||||
'confluence_add_label',
|
|
||||||
// Space Tools
|
|
||||||
'confluence_get_space',
|
'confluence_get_space',
|
||||||
'confluence_list_spaces',
|
'confluence_list_spaces',
|
||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => {
|
tool: (params) => {
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
// Page Operations
|
|
||||||
case 'read':
|
case 'read':
|
||||||
return 'confluence_retrieve'
|
return 'confluence_retrieve'
|
||||||
case 'create':
|
case 'create':
|
||||||
@@ -836,37 +587,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return 'confluence_update'
|
return 'confluence_update'
|
||||||
case 'delete':
|
case 'delete':
|
||||||
return 'confluence_delete_page'
|
return 'confluence_delete_page'
|
||||||
case 'list_pages_in_space':
|
|
||||||
return 'confluence_list_pages_in_space'
|
|
||||||
case 'get_page_children':
|
|
||||||
return 'confluence_get_page_children'
|
|
||||||
case 'get_page_ancestors':
|
|
||||||
return 'confluence_get_page_ancestors'
|
|
||||||
// Version Operations
|
|
||||||
case 'list_page_versions':
|
|
||||||
return 'confluence_list_page_versions'
|
|
||||||
case 'get_page_version':
|
|
||||||
return 'confluence_get_page_version'
|
|
||||||
// Property Operations
|
|
||||||
case 'list_page_properties':
|
|
||||||
return 'confluence_list_page_properties'
|
|
||||||
case 'create_page_property':
|
|
||||||
return 'confluence_create_page_property'
|
|
||||||
// Search Operations
|
|
||||||
case 'search':
|
case 'search':
|
||||||
return 'confluence_search'
|
return 'confluence_search'
|
||||||
case 'search_in_space':
|
|
||||||
return 'confluence_search_in_space'
|
|
||||||
// Blog Post Operations
|
|
||||||
case 'list_blogposts':
|
|
||||||
return 'confluence_list_blogposts'
|
|
||||||
case 'get_blogpost':
|
|
||||||
return 'confluence_get_blogpost'
|
|
||||||
case 'create_blogpost':
|
|
||||||
return 'confluence_create_blogpost'
|
|
||||||
case 'list_blogposts_in_space':
|
|
||||||
return 'confluence_list_blogposts_in_space'
|
|
||||||
// Comment Operations
|
|
||||||
case 'create_comment':
|
case 'create_comment':
|
||||||
return 'confluence_create_comment'
|
return 'confluence_create_comment'
|
||||||
case 'list_comments':
|
case 'list_comments':
|
||||||
@@ -875,19 +597,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return 'confluence_update_comment'
|
return 'confluence_update_comment'
|
||||||
case 'delete_comment':
|
case 'delete_comment':
|
||||||
return 'confluence_delete_comment'
|
return 'confluence_delete_comment'
|
||||||
// Attachment Operations
|
|
||||||
case 'upload_attachment':
|
case 'upload_attachment':
|
||||||
return 'confluence_upload_attachment'
|
return 'confluence_upload_attachment'
|
||||||
case 'list_attachments':
|
case 'list_attachments':
|
||||||
return 'confluence_list_attachments'
|
return 'confluence_list_attachments'
|
||||||
case 'delete_attachment':
|
case 'delete_attachment':
|
||||||
return 'confluence_delete_attachment'
|
return 'confluence_delete_attachment'
|
||||||
// Label Operations
|
|
||||||
case 'list_labels':
|
case 'list_labels':
|
||||||
return 'confluence_list_labels'
|
return 'confluence_list_labels'
|
||||||
case 'add_label':
|
|
||||||
return 'confluence_add_label'
|
|
||||||
// Space Operations
|
|
||||||
case 'get_space':
|
case 'get_space':
|
||||||
return 'confluence_get_space'
|
return 'confluence_get_space'
|
||||||
case 'list_spaces':
|
case 'list_spaces':
|
||||||
@@ -900,104 +617,42 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
pageId,
|
pageId,
|
||||||
|
manualPageId,
|
||||||
operation,
|
operation,
|
||||||
|
attachmentFileUpload,
|
||||||
|
attachmentFileReference,
|
||||||
attachmentFile,
|
attachmentFile,
|
||||||
attachmentFileName,
|
attachmentFileName,
|
||||||
attachmentComment,
|
attachmentComment,
|
||||||
blogPostId,
|
|
||||||
versionNumber,
|
|
||||||
propertyKey,
|
|
||||||
propertyValue,
|
|
||||||
labelPrefix,
|
|
||||||
blogPostStatus,
|
|
||||||
purge,
|
|
||||||
bodyFormat,
|
|
||||||
cursor,
|
|
||||||
...rest
|
...rest
|
||||||
} = params
|
} = params
|
||||||
|
|
||||||
// Use canonical param (serializer already handles basic/advanced mode)
|
const effectivePageId = (pageId || manualPageId || '').trim()
|
||||||
const effectivePageId = pageId ? String(pageId).trim() : ''
|
|
||||||
|
|
||||||
if (operation === 'add_label') {
|
const requiresPageId = [
|
||||||
return {
|
'read',
|
||||||
credential,
|
'update',
|
||||||
pageId: effectivePageId,
|
'delete',
|
||||||
operation,
|
'create_comment',
|
||||||
prefix: labelPrefix || 'global',
|
'list_comments',
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'create_blogpost') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
operation,
|
|
||||||
status: blogPostStatus || 'current',
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'delete') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
pageId: effectivePageId,
|
|
||||||
operation,
|
|
||||||
purge: purge || false,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'list_comments') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
pageId: effectivePageId,
|
|
||||||
operation,
|
|
||||||
bodyFormat: bodyFormat || 'storage',
|
|
||||||
cursor: cursor || undefined,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Operations that support cursor pagination
|
|
||||||
const supportsCursor = [
|
|
||||||
'list_attachments',
|
'list_attachments',
|
||||||
'list_spaces',
|
|
||||||
'list_pages_in_space',
|
|
||||||
'list_blogposts',
|
|
||||||
'list_blogposts_in_space',
|
|
||||||
'get_page_children',
|
|
||||||
'list_page_versions',
|
|
||||||
'list_page_properties',
|
|
||||||
'list_labels',
|
'list_labels',
|
||||||
|
'upload_attachment',
|
||||||
]
|
]
|
||||||
|
|
||||||
if (supportsCursor.includes(operation) && cursor) {
|
const requiresSpaceId = ['create', 'get_space']
|
||||||
return {
|
|
||||||
credential,
|
if (requiresPageId.includes(operation) && !effectivePageId) {
|
||||||
pageId: effectivePageId || undefined,
|
throw new Error('Page ID is required. Please select a page or enter a page ID manually.')
|
||||||
operation,
|
|
||||||
cursor,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (operation === 'create_page_property') {
|
if (requiresSpaceId.includes(operation) && !rest.spaceId) {
|
||||||
if (!propertyKey) {
|
throw new Error('Space ID is required for this operation.')
|
||||||
throw new Error('Property key is required for this operation.')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
pageId: effectivePageId,
|
|
||||||
operation,
|
|
||||||
key: propertyKey,
|
|
||||||
value: propertyValue,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (operation === 'upload_attachment') {
|
if (operation === 'upload_attachment') {
|
||||||
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
|
const fileInput = attachmentFileUpload || attachmentFileReference || attachmentFile
|
||||||
|
const normalizedFile = normalizeFileInput(fileInput, { single: true })
|
||||||
if (!normalizedFile) {
|
if (!normalizedFile) {
|
||||||
throw new Error('File is required for upload attachment operation.')
|
throw new Error('File is required for upload attachment operation.')
|
||||||
}
|
}
|
||||||
@@ -1015,8 +670,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return {
|
return {
|
||||||
credential,
|
credential,
|
||||||
pageId: effectivePageId || undefined,
|
pageId: effectivePageId || undefined,
|
||||||
blogPostId: blogPostId || undefined,
|
|
||||||
versionNumber: versionNumber ? Number.parseInt(String(versionNumber), 10) : undefined,
|
|
||||||
operation,
|
operation,
|
||||||
...rest,
|
...rest,
|
||||||
}
|
}
|
||||||
@@ -1027,79 +680,22 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
domain: { type: 'string', description: 'Confluence domain' },
|
domain: { type: 'string', description: 'Confluence domain' },
|
||||||
credential: { type: 'string', description: 'Confluence access token' },
|
credential: { type: 'string', description: 'Confluence access token' },
|
||||||
pageId: { type: 'string', description: 'Page identifier (canonical param)' },
|
pageId: { type: 'string', description: 'Page identifier' },
|
||||||
|
manualPageId: { type: 'string', description: 'Manual page identifier' },
|
||||||
spaceId: { type: 'string', description: 'Space identifier' },
|
spaceId: { type: 'string', description: 'Space identifier' },
|
||||||
blogPostId: { type: 'string', description: 'Blog post identifier' },
|
title: { type: 'string', description: 'Page title' },
|
||||||
versionNumber: { type: 'number', description: 'Page version number' },
|
content: { type: 'string', description: 'Page content' },
|
||||||
propertyKey: { type: 'string', description: 'Property key/name' },
|
|
||||||
propertyValue: { type: 'json', description: 'Property value (JSON)' },
|
|
||||||
title: { type: 'string', description: 'Page or blog post title' },
|
|
||||||
content: { type: 'string', description: 'Page or blog post content' },
|
|
||||||
parentId: { type: 'string', description: 'Parent page identifier' },
|
parentId: { type: 'string', description: 'Parent page identifier' },
|
||||||
query: { type: 'string', description: 'Search query' },
|
query: { type: 'string', description: 'Search query' },
|
||||||
comment: { type: 'string', description: 'Comment text' },
|
comment: { type: 'string', description: 'Comment text' },
|
||||||
commentId: { type: 'string', description: 'Comment identifier' },
|
commentId: { type: 'string', description: 'Comment identifier' },
|
||||||
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
||||||
attachmentFile: { type: 'json', description: 'File to upload as attachment (canonical param)' },
|
attachmentFile: { type: 'json', description: 'File to upload as attachment' },
|
||||||
|
attachmentFileUpload: { type: 'json', description: 'Uploaded file (basic mode)' },
|
||||||
|
attachmentFileReference: { type: 'json', description: 'File reference (advanced mode)' },
|
||||||
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
||||||
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
||||||
labelName: { type: 'string', description: 'Label name' },
|
labelName: { type: 'string', description: 'Label name' },
|
||||||
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
|
||||||
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
|
||||||
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
|
||||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
|
||||||
limit: { type: 'number', description: 'Maximum number of results' },
|
limit: { type: 'number', description: 'Maximum number of results' },
|
||||||
cursor: { type: 'string', description: 'Pagination cursor from previous response' },
|
|
||||||
},
|
|
||||||
outputs: {
|
|
||||||
ts: { type: 'string', description: 'Timestamp' },
|
|
||||||
pageId: { type: 'string', description: 'Page identifier' },
|
|
||||||
content: { type: 'string', description: 'Page content' },
|
|
||||||
body: { type: 'json', description: 'Page body with storage format' },
|
|
||||||
title: { type: 'string', description: 'Page title' },
|
|
||||||
url: { type: 'string', description: 'Page or resource URL' },
|
|
||||||
success: { type: 'boolean', description: 'Operation success status' },
|
|
||||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
|
||||||
added: { type: 'boolean', description: 'Addition status' },
|
|
||||||
removed: { type: 'boolean', description: 'Removal status' },
|
|
||||||
updated: { type: 'boolean', description: 'Update status' },
|
|
||||||
// Search & List Results
|
|
||||||
results: { type: 'array', description: 'Search results' },
|
|
||||||
pages: { type: 'array', description: 'List of pages' },
|
|
||||||
children: { type: 'array', description: 'List of child pages' },
|
|
||||||
ancestors: { type: 'array', description: 'List of ancestor pages' },
|
|
||||||
// Comment Results
|
|
||||||
comments: { type: 'array', description: 'List of comments' },
|
|
||||||
commentId: { type: 'string', description: 'Comment identifier' },
|
|
||||||
// Attachment Results
|
|
||||||
attachments: { type: 'array', description: 'List of attachments' },
|
|
||||||
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
|
||||||
fileSize: { type: 'number', description: 'Attachment file size in bytes' },
|
|
||||||
mediaType: { type: 'string', description: 'Attachment MIME type' },
|
|
||||||
downloadUrl: { type: 'string', description: 'Attachment download URL' },
|
|
||||||
// Label Results
|
|
||||||
labels: { type: 'array', description: 'List of labels' },
|
|
||||||
labelName: { type: 'string', description: 'Label name' },
|
|
||||||
// Space Results
|
|
||||||
spaces: { type: 'array', description: 'List of spaces' },
|
|
||||||
spaceId: { type: 'string', description: 'Space identifier' },
|
|
||||||
name: { type: 'string', description: 'Space name' },
|
|
||||||
key: { type: 'string', description: 'Space key' },
|
|
||||||
type: { type: 'string', description: 'Space or content type' },
|
|
||||||
status: { type: 'string', description: 'Space status' },
|
|
||||||
// Blog Post Results
|
|
||||||
blogPosts: { type: 'array', description: 'List of blog posts' },
|
|
||||||
blogPostId: { type: 'string', description: 'Blog post identifier' },
|
|
||||||
// Version Results
|
|
||||||
versions: { type: 'array', description: 'List of page versions' },
|
|
||||||
version: { type: 'json', description: 'Version information' },
|
|
||||||
versionNumber: { type: 'number', description: 'Version number' },
|
|
||||||
// Property Results
|
|
||||||
properties: { type: 'array', description: 'List of page properties' },
|
|
||||||
propertyId: { type: 'string', description: 'Property identifier' },
|
|
||||||
propertyKey: { type: 'string', description: 'Property key' },
|
|
||||||
propertyValue: { type: 'json', description: 'Property value' },
|
|
||||||
// Pagination
|
|
||||||
nextCursor: { type: 'string', description: 'Cursor for fetching next page of results' },
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -584,7 +584,7 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
|||||||
...commonParams,
|
...commonParams,
|
||||||
channelId: params.channelId,
|
channelId: params.channelId,
|
||||||
content: params.content,
|
content: params.content,
|
||||||
files: normalizeFileInput(params.files),
|
files: normalizeFileInput(params.attachmentFiles || params.files),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'discord_get_messages':
|
case 'discord_get_messages':
|
||||||
@@ -773,7 +773,8 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
|||||||
nick: { type: 'string', description: 'Member nickname' },
|
nick: { type: 'string', description: 'Member nickname' },
|
||||||
reason: { type: 'string', description: 'Reason for moderation action' },
|
reason: { type: 'string', description: 'Reason for moderation action' },
|
||||||
archived: { type: 'string', description: 'Archive status (true/false)' },
|
archived: { type: 'string', description: 'Archive status (true/false)' },
|
||||||
files: { type: 'array', description: 'Files to attach (canonical param)' },
|
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||||
|
files: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||||
limit: { type: 'number', description: 'Message limit' },
|
limit: { type: 'number', description: 'Message limit' },
|
||||||
autoArchiveDuration: { type: 'number', description: 'Thread auto-archive duration in minutes' },
|
autoArchiveDuration: { type: 'number', description: 'Thread auto-archive duration in minutes' },
|
||||||
channelType: { type: 'number', description: 'Discord channel type (0=text, 2=voice, etc.)' },
|
channelType: { type: 'number', description: 'Discord channel type (0=text, 2=voice, etc.)' },
|
||||||
|
|||||||
@@ -317,8 +317,12 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
params.maxResults = Number(params.maxResults)
|
params.maxResults = Number(params.maxResults)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize file input for upload operation - use canonical 'file' param
|
// Normalize file input for upload operation
|
||||||
const normalizedFile = normalizeFileInput(params.file, { single: true })
|
// Check all possible field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
if (normalizedFile) {
|
if (normalizedFile) {
|
||||||
params.file = normalizedFile
|
params.file = normalizedFile
|
||||||
}
|
}
|
||||||
@@ -357,7 +361,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
path: { type: 'string', description: 'Path in Dropbox' },
|
path: { type: 'string', description: 'Path in Dropbox' },
|
||||||
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
||||||
// Upload inputs
|
// Upload inputs
|
||||||
file: { type: 'json', description: 'File to upload (canonical param)' },
|
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||||
|
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
||||||
|
fileRef: { type: 'json', description: 'File reference from previous block' },
|
||||||
|
fileContent: { type: 'string', description: 'Legacy: base64 encoded file content' },
|
||||||
fileName: { type: 'string', description: 'Optional filename' },
|
fileName: { type: 'string', description: 'Optional filename' },
|
||||||
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
||||||
mute: { type: 'boolean', description: 'Mute notifications' },
|
mute: { type: 'boolean', description: 'Mute notifications' },
|
||||||
|
|||||||
@@ -194,8 +194,7 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
|||||||
fallbackToolId: 'file_parser_v2',
|
fallbackToolId: 'file_parser_v2',
|
||||||
}),
|
}),
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
// Use canonical 'fileInput' param directly
|
const fileInput = params.file || params.filePath || params.fileInput
|
||||||
const fileInput = params.fileInput
|
|
||||||
if (!fileInput) {
|
if (!fileInput) {
|
||||||
logger.error('No file input provided')
|
logger.error('No file input provided')
|
||||||
throw new Error('File is required')
|
throw new Error('File is required')
|
||||||
@@ -229,7 +228,9 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: {
|
inputs: {
|
||||||
fileInput: { type: 'json', description: 'File input (canonical param)' },
|
fileInput: { type: 'json', description: 'File input (upload or URL reference)' },
|
||||||
|
filePath: { type: 'string', description: 'File URL (advanced mode)' },
|
||||||
|
file: { type: 'json', description: 'Uploaded file data (basic mode)' },
|
||||||
fileType: { type: 'string', description: 'File type' },
|
fileType: { type: 'string', description: 'File type' },
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
@@ -282,8 +283,7 @@ export const FileV3Block: BlockConfig<FileParserV3Output> = {
|
|||||||
config: {
|
config: {
|
||||||
tool: () => 'file_parser_v3',
|
tool: () => 'file_parser_v3',
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
// Use canonical 'fileInput' param directly
|
const fileInput = params.fileInput ?? params.file ?? params.fileUrl ?? params.filePath
|
||||||
const fileInput = params.fileInput
|
|
||||||
if (!fileInput) {
|
if (!fileInput) {
|
||||||
logger.error('No file input provided')
|
logger.error('No file input provided')
|
||||||
throw new Error('File input is required')
|
throw new Error('File input is required')
|
||||||
@@ -321,7 +321,9 @@ export const FileV3Block: BlockConfig<FileParserV3Output> = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: {
|
inputs: {
|
||||||
fileInput: { type: 'json', description: 'File input (canonical param)' },
|
fileInput: { type: 'json', description: 'File input (upload or URL)' },
|
||||||
|
fileUrl: { type: 'string', description: 'External file URL (advanced mode)' },
|
||||||
|
file: { type: 'json', description: 'Uploaded file data (basic mode)' },
|
||||||
fileType: { type: 'string', description: 'File type' },
|
fileType: { type: 'string', description: 'File type' },
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
|
|||||||
@@ -461,11 +461,12 @@ Return ONLY the summary text - no quotes, no labels.`,
|
|||||||
return baseParams
|
return baseParams
|
||||||
|
|
||||||
case 'fireflies_upload_audio': {
|
case 'fireflies_upload_audio': {
|
||||||
// Support both file upload and URL - use canonical 'audioFile' param
|
// Support both file upload and URL
|
||||||
const audioUrl = params.audioUrl?.trim()
|
const audioUrl = params.audioUrl?.trim()
|
||||||
const audioFile = params.audioFile
|
const audioFile = params.audioFile
|
||||||
|
const audioFileReference = params.audioFileReference
|
||||||
|
|
||||||
if (!audioUrl && !audioFile) {
|
if (!audioUrl && !audioFile && !audioFileReference) {
|
||||||
throw new Error('Either audio file or audio URL is required.')
|
throw new Error('Either audio file or audio URL is required.')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -473,6 +474,7 @@ Return ONLY the summary text - no quotes, no labels.`,
|
|||||||
...baseParams,
|
...baseParams,
|
||||||
audioUrl: audioUrl || undefined,
|
audioUrl: audioUrl || undefined,
|
||||||
audioFile: audioFile || undefined,
|
audioFile: audioFile || undefined,
|
||||||
|
audioFileReference: audioFileReference || undefined,
|
||||||
title: params.title?.trim() || undefined,
|
title: params.title?.trim() || undefined,
|
||||||
language: params.language?.trim() || undefined,
|
language: params.language?.trim() || undefined,
|
||||||
attendees: params.attendees?.trim() || undefined,
|
attendees: params.attendees?.trim() || undefined,
|
||||||
@@ -546,7 +548,8 @@ Return ONLY the summary text - no quotes, no labels.`,
|
|||||||
hostEmail: { type: 'string', description: 'Filter by host email' },
|
hostEmail: { type: 'string', description: 'Filter by host email' },
|
||||||
participants: { type: 'string', description: 'Filter by participants (comma-separated)' },
|
participants: { type: 'string', description: 'Filter by participants (comma-separated)' },
|
||||||
limit: { type: 'number', description: 'Maximum results to return' },
|
limit: { type: 'number', description: 'Maximum results to return' },
|
||||||
audioFile: { type: 'json', description: 'Audio/video file (canonical param)' },
|
audioFile: { type: 'json', description: 'Audio/video file (UserFile)' },
|
||||||
|
audioFileReference: { type: 'json', description: 'Audio/video file reference' },
|
||||||
audioUrl: { type: 'string', description: 'Public URL to audio file' },
|
audioUrl: { type: 'string', description: 'Public URL to audio file' },
|
||||||
title: { type: 'string', description: 'Meeting title' },
|
title: { type: 'string', description: 'Meeting title' },
|
||||||
language: { type: 'string', description: 'Language code for transcription' },
|
language: { type: 'string', description: 'Language code for transcription' },
|
||||||
@@ -617,8 +620,9 @@ export const FirefliesV2Block: BlockConfig<FirefliesResponse> = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (params.operation === 'fireflies_upload_audio') {
|
if (params.operation === 'fireflies_upload_audio') {
|
||||||
// Use canonical 'audioFile' param directly
|
const audioFile = normalizeFileInput(params.audioFile || params.audioFileReference, {
|
||||||
const audioFile = normalizeFileInput(params.audioFile, { single: true })
|
single: true,
|
||||||
|
})
|
||||||
if (!audioFile) {
|
if (!audioFile) {
|
||||||
throw new Error('Audio file is required.')
|
throw new Error('Audio file is required.')
|
||||||
}
|
}
|
||||||
@@ -631,6 +635,7 @@ export const FirefliesV2Block: BlockConfig<FirefliesResponse> = {
|
|||||||
...params,
|
...params,
|
||||||
audioUrl,
|
audioUrl,
|
||||||
audioFile: undefined,
|
audioFile: undefined,
|
||||||
|
audioFileReference: undefined,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -638,5 +643,8 @@ export const FirefliesV2Block: BlockConfig<FirefliesResponse> = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: firefliesV2Inputs,
|
inputs: {
|
||||||
|
...firefliesV2Inputs,
|
||||||
|
audioFileReference: { type: 'json', description: 'Audio/video file reference' },
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -362,10 +362,10 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
},
|
},
|
||||||
// Add/Remove Label - Label selector (basic mode)
|
// Add/Remove Label - Label selector (basic mode)
|
||||||
{
|
{
|
||||||
id: 'labelSelector',
|
id: 'labelManagement',
|
||||||
title: 'Label',
|
title: 'Label',
|
||||||
type: 'folder-selector',
|
type: 'folder-selector',
|
||||||
canonicalParamId: 'manageLabelId',
|
canonicalParamId: 'labelIds',
|
||||||
serviceId: 'gmail',
|
serviceId: 'gmail',
|
||||||
requiredScopes: ['https://www.googleapis.com/auth/gmail.labels'],
|
requiredScopes: ['https://www.googleapis.com/auth/gmail.labels'],
|
||||||
placeholder: 'Select label',
|
placeholder: 'Select label',
|
||||||
@@ -376,10 +376,10 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
},
|
},
|
||||||
// Add/Remove Label - Manual label input (advanced mode)
|
// Add/Remove Label - Manual label input (advanced mode)
|
||||||
{
|
{
|
||||||
id: 'manualLabelId',
|
id: 'manualLabelManagement',
|
||||||
title: 'Label',
|
title: 'Label',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'manageLabelId',
|
canonicalParamId: 'labelIds',
|
||||||
placeholder: 'Enter label ID (e.g., INBOX, Label_123)',
|
placeholder: 'Enter label ID (e.g., INBOX, Label_123)',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: ['add_label_gmail', 'remove_label_gmail'] },
|
condition: { field: 'operation', value: ['add_label_gmail', 'remove_label_gmail'] },
|
||||||
@@ -408,33 +408,38 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
folder,
|
folder,
|
||||||
addLabelIds,
|
manualFolder,
|
||||||
removeLabelIds,
|
destinationLabel,
|
||||||
|
manualDestinationLabel,
|
||||||
|
sourceLabel,
|
||||||
|
manualSourceLabel,
|
||||||
moveMessageId,
|
moveMessageId,
|
||||||
actionMessageId,
|
actionMessageId,
|
||||||
labelActionMessageId,
|
labelActionMessageId,
|
||||||
manageLabelId,
|
labelManagement,
|
||||||
|
manualLabelManagement,
|
||||||
|
attachmentFiles,
|
||||||
attachments,
|
attachments,
|
||||||
...rest
|
...rest
|
||||||
} = params
|
} = params
|
||||||
|
|
||||||
// Use canonical 'folder' param directly
|
// Handle both selector and manual folder input
|
||||||
const effectiveFolder = folder ? String(folder).trim() : ''
|
const effectiveFolder = (folder || manualFolder || '').trim()
|
||||||
|
|
||||||
if (rest.operation === 'read_gmail') {
|
if (rest.operation === 'read_gmail') {
|
||||||
rest.folder = effectiveFolder || 'INBOX'
|
rest.folder = effectiveFolder || 'INBOX'
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle move operation - use canonical params addLabelIds and removeLabelIds
|
// Handle move operation
|
||||||
if (rest.operation === 'move_gmail') {
|
if (rest.operation === 'move_gmail') {
|
||||||
if (moveMessageId) {
|
if (moveMessageId) {
|
||||||
rest.messageId = moveMessageId
|
rest.messageId = moveMessageId
|
||||||
}
|
}
|
||||||
if (addLabelIds) {
|
if (!rest.addLabelIds) {
|
||||||
rest.addLabelIds = String(addLabelIds).trim()
|
rest.addLabelIds = (destinationLabel || manualDestinationLabel || '').trim()
|
||||||
}
|
}
|
||||||
if (removeLabelIds) {
|
if (!rest.removeLabelIds) {
|
||||||
rest.removeLabelIds = String(removeLabelIds).trim()
|
rest.removeLabelIds = (sourceLabel || manualSourceLabel || '').trim()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -457,13 +462,13 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
if (labelActionMessageId) {
|
if (labelActionMessageId) {
|
||||||
rest.messageId = labelActionMessageId
|
rest.messageId = labelActionMessageId
|
||||||
}
|
}
|
||||||
if (manageLabelId) {
|
if (!rest.labelIds) {
|
||||||
rest.labelIds = String(manageLabelId).trim()
|
rest.labelIds = (labelManagement || manualLabelManagement || '').trim()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize attachments for send/draft operations - use canonical 'attachments' param
|
// Normalize attachments for send/draft operations
|
||||||
const normalizedAttachments = normalizeFileInput(attachments)
|
const normalizedAttachments = normalizeFileInput(attachmentFiles || attachments)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...rest,
|
...rest,
|
||||||
@@ -488,9 +493,10 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
},
|
},
|
||||||
cc: { type: 'string', description: 'CC recipients (comma-separated)' },
|
cc: { type: 'string', description: 'CC recipients (comma-separated)' },
|
||||||
bcc: { type: 'string', description: 'BCC recipients (comma-separated)' },
|
bcc: { type: 'string', description: 'BCC recipients (comma-separated)' },
|
||||||
attachments: { type: 'array', description: 'Files to attach (canonical param)' },
|
attachments: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||||
// Read operation inputs
|
// Read operation inputs
|
||||||
folder: { type: 'string', description: 'Gmail folder (canonical param)' },
|
folder: { type: 'string', description: 'Gmail folder' },
|
||||||
|
manualFolder: { type: 'string', description: 'Manual folder name' },
|
||||||
readMessageId: { type: 'string', description: 'Message identifier for reading specific email' },
|
readMessageId: { type: 'string', description: 'Message identifier for reading specific email' },
|
||||||
unreadOnly: { type: 'boolean', description: 'Unread messages only' },
|
unreadOnly: { type: 'boolean', description: 'Unread messages only' },
|
||||||
includeAttachments: { type: 'boolean', description: 'Include email attachments' },
|
includeAttachments: { type: 'boolean', description: 'Include email attachments' },
|
||||||
@@ -499,16 +505,18 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
maxResults: { type: 'number', description: 'Maximum results' },
|
maxResults: { type: 'number', description: 'Maximum results' },
|
||||||
// Move operation inputs
|
// Move operation inputs
|
||||||
moveMessageId: { type: 'string', description: 'Message ID to move' },
|
moveMessageId: { type: 'string', description: 'Message ID to move' },
|
||||||
addLabelIds: { type: 'string', description: 'Label IDs to add (canonical param)' },
|
destinationLabel: { type: 'string', description: 'Destination label ID' },
|
||||||
removeLabelIds: { type: 'string', description: 'Label IDs to remove (canonical param)' },
|
manualDestinationLabel: { type: 'string', description: 'Manual destination label ID' },
|
||||||
|
sourceLabel: { type: 'string', description: 'Source label ID to remove' },
|
||||||
|
manualSourceLabel: { type: 'string', description: 'Manual source label ID' },
|
||||||
|
addLabelIds: { type: 'string', description: 'Label IDs to add' },
|
||||||
|
removeLabelIds: { type: 'string', description: 'Label IDs to remove' },
|
||||||
// Action operation inputs
|
// Action operation inputs
|
||||||
actionMessageId: { type: 'string', description: 'Message ID for actions' },
|
actionMessageId: { type: 'string', description: 'Message ID for actions' },
|
||||||
labelActionMessageId: { type: 'string', description: 'Message ID for label actions' },
|
labelActionMessageId: { type: 'string', description: 'Message ID for label actions' },
|
||||||
manageLabelId: {
|
labelManagement: { type: 'string', description: 'Label ID for management' },
|
||||||
type: 'string',
|
manualLabelManagement: { type: 'string', description: 'Manual label ID' },
|
||||||
description: 'Label ID for add/remove operations (canonical param)',
|
labelIds: { type: 'string', description: 'Label IDs for add/remove operations' },
|
||||||
},
|
|
||||||
labelIds: { type: 'string', description: 'Label IDs to monitor (trigger)' },
|
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
// Tool outputs
|
// Tool outputs
|
||||||
|
|||||||
@@ -517,17 +517,21 @@ Return ONLY the natural language event text - no explanations.`,
|
|||||||
attendees,
|
attendees,
|
||||||
replaceExisting,
|
replaceExisting,
|
||||||
calendarId,
|
calendarId,
|
||||||
destinationCalendarId,
|
manualCalendarId,
|
||||||
|
destinationCalendar,
|
||||||
|
manualDestinationCalendarId,
|
||||||
...rest
|
...rest
|
||||||
} = params
|
} = params
|
||||||
|
|
||||||
// Use canonical 'calendarId' param directly
|
// Handle calendar ID (selector or manual)
|
||||||
const effectiveCalendarId = calendarId ? String(calendarId).trim() : ''
|
const effectiveCalendarId = (calendarId || manualCalendarId || '').trim()
|
||||||
|
|
||||||
// Use canonical 'destinationCalendarId' param directly
|
// Handle destination calendar ID for move operation (selector or manual)
|
||||||
const effectiveDestinationCalendarId = destinationCalendarId
|
const effectiveDestinationCalendarId = (
|
||||||
? String(destinationCalendarId).trim()
|
destinationCalendar ||
|
||||||
: ''
|
manualDestinationCalendarId ||
|
||||||
|
''
|
||||||
|
).trim()
|
||||||
|
|
||||||
const processedParams: Record<string, any> = {
|
const processedParams: Record<string, any> = {
|
||||||
...rest,
|
...rest,
|
||||||
@@ -585,7 +589,8 @@ Return ONLY the natural language event text - no explanations.`,
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google Calendar access token' },
|
credential: { type: 'string', description: 'Google Calendar access token' },
|
||||||
calendarId: { type: 'string', description: 'Calendar identifier (canonical param)' },
|
calendarId: { type: 'string', description: 'Calendar identifier' },
|
||||||
|
manualCalendarId: { type: 'string', description: 'Manual calendar identifier' },
|
||||||
|
|
||||||
// Create/Update operation inputs
|
// Create/Update operation inputs
|
||||||
summary: { type: 'string', description: 'Event title' },
|
summary: { type: 'string', description: 'Event title' },
|
||||||
@@ -604,10 +609,8 @@ Return ONLY the natural language event text - no explanations.`,
|
|||||||
eventId: { type: 'string', description: 'Event identifier' },
|
eventId: { type: 'string', description: 'Event identifier' },
|
||||||
|
|
||||||
// Move operation inputs
|
// Move operation inputs
|
||||||
destinationCalendarId: {
|
destinationCalendar: { type: 'string', description: 'Destination calendar selector' },
|
||||||
type: 'string',
|
manualDestinationCalendarId: { type: 'string', description: 'Manual destination calendar ID' },
|
||||||
description: 'Destination calendar ID (canonical param)',
|
|
||||||
},
|
|
||||||
|
|
||||||
// List Calendars operation inputs
|
// List Calendars operation inputs
|
||||||
minAccessRole: { type: 'string', description: 'Minimum access role filter' },
|
minAccessRole: { type: 'string', description: 'Minimum access role filter' },
|
||||||
|
|||||||
@@ -157,10 +157,11 @@ Return ONLY the document content - no explanations, no extra text.`,
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
const { credential, documentId, folderId, ...rest } = params
|
const { credential, documentId, manualDocumentId, folderSelector, folderId, ...rest } =
|
||||||
|
params
|
||||||
|
|
||||||
const effectiveDocumentId = documentId ? String(documentId).trim() : ''
|
const effectiveDocumentId = (documentId || manualDocumentId || '').trim()
|
||||||
const effectiveFolderId = folderId ? String(folderId).trim() : ''
|
const effectiveFolderId = (folderSelector || folderId || '').trim()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...rest,
|
...rest,
|
||||||
@@ -174,9 +175,11 @@ Return ONLY the document content - no explanations, no extra text.`,
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google Docs access token' },
|
credential: { type: 'string', description: 'Google Docs access token' },
|
||||||
documentId: { type: 'string', description: 'Document identifier (canonical param)' },
|
documentId: { type: 'string', description: 'Document identifier' },
|
||||||
|
manualDocumentId: { type: 'string', description: 'Manual document identifier' },
|
||||||
title: { type: 'string', description: 'Document title' },
|
title: { type: 'string', description: 'Document title' },
|
||||||
folderId: { type: 'string', description: 'Parent folder identifier (canonical param)' },
|
folderSelector: { type: 'string', description: 'Selected folder' },
|
||||||
|
folderId: { type: 'string', description: 'Folder identifier' },
|
||||||
content: { type: 'string', description: 'Document content' },
|
content: { type: 'string', description: 'Document content' },
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
|
|||||||
@@ -121,10 +121,10 @@ Return ONLY the file content - no explanations, no markdown code blocks, no extr
|
|||||||
required: false,
|
required: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'uploadFolderSelector',
|
id: 'folderSelector',
|
||||||
title: 'Select Parent Folder',
|
title: 'Select Parent Folder',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'uploadFolderId',
|
canonicalParamId: 'folderId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -137,10 +137,10 @@ Return ONLY the file content - no explanations, no markdown code blocks, no extr
|
|||||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'uploadManualFolderId',
|
id: 'manualFolderId',
|
||||||
title: 'Parent Folder ID',
|
title: 'Parent Folder ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'uploadFolderId',
|
canonicalParamId: 'folderId',
|
||||||
placeholder: 'Enter parent folder ID (leave empty for root folder)',
|
placeholder: 'Enter parent folder ID (leave empty for root folder)',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||||
@@ -193,10 +193,10 @@ Return ONLY the file content - no explanations, no markdown code blocks, no extr
|
|||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'createFolderParentSelector',
|
id: 'folderSelector',
|
||||||
title: 'Select Parent Folder',
|
title: 'Select Parent Folder',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'createFolderParentId',
|
canonicalParamId: 'folderId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -210,20 +210,20 @@ Return ONLY the file content - no explanations, no markdown code blocks, no extr
|
|||||||
},
|
},
|
||||||
// Manual Folder ID input (advanced mode)
|
// Manual Folder ID input (advanced mode)
|
||||||
{
|
{
|
||||||
id: 'createFolderManualParentId',
|
id: 'manualFolderId',
|
||||||
title: 'Parent Folder ID',
|
title: 'Parent Folder ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'createFolderParentId',
|
canonicalParamId: 'folderId',
|
||||||
placeholder: 'Enter parent folder ID (leave empty for root folder)',
|
placeholder: 'Enter parent folder ID (leave empty for root folder)',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'create_folder' },
|
condition: { field: 'operation', value: 'create_folder' },
|
||||||
},
|
},
|
||||||
// List Fields - Folder Selector (basic mode)
|
// List Fields - Folder Selector (basic mode)
|
||||||
{
|
{
|
||||||
id: 'listFolderSelector',
|
id: 'folderSelector',
|
||||||
title: 'Select Folder',
|
title: 'Select Folder',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'listFolderId',
|
canonicalParamId: 'folderId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -237,10 +237,10 @@ Return ONLY the file content - no explanations, no markdown code blocks, no extr
|
|||||||
},
|
},
|
||||||
// Manual Folder ID input (advanced mode)
|
// Manual Folder ID input (advanced mode)
|
||||||
{
|
{
|
||||||
id: 'listManualFolderId',
|
id: 'manualFolderId',
|
||||||
title: 'Folder ID',
|
title: 'Folder ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'listFolderId',
|
canonicalParamId: 'folderId',
|
||||||
placeholder: 'Enter folder ID (leave empty for root folder)',
|
placeholder: 'Enter folder ID (leave empty for root folder)',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'list' },
|
condition: { field: 'operation', value: 'list' },
|
||||||
@@ -279,10 +279,10 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
},
|
},
|
||||||
// Download File Fields - File Selector (basic mode)
|
// Download File Fields - File Selector (basic mode)
|
||||||
{
|
{
|
||||||
id: 'downloadFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File',
|
title: 'Select File',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'downloadFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -292,14 +292,13 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'download' },
|
condition: { field: 'operation', value: 'download' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
// Manual File ID input (advanced mode)
|
// Manual File ID input (advanced mode)
|
||||||
{
|
{
|
||||||
id: 'downloadManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'downloadFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID',
|
placeholder: 'Enter file ID',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'download' },
|
condition: { field: 'operation', value: 'download' },
|
||||||
@@ -340,10 +339,10 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
},
|
},
|
||||||
// Get File Info Fields
|
// Get File Info Fields
|
||||||
{
|
{
|
||||||
id: 'getFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File',
|
title: 'Select File',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'getFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -353,13 +352,12 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'get_file' },
|
condition: { field: 'operation', value: 'get_file' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'getManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'getFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID',
|
placeholder: 'Enter file ID',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'get_file' },
|
condition: { field: 'operation', value: 'get_file' },
|
||||||
@@ -367,10 +365,10 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
},
|
},
|
||||||
// Copy File Fields
|
// Copy File Fields
|
||||||
{
|
{
|
||||||
id: 'copyFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File to Copy',
|
title: 'Select File to Copy',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'copyFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -380,13 +378,12 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'copy' },
|
condition: { field: 'operation', value: 'copy' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'copyManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'copyFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID to copy',
|
placeholder: 'Enter file ID to copy',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'copy' },
|
condition: { field: 'operation', value: 'copy' },
|
||||||
@@ -400,10 +397,10 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
condition: { field: 'operation', value: 'copy' },
|
condition: { field: 'operation', value: 'copy' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'copyDestFolderSelector',
|
id: 'folderSelector',
|
||||||
title: 'Destination Folder',
|
title: 'Destination Folder',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'copyDestFolderId',
|
canonicalParamId: 'destinationFolderId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -416,20 +413,20 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
condition: { field: 'operation', value: 'copy' },
|
condition: { field: 'operation', value: 'copy' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'copyManualDestFolderId',
|
id: 'manualDestinationFolderId',
|
||||||
title: 'Destination Folder ID',
|
title: 'Destination Folder ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'copyDestFolderId',
|
canonicalParamId: 'destinationFolderId',
|
||||||
placeholder: 'Enter destination folder ID (optional)',
|
placeholder: 'Enter destination folder ID (optional)',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'copy' },
|
condition: { field: 'operation', value: 'copy' },
|
||||||
},
|
},
|
||||||
// Update File Fields
|
// Update File Fields
|
||||||
{
|
{
|
||||||
id: 'updateFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File to Update',
|
title: 'Select File to Update',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'updateFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -439,13 +436,12 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'update' },
|
condition: { field: 'operation', value: 'update' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'updateManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'updateFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID to update',
|
placeholder: 'Enter file ID to update',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'update' },
|
condition: { field: 'operation', value: 'update' },
|
||||||
@@ -504,10 +500,10 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
},
|
},
|
||||||
// Trash File Fields
|
// Trash File Fields
|
||||||
{
|
{
|
||||||
id: 'trashFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File to Trash',
|
title: 'Select File to Trash',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'trashFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -517,13 +513,12 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'trash' },
|
condition: { field: 'operation', value: 'trash' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'trashManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'trashFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID to trash',
|
placeholder: 'Enter file ID to trash',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'trash' },
|
condition: { field: 'operation', value: 'trash' },
|
||||||
@@ -531,10 +526,10 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
},
|
},
|
||||||
// Delete File Fields
|
// Delete File Fields
|
||||||
{
|
{
|
||||||
id: 'deleteFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File to Delete',
|
title: 'Select File to Delete',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'deleteFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -544,13 +539,12 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'delete' },
|
condition: { field: 'operation', value: 'delete' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'deleteManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'deleteFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID to permanently delete',
|
placeholder: 'Enter file ID to permanently delete',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'delete' },
|
condition: { field: 'operation', value: 'delete' },
|
||||||
@@ -558,10 +552,10 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
},
|
},
|
||||||
// Share File Fields
|
// Share File Fields
|
||||||
{
|
{
|
||||||
id: 'shareFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File to Share',
|
title: 'Select File to Share',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'shareFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -571,13 +565,12 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'share' },
|
condition: { field: 'operation', value: 'share' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'shareManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'shareFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID to share',
|
placeholder: 'Enter file ID to share',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'share' },
|
condition: { field: 'operation', value: 'share' },
|
||||||
@@ -672,10 +665,10 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
},
|
},
|
||||||
// Unshare (Remove Permission) Fields
|
// Unshare (Remove Permission) Fields
|
||||||
{
|
{
|
||||||
id: 'unshareFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File',
|
title: 'Select File',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'unshareFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -685,13 +678,12 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'unshare' },
|
condition: { field: 'operation', value: 'unshare' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'unshareManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'unshareFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID',
|
placeholder: 'Enter file ID',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'unshare' },
|
condition: { field: 'operation', value: 'unshare' },
|
||||||
@@ -707,10 +699,10 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
},
|
},
|
||||||
// List Permissions Fields
|
// List Permissions Fields
|
||||||
{
|
{
|
||||||
id: 'listPermissionsFileSelector',
|
id: 'fileSelector',
|
||||||
title: 'Select File',
|
title: 'Select File',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'listPermissionsFileId',
|
canonicalParamId: 'fileId',
|
||||||
serviceId: 'google-drive',
|
serviceId: 'google-drive',
|
||||||
requiredScopes: [
|
requiredScopes: [
|
||||||
'https://www.googleapis.com/auth/drive.file',
|
'https://www.googleapis.com/auth/drive.file',
|
||||||
@@ -720,13 +712,12 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
condition: { field: 'operation', value: 'list_permissions' },
|
condition: { field: 'operation', value: 'list_permissions' },
|
||||||
required: true,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'listPermissionsManualFileId',
|
id: 'manualFileId',
|
||||||
title: 'File ID',
|
title: 'File ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'listPermissionsFileId',
|
canonicalParamId: 'fileId',
|
||||||
placeholder: 'Enter file ID',
|
placeholder: 'Enter file ID',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: { field: 'operation', value: 'list_permissions' },
|
condition: { field: 'operation', value: 'list_permissions' },
|
||||||
@@ -787,23 +778,13 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
params: (params) => {
|
params: (params) => {
|
||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
// Folder canonical params (per-operation)
|
folderSelector,
|
||||||
uploadFolderId,
|
manualFolderId,
|
||||||
createFolderParentId,
|
manualDestinationFolderId,
|
||||||
listFolderId,
|
fileSelector,
|
||||||
copyDestFolderId,
|
manualFileId,
|
||||||
// File canonical params (per-operation)
|
|
||||||
downloadFileId,
|
|
||||||
getFileId,
|
|
||||||
copyFileId,
|
|
||||||
updateFileId,
|
|
||||||
trashFileId,
|
|
||||||
deleteFileId,
|
|
||||||
shareFileId,
|
|
||||||
unshareFileId,
|
|
||||||
listPermissionsFileId,
|
|
||||||
// File upload
|
|
||||||
file,
|
file,
|
||||||
|
fileUpload,
|
||||||
mimeType,
|
mimeType,
|
||||||
shareType,
|
shareType,
|
||||||
starred,
|
starred,
|
||||||
@@ -812,58 +793,19 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
} = params
|
} = params
|
||||||
|
|
||||||
// Normalize file input - handles both basic (file-upload) and advanced (short-input) modes
|
// Normalize file input - handles both basic (file-upload) and advanced (short-input) modes
|
||||||
const normalizedFile = normalizeFileInput(file, { single: true })
|
const normalizedFile = normalizeFileInput(file ?? fileUpload, { single: true })
|
||||||
|
|
||||||
// Resolve folderId based on operation
|
// Use folderSelector if provided, otherwise use manualFolderId
|
||||||
let effectiveFolderId: string | undefined
|
const effectiveFolderId = (folderSelector || manualFolderId || '').trim()
|
||||||
switch (params.operation) {
|
|
||||||
case 'create_file':
|
|
||||||
case 'upload':
|
|
||||||
effectiveFolderId = uploadFolderId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'create_folder':
|
|
||||||
effectiveFolderId = createFolderParentId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'list':
|
|
||||||
effectiveFolderId = listFolderId?.trim() || undefined
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve fileId based on operation
|
// Use fileSelector if provided, otherwise use manualFileId
|
||||||
let effectiveFileId: string | undefined
|
const effectiveFileId = (fileSelector || manualFileId || '').trim()
|
||||||
switch (params.operation) {
|
|
||||||
case 'download':
|
|
||||||
effectiveFileId = downloadFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'get_file':
|
|
||||||
effectiveFileId = getFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'copy':
|
|
||||||
effectiveFileId = copyFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'update':
|
|
||||||
effectiveFileId = updateFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'trash':
|
|
||||||
effectiveFileId = trashFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'delete':
|
|
||||||
effectiveFileId = deleteFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'share':
|
|
||||||
effectiveFileId = shareFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'unshare':
|
|
||||||
effectiveFileId = unshareFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
case 'list_permissions':
|
|
||||||
effectiveFileId = listPermissionsFileId?.trim() || undefined
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve destinationFolderId for copy operation
|
// Use folderSelector for destination or manualDestinationFolderId for copy operation
|
||||||
const effectiveDestinationFolderId =
|
const effectiveDestinationFolderId =
|
||||||
params.operation === 'copy' ? copyDestFolderId?.trim() || undefined : undefined
|
params.operation === 'copy'
|
||||||
|
? (folderSelector || manualDestinationFolderId || '').trim()
|
||||||
|
: undefined
|
||||||
|
|
||||||
// Convert starred dropdown to boolean
|
// Convert starred dropdown to boolean
|
||||||
const starredValue = starred === 'true' ? true : starred === 'false' ? false : undefined
|
const starredValue = starred === 'true' ? true : starred === 'false' ? false : undefined
|
||||||
@@ -874,9 +816,9 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
credential,
|
credential,
|
||||||
folderId: effectiveFolderId,
|
folderId: effectiveFolderId || undefined,
|
||||||
fileId: effectiveFileId,
|
fileId: effectiveFileId || undefined,
|
||||||
destinationFolderId: effectiveDestinationFolderId,
|
destinationFolderId: effectiveDestinationFolderId || undefined,
|
||||||
file: normalizedFile,
|
file: normalizedFile,
|
||||||
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
|
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
|
||||||
mimeType: mimeType,
|
mimeType: mimeType,
|
||||||
@@ -892,21 +834,13 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google Drive access token' },
|
credential: { type: 'string', description: 'Google Drive access token' },
|
||||||
// Folder canonical params (per-operation)
|
// File selection inputs
|
||||||
uploadFolderId: { type: 'string', description: 'Parent folder for upload/create' },
|
fileSelector: { type: 'string', description: 'Selected file' },
|
||||||
createFolderParentId: { type: 'string', description: 'Parent folder for create folder' },
|
manualFileId: { type: 'string', description: 'Manual file identifier' },
|
||||||
listFolderId: { type: 'string', description: 'Folder to list files from' },
|
// Folder selection inputs
|
||||||
copyDestFolderId: { type: 'string', description: 'Destination folder for copy' },
|
folderSelector: { type: 'string', description: 'Selected folder' },
|
||||||
// File canonical params (per-operation)
|
manualFolderId: { type: 'string', description: 'Manual folder identifier' },
|
||||||
downloadFileId: { type: 'string', description: 'File to download' },
|
manualDestinationFolderId: { type: 'string', description: 'Destination folder for copy' },
|
||||||
getFileId: { type: 'string', description: 'File to get info for' },
|
|
||||||
copyFileId: { type: 'string', description: 'File to copy' },
|
|
||||||
updateFileId: { type: 'string', description: 'File to update' },
|
|
||||||
trashFileId: { type: 'string', description: 'File to trash' },
|
|
||||||
deleteFileId: { type: 'string', description: 'File to delete' },
|
|
||||||
shareFileId: { type: 'string', description: 'File to share' },
|
|
||||||
unshareFileId: { type: 'string', description: 'File to unshare' },
|
|
||||||
listPermissionsFileId: { type: 'string', description: 'File to list permissions for' },
|
|
||||||
// Upload and Create inputs
|
// Upload and Create inputs
|
||||||
fileName: { type: 'string', description: 'File or folder name' },
|
fileName: { type: 'string', description: 'File or folder name' },
|
||||||
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
||||||
|
|||||||
@@ -47,11 +47,10 @@ export const GoogleFormsBlock: BlockConfig = {
|
|||||||
},
|
},
|
||||||
// Form selector (basic mode)
|
// Form selector (basic mode)
|
||||||
{
|
{
|
||||||
id: 'formSelector',
|
id: 'formId',
|
||||||
title: 'Select Form',
|
title: 'Select Form',
|
||||||
type: 'file-selector',
|
type: 'file-selector',
|
||||||
canonicalParamId: 'formId',
|
canonicalParamId: 'formId',
|
||||||
required: true,
|
|
||||||
serviceId: 'google-forms',
|
serviceId: 'google-forms',
|
||||||
requiredScopes: [],
|
requiredScopes: [],
|
||||||
mimeType: 'application/vnd.google-apps.form',
|
mimeType: 'application/vnd.google-apps.form',
|
||||||
@@ -235,7 +234,8 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
operation,
|
operation,
|
||||||
formId, // Canonical param from formSelector (basic) or manualFormId (advanced)
|
formId,
|
||||||
|
manualFormId,
|
||||||
responseId,
|
responseId,
|
||||||
pageSize,
|
pageSize,
|
||||||
title,
|
title,
|
||||||
@@ -252,10 +252,11 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
} = params
|
} = params
|
||||||
|
|
||||||
const baseParams = { ...rest, credential }
|
const baseParams = { ...rest, credential }
|
||||||
const effectiveFormId = formId ? String(formId).trim() : undefined
|
const effectiveFormId = (formId || manualFormId || '').toString().trim() || undefined
|
||||||
|
|
||||||
switch (operation) {
|
switch (operation) {
|
||||||
case 'get_responses':
|
case 'get_responses':
|
||||||
|
if (!effectiveFormId) throw new Error('Form ID is required.')
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
formId: effectiveFormId,
|
formId: effectiveFormId,
|
||||||
@@ -264,8 +265,10 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
}
|
}
|
||||||
case 'get_form':
|
case 'get_form':
|
||||||
case 'list_watches':
|
case 'list_watches':
|
||||||
|
if (!effectiveFormId) throw new Error('Form ID is required.')
|
||||||
return { ...baseParams, formId: effectiveFormId }
|
return { ...baseParams, formId: effectiveFormId }
|
||||||
case 'create_form':
|
case 'create_form':
|
||||||
|
if (!title) throw new Error('Form title is required.')
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
title: String(title).trim(),
|
title: String(title).trim(),
|
||||||
@@ -273,6 +276,8 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
unpublished: unpublished ?? false,
|
unpublished: unpublished ?? false,
|
||||||
}
|
}
|
||||||
case 'batch_update':
|
case 'batch_update':
|
||||||
|
if (!effectiveFormId) throw new Error('Form ID is required.')
|
||||||
|
if (!requests) throw new Error('Update requests are required.')
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
formId: effectiveFormId,
|
formId: effectiveFormId,
|
||||||
@@ -280,6 +285,7 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
includeFormInResponse: includeFormInResponse ?? false,
|
includeFormInResponse: includeFormInResponse ?? false,
|
||||||
}
|
}
|
||||||
case 'set_publish_settings':
|
case 'set_publish_settings':
|
||||||
|
if (!effectiveFormId) throw new Error('Form ID is required.')
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
formId: effectiveFormId,
|
formId: effectiveFormId,
|
||||||
@@ -287,6 +293,9 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
isAcceptingResponses: isAcceptingResponses,
|
isAcceptingResponses: isAcceptingResponses,
|
||||||
}
|
}
|
||||||
case 'create_watch':
|
case 'create_watch':
|
||||||
|
if (!effectiveFormId) throw new Error('Form ID is required.')
|
||||||
|
if (!eventType) throw new Error('Event type is required.')
|
||||||
|
if (!topicName) throw new Error('Pub/Sub topic is required.')
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
formId: effectiveFormId,
|
formId: effectiveFormId,
|
||||||
@@ -296,6 +305,8 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
}
|
}
|
||||||
case 'delete_watch':
|
case 'delete_watch':
|
||||||
case 'renew_watch':
|
case 'renew_watch':
|
||||||
|
if (!effectiveFormId) throw new Error('Form ID is required.')
|
||||||
|
if (!watchId) throw new Error('Watch ID is required.')
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
formId: effectiveFormId,
|
formId: effectiveFormId,
|
||||||
@@ -310,7 +321,8 @@ Example for "Add a required multiple choice question about favorite color":
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google OAuth credential' },
|
credential: { type: 'string', description: 'Google OAuth credential' },
|
||||||
formId: { type: 'string', description: 'Google Form ID' },
|
formId: { type: 'string', description: 'Google Form ID (from selector)' },
|
||||||
|
manualFormId: { type: 'string', description: 'Google Form ID (manual entry)' },
|
||||||
responseId: { type: 'string', description: 'Specific response ID' },
|
responseId: { type: 'string', description: 'Specific response ID' },
|
||||||
pageSize: { type: 'string', description: 'Max responses to retrieve' },
|
pageSize: { type: 'string', description: 'Max responses to retrieve' },
|
||||||
title: { type: 'string', description: 'Form title for creation' },
|
title: { type: 'string', description: 'Form title for creation' },
|
||||||
|
|||||||
@@ -246,11 +246,11 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
const { credential, values, spreadsheetId, ...rest } = params
|
const { credential, values, spreadsheetId, manualSpreadsheetId, ...rest } = params
|
||||||
|
|
||||||
const parsedValues = values ? JSON.parse(values as string) : undefined
|
const parsedValues = values ? JSON.parse(values as string) : undefined
|
||||||
|
|
||||||
const effectiveSpreadsheetId = spreadsheetId ? String(spreadsheetId).trim() : ''
|
const effectiveSpreadsheetId = (spreadsheetId || manualSpreadsheetId || '').trim()
|
||||||
|
|
||||||
if (!effectiveSpreadsheetId) {
|
if (!effectiveSpreadsheetId) {
|
||||||
throw new Error('Spreadsheet ID is required.')
|
throw new Error('Spreadsheet ID is required.')
|
||||||
@@ -268,7 +268,8 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google Sheets access token' },
|
credential: { type: 'string', description: 'Google Sheets access token' },
|
||||||
spreadsheetId: { type: 'string', description: 'Spreadsheet identifier (canonical param)' },
|
spreadsheetId: { type: 'string', description: 'Spreadsheet identifier' },
|
||||||
|
manualSpreadsheetId: { type: 'string', description: 'Manual spreadsheet identifier' },
|
||||||
range: { type: 'string', description: 'Cell range' },
|
range: { type: 'string', description: 'Cell range' },
|
||||||
values: { type: 'string', description: 'Cell values data' },
|
values: { type: 'string', description: 'Cell values data' },
|
||||||
valueInputOption: { type: 'string', description: 'Value input option' },
|
valueInputOption: { type: 'string', description: 'Value input option' },
|
||||||
@@ -718,7 +719,9 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
credential,
|
credential,
|
||||||
values,
|
values,
|
||||||
spreadsheetId,
|
spreadsheetId,
|
||||||
|
manualSpreadsheetId,
|
||||||
sheetName,
|
sheetName,
|
||||||
|
manualSheetName,
|
||||||
cellRange,
|
cellRange,
|
||||||
title,
|
title,
|
||||||
sheetTitles,
|
sheetTitles,
|
||||||
@@ -743,7 +746,9 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const effectiveSpreadsheetId = spreadsheetId ? String(spreadsheetId).trim() : ''
|
const effectiveSpreadsheetId = (
|
||||||
|
(spreadsheetId || manualSpreadsheetId || '') as string
|
||||||
|
).trim()
|
||||||
|
|
||||||
if (!effectiveSpreadsheetId) {
|
if (!effectiveSpreadsheetId) {
|
||||||
throw new Error('Spreadsheet ID is required.')
|
throw new Error('Spreadsheet ID is required.')
|
||||||
@@ -799,7 +804,7 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handle read/write/update/append/clear operations (require sheet name)
|
// Handle read/write/update/append/clear operations (require sheet name)
|
||||||
const effectiveSheetName = sheetName ? String(sheetName).trim() : ''
|
const effectiveSheetName = ((sheetName || manualSheetName || '') as string).trim()
|
||||||
|
|
||||||
if (!effectiveSheetName) {
|
if (!effectiveSheetName) {
|
||||||
throw new Error('Sheet name is required. Please select or enter a sheet name.')
|
throw new Error('Sheet name is required. Please select or enter a sheet name.')
|
||||||
@@ -821,8 +826,10 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google Sheets access token' },
|
credential: { type: 'string', description: 'Google Sheets access token' },
|
||||||
spreadsheetId: { type: 'string', description: 'Spreadsheet identifier (canonical param)' },
|
spreadsheetId: { type: 'string', description: 'Spreadsheet identifier' },
|
||||||
sheetName: { type: 'string', description: 'Name of the sheet/tab (canonical param)' },
|
manualSpreadsheetId: { type: 'string', description: 'Manual spreadsheet identifier' },
|
||||||
|
sheetName: { type: 'string', description: 'Name of the sheet/tab' },
|
||||||
|
manualSheetName: { type: 'string', description: 'Manual sheet name entry' },
|
||||||
cellRange: { type: 'string', description: 'Cell range (e.g., A1:D10)' },
|
cellRange: { type: 'string', description: 'Cell range (e.g., A1:D10)' },
|
||||||
values: { type: 'string', description: 'Cell values data' },
|
values: { type: 'string', description: 'Cell values data' },
|
||||||
valueInputOption: { type: 'string', description: 'Value input option' },
|
valueInputOption: { type: 'string', description: 'Value input option' },
|
||||||
|
|||||||
@@ -664,6 +664,8 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
|||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
presentationId,
|
presentationId,
|
||||||
|
manualPresentationId,
|
||||||
|
folderSelector,
|
||||||
folderId,
|
folderId,
|
||||||
slideIndex,
|
slideIndex,
|
||||||
createContent,
|
createContent,
|
||||||
@@ -673,8 +675,8 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
|||||||
...rest
|
...rest
|
||||||
} = params
|
} = params
|
||||||
|
|
||||||
const effectivePresentationId = presentationId ? String(presentationId).trim() : ''
|
const effectivePresentationId = (presentationId || manualPresentationId || '').trim()
|
||||||
const effectiveFolderId = folderId ? String(folderId).trim() : ''
|
const effectiveFolderId = (folderSelector || folderId || '').trim()
|
||||||
|
|
||||||
const result: Record<string, any> = {
|
const result: Record<string, any> = {
|
||||||
...rest,
|
...rest,
|
||||||
@@ -800,13 +802,15 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Google Slides access token' },
|
credential: { type: 'string', description: 'Google Slides access token' },
|
||||||
presentationId: { type: 'string', description: 'Presentation identifier (canonical param)' },
|
presentationId: { type: 'string', description: 'Presentation identifier' },
|
||||||
|
manualPresentationId: { type: 'string', description: 'Manual presentation identifier' },
|
||||||
// Write operation
|
// Write operation
|
||||||
slideIndex: { type: 'number', description: 'Slide index to write to' },
|
slideIndex: { type: 'number', description: 'Slide index to write to' },
|
||||||
content: { type: 'string', description: 'Slide content' },
|
content: { type: 'string', description: 'Slide content' },
|
||||||
// Create operation
|
// Create operation
|
||||||
title: { type: 'string', description: 'Presentation title' },
|
title: { type: 'string', description: 'Presentation title' },
|
||||||
folderId: { type: 'string', description: 'Parent folder identifier (canonical param)' },
|
folderSelector: { type: 'string', description: 'Selected folder' },
|
||||||
|
folderId: { type: 'string', description: 'Folder identifier' },
|
||||||
createContent: { type: 'string', description: 'Initial slide content' },
|
createContent: { type: 'string', description: 'Initial slide content' },
|
||||||
// Replace all text operation
|
// Replace all text operation
|
||||||
findText: { type: 'string', description: 'Text to find' },
|
findText: { type: 'string', description: 'Text to find' },
|
||||||
@@ -822,6 +826,8 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
|||||||
placeholderIdMappings: { type: 'string', description: 'JSON array of placeholder ID mappings' },
|
placeholderIdMappings: { type: 'string', description: 'JSON array of placeholder ID mappings' },
|
||||||
// Add image operation
|
// Add image operation
|
||||||
pageObjectId: { type: 'string', description: 'Slide object ID for image' },
|
pageObjectId: { type: 'string', description: 'Slide object ID for image' },
|
||||||
|
imageFile: { type: 'json', description: 'Uploaded image (UserFile)' },
|
||||||
|
imageUrl: { type: 'string', description: 'Image URL or reference' },
|
||||||
imageSource: { type: 'json', description: 'Image source (file or URL)' },
|
imageSource: { type: 'json', description: 'Image source (file or URL)' },
|
||||||
imageWidth: { type: 'number', description: 'Image width in points' },
|
imageWidth: { type: 'number', description: 'Image width in points' },
|
||||||
imageHeight: { type: 'number', description: 'Image height in points' },
|
imageHeight: { type: 'number', description: 'Image height in points' },
|
||||||
@@ -930,12 +936,11 @@ const googleSlidesV2SubBlocks = (GoogleSlidesBlock.subBlocks || []).flatMap((sub
|
|||||||
})
|
})
|
||||||
|
|
||||||
const googleSlidesV2Inputs = GoogleSlidesBlock.inputs
|
const googleSlidesV2Inputs = GoogleSlidesBlock.inputs
|
||||||
? {
|
? Object.fromEntries(
|
||||||
...Object.fromEntries(
|
Object.entries(GoogleSlidesBlock.inputs).filter(
|
||||||
Object.entries(GoogleSlidesBlock.inputs).filter(([key]) => key !== 'imageSource')
|
([key]) => key !== 'imageUrl' && key !== 'imageSource'
|
||||||
),
|
)
|
||||||
imageFile: { type: 'json', description: 'Image source (file or URL)' },
|
)
|
||||||
}
|
|
||||||
: {}
|
: {}
|
||||||
|
|
||||||
export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
||||||
@@ -956,7 +961,8 @@ export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (params.operation === 'add_image') {
|
if (params.operation === 'add_image') {
|
||||||
const fileObject = normalizeFileInput(params.imageFile, { single: true })
|
const imageInput = params.imageFile || params.imageFileReference || params.imageSource
|
||||||
|
const fileObject = normalizeFileInput(imageInput, { single: true })
|
||||||
if (!fileObject) {
|
if (!fileObject) {
|
||||||
throw new Error('Image file is required.')
|
throw new Error('Image file is required.')
|
||||||
}
|
}
|
||||||
@@ -968,6 +974,8 @@ export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
|||||||
return baseParams({
|
return baseParams({
|
||||||
...params,
|
...params,
|
||||||
imageUrl,
|
imageUrl,
|
||||||
|
imageFileReference: undefined,
|
||||||
|
imageSource: undefined,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -975,5 +983,8 @@ export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: googleSlidesV2Inputs,
|
inputs: {
|
||||||
|
...googleSlidesV2Inputs,
|
||||||
|
imageFileReference: { type: 'json', description: 'Image file reference' },
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -106,7 +106,6 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
placeholder: 'Select Jira project',
|
placeholder: 'Select Jira project',
|
||||||
dependsOn: ['credential', 'domain'],
|
dependsOn: ['credential', 'domain'],
|
||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
required: { field: 'operation', value: ['write', 'update', 'read-bulk'] },
|
|
||||||
},
|
},
|
||||||
// Manual project ID input (advanced mode)
|
// Manual project ID input (advanced mode)
|
||||||
{
|
{
|
||||||
@@ -117,7 +116,6 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
placeholder: 'Enter Jira project ID',
|
placeholder: 'Enter Jira project ID',
|
||||||
dependsOn: ['credential', 'domain'],
|
dependsOn: ['credential', 'domain'],
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
required: { field: 'operation', value: ['write', 'update', 'read-bulk'] },
|
|
||||||
},
|
},
|
||||||
// Issue selector (basic mode)
|
// Issue selector (basic mode)
|
||||||
{
|
{
|
||||||
@@ -150,28 +148,6 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
'remove_watcher',
|
'remove_watcher',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
required: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'read',
|
|
||||||
'update',
|
|
||||||
'delete',
|
|
||||||
'assign',
|
|
||||||
'transition',
|
|
||||||
'add_comment',
|
|
||||||
'get_comments',
|
|
||||||
'update_comment',
|
|
||||||
'delete_comment',
|
|
||||||
'get_attachments',
|
|
||||||
'add_attachment',
|
|
||||||
'add_worklog',
|
|
||||||
'get_worklogs',
|
|
||||||
'update_worklog',
|
|
||||||
'delete_worklog',
|
|
||||||
'add_watcher',
|
|
||||||
'remove_watcher',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
mode: 'basic',
|
mode: 'basic',
|
||||||
},
|
},
|
||||||
// Manual issue key input (advanced mode)
|
// Manual issue key input (advanced mode)
|
||||||
@@ -204,28 +180,6 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
'remove_watcher',
|
'remove_watcher',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
required: {
|
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'read',
|
|
||||||
'update',
|
|
||||||
'delete',
|
|
||||||
'assign',
|
|
||||||
'transition',
|
|
||||||
'add_comment',
|
|
||||||
'get_comments',
|
|
||||||
'update_comment',
|
|
||||||
'delete_comment',
|
|
||||||
'get_attachments',
|
|
||||||
'add_attachment',
|
|
||||||
'add_worklog',
|
|
||||||
'get_worklogs',
|
|
||||||
'update_worklog',
|
|
||||||
'delete_worklog',
|
|
||||||
'add_watcher',
|
|
||||||
'remove_watcher',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -661,9 +615,8 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => {
|
tool: (params) => {
|
||||||
// Use canonical param IDs (raw subBlock IDs are deleted after serialization)
|
const effectiveProjectId = (params.projectId || params.manualProjectId || '').trim()
|
||||||
const effectiveProjectId = params.projectId ? String(params.projectId).trim() : ''
|
const effectiveIssueKey = (params.issueKey || params.manualIssueKey || '').trim()
|
||||||
const effectiveIssueKey = params.issueKey ? String(params.issueKey).trim() : ''
|
|
||||||
|
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
case 'read':
|
case 'read':
|
||||||
@@ -723,11 +676,11 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
const { credential, projectId, issueKey, ...rest } = params
|
const { credential, projectId, manualProjectId, issueKey, manualIssueKey, ...rest } = params
|
||||||
|
|
||||||
// Use canonical param IDs (raw subBlock IDs are deleted after serialization)
|
// Use the selected IDs or the manually entered ones
|
||||||
const effectiveProjectId = projectId ? String(projectId).trim() : ''
|
const effectiveProjectId = (projectId || manualProjectId || '').trim()
|
||||||
const effectiveIssueKey = issueKey ? String(issueKey).trim() : ''
|
const effectiveIssueKey = (issueKey || manualIssueKey || '').trim()
|
||||||
|
|
||||||
const baseParams = {
|
const baseParams = {
|
||||||
credential,
|
credential,
|
||||||
@@ -736,6 +689,11 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
|
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
case 'write': {
|
case 'write': {
|
||||||
|
if (!effectiveProjectId) {
|
||||||
|
throw new Error(
|
||||||
|
'Project ID is required. Please select a project or enter a project ID manually.'
|
||||||
|
)
|
||||||
|
}
|
||||||
// Parse comma-separated strings into arrays
|
// Parse comma-separated strings into arrays
|
||||||
const parseCommaSeparated = (value: string | undefined): string[] | undefined => {
|
const parseCommaSeparated = (value: string | undefined): string[] | undefined => {
|
||||||
if (!value || value.trim() === '') return undefined
|
if (!value || value.trim() === '') return undefined
|
||||||
@@ -768,6 +726,16 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'update': {
|
case 'update': {
|
||||||
|
if (!effectiveProjectId) {
|
||||||
|
throw new Error(
|
||||||
|
'Project ID is required. Please select a project or enter a project ID manually.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error(
|
||||||
|
'Issue Key is required. Please select an issue or enter an issue key manually.'
|
||||||
|
)
|
||||||
|
}
|
||||||
const updateParams = {
|
const updateParams = {
|
||||||
projectId: effectiveProjectId,
|
projectId: effectiveProjectId,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -780,20 +748,40 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'read': {
|
case 'read': {
|
||||||
|
// Check for project ID from either source
|
||||||
|
const projectForRead = (params.projectId || params.manualProjectId || '').trim()
|
||||||
|
const issueForRead = (params.issueKey || params.manualIssueKey || '').trim()
|
||||||
|
|
||||||
|
if (!issueForRead) {
|
||||||
|
throw new Error(
|
||||||
|
'Select a project to read issues, or provide an issue key to read a single issue.'
|
||||||
|
)
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: issueForRead,
|
||||||
// Include projectId if available for context
|
// Include projectId if available for context
|
||||||
...(effectiveProjectId && { projectId: effectiveProjectId }),
|
...(projectForRead && { projectId: projectForRead }),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'read-bulk': {
|
case 'read-bulk': {
|
||||||
|
// Check both projectId and manualProjectId directly from params
|
||||||
|
const finalProjectId = params.projectId || params.manualProjectId || ''
|
||||||
|
|
||||||
|
if (!finalProjectId) {
|
||||||
|
throw new Error(
|
||||||
|
'Project ID is required. Please select a project or enter a project ID manually.'
|
||||||
|
)
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
projectId: effectiveProjectId.trim(),
|
projectId: finalProjectId.trim(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'delete': {
|
case 'delete': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to delete an issue.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -801,6 +789,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'assign': {
|
case 'assign': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to assign an issue.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -808,6 +799,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'transition': {
|
case 'transition': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to transition an issue.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -823,6 +817,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'add_comment': {
|
case 'add_comment': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to add a comment.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -830,6 +827,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'get_comments': {
|
case 'get_comments': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to get comments.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -837,6 +837,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'update_comment': {
|
case 'update_comment': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to update a comment.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -845,6 +848,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'delete_comment': {
|
case 'delete_comment': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to delete a comment.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -852,13 +858,19 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'get_attachments': {
|
case 'get_attachments': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to get attachments.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'add_attachment': {
|
case 'add_attachment': {
|
||||||
const normalizedFiles = normalizeFileInput(params.files)
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to add attachments.')
|
||||||
|
}
|
||||||
|
const normalizedFiles = normalizeFileInput(params.attachmentFiles || params.files)
|
||||||
if (!normalizedFiles || normalizedFiles.length === 0) {
|
if (!normalizedFiles || normalizedFiles.length === 0) {
|
||||||
throw new Error('At least one attachment file is required.')
|
throw new Error('At least one attachment file is required.')
|
||||||
}
|
}
|
||||||
@@ -875,6 +887,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'add_worklog': {
|
case 'add_worklog': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to add a worklog.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -886,6 +901,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'get_worklogs': {
|
case 'get_worklogs': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to get worklogs.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -893,6 +911,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'update_worklog': {
|
case 'update_worklog': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to update a worklog.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -905,6 +926,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'delete_worklog': {
|
case 'delete_worklog': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to delete a worklog.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -927,6 +951,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'add_watcher': {
|
case 'add_watcher': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to add a watcher.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -934,6 +961,9 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'remove_watcher': {
|
case 'remove_watcher': {
|
||||||
|
if (!effectiveIssueKey) {
|
||||||
|
throw new Error('Issue Key is required to remove a watcher.')
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
issueKey: effectiveIssueKey,
|
issueKey: effectiveIssueKey,
|
||||||
@@ -960,8 +990,10 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
domain: { type: 'string', description: 'Jira domain' },
|
domain: { type: 'string', description: 'Jira domain' },
|
||||||
credential: { type: 'string', description: 'Jira access token' },
|
credential: { type: 'string', description: 'Jira access token' },
|
||||||
issueKey: { type: 'string', description: 'Issue key identifier (canonical param)' },
|
issueKey: { type: 'string', description: 'Issue key identifier' },
|
||||||
projectId: { type: 'string', description: 'Project identifier (canonical param)' },
|
projectId: { type: 'string', description: 'Project identifier' },
|
||||||
|
manualProjectId: { type: 'string', description: 'Manual project identifier' },
|
||||||
|
manualIssueKey: { type: 'string', description: 'Manual issue key' },
|
||||||
// Update/Write operation inputs
|
// Update/Write operation inputs
|
||||||
summary: { type: 'string', description: 'Issue summary' },
|
summary: { type: 'string', description: 'Issue summary' },
|
||||||
description: { type: 'string', description: 'Issue description' },
|
description: { type: 'string', description: 'Issue description' },
|
||||||
@@ -992,7 +1024,8 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
commentBody: { type: 'string', description: 'Text content for comment operations' },
|
commentBody: { type: 'string', description: 'Text content for comment operations' },
|
||||||
commentId: { type: 'string', description: 'Comment ID for update/delete operations' },
|
commentId: { type: 'string', description: 'Comment ID for update/delete operations' },
|
||||||
// Attachment operation inputs
|
// Attachment operation inputs
|
||||||
files: { type: 'array', description: 'Files to attach (canonical param)' },
|
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||||
|
files: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||||
attachmentId: { type: 'string', description: 'Attachment ID for delete operation' },
|
attachmentId: { type: 'string', description: 'Attachment ID for delete operation' },
|
||||||
// Worklog operation inputs
|
// Worklog operation inputs
|
||||||
timeSpentSeconds: {
|
timeSpentSeconds: {
|
||||||
|
|||||||
@@ -1476,9 +1476,9 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
|||||||
return params.operation || 'linear_read_issues'
|
return params.operation || 'linear_read_issues'
|
||||||
},
|
},
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
// Use canonical param IDs (raw subBlock IDs are deleted after serialization)
|
// Handle both selector and manual inputs
|
||||||
const effectiveTeamId = params.teamId ? String(params.teamId).trim() : ''
|
const effectiveTeamId = (params.teamId || params.manualTeamId || '').trim()
|
||||||
const effectiveProjectId = params.projectId ? String(params.projectId).trim() : ''
|
const effectiveProjectId = (params.projectId || params.manualProjectId || '').trim()
|
||||||
|
|
||||||
// Base params that most operations need
|
// Base params that most operations need
|
||||||
const baseParams: Record<string, any> = {
|
const baseParams: Record<string, any> = {
|
||||||
@@ -1774,11 +1774,16 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
|||||||
if (!params.issueId?.trim()) {
|
if (!params.issueId?.trim()) {
|
||||||
throw new Error('Issue ID is required.')
|
throw new Error('Issue ID is required.')
|
||||||
}
|
}
|
||||||
// Normalize file input - use canonical param 'file' (raw subBlock IDs are deleted after serialization)
|
// Normalize file inputs - handles JSON stringified values from advanced mode
|
||||||
const attachmentFile = normalizeFileInput(params.file, {
|
const attachmentFile =
|
||||||
single: true,
|
normalizeFileInput(params.attachmentFileUpload, {
|
||||||
errorMessage: 'Attachment file must be a single file.',
|
single: true,
|
||||||
})
|
errorMessage: 'Attachment file must be a single file.',
|
||||||
|
}) ||
|
||||||
|
normalizeFileInput(params.file, {
|
||||||
|
single: true,
|
||||||
|
errorMessage: 'Attachment file must be a single file.',
|
||||||
|
})
|
||||||
const attachmentUrl =
|
const attachmentUrl =
|
||||||
params.url?.trim() ||
|
params.url?.trim() ||
|
||||||
(attachmentFile ? (attachmentFile as { url?: string }).url : undefined)
|
(attachmentFile ? (attachmentFile as { url?: string }).url : undefined)
|
||||||
@@ -2256,8 +2261,10 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
|||||||
inputs: {
|
inputs: {
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
credential: { type: 'string', description: 'Linear access token' },
|
credential: { type: 'string', description: 'Linear access token' },
|
||||||
teamId: { type: 'string', description: 'Linear team identifier (canonical param)' },
|
teamId: { type: 'string', description: 'Linear team identifier' },
|
||||||
projectId: { type: 'string', description: 'Linear project identifier (canonical param)' },
|
projectId: { type: 'string', description: 'Linear project identifier' },
|
||||||
|
manualTeamId: { type: 'string', description: 'Manual team identifier' },
|
||||||
|
manualProjectId: { type: 'string', description: 'Manual project identifier' },
|
||||||
issueId: { type: 'string', description: 'Issue identifier' },
|
issueId: { type: 'string', description: 'Issue identifier' },
|
||||||
title: { type: 'string', description: 'Title' },
|
title: { type: 'string', description: 'Title' },
|
||||||
description: { type: 'string', description: 'Description' },
|
description: { type: 'string', description: 'Description' },
|
||||||
@@ -2287,7 +2294,8 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
|||||||
endDate: { type: 'string', description: 'End date' },
|
endDate: { type: 'string', description: 'End date' },
|
||||||
targetDate: { type: 'string', description: 'Target date' },
|
targetDate: { type: 'string', description: 'Target date' },
|
||||||
url: { type: 'string', description: 'URL' },
|
url: { type: 'string', description: 'URL' },
|
||||||
file: { type: 'json', description: 'File to attach (canonical param)' },
|
attachmentFileUpload: { type: 'json', description: 'File to attach (UI upload)' },
|
||||||
|
file: { type: 'json', description: 'File to attach (UserFile)' },
|
||||||
attachmentTitle: { type: 'string', description: 'Attachment title' },
|
attachmentTitle: { type: 'string', description: 'Attachment title' },
|
||||||
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
attachmentId: { type: 'string', description: 'Attachment identifier' },
|
||||||
relationType: { type: 'string', description: 'Relation type' },
|
relationType: { type: 'string', description: 'Relation type' },
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user