mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-27 15:58:11 -05:00
Compare commits
1 Commits
staging
...
fix/hitl-o
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b69274eebf |
@@ -101,6 +101,7 @@ import {
|
||||
ShopifyIcon,
|
||||
SlackIcon,
|
||||
SmtpIcon,
|
||||
SpotifyIcon,
|
||||
SQSIcon,
|
||||
SshIcon,
|
||||
STTIcon,
|
||||
@@ -181,7 +182,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
jina: JinaAIIcon,
|
||||
jira: JiraIcon,
|
||||
jira_service_management: JiraServiceManagementIcon,
|
||||
kalshi_v2: KalshiIcon,
|
||||
kalshi: KalshiIcon,
|
||||
knowledge: PackageSearchIcon,
|
||||
langsmith: LangsmithIcon,
|
||||
lemlist: LemlistIcon,
|
||||
@@ -228,6 +229,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
shopify: ShopifyIcon,
|
||||
slack: SlackIcon,
|
||||
smtp: SmtpIcon,
|
||||
spotify: SpotifyIcon,
|
||||
sqs: SQSIcon,
|
||||
ssh: SshIcon,
|
||||
stagehand: StagehandIcon,
|
||||
|
||||
@@ -47,7 +47,6 @@ Runs a browser automation task using BrowserUse
|
||||
| `save_browser_data` | boolean | No | Whether to save browser data |
|
||||
| `model` | string | No | LLM model to use \(default: gpt-4o\) |
|
||||
| `apiKey` | string | Yes | API key for BrowserUse API |
|
||||
| `profile_id` | string | No | Browser profile ID for persistent sessions \(cookies, login state\) |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -647,42 +647,6 @@ Retrieve a single ticket by ID from Intercom. Returns API-aligned fields only.
|
||||
| `ticketId` | string | ID of the retrieved ticket |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `intercom_update_ticket`
|
||||
|
||||
Update a ticket in Intercom (change state, assignment, attributes)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `ticketId` | string | Yes | The ID of the ticket to update |
|
||||
| `ticket_attributes` | string | No | JSON object with ticket attributes \(e.g., \{"_default_title_":"New Title","_default_description_":"Updated description"\}\) |
|
||||
| `open` | boolean | No | Set to false to close the ticket, true to keep it open |
|
||||
| `is_shared` | boolean | No | Whether the ticket is visible to users |
|
||||
| `snoozed_until` | number | No | Unix timestamp for when the ticket should reopen |
|
||||
| `admin_id` | string | No | The ID of the admin performing the update \(needed for workflows and attribution\) |
|
||||
| `assignee_id` | string | No | The ID of the admin or team to assign the ticket to. Set to "0" to unassign. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ticket` | object | The updated ticket object |
|
||||
| ↳ `id` | string | Unique identifier for the ticket |
|
||||
| ↳ `type` | string | Object type \(ticket\) |
|
||||
| ↳ `ticket_id` | string | Ticket ID shown in Intercom UI |
|
||||
| ↳ `ticket_state` | string | State of the ticket |
|
||||
| ↳ `ticket_attributes` | object | Attributes of the ticket |
|
||||
| ↳ `open` | boolean | Whether the ticket is open |
|
||||
| ↳ `is_shared` | boolean | Whether the ticket is visible to users |
|
||||
| ↳ `snoozed_until` | number | Unix timestamp when ticket will reopen |
|
||||
| ↳ `admin_assignee_id` | string | ID of assigned admin |
|
||||
| ↳ `team_assignee_id` | string | ID of assigned team |
|
||||
| ↳ `created_at` | number | Unix timestamp when ticket was created |
|
||||
| ↳ `updated_at` | number | Unix timestamp when ticket was last updated |
|
||||
| `ticketId` | string | ID of the updated ticket |
|
||||
| `ticket_state` | string | Current state of the ticket |
|
||||
|
||||
### `intercom_create_message`
|
||||
|
||||
Create and send a new admin-initiated message in Intercom. Returns API-aligned fields only.
|
||||
@@ -716,340 +680,4 @@ Create and send a new admin-initiated message in Intercom. Returns API-aligned f
|
||||
| `messageId` | string | ID of the created message |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `intercom_list_admins`
|
||||
|
||||
Fetch a list of all admins for the workspace
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `admins` | array | Array of admin objects |
|
||||
| ↳ `id` | string | Unique identifier for the admin |
|
||||
| ↳ `type` | string | Object type \(admin\) |
|
||||
| ↳ `name` | string | Name of the admin |
|
||||
| ↳ `email` | string | Email of the admin |
|
||||
| ↳ `job_title` | string | Job title of the admin |
|
||||
| ↳ `away_mode_enabled` | boolean | Whether admin is in away mode |
|
||||
| ↳ `away_mode_reassign` | boolean | Whether to reassign conversations when away |
|
||||
| ↳ `has_inbox_seat` | boolean | Whether admin has a paid inbox seat |
|
||||
| ↳ `team_ids` | array | List of team IDs the admin belongs to |
|
||||
| ↳ `avatar` | object | Avatar information |
|
||||
| ↳ `email_verified` | boolean | Whether email is verified |
|
||||
| `type` | string | Object type \(admin.list\) |
|
||||
|
||||
### `intercom_close_conversation`
|
||||
|
||||
Close a conversation in Intercom
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `conversationId` | string | Yes | The ID of the conversation to close |
|
||||
| `admin_id` | string | Yes | The ID of the admin performing the action |
|
||||
| `body` | string | No | Optional closing message to add to the conversation |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `conversation` | object | The closed conversation object |
|
||||
| ↳ `id` | string | Unique identifier for the conversation |
|
||||
| ↳ `type` | string | Object type \(conversation\) |
|
||||
| ↳ `state` | string | State of the conversation \(closed\) |
|
||||
| ↳ `open` | boolean | Whether the conversation is open \(false\) |
|
||||
| ↳ `read` | boolean | Whether the conversation has been read |
|
||||
| ↳ `created_at` | number | Unix timestamp when conversation was created |
|
||||
| ↳ `updated_at` | number | Unix timestamp when conversation was last updated |
|
||||
| `conversationId` | string | ID of the closed conversation |
|
||||
| `state` | string | State of the conversation \(closed\) |
|
||||
|
||||
### `intercom_open_conversation`
|
||||
|
||||
Open a closed or snoozed conversation in Intercom
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `conversationId` | string | Yes | The ID of the conversation to open |
|
||||
| `admin_id` | string | Yes | The ID of the admin performing the action |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `conversation` | object | The opened conversation object |
|
||||
| ↳ `id` | string | Unique identifier for the conversation |
|
||||
| ↳ `type` | string | Object type \(conversation\) |
|
||||
| ↳ `state` | string | State of the conversation \(open\) |
|
||||
| ↳ `open` | boolean | Whether the conversation is open \(true\) |
|
||||
| ↳ `read` | boolean | Whether the conversation has been read |
|
||||
| ↳ `created_at` | number | Unix timestamp when conversation was created |
|
||||
| ↳ `updated_at` | number | Unix timestamp when conversation was last updated |
|
||||
| `conversationId` | string | ID of the opened conversation |
|
||||
| `state` | string | State of the conversation \(open\) |
|
||||
|
||||
### `intercom_snooze_conversation`
|
||||
|
||||
Snooze a conversation to reopen at a future time
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `conversationId` | string | Yes | The ID of the conversation to snooze |
|
||||
| `admin_id` | string | Yes | The ID of the admin performing the action |
|
||||
| `snoozed_until` | number | Yes | Unix timestamp for when the conversation should reopen |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `conversation` | object | The snoozed conversation object |
|
||||
| ↳ `id` | string | Unique identifier for the conversation |
|
||||
| ↳ `type` | string | Object type \(conversation\) |
|
||||
| ↳ `state` | string | State of the conversation \(snoozed\) |
|
||||
| ↳ `open` | boolean | Whether the conversation is open |
|
||||
| ↳ `snoozed_until` | number | Unix timestamp when conversation will reopen |
|
||||
| ↳ `created_at` | number | Unix timestamp when conversation was created |
|
||||
| ↳ `updated_at` | number | Unix timestamp when conversation was last updated |
|
||||
| `conversationId` | string | ID of the snoozed conversation |
|
||||
| `state` | string | State of the conversation \(snoozed\) |
|
||||
| `snoozed_until` | number | Unix timestamp when conversation will reopen |
|
||||
|
||||
### `intercom_assign_conversation`
|
||||
|
||||
Assign a conversation to an admin or team in Intercom
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `conversationId` | string | Yes | The ID of the conversation to assign |
|
||||
| `admin_id` | string | Yes | The ID of the admin performing the assignment |
|
||||
| `assignee_id` | string | Yes | The ID of the admin or team to assign the conversation to. Set to "0" to unassign. |
|
||||
| `body` | string | No | Optional message to add when assigning \(e.g., "Passing to the support team"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `conversation` | object | The assigned conversation object |
|
||||
| ↳ `id` | string | Unique identifier for the conversation |
|
||||
| ↳ `type` | string | Object type \(conversation\) |
|
||||
| ↳ `state` | string | State of the conversation |
|
||||
| ↳ `open` | boolean | Whether the conversation is open |
|
||||
| ↳ `admin_assignee_id` | number | ID of the assigned admin |
|
||||
| ↳ `team_assignee_id` | string | ID of the assigned team |
|
||||
| ↳ `created_at` | number | Unix timestamp when conversation was created |
|
||||
| ↳ `updated_at` | number | Unix timestamp when conversation was last updated |
|
||||
| `conversationId` | string | ID of the assigned conversation |
|
||||
| `admin_assignee_id` | number | ID of the assigned admin |
|
||||
| `team_assignee_id` | string | ID of the assigned team |
|
||||
|
||||
### `intercom_list_tags`
|
||||
|
||||
Fetch a list of all tags in the workspace
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tags` | array | Array of tag objects |
|
||||
| ↳ `id` | string | Unique identifier for the tag |
|
||||
| ↳ `type` | string | Object type \(tag\) |
|
||||
| ↳ `name` | string | Name of the tag |
|
||||
| `type` | string | Object type \(list\) |
|
||||
|
||||
### `intercom_create_tag`
|
||||
|
||||
Create a new tag or update an existing tag name
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `name` | string | Yes | The name of the tag. Will create a new tag if not found, or update the name if id is provided. |
|
||||
| `id` | string | No | The ID of an existing tag to update. Omit to create a new tag. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the tag |
|
||||
| `name` | string | Name of the tag |
|
||||
| `type` | string | Object type \(tag\) |
|
||||
|
||||
### `intercom_tag_contact`
|
||||
|
||||
Add a tag to a specific contact
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `contactId` | string | Yes | The ID of the contact to tag |
|
||||
| `tagId` | string | Yes | The ID of the tag to apply |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the tag |
|
||||
| `name` | string | Name of the tag |
|
||||
| `type` | string | Object type \(tag\) |
|
||||
|
||||
### `intercom_untag_contact`
|
||||
|
||||
Remove a tag from a specific contact
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `contactId` | string | Yes | The ID of the contact to untag |
|
||||
| `tagId` | string | Yes | The ID of the tag to remove |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the tag that was removed |
|
||||
| `name` | string | Name of the tag that was removed |
|
||||
| `type` | string | Object type \(tag\) |
|
||||
|
||||
### `intercom_tag_conversation`
|
||||
|
||||
Add a tag to a specific conversation
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `conversationId` | string | Yes | The ID of the conversation to tag |
|
||||
| `tagId` | string | Yes | The ID of the tag to apply |
|
||||
| `admin_id` | string | Yes | The ID of the admin applying the tag |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the tag |
|
||||
| `name` | string | Name of the tag |
|
||||
| `type` | string | Object type \(tag\) |
|
||||
|
||||
### `intercom_create_note`
|
||||
|
||||
Add a note to a specific contact
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `contactId` | string | Yes | The ID of the contact to add the note to |
|
||||
| `body` | string | Yes | The text content of the note |
|
||||
| `admin_id` | string | No | The ID of the admin creating the note |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique identifier for the note |
|
||||
| `body` | string | The text content of the note |
|
||||
| `created_at` | number | Unix timestamp when the note was created |
|
||||
| `type` | string | Object type \(note\) |
|
||||
| `author` | object | The admin who created the note |
|
||||
| ↳ `type` | string | Author type \(admin\) |
|
||||
| ↳ `id` | string | Author ID |
|
||||
| ↳ `name` | string | Author name |
|
||||
| ↳ `email` | string | Author email |
|
||||
| `contact` | object | The contact the note was created for |
|
||||
| ↳ `type` | string | Contact type |
|
||||
| ↳ `id` | string | Contact ID |
|
||||
|
||||
### `intercom_create_event`
|
||||
|
||||
Track a custom event for a contact in Intercom
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `event_name` | string | Yes | The name of the event \(e.g., "order-completed"\). Use past-tense verb-noun format for readability. |
|
||||
| `created_at` | number | No | Unix timestamp for when the event occurred. Strongly recommended for uniqueness. |
|
||||
| `user_id` | string | No | Your identifier for the user \(external_id\) |
|
||||
| `email` | string | No | Email address of the user. Use only if your app uses email to uniquely identify users. |
|
||||
| `id` | string | No | The Intercom contact ID |
|
||||
| `metadata` | string | No | JSON object with up to 10 metadata key-value pairs about the event \(e.g., \{"order_value": 99.99\}\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `accepted` | boolean | Whether the event was accepted \(202 Accepted\) |
|
||||
|
||||
### `intercom_attach_contact_to_company`
|
||||
|
||||
Attach a contact to a company in Intercom
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `contactId` | string | Yes | The ID of the contact to attach to the company |
|
||||
| `companyId` | string | Yes | The ID of the company to attach the contact to |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `company` | object | The company object the contact was attached to |
|
||||
| ↳ `id` | string | Unique identifier for the company |
|
||||
| ↳ `type` | string | Object type \(company\) |
|
||||
| ↳ `company_id` | string | The company_id you defined |
|
||||
| ↳ `name` | string | Name of the company |
|
||||
| ↳ `created_at` | number | Unix timestamp when company was created |
|
||||
| ↳ `updated_at` | number | Unix timestamp when company was updated |
|
||||
| ↳ `user_count` | number | Number of users in the company |
|
||||
| ↳ `session_count` | number | Number of sessions |
|
||||
| ↳ `monthly_spend` | number | Monthly spend amount |
|
||||
| ↳ `plan` | object | Company plan details |
|
||||
| `companyId` | string | ID of the company |
|
||||
| `name` | string | Name of the company |
|
||||
|
||||
### `intercom_detach_contact_from_company`
|
||||
|
||||
Remove a contact from a company in Intercom
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `contactId` | string | Yes | The ID of the contact to detach from the company |
|
||||
| `companyId` | string | Yes | The ID of the company to detach the contact from |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `company` | object | The company object the contact was detached from |
|
||||
| ↳ `id` | string | Unique identifier for the company |
|
||||
| ↳ `type` | string | Object type \(company\) |
|
||||
| ↳ `company_id` | string | The company_id you defined |
|
||||
| ↳ `name` | string | Name of the company |
|
||||
| `companyId` | string | ID of the company |
|
||||
| `name` | string | Name of the company |
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Access prediction markets and trade on Kalshi
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="kalshi_v2"
|
||||
type="kalshi"
|
||||
color="#09C285"
|
||||
/>
|
||||
|
||||
@@ -36,7 +36,7 @@ Integrate Kalshi prediction markets into the workflow. Can get markets, market,
|
||||
|
||||
### `kalshi_get_markets`
|
||||
|
||||
Retrieve a list of prediction markets from Kalshi with all filtering options (V2 - full API response)
|
||||
Retrieve a list of prediction markets from Kalshi with optional filtering
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -52,12 +52,12 @@ Retrieve a list of prediction markets from Kalshi with all filtering options (V2
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `markets` | array | Array of market objects with all API fields |
|
||||
| `cursor` | string | Pagination cursor for fetching more results |
|
||||
| `markets` | array | Array of market objects |
|
||||
| `paging` | object | Pagination cursor for fetching more results |
|
||||
|
||||
### `kalshi_get_market`
|
||||
|
||||
Retrieve details of a specific prediction market by ticker (V2 - full API response)
|
||||
Retrieve details of a specific prediction market by ticker
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -69,62 +69,11 @@ Retrieve details of a specific prediction market by ticker (V2 - full API respon
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `market` | object | Market object with all API fields |
|
||||
| ↳ `ticker` | string | Market ticker |
|
||||
| ↳ `event_ticker` | string | Event ticker |
|
||||
| ↳ `market_type` | string | Market type |
|
||||
| ↳ `title` | string | Market title |
|
||||
| ↳ `subtitle` | string | Market subtitle |
|
||||
| ↳ `yes_sub_title` | string | Yes outcome subtitle |
|
||||
| ↳ `no_sub_title` | string | No outcome subtitle |
|
||||
| ↳ `open_time` | string | Market open time |
|
||||
| ↳ `close_time` | string | Market close time |
|
||||
| ↳ `expected_expiration_time` | string | Expected expiration time |
|
||||
| ↳ `expiration_time` | string | Expiration time |
|
||||
| ↳ `latest_expiration_time` | string | Latest expiration time |
|
||||
| ↳ `settlement_timer_seconds` | number | Settlement timer in seconds |
|
||||
| ↳ `status` | string | Market status |
|
||||
| ↳ `response_price_units` | string | Response price units |
|
||||
| ↳ `notional_value` | number | Notional value |
|
||||
| ↳ `tick_size` | number | Tick size |
|
||||
| ↳ `yes_bid` | number | Current yes bid price |
|
||||
| ↳ `yes_ask` | number | Current yes ask price |
|
||||
| ↳ `no_bid` | number | Current no bid price |
|
||||
| ↳ `no_ask` | number | Current no ask price |
|
||||
| ↳ `last_price` | number | Last trade price |
|
||||
| ↳ `previous_yes_bid` | number | Previous yes bid |
|
||||
| ↳ `previous_yes_ask` | number | Previous yes ask |
|
||||
| ↳ `previous_price` | number | Previous price |
|
||||
| ↳ `volume` | number | Total volume |
|
||||
| ↳ `volume_24h` | number | 24-hour volume |
|
||||
| ↳ `liquidity` | number | Market liquidity |
|
||||
| ↳ `open_interest` | number | Open interest |
|
||||
| ↳ `result` | string | Market result |
|
||||
| ↳ `cap_strike` | number | Cap strike |
|
||||
| ↳ `floor_strike` | number | Floor strike |
|
||||
| ↳ `can_close_early` | boolean | Can close early |
|
||||
| ↳ `expiration_value` | string | Expiration value |
|
||||
| ↳ `category` | string | Market category |
|
||||
| ↳ `risk_limit_cents` | number | Risk limit in cents |
|
||||
| ↳ `strike_type` | string | Strike type |
|
||||
| ↳ `rules_primary` | string | Primary rules |
|
||||
| ↳ `rules_secondary` | string | Secondary rules |
|
||||
| ↳ `settlement_source_url` | string | Settlement source URL |
|
||||
| ↳ `custom_strike` | object | Custom strike object |
|
||||
| ↳ `underlying` | string | Underlying asset |
|
||||
| ↳ `settlement_value` | number | Settlement value |
|
||||
| ↳ `cfd_contract_size` | number | CFD contract size |
|
||||
| ↳ `yes_fee_fp` | number | Yes fee \(fixed-point\) |
|
||||
| ↳ `no_fee_fp` | number | No fee \(fixed-point\) |
|
||||
| ↳ `last_price_fp` | number | Last price \(fixed-point\) |
|
||||
| ↳ `yes_bid_fp` | number | Yes bid \(fixed-point\) |
|
||||
| ↳ `yes_ask_fp` | number | Yes ask \(fixed-point\) |
|
||||
| ↳ `no_bid_fp` | number | No bid \(fixed-point\) |
|
||||
| ↳ `no_ask_fp` | number | No ask \(fixed-point\) |
|
||||
| `market` | object | Market object with details |
|
||||
|
||||
### `kalshi_get_events`
|
||||
|
||||
Retrieve a list of events from Kalshi with optional filtering (V2 - exact API response)
|
||||
Retrieve a list of events from Kalshi with optional filtering
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -141,12 +90,11 @@ Retrieve a list of events from Kalshi with optional filtering (V2 - exact API re
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `events` | array | Array of event objects |
|
||||
| `milestones` | array | Array of milestone objects \(if requested\) |
|
||||
| `cursor` | string | Pagination cursor for fetching more results |
|
||||
| `paging` | object | Pagination cursor for fetching more results |
|
||||
|
||||
### `kalshi_get_event`
|
||||
|
||||
Retrieve details of a specific event by ticker (V2 - exact API response)
|
||||
Retrieve details of a specific event by ticker
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -159,23 +107,11 @@ Retrieve details of a specific event by ticker (V2 - exact API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `event` | object | Event object with full details matching Kalshi API response |
|
||||
| ↳ `event_ticker` | string | Event ticker |
|
||||
| ↳ `series_ticker` | string | Series ticker |
|
||||
| ↳ `title` | string | Event title |
|
||||
| ↳ `sub_title` | string | Event subtitle |
|
||||
| ↳ `mutually_exclusive` | boolean | Mutually exclusive markets |
|
||||
| ↳ `category` | string | Event category |
|
||||
| ↳ `collateral_return_type` | string | Collateral return type |
|
||||
| ↳ `strike_date` | string | Strike date |
|
||||
| ↳ `strike_period` | string | Strike period |
|
||||
| ↳ `available_on_brokers` | boolean | Available on brokers |
|
||||
| ↳ `product_metadata` | object | Product metadata |
|
||||
| ↳ `markets` | array | Nested markets \(if requested\) |
|
||||
| `event` | object | Event object with details |
|
||||
|
||||
### `kalshi_get_balance`
|
||||
|
||||
Retrieve your account balance and portfolio value from Kalshi (V2 - exact API response)
|
||||
Retrieve your account balance and portfolio value from Kalshi
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -189,12 +125,11 @@ Retrieve your account balance and portfolio value from Kalshi (V2 - exact API re
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | Account balance in cents |
|
||||
| `portfolio_value` | number | Portfolio value in cents |
|
||||
| `updated_ts` | number | Unix timestamp of last update \(milliseconds\) |
|
||||
| `portfolioValue` | number | Portfolio value in cents |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
Retrieve your open positions from Kalshi (V2 - exact API response)
|
||||
Retrieve your open positions from Kalshi
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -212,13 +147,12 @@ Retrieve your open positions from Kalshi (V2 - exact API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `market_positions` | array | Array of market position objects |
|
||||
| `event_positions` | array | Array of event position objects |
|
||||
| `cursor` | string | Pagination cursor for fetching more results |
|
||||
| `positions` | array | Array of position objects |
|
||||
| `paging` | object | Pagination cursor for fetching more results |
|
||||
|
||||
### `kalshi_get_orders`
|
||||
|
||||
Retrieve your orders from Kalshi with optional filtering (V2 with full API response)
|
||||
Retrieve your orders from Kalshi with optional filtering
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -236,12 +170,12 @@ Retrieve your orders from Kalshi with optional filtering (V2 with full API respo
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `orders` | array | Array of order objects with full API response fields |
|
||||
| `cursor` | string | Pagination cursor for fetching more results |
|
||||
| `orders` | array | Array of order objects |
|
||||
| `paging` | object | Pagination cursor for fetching more results |
|
||||
|
||||
### `kalshi_get_order`
|
||||
|
||||
Retrieve details of a specific order by ID from Kalshi (V2 with full API response)
|
||||
Retrieve details of a specific order by ID from Kalshi
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -255,44 +189,11 @@ Retrieve details of a specific order by ID from Kalshi (V2 with full API respons
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `order` | object | Order object with full API response fields |
|
||||
| ↳ `order_id` | string | Order ID |
|
||||
| ↳ `user_id` | string | User ID |
|
||||
| ↳ `client_order_id` | string | Client order ID |
|
||||
| ↳ `ticker` | string | Market ticker |
|
||||
| ↳ `side` | string | Order side \(yes/no\) |
|
||||
| ↳ `action` | string | Action \(buy/sell\) |
|
||||
| ↳ `type` | string | Order type \(limit/market\) |
|
||||
| ↳ `status` | string | Order status \(resting/canceled/executed\) |
|
||||
| ↳ `yes_price` | number | Yes price in cents |
|
||||
| ↳ `no_price` | number | No price in cents |
|
||||
| ↳ `yes_price_dollars` | string | Yes price in dollars |
|
||||
| ↳ `no_price_dollars` | string | No price in dollars |
|
||||
| ↳ `fill_count` | number | Filled contract count |
|
||||
| ↳ `fill_count_fp` | string | Filled count \(fixed-point\) |
|
||||
| ↳ `remaining_count` | number | Remaining contracts |
|
||||
| ↳ `remaining_count_fp` | string | Remaining count \(fixed-point\) |
|
||||
| ↳ `initial_count` | number | Initial contract count |
|
||||
| ↳ `initial_count_fp` | string | Initial count \(fixed-point\) |
|
||||
| ↳ `taker_fees` | number | Taker fees in cents |
|
||||
| ↳ `maker_fees` | number | Maker fees in cents |
|
||||
| ↳ `taker_fees_dollars` | string | Taker fees in dollars |
|
||||
| ↳ `maker_fees_dollars` | string | Maker fees in dollars |
|
||||
| ↳ `taker_fill_cost` | number | Taker fill cost in cents |
|
||||
| ↳ `maker_fill_cost` | number | Maker fill cost in cents |
|
||||
| ↳ `taker_fill_cost_dollars` | string | Taker fill cost in dollars |
|
||||
| ↳ `maker_fill_cost_dollars` | string | Maker fill cost in dollars |
|
||||
| ↳ `queue_position` | number | Queue position \(deprecated\) |
|
||||
| ↳ `expiration_time` | string | Order expiration time |
|
||||
| ↳ `created_time` | string | Order creation time |
|
||||
| ↳ `last_update_time` | string | Last update time |
|
||||
| ↳ `self_trade_prevention_type` | string | Self-trade prevention type |
|
||||
| ↳ `order_group_id` | string | Order group ID |
|
||||
| ↳ `cancel_order_on_pause` | boolean | Cancel on market pause |
|
||||
| `order` | object | Order object with details |
|
||||
|
||||
### `kalshi_get_orderbook`
|
||||
|
||||
Retrieve the orderbook (yes and no bids) for a specific market (V2 - includes depth and fp fields)
|
||||
Retrieve the orderbook (yes and no bids) for a specific market
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -304,18 +205,11 @@ Retrieve the orderbook (yes and no bids) for a specific market (V2 - includes de
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `orderbook` | object | Orderbook with yes/no bids \(legacy integer counts\) |
|
||||
| ↳ `yes` | array | Yes side bids as tuples \[price_cents, count\] |
|
||||
| ↳ `no` | array | No side bids as tuples \[price_cents, count\] |
|
||||
| ↳ `yes_dollars` | array | Yes side bids as tuples \[dollars_string, count\] |
|
||||
| ↳ `no_dollars` | array | No side bids as tuples \[dollars_string, count\] |
|
||||
| `orderbook_fp` | object | Orderbook with fixed-point counts \(preferred\) |
|
||||
| ↳ `yes_dollars` | array | Yes side bids as tuples \[dollars_string, fp_count_string\] |
|
||||
| ↳ `no_dollars` | array | No side bids as tuples \[dollars_string, fp_count_string\] |
|
||||
| `orderbook` | object | Orderbook with yes/no bids and asks |
|
||||
|
||||
### `kalshi_get_trades`
|
||||
|
||||
Retrieve recent trades with additional filtering options (V2 - includes trade_id and count_fp)
|
||||
Retrieve recent trades across all markets
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -328,12 +222,12 @@ Retrieve recent trades with additional filtering options (V2 - includes trade_id
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `trades` | array | Array of trade objects with trade_id and count_fp |
|
||||
| `cursor` | string | Pagination cursor for fetching more results |
|
||||
| `trades` | array | Array of trade objects |
|
||||
| `paging` | object | Pagination cursor for fetching more results |
|
||||
|
||||
### `kalshi_get_candlesticks`
|
||||
|
||||
Retrieve OHLC candlestick data for a specific market (V2 - full API response)
|
||||
Retrieve OHLC candlestick data for a specific market
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -349,8 +243,7 @@ Retrieve OHLC candlestick data for a specific market (V2 - full API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ticker` | string | Market ticker |
|
||||
| `candlesticks` | array | Array of OHLC candlestick data with nested bid/ask/price objects |
|
||||
| `candlesticks` | array | Array of OHLC candlestick data |
|
||||
|
||||
### `kalshi_get_fills`
|
||||
|
||||
@@ -373,12 +266,12 @@ Retrieve your portfolio
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `fills` | array | Array of fill/trade objects with all API fields |
|
||||
| `cursor` | string | Pagination cursor for fetching more results |
|
||||
| `fills` | array | Array of fill/trade objects |
|
||||
| `paging` | object | Pagination cursor for fetching more results |
|
||||
|
||||
### `kalshi_get_series_by_ticker`
|
||||
|
||||
Retrieve details of a specific market series by ticker (V2 - exact API response)
|
||||
Retrieve details of a specific market series by ticker
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -390,25 +283,11 @@ Retrieve details of a specific market series by ticker (V2 - exact API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `series` | object | Series object with full details matching Kalshi API response |
|
||||
| ↳ `ticker` | string | Series ticker |
|
||||
| ↳ `title` | string | Series title |
|
||||
| ↳ `frequency` | string | Event frequency |
|
||||
| ↳ `category` | string | Series category |
|
||||
| ↳ `tags` | array | Series tags |
|
||||
| ↳ `settlement_sources` | array | Settlement sources |
|
||||
| ↳ `contract_url` | string | Contract URL |
|
||||
| ↳ `contract_terms_url` | string | Contract terms URL |
|
||||
| ↳ `fee_type` | string | Fee type |
|
||||
| ↳ `fee_multiplier` | number | Fee multiplier |
|
||||
| ↳ `additional_prohibitions` | array | Additional prohibitions |
|
||||
| ↳ `product_metadata` | object | Product metadata |
|
||||
| ↳ `volume` | number | Series volume |
|
||||
| ↳ `volume_fp` | number | Volume \(fixed-point\) |
|
||||
| `series` | object | Series object with details |
|
||||
|
||||
### `kalshi_get_exchange_status`
|
||||
|
||||
Retrieve the current status of the Kalshi exchange (V2 - exact API response)
|
||||
Retrieve the current status of the Kalshi exchange (trading and exchange activity)
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -419,13 +298,11 @@ Retrieve the current status of the Kalshi exchange (V2 - exact API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `exchange_active` | boolean | Whether the exchange is active |
|
||||
| `trading_active` | boolean | Whether trading is active |
|
||||
| `exchange_estimated_resume_time` | string | Estimated time when exchange will resume \(if inactive\) |
|
||||
| `status` | object | Exchange status with trading_active and exchange_active flags |
|
||||
|
||||
### `kalshi_create_order`
|
||||
|
||||
Create a new order on a Kalshi prediction market (V2 with full API response)
|
||||
Create a new order on a Kalshi prediction market
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -455,44 +332,11 @@ Create a new order on a Kalshi prediction market (V2 with full API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `order` | object | The created order object with full API response fields |
|
||||
| ↳ `order_id` | string | Order ID |
|
||||
| ↳ `user_id` | string | User ID |
|
||||
| ↳ `client_order_id` | string | Client order ID |
|
||||
| ↳ `ticker` | string | Market ticker |
|
||||
| ↳ `side` | string | Order side \(yes/no\) |
|
||||
| ↳ `action` | string | Action \(buy/sell\) |
|
||||
| ↳ `type` | string | Order type \(limit/market\) |
|
||||
| ↳ `status` | string | Order status \(resting/canceled/executed\) |
|
||||
| ↳ `yes_price` | number | Yes price in cents |
|
||||
| ↳ `no_price` | number | No price in cents |
|
||||
| ↳ `yes_price_dollars` | string | Yes price in dollars |
|
||||
| ↳ `no_price_dollars` | string | No price in dollars |
|
||||
| ↳ `fill_count` | number | Filled contract count |
|
||||
| ↳ `fill_count_fp` | string | Filled count \(fixed-point\) |
|
||||
| ↳ `remaining_count` | number | Remaining contracts |
|
||||
| ↳ `remaining_count_fp` | string | Remaining count \(fixed-point\) |
|
||||
| ↳ `initial_count` | number | Initial contract count |
|
||||
| ↳ `initial_count_fp` | string | Initial count \(fixed-point\) |
|
||||
| ↳ `taker_fees` | number | Taker fees in cents |
|
||||
| ↳ `maker_fees` | number | Maker fees in cents |
|
||||
| ↳ `taker_fees_dollars` | string | Taker fees in dollars |
|
||||
| ↳ `maker_fees_dollars` | string | Maker fees in dollars |
|
||||
| ↳ `taker_fill_cost` | number | Taker fill cost in cents |
|
||||
| ↳ `maker_fill_cost` | number | Maker fill cost in cents |
|
||||
| ↳ `taker_fill_cost_dollars` | string | Taker fill cost in dollars |
|
||||
| ↳ `maker_fill_cost_dollars` | string | Maker fill cost in dollars |
|
||||
| ↳ `queue_position` | number | Queue position \(deprecated\) |
|
||||
| ↳ `expiration_time` | string | Order expiration time |
|
||||
| ↳ `created_time` | string | Order creation time |
|
||||
| ↳ `last_update_time` | string | Last update time |
|
||||
| ↳ `self_trade_prevention_type` | string | Self-trade prevention type |
|
||||
| ↳ `order_group_id` | string | Order group ID |
|
||||
| ↳ `cancel_order_on_pause` | boolean | Cancel on market pause |
|
||||
| `order` | object | The created order object |
|
||||
|
||||
### `kalshi_cancel_order`
|
||||
|
||||
Cancel an existing order on Kalshi (V2 with full API response)
|
||||
Cancel an existing order on Kalshi
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -506,46 +350,12 @@ Cancel an existing order on Kalshi (V2 with full API response)
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `order` | object | The canceled order object with full API response fields |
|
||||
| ↳ `order_id` | string | Order ID |
|
||||
| ↳ `user_id` | string | User ID |
|
||||
| ↳ `client_order_id` | string | Client order ID |
|
||||
| ↳ `ticker` | string | Market ticker |
|
||||
| ↳ `side` | string | Order side \(yes/no\) |
|
||||
| ↳ `action` | string | Action \(buy/sell\) |
|
||||
| ↳ `type` | string | Order type \(limit/market\) |
|
||||
| ↳ `status` | string | Order status \(resting/canceled/executed\) |
|
||||
| ↳ `yes_price` | number | Yes price in cents |
|
||||
| ↳ `no_price` | number | No price in cents |
|
||||
| ↳ `yes_price_dollars` | string | Yes price in dollars |
|
||||
| ↳ `no_price_dollars` | string | No price in dollars |
|
||||
| ↳ `fill_count` | number | Filled contract count |
|
||||
| ↳ `fill_count_fp` | string | Filled count \(fixed-point\) |
|
||||
| ↳ `remaining_count` | number | Remaining contracts |
|
||||
| ↳ `remaining_count_fp` | string | Remaining count \(fixed-point\) |
|
||||
| ↳ `initial_count` | number | Initial contract count |
|
||||
| ↳ `initial_count_fp` | string | Initial count \(fixed-point\) |
|
||||
| ↳ `taker_fees` | number | Taker fees in cents |
|
||||
| ↳ `maker_fees` | number | Maker fees in cents |
|
||||
| ↳ `taker_fees_dollars` | string | Taker fees in dollars |
|
||||
| ↳ `maker_fees_dollars` | string | Maker fees in dollars |
|
||||
| ↳ `taker_fill_cost` | number | Taker fill cost in cents |
|
||||
| ↳ `maker_fill_cost` | number | Maker fill cost in cents |
|
||||
| ↳ `taker_fill_cost_dollars` | string | Taker fill cost in dollars |
|
||||
| ↳ `maker_fill_cost_dollars` | string | Maker fill cost in dollars |
|
||||
| ↳ `queue_position` | number | Queue position \(deprecated\) |
|
||||
| ↳ `expiration_time` | string | Order expiration time |
|
||||
| ↳ `created_time` | string | Order creation time |
|
||||
| ↳ `last_update_time` | string | Last update time |
|
||||
| ↳ `self_trade_prevention_type` | string | Self-trade prevention type |
|
||||
| ↳ `order_group_id` | string | Order group ID |
|
||||
| ↳ `cancel_order_on_pause` | boolean | Cancel on market pause |
|
||||
| `reduced_by` | number | Number of contracts canceled |
|
||||
| `reduced_by_fp` | string | Number of contracts canceled in fixed-point format |
|
||||
| `order` | object | The canceled order object |
|
||||
| `reducedBy` | number | Number of contracts canceled |
|
||||
|
||||
### `kalshi_amend_order`
|
||||
|
||||
Modify the price or quantity of an existing order on Kalshi (V2 with full API response)
|
||||
Modify the price or quantity of an existing order on Kalshi
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -569,63 +379,6 @@ Modify the price or quantity of an existing order on Kalshi (V2 with full API re
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `old_order` | object | The original order object before amendment |
|
||||
| ↳ `order_id` | string | Order ID |
|
||||
| ↳ `user_id` | string | User ID |
|
||||
| ↳ `ticker` | string | Market ticker |
|
||||
| ↳ `event_ticker` | string | Event ticker |
|
||||
| ↳ `status` | string | Order status |
|
||||
| ↳ `side` | string | Order side \(yes/no\) |
|
||||
| ↳ `type` | string | Order type \(limit/market\) |
|
||||
| ↳ `yes_price` | number | Yes price in cents |
|
||||
| ↳ `no_price` | number | No price in cents |
|
||||
| ↳ `action` | string | Action \(buy/sell\) |
|
||||
| ↳ `count` | number | Number of contracts |
|
||||
| ↳ `remaining_count` | number | Remaining contracts |
|
||||
| ↳ `created_time` | string | Order creation time |
|
||||
| ↳ `expiration_time` | string | Order expiration time |
|
||||
| ↳ `order_group_id` | string | Order group ID |
|
||||
| ↳ `client_order_id` | string | Client order ID |
|
||||
| ↳ `place_count` | number | Place count |
|
||||
| ↳ `decrease_count` | number | Decrease count |
|
||||
| ↳ `queue_position` | number | Queue position |
|
||||
| ↳ `maker_fill_count` | number | Maker fill count |
|
||||
| ↳ `taker_fill_count` | number | Taker fill count |
|
||||
| ↳ `maker_fees` | number | Maker fees |
|
||||
| ↳ `taker_fees` | number | Taker fees |
|
||||
| ↳ `last_update_time` | string | Last update time |
|
||||
| ↳ `take_profit_order_id` | string | Take profit order ID |
|
||||
| ↳ `stop_loss_order_id` | string | Stop loss order ID |
|
||||
| ↳ `amend_count` | number | Amend count |
|
||||
| ↳ `amend_taker_fill_count` | number | Amend taker fill count |
|
||||
| `order` | object | The amended order object with full API response fields |
|
||||
| ↳ `order_id` | string | Order ID |
|
||||
| ↳ `user_id` | string | User ID |
|
||||
| ↳ `ticker` | string | Market ticker |
|
||||
| ↳ `event_ticker` | string | Event ticker |
|
||||
| ↳ `status` | string | Order status |
|
||||
| ↳ `side` | string | Order side \(yes/no\) |
|
||||
| ↳ `type` | string | Order type \(limit/market\) |
|
||||
| ↳ `yes_price` | number | Yes price in cents |
|
||||
| ↳ `no_price` | number | No price in cents |
|
||||
| ↳ `action` | string | Action \(buy/sell\) |
|
||||
| ↳ `count` | number | Number of contracts |
|
||||
| ↳ `remaining_count` | number | Remaining contracts |
|
||||
| ↳ `created_time` | string | Order creation time |
|
||||
| ↳ `expiration_time` | string | Order expiration time |
|
||||
| ↳ `order_group_id` | string | Order group ID |
|
||||
| ↳ `client_order_id` | string | Client order ID |
|
||||
| ↳ `place_count` | number | Place count |
|
||||
| ↳ `decrease_count` | number | Decrease count |
|
||||
| ↳ `queue_position` | number | Queue position |
|
||||
| ↳ `maker_fill_count` | number | Maker fill count |
|
||||
| ↳ `taker_fill_count` | number | Taker fill count |
|
||||
| ↳ `maker_fees` | number | Maker fees |
|
||||
| ↳ `taker_fees` | number | Taker fees |
|
||||
| ↳ `last_update_time` | string | Last update time |
|
||||
| ↳ `take_profit_order_id` | string | Take profit order ID |
|
||||
| ↳ `stop_loss_order_id` | string | Stop loss order ID |
|
||||
| ↳ `amend_count` | number | Amend count |
|
||||
| ↳ `amend_taker_fill_count` | number | Amend taker fill count |
|
||||
| `order` | object | The amended order object |
|
||||
|
||||
|
||||
|
||||
@@ -97,6 +97,7 @@
|
||||
"shopify",
|
||||
"slack",
|
||||
"smtp",
|
||||
"spotify",
|
||||
"sqs",
|
||||
"ssh",
|
||||
"stagehand",
|
||||
|
||||
@@ -29,7 +29,7 @@ By using these documented API endpoints, you can seamlessly integrate Polymarket
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Polymarket prediction markets into the workflow. Can get markets, market, events, event, tags, series, orderbook, price, midpoint, price history, last trade price, spread, tick size, positions, trades, activity, leaderboard, holders, and search.
|
||||
Integrate Polymarket prediction markets into the workflow. Can get markets, market, events, event, tags, series, orderbook, price, midpoint, price history, last trade price, spread, tick size, positions, trades, and search.
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ Retrieve a list of prediction markets from Polymarket with optional filtering
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `closed` | string | No | Filter by closed status \(true/false\). Use false for open markets only. |
|
||||
| `closed` | string | No | Filter by closed status \(true/false\). Use false for active markets only. |
|
||||
| `order` | string | No | Sort field \(e.g., volumeNum, liquidityNum, startDate, endDate, createdAt\) |
|
||||
| `ascending` | string | No | Sort direction \(true for ascending, false for descending\) |
|
||||
| `tagId` | string | No | Filter by tag ID |
|
||||
@@ -55,21 +55,6 @@ Retrieve a list of prediction markets from Polymarket with optional filtering
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `markets` | array | Array of market objects |
|
||||
| ↳ `id` | string | Market ID |
|
||||
| ↳ `question` | string | Market question |
|
||||
| ↳ `conditionId` | string | Condition ID |
|
||||
| ↳ `slug` | string | Market slug |
|
||||
| ↳ `endDate` | string | End date |
|
||||
| ↳ `image` | string | Market image URL |
|
||||
| ↳ `outcomes` | string | Outcomes JSON string |
|
||||
| ↳ `outcomePrices` | string | Outcome prices JSON string |
|
||||
| ↳ `volume` | string | Total volume |
|
||||
| ↳ `liquidity` | string | Total liquidity |
|
||||
| ↳ `active` | boolean | Whether market is active |
|
||||
| ↳ `closed` | boolean | Whether market is closed |
|
||||
| ↳ `volumeNum` | number | Volume as number |
|
||||
| ↳ `liquidityNum` | number | Liquidity as number |
|
||||
| ↳ `clobTokenIds` | array | CLOB token IDs |
|
||||
|
||||
### `polymarket_get_market`
|
||||
|
||||
@@ -87,28 +72,6 @@ Retrieve details of a specific prediction market by ID or slug
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `market` | object | Market object with details |
|
||||
| ↳ `id` | string | Market ID |
|
||||
| ↳ `question` | string | Market question |
|
||||
| ↳ `conditionId` | string | Condition ID |
|
||||
| ↳ `slug` | string | Market slug |
|
||||
| ↳ `resolutionSource` | string | Resolution source |
|
||||
| ↳ `endDate` | string | End date |
|
||||
| ↳ `startDate` | string | Start date |
|
||||
| ↳ `image` | string | Market image URL |
|
||||
| ↳ `icon` | string | Market icon URL |
|
||||
| ↳ `description` | string | Market description |
|
||||
| ↳ `outcomes` | string | Outcomes JSON string |
|
||||
| ↳ `outcomePrices` | string | Outcome prices JSON string |
|
||||
| ↳ `volume` | string | Total volume |
|
||||
| ↳ `liquidity` | string | Total liquidity |
|
||||
| ↳ `active` | boolean | Whether market is active |
|
||||
| ↳ `closed` | boolean | Whether market is closed |
|
||||
| ↳ `archived` | boolean | Whether market is archived |
|
||||
| ↳ `volumeNum` | number | Volume as number |
|
||||
| ↳ `liquidityNum` | number | Liquidity as number |
|
||||
| ↳ `clobTokenIds` | array | CLOB token IDs |
|
||||
| ↳ `acceptingOrders` | boolean | Whether accepting orders |
|
||||
| ↳ `negRisk` | boolean | Whether negative risk |
|
||||
|
||||
### `polymarket_get_events`
|
||||
|
||||
@@ -118,7 +81,7 @@ Retrieve a list of events from Polymarket with optional filtering
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `closed` | string | No | Filter by closed status \(true/false\). Use false for open events only. |
|
||||
| `closed` | string | No | Filter by closed status \(true/false\). Use false for active events only. |
|
||||
| `order` | string | No | Sort field \(e.g., volume, liquidity, startDate, endDate, createdAt\) |
|
||||
| `ascending` | string | No | Sort direction \(true for ascending, false for descending\) |
|
||||
| `tagId` | string | No | Filter by tag ID |
|
||||
@@ -130,21 +93,6 @@ Retrieve a list of events from Polymarket with optional filtering
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `events` | array | Array of event objects |
|
||||
| ↳ `id` | string | Event ID |
|
||||
| ↳ `ticker` | string | Event ticker |
|
||||
| ↳ `slug` | string | Event slug |
|
||||
| ↳ `title` | string | Event title |
|
||||
| ↳ `description` | string | Event description |
|
||||
| ↳ `startDate` | string | Start date |
|
||||
| ↳ `endDate` | string | End date |
|
||||
| ↳ `image` | string | Event image URL |
|
||||
| ↳ `icon` | string | Event icon URL |
|
||||
| ↳ `active` | boolean | Whether event is active |
|
||||
| ↳ `closed` | boolean | Whether event is closed |
|
||||
| ↳ `archived` | boolean | Whether event is archived |
|
||||
| ↳ `liquidity` | number | Total liquidity |
|
||||
| ↳ `volume` | number | Total volume |
|
||||
| ↳ `markets` | array | Array of markets in this event |
|
||||
|
||||
### `polymarket_get_event`
|
||||
|
||||
@@ -162,24 +110,6 @@ Retrieve details of a specific event by ID or slug
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `event` | object | Event object with details |
|
||||
| ↳ `id` | string | Event ID |
|
||||
| ↳ `ticker` | string | Event ticker |
|
||||
| ↳ `slug` | string | Event slug |
|
||||
| ↳ `title` | string | Event title |
|
||||
| ↳ `description` | string | Event description |
|
||||
| ↳ `startDate` | string | Start date |
|
||||
| ↳ `creationDate` | string | Creation date |
|
||||
| ↳ `endDate` | string | End date |
|
||||
| ↳ `image` | string | Event image URL |
|
||||
| ↳ `icon` | string | Event icon URL |
|
||||
| ↳ `active` | boolean | Whether event is active |
|
||||
| ↳ `closed` | boolean | Whether event is closed |
|
||||
| ↳ `archived` | boolean | Whether event is archived |
|
||||
| ↳ `liquidity` | number | Total liquidity |
|
||||
| ↳ `volume` | number | Total volume |
|
||||
| ↳ `openInterest` | number | Open interest |
|
||||
| ↳ `commentCount` | number | Comment count |
|
||||
| ↳ `markets` | array | Array of markets in this event |
|
||||
|
||||
### `polymarket_get_tags`
|
||||
|
||||
@@ -196,12 +126,7 @@ Retrieve available tags for filtering markets from Polymarket
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tags` | array | Array of tag objects |
|
||||
| ↳ `id` | string | Tag ID |
|
||||
| ↳ `label` | string | Tag label |
|
||||
| ↳ `slug` | string | Tag slug |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `tags` | array | Array of tag objects with id, label, and slug |
|
||||
|
||||
### `polymarket_search`
|
||||
|
||||
@@ -213,28 +138,13 @@ Search for markets, events, and profiles on Polymarket
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | Yes | Search query term |
|
||||
| `limit` | string | No | Number of results per page \(max 50\) |
|
||||
| `page` | string | No | Page number for pagination \(1-indexed\) |
|
||||
| `cache` | string | No | Enable caching \(true/false\) |
|
||||
| `eventsStatus` | string | No | Filter events by status |
|
||||
| `limitPerType` | string | No | Limit results per type \(markets, events, profiles\) |
|
||||
| `eventsTag` | string | No | Filter by event tags \(comma-separated\) |
|
||||
| `sort` | string | No | Sort field |
|
||||
| `ascending` | string | No | Sort direction \(true for ascending, false for descending\) |
|
||||
| `searchTags` | string | No | Include tags in search results \(true/false\) |
|
||||
| `searchProfiles` | string | No | Include profiles in search results \(true/false\) |
|
||||
| `recurrence` | string | No | Filter by recurrence type |
|
||||
| `excludeTagId` | string | No | Exclude events with these tag IDs \(comma-separated\) |
|
||||
| `keepClosedMarkets` | string | No | Include closed markets in results \(0 or 1\) |
|
||||
| `offset` | string | No | Pagination offset |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | object | Search results containing markets, events, tags, and profiles arrays |
|
||||
| ↳ `markets` | array | Array of matching market objects |
|
||||
| ↳ `events` | array | Array of matching event objects |
|
||||
| ↳ `tags` | array | Array of matching tag objects |
|
||||
| ↳ `profiles` | array | Array of matching profile objects |
|
||||
| `results` | object | Search results containing markets, events, and profiles arrays |
|
||||
|
||||
### `polymarket_get_series`
|
||||
|
||||
@@ -252,21 +162,6 @@ Retrieve series (related market groups) from Polymarket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `series` | array | Array of series objects |
|
||||
| ↳ `id` | string | Series ID |
|
||||
| ↳ `ticker` | string | Series ticker |
|
||||
| ↳ `slug` | string | Series slug |
|
||||
| ↳ `title` | string | Series title |
|
||||
| ↳ `seriesType` | string | Series type |
|
||||
| ↳ `recurrence` | string | Recurrence pattern |
|
||||
| ↳ `image` | string | Series image URL |
|
||||
| ↳ `icon` | string | Series icon URL |
|
||||
| ↳ `active` | boolean | Whether series is active |
|
||||
| ↳ `closed` | boolean | Whether series is closed |
|
||||
| ↳ `archived` | boolean | Whether series is archived |
|
||||
| ↳ `featured` | boolean | Whether series is featured |
|
||||
| ↳ `volume` | number | Total volume |
|
||||
| ↳ `liquidity` | number | Total liquidity |
|
||||
| ↳ `eventCount` | number | Number of events in series |
|
||||
|
||||
### `polymarket_get_series_by_id`
|
||||
|
||||
@@ -283,23 +178,6 @@ Retrieve a specific series (related market group) by ID from Polymarket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `series` | object | Series object with details |
|
||||
| ↳ `id` | string | Series ID |
|
||||
| ↳ `ticker` | string | Series ticker |
|
||||
| ↳ `slug` | string | Series slug |
|
||||
| ↳ `title` | string | Series title |
|
||||
| ↳ `seriesType` | string | Series type |
|
||||
| ↳ `recurrence` | string | Recurrence pattern |
|
||||
| ↳ `image` | string | Series image URL |
|
||||
| ↳ `icon` | string | Series icon URL |
|
||||
| ↳ `active` | boolean | Whether series is active |
|
||||
| ↳ `closed` | boolean | Whether series is closed |
|
||||
| ↳ `archived` | boolean | Whether series is archived |
|
||||
| ↳ `featured` | boolean | Whether series is featured |
|
||||
| ↳ `volume` | number | Total volume |
|
||||
| ↳ `liquidity` | number | Total liquidity |
|
||||
| ↳ `commentCount` | number | Comment count |
|
||||
| ↳ `eventCount` | number | Number of events in series |
|
||||
| ↳ `events` | array | Array of events in this series |
|
||||
|
||||
### `polymarket_get_orderbook`
|
||||
|
||||
@@ -316,21 +194,6 @@ Retrieve the order book summary for a specific token
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `orderbook` | object | Order book with bids and asks arrays |
|
||||
| ↳ `market` | string | Market identifier |
|
||||
| ↳ `asset_id` | string | Asset token ID |
|
||||
| ↳ `hash` | string | Order book hash |
|
||||
| ↳ `timestamp` | string | Timestamp |
|
||||
| ↳ `bids` | array | Bid orders |
|
||||
| ↳ `price` | string | Bid price |
|
||||
| ↳ `size` | string | Bid size |
|
||||
| ↳ `price` | string | Ask price |
|
||||
| ↳ `size` | string | Ask size |
|
||||
| ↳ `asks` | array | Ask orders |
|
||||
| ↳ `price` | string | Ask price |
|
||||
| ↳ `size` | string | Ask size |
|
||||
| ↳ `min_order_size` | string | Minimum order size |
|
||||
| ↳ `tick_size` | string | Tick size |
|
||||
| ↳ `neg_risk` | boolean | Whether negative risk |
|
||||
|
||||
### `polymarket_get_price`
|
||||
|
||||
@@ -383,9 +246,7 @@ Retrieve historical price data for a specific market token
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `history` | array | Array of price history entries |
|
||||
| ↳ `t` | number | Unix timestamp |
|
||||
| ↳ `p` | number | Price at timestamp |
|
||||
| `history` | array | Array of price history entries with timestamp \(t\) and price \(p\) |
|
||||
|
||||
### `polymarket_get_last_trade_price`
|
||||
|
||||
@@ -402,7 +263,6 @@ Retrieve the last trade price for a specific token
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `price` | string | Last trade price |
|
||||
| `side` | string | Side of the last trade \(BUY or SELL\) |
|
||||
|
||||
### `polymarket_get_spread`
|
||||
|
||||
@@ -418,8 +278,7 @@ Retrieve the bid-ask spread for a specific token
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `spread` | object | Spread value between bid and ask |
|
||||
| ↳ `spread` | string | The spread value |
|
||||
| `spread` | object | Bid-ask spread with bid and ask prices |
|
||||
|
||||
### `polymarket_get_tick_size`
|
||||
|
||||
@@ -446,47 +305,13 @@ Retrieve user positions from Polymarket
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `user` | string | Yes | User wallet address |
|
||||
| `market` | string | No | Condition IDs to filter positions \(comma-separated, mutually exclusive with eventId\) |
|
||||
| `eventId` | string | No | Event ID to filter positions \(mutually exclusive with market\) |
|
||||
| `sizeThreshold` | string | No | Minimum position size threshold \(default: 1\) |
|
||||
| `redeemable` | string | No | Filter for redeemable positions only \(true/false\) |
|
||||
| `mergeable` | string | No | Filter for mergeable positions only \(true/false\) |
|
||||
| `sortBy` | string | No | Sort field \(TOKENS, CURRENT, INITIAL, CASHPNL, PERCENTPNL, TITLE, RESOLVING, PRICE, AVGPRICE\) |
|
||||
| `sortDirection` | string | No | Sort direction \(ASC or DESC\) |
|
||||
| `title` | string | No | Search filter by title |
|
||||
| `limit` | string | No | Number of results per page |
|
||||
| `offset` | string | No | Pagination offset |
|
||||
| `market` | string | No | Optional market ID to filter positions |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `positions` | array | Array of position objects |
|
||||
| ↳ `proxyWallet` | string | Proxy wallet address |
|
||||
| ↳ `asset` | string | Asset token ID |
|
||||
| ↳ `conditionId` | string | Condition ID |
|
||||
| ↳ `size` | number | Position size |
|
||||
| ↳ `avgPrice` | number | Average price |
|
||||
| ↳ `initialValue` | number | Initial value |
|
||||
| ↳ `currentValue` | number | Current value |
|
||||
| ↳ `cashPnl` | number | Cash profit/loss |
|
||||
| ↳ `percentPnl` | number | Percent profit/loss |
|
||||
| ↳ `totalBought` | number | Total bought |
|
||||
| ↳ `realizedPnl` | number | Realized profit/loss |
|
||||
| ↳ `percentRealizedPnl` | number | Percent realized profit/loss |
|
||||
| ↳ `curPrice` | number | Current price |
|
||||
| ↳ `redeemable` | boolean | Whether position is redeemable |
|
||||
| ↳ `mergeable` | boolean | Whether position is mergeable |
|
||||
| ↳ `title` | string | Market title |
|
||||
| ↳ `slug` | string | Market slug |
|
||||
| ↳ `icon` | string | Market icon URL |
|
||||
| ↳ `eventSlug` | string | Event slug |
|
||||
| ↳ `outcome` | string | Outcome name |
|
||||
| ↳ `outcomeIndex` | number | Outcome index |
|
||||
| ↳ `oppositeOutcome` | string | Opposite outcome name |
|
||||
| ↳ `oppositeAsset` | string | Opposite asset token ID |
|
||||
| ↳ `endDate` | string | End date |
|
||||
| ↳ `negativeRisk` | boolean | Whether negative risk |
|
||||
|
||||
### `polymarket_get_trades`
|
||||
|
||||
@@ -497,13 +322,8 @@ Retrieve trade history from Polymarket
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `user` | string | No | User wallet address to filter trades |
|
||||
| `market` | string | No | Market/condition ID to filter trades \(mutually exclusive with eventId\) |
|
||||
| `eventId` | string | No | Event ID to filter trades \(mutually exclusive with market\) |
|
||||
| `side` | string | No | Trade direction filter \(BUY or SELL\) |
|
||||
| `takerOnly` | string | No | Filter for taker trades only \(true/false, default: true\) |
|
||||
| `filterType` | string | No | Filter type \(CASH or TOKENS\) - requires filterAmount |
|
||||
| `filterAmount` | string | No | Filter amount threshold - requires filterType |
|
||||
| `limit` | string | No | Number of results per page \(default: 100, max: 10000\) |
|
||||
| `market` | string | No | Market ID to filter trades |
|
||||
| `limit` | string | No | Number of results per page \(max 50\) |
|
||||
| `offset` | string | No | Pagination offset \(skip this many results\) |
|
||||
|
||||
#### Output
|
||||
@@ -511,141 +331,5 @@ Retrieve trade history from Polymarket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `trades` | array | Array of trade objects |
|
||||
| ↳ `proxyWallet` | string | Proxy wallet address |
|
||||
| ↳ `side` | string | Trade side \(BUY or SELL\) |
|
||||
| ↳ `asset` | string | Asset token ID |
|
||||
| ↳ `conditionId` | string | Condition ID |
|
||||
| ↳ `size` | number | Trade size |
|
||||
| ↳ `price` | number | Trade price |
|
||||
| ↳ `timestamp` | number | Unix timestamp |
|
||||
| ↳ `title` | string | Market title |
|
||||
| ↳ `slug` | string | Market slug |
|
||||
| ↳ `icon` | string | Market icon URL |
|
||||
| ↳ `eventSlug` | string | Event slug |
|
||||
| ↳ `outcome` | string | Outcome name |
|
||||
| ↳ `outcomeIndex` | number | Outcome index |
|
||||
| ↳ `name` | string | Trader name |
|
||||
| ↳ `pseudonym` | string | Trader pseudonym |
|
||||
| ↳ `bio` | string | Trader bio |
|
||||
| ↳ `profileImage` | string | Profile image URL |
|
||||
| ↳ `profileImageOptimized` | string | Optimized profile image URL |
|
||||
| ↳ `transactionHash` | string | Transaction hash |
|
||||
|
||||
### `polymarket_get_activity`
|
||||
|
||||
Retrieve on-chain activity for a user including trades, splits, merges, redemptions, rewards, and conversions
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `user` | string | Yes | User wallet address \(0x-prefixed\) |
|
||||
| `limit` | string | No | Maximum results \(default: 100, max: 500\) |
|
||||
| `offset` | string | No | Pagination offset \(default: 0, max: 10000\) |
|
||||
| `market` | string | No | Comma-separated condition IDs \(mutually exclusive with eventId\) |
|
||||
| `eventId` | string | No | Comma-separated event IDs \(mutually exclusive with market\) |
|
||||
| `type` | string | No | Activity type filter: TRADE, SPLIT, MERGE, REDEEM, REWARD, CONVERSION, MAKER_REBATE |
|
||||
| `start` | number | No | Start timestamp \(Unix seconds\) |
|
||||
| `end` | number | No | End timestamp \(Unix seconds\) |
|
||||
| `sortBy` | string | No | Sort field: TIMESTAMP, TOKENS, or CASH \(default: TIMESTAMP\) |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC \(default: DESC\) |
|
||||
| `side` | string | No | Trade side filter: BUY or SELL \(only applies to trades\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `activity` | array | Array of activity entries |
|
||||
| ↳ `proxyWallet` | string | User proxy wallet address |
|
||||
| ↳ `timestamp` | number | Unix timestamp of activity |
|
||||
| ↳ `conditionId` | string | Market condition ID |
|
||||
| ↳ `type` | string | Activity type \(TRADE, SPLIT, MERGE, REDEEM, REWARD, CONVERSION\) |
|
||||
| ↳ `size` | number | Size in tokens |
|
||||
| ↳ `usdcSize` | number | Size in USDC |
|
||||
| ↳ `transactionHash` | string | Blockchain transaction hash |
|
||||
| ↳ `price` | number | Price \(for trades\) |
|
||||
| ↳ `asset` | string | Asset/token ID |
|
||||
| ↳ `side` | string | Trade side \(BUY/SELL\) |
|
||||
| ↳ `outcomeIndex` | number | Outcome index |
|
||||
| ↳ `title` | string | Market title |
|
||||
| ↳ `slug` | string | Market slug |
|
||||
| ↳ `icon` | string | Market icon URL |
|
||||
| ↳ `eventSlug` | string | Event slug |
|
||||
| ↳ `outcome` | string | Outcome name |
|
||||
| ↳ `name` | string | User display name |
|
||||
| ↳ `pseudonym` | string | User pseudonym |
|
||||
| ↳ `bio` | string | User bio |
|
||||
| ↳ `profileImage` | string | User profile image URL |
|
||||
| ↳ `profileImageOptimized` | string | Optimized profile image URL |
|
||||
|
||||
### `polymarket_get_leaderboard`
|
||||
|
||||
Retrieve trader leaderboard rankings by profit/loss or volume
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `category` | string | No | Category filter: OVERALL, POLITICS, SPORTS, CRYPTO, CULTURE, MENTIONS, WEATHER, ECONOMICS, TECH, FINANCE \(default: OVERALL\) |
|
||||
| `timePeriod` | string | No | Time period: DAY, WEEK, MONTH, ALL \(default: DAY\) |
|
||||
| `orderBy` | string | No | Order by: PNL or VOL \(default: PNL\) |
|
||||
| `limit` | string | No | Number of results \(1-50, default: 25\) |
|
||||
| `offset` | string | No | Pagination offset \(0-1000, default: 0\) |
|
||||
| `user` | string | No | Filter by specific user wallet address |
|
||||
| `userName` | string | No | Filter by username |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `leaderboard` | array | Array of leaderboard entries |
|
||||
| ↳ `rank` | string | Leaderboard rank position |
|
||||
| ↳ `proxyWallet` | string | User proxy wallet address |
|
||||
| ↳ `userName` | string | User display name |
|
||||
| ↳ `vol` | number | Trading volume |
|
||||
| ↳ `pnl` | number | Profit and loss |
|
||||
| ↳ `profileImage` | string | User profile image URL |
|
||||
| ↳ `xUsername` | string | Twitter/X username |
|
||||
| ↳ `verifiedBadge` | boolean | Whether user has verified badge |
|
||||
|
||||
### `polymarket_get_holders`
|
||||
|
||||
Retrieve top holders of a specific market token
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `market` | string | Yes | Comma-separated list of condition IDs |
|
||||
| `limit` | string | No | Number of holders to return \(0-20, default: 20\) |
|
||||
| `minBalance` | string | No | Minimum balance threshold \(default: 1\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `holders` | array | Array of market holder groups by token |
|
||||
| ↳ `token` | string | Token/asset ID |
|
||||
| ↳ `holders` | array | Array of holders for this token |
|
||||
| ↳ `proxyWallet` | string | Holder wallet address |
|
||||
| ↳ `bio` | string | Holder bio |
|
||||
| ↳ `asset` | string | Asset ID |
|
||||
| ↳ `pseudonym` | string | Holder pseudonym |
|
||||
| ↳ `amount` | number | Amount held |
|
||||
| ↳ `displayUsernamePublic` | boolean | Whether username is publicly displayed |
|
||||
| ↳ `outcomeIndex` | number | Outcome index |
|
||||
| ↳ `name` | string | Holder display name |
|
||||
| ↳ `profileImage` | string | Profile image URL |
|
||||
| ↳ `profileImageOptimized` | string | Optimized profile image URL |
|
||||
| ↳ `proxyWallet` | string | Holder wallet address |
|
||||
| ↳ `bio` | string | Holder bio |
|
||||
| ↳ `asset` | string | Asset ID |
|
||||
| ↳ `pseudonym` | string | Holder pseudonym |
|
||||
| ↳ `amount` | number | Amount held |
|
||||
| ↳ `displayUsernamePublic` | boolean | Whether username is publicly displayed |
|
||||
| ↳ `outcomeIndex` | number | Outcome index |
|
||||
| ↳ `name` | string | Holder display name |
|
||||
| ↳ `profileImage` | string | Profile image URL |
|
||||
| ↳ `profileImageOptimized` | string | Optimized profile image URL |
|
||||
|
||||
|
||||
|
||||
1551
apps/docs/content/docs/en/tools/spotify.mdx
Normal file
1551
apps/docs/content/docs/en/tools/spotify.mdx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -299,7 +299,7 @@ Upload a file to a Supabase storage bucket
|
||||
| `bucket` | string | Yes | The name of the storage bucket |
|
||||
| `fileName` | string | Yes | The name of the file \(e.g., "document.pdf", "image.jpg"\) |
|
||||
| `path` | string | No | Optional folder path \(e.g., "folder/subfolder/"\) |
|
||||
| `fileData` | json | Yes | File to upload - UserFile object \(basic mode\) or string content \(advanced mode: base64 or plain text\). Supports data URLs. |
|
||||
| `fileContent` | string | Yes | The file content \(base64 encoded for binary files, or plain text\) |
|
||||
| `contentType` | string | No | MIME type of the file \(e.g., "image/jpeg", "text/plain"\) |
|
||||
| `upsert` | boolean | No | If true, overwrites existing file \(default: false\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
@@ -309,7 +309,7 @@ Upload a file to a Supabase storage bucket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | object | Upload result including file path, bucket, and public URL |
|
||||
| `results` | object | Upload result including file path and metadata |
|
||||
|
||||
### `supabase_storage_download`
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ describe('OAuth Token API Routes', () => {
|
||||
const mockGetUserId = vi.fn()
|
||||
const mockGetCredential = vi.fn()
|
||||
const mockRefreshTokenIfNeeded = vi.fn()
|
||||
const mockGetOAuthToken = vi.fn()
|
||||
const mockAuthorizeCredentialUse = vi.fn()
|
||||
const mockCheckHybridAuth = vi.fn()
|
||||
|
||||
@@ -30,7 +29,6 @@ describe('OAuth Token API Routes', () => {
|
||||
getUserId: mockGetUserId,
|
||||
getCredential: mockGetCredential,
|
||||
refreshTokenIfNeeded: mockRefreshTokenIfNeeded,
|
||||
getOAuthToken: mockGetOAuthToken,
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/logger', () => ({
|
||||
@@ -232,140 +230,6 @@ describe('OAuth Token API Routes', () => {
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Failed to refresh access token')
|
||||
})
|
||||
|
||||
describe('credentialAccountUserId + providerId path', () => {
|
||||
it('should reject unauthenticated requests', async () => {
|
||||
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||
success: false,
|
||||
error: 'Authentication required',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
credentialAccountUserId: 'target-user-id',
|
||||
providerId: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'User not authenticated')
|
||||
expect(mockGetOAuthToken).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reject API key authentication', async () => {
|
||||
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||
success: true,
|
||||
authType: 'api_key',
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
credentialAccountUserId: 'test-user-id',
|
||||
providerId: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'User not authenticated')
|
||||
expect(mockGetOAuthToken).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reject internal JWT authentication', async () => {
|
||||
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||
success: true,
|
||||
authType: 'internal_jwt',
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
credentialAccountUserId: 'test-user-id',
|
||||
providerId: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'User not authenticated')
|
||||
expect(mockGetOAuthToken).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reject requests for other users credentials', async () => {
|
||||
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||
success: true,
|
||||
authType: 'session',
|
||||
userId: 'attacker-user-id',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
credentialAccountUserId: 'victim-user-id',
|
||||
providerId: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
expect(mockGetOAuthToken).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should allow session-authenticated users to access their own credentials', async () => {
|
||||
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||
success: true,
|
||||
authType: 'session',
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
mockGetOAuthToken.mockResolvedValueOnce('valid-access-token')
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
credentialAccountUserId: 'test-user-id',
|
||||
providerId: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('accessToken', 'valid-access-token')
|
||||
expect(mockGetOAuthToken).toHaveBeenCalledWith('test-user-id', 'google')
|
||||
})
|
||||
|
||||
it('should return 404 when credential not found for user', async () => {
|
||||
mockCheckHybridAuth.mockResolvedValueOnce({
|
||||
success: true,
|
||||
authType: 'session',
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
mockGetOAuthToken.mockResolvedValueOnce(null)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
credentialAccountUserId: 'test-user-id',
|
||||
providerId: 'nonexistent-provider',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
expect(data.error).toContain('No credential found')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
|
||||
@@ -71,22 +71,6 @@ export async function POST(request: NextRequest) {
|
||||
providerId,
|
||||
})
|
||||
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || auth.authType !== 'session' || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized request for credentialAccountUserId path`, {
|
||||
success: auth.success,
|
||||
authType: auth.authType,
|
||||
})
|
||||
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 })
|
||||
}
|
||||
|
||||
if (auth.userId !== credentialAccountUserId) {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${auth.userId} attempted to access credentials for ${credentialAccountUserId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
|
||||
}
|
||||
|
||||
try {
|
||||
const accessToken = await getOAuthToken(credentialAccountUserId, providerId)
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -26,9 +26,8 @@ vi.mock('@/serializer', () => ({
|
||||
Serializer: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/subblocks', () => ({
|
||||
mergeSubblockStateWithValues: vi.fn().mockReturnValue({}),
|
||||
mergeSubBlockValues: vi.fn().mockReturnValue({}),
|
||||
vi.mock('@/stores/workflows/server-utils', () => ({
|
||||
mergeSubblockState: vi.fn().mockReturnValue({}),
|
||||
}))
|
||||
|
||||
const mockDecryptSecret = vi.fn()
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AsanaAddCommentAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { accessToken, taskGid, text } = await request.json()
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AsanaCreateTaskAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { accessToken, workspace, name, notes, assignee, due_on } = await request.json()
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AsanaGetProjectsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { accessToken, workspace } = await request.json()
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AsanaGetTaskAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { accessToken, taskGid, workspace, project, limit } = await request.json()
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AsanaSearchTasksAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { accessToken, workspace, text, assignee, projects, completed } = await request.json()
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AsanaUpdateTaskAPI')
|
||||
|
||||
export async function PUT(request: NextRequest) {
|
||||
export async function PUT(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { accessToken, taskGid, name, notes, assignee, completed, due_on } = await request.json()
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluenceAttachmentAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// Delete an attachment
|
||||
export async function DELETE(request: NextRequest) {
|
||||
export async function DELETE(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { domain, accessToken, cloudId: providedCloudId, attachmentId } = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluenceAttachmentsAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// List attachments on a page
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -47,13 +46,8 @@ const deleteCommentSchema = z
|
||||
)
|
||||
|
||||
// Update a comment
|
||||
export async function PUT(request: NextRequest) {
|
||||
export async function PUT(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
const validation = putCommentSchema.safeParse(body)
|
||||
@@ -134,13 +128,8 @@ export async function PUT(request: NextRequest) {
|
||||
}
|
||||
|
||||
// Delete a comment
|
||||
export async function DELETE(request: NextRequest) {
|
||||
export async function DELETE(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
const validation = deleteCommentSchema.safeParse(body)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluenceCommentsAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// Create a comment
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { domain, accessToken, cloudId: providedCloudId, pageId, comment } = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
@@ -92,13 +86,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
// List comments on a page
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -8,13 +7,8 @@ const logger = createLogger('ConfluenceCreatePageAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluenceLabelsAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// Add a label to a page
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
@@ -93,13 +87,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
// List labels on a page
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -74,13 +73,8 @@ const deletePageSchema = z
|
||||
}
|
||||
)
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
const validation = postPageSchema.safeParse(body)
|
||||
@@ -150,13 +144,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function PUT(request: NextRequest) {
|
||||
export async function PUT(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
const validation = putPageSchema.safeParse(body)
|
||||
@@ -259,13 +248,8 @@ export async function PUT(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest) {
|
||||
export async function DELETE(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
const validation = deletePageSchema.safeParse(body)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluencePagesAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// List pages or search pages
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -8,13 +7,8 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('Confluence Search')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluenceSpaceAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// Get a specific space
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
@@ -9,13 +8,8 @@ const logger = createLogger('ConfluenceSpacesAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// List all spaces
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -12,11 +11,6 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: providedCloudId, pageId, file, fileName, comment } = body
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
interface DiscordChannel {
|
||||
@@ -14,12 +13,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('DiscordChannelsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const { botToken, serverId, channelId } = await request.json()
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||
|
||||
interface DiscordServer {
|
||||
@@ -13,12 +12,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('DiscordServersAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const { botToken, serverId } = await request.json()
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
@@ -16,11 +15,6 @@ export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] Google Drive file request received`)
|
||||
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const credentialId = searchParams.get('credentialId')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
@@ -73,12 +73,14 @@ export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] Google Drive files request received`)
|
||||
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthenticated request rejected`)
|
||||
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const credentialId = searchParams.get('credentialId')
|
||||
const mimeType = searchParams.get('mimeType')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createDynamoDBClient, deleteItem } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
@@ -14,13 +13,8 @@ const DeleteSchema = z.object({
|
||||
conditionExpression: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = DeleteSchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createDynamoDBClient, getItem } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const GetSchema = z.object({
|
||||
@@ -20,13 +19,8 @@ const GetSchema = z.object({
|
||||
}),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GetSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRawDynamoDBClient, describeTable, listTables } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const logger = createLogger('DynamoDBIntrospectAPI')
|
||||
@@ -18,11 +17,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createDynamoDBClient, putItem } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const PutSchema = z.object({
|
||||
@@ -13,13 +12,8 @@ const PutSchema = z.object({
|
||||
}),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = PutSchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createDynamoDBClient, queryItems } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const QuerySchema = z.object({
|
||||
@@ -16,13 +15,8 @@ const QuerySchema = z.object({
|
||||
limit: z.number().positive().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = QuerySchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createDynamoDBClient, scanItems } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const ScanSchema = z.object({
|
||||
@@ -15,13 +14,8 @@ const ScanSchema = z.object({
|
||||
limit: z.number().positive().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = ScanSchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createDynamoDBClient, updateItem } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
@@ -17,13 +16,8 @@ const UpdateSchema = z.object({
|
||||
conditionExpression: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = UpdateSchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
@@ -30,11 +29,6 @@ export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] Google Sheets sheets request received`)
|
||||
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const credentialId = searchParams.get('credentialId')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
@@ -8,13 +7,8 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JiraIssueAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { domain, accessToken, issueId, cloudId: providedCloudId } = await request.json()
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
@@ -27,13 +26,8 @@ const validateRequiredParams = (domain: string | null, accessToken: string | nul
|
||||
return null
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { domain, accessToken, issueKeys = [], cloudId: providedCloudId } = await request.json()
|
||||
|
||||
const validationError = validateRequiredParams(domain || null, accessToken || null)
|
||||
@@ -107,13 +101,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const url = new URL(request.url)
|
||||
const domain = url.searchParams.get('domain')?.trim()
|
||||
const accessToken = url.searchParams.get('accessToken')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
@@ -8,13 +7,8 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JiraProjectsAPI')
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const url = new URL(request.url)
|
||||
const domain = url.searchParams.get('domain')?.trim()
|
||||
const accessToken = url.searchParams.get('accessToken')
|
||||
@@ -104,13 +98,8 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { domain, accessToken, projectId, cloudId: providedCloudId } = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
@@ -22,13 +21,8 @@ const jiraUpdateSchema = z.object({
|
||||
cloudId: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function PUT(request: NextRequest) {
|
||||
export async function PUT(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validation = jiraUpdateSchema.safeParse(body)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
@@ -8,13 +7,8 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JiraWriteAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateEnum,
|
||||
@@ -16,12 +15,7 @@ const logger = createLogger('JsmApprovalsAPI')
|
||||
const VALID_ACTIONS = ['get', 'answer'] as const
|
||||
const VALID_DECISIONS = ['approve', 'decline'] as const
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmCommentAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const {
|
||||
domain,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmCommentsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmCustomersAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateEnum,
|
||||
@@ -14,12 +13,7 @@ const logger = createLogger('JsmOrganizationAPI')
|
||||
|
||||
const VALID_ACTIONS = ['create', 'add_to_service_desk'] as const
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmOrganizationsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, serviceDeskId, start, limit } = body
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import {
|
||||
validateEnum,
|
||||
validateJiraCloudId,
|
||||
@@ -14,12 +13,7 @@ const logger = createLogger('JsmParticipantsAPI')
|
||||
|
||||
const VALID_ACTIONS = ['get', 'add'] as const
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmQueuesAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
@@ -12,12 +11,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmRequestAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmRequestsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmRequestTypesAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, serviceDeskId, start, limit } = body
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmServiceDesksAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, start, limit } = body
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmSlaAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey, start, limit } = body
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
@@ -12,12 +11,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmTransitionAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const {
|
||||
domain,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
@@ -8,12 +7,7 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('JsmTransitionsAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey } = body
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMongoDBConnection, sanitizeCollectionName, validateFilter } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBDeleteAPI')
|
||||
@@ -41,12 +40,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MongoDB delete attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMongoDBConnection, sanitizeCollectionName, validatePipeline } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBExecuteAPI')
|
||||
@@ -33,12 +32,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MongoDB execute attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMongoDBConnection, sanitizeCollectionName } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBInsertAPI')
|
||||
@@ -38,12 +37,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MongoDB insert attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMongoDBConnection, executeIntrospect } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBIntrospectAPI')
|
||||
@@ -21,12 +20,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MongoDB introspect attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMongoDBConnection, sanitizeCollectionName, validateFilter } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBQueryAPI')
|
||||
@@ -50,12 +49,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MongoDB query attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMongoDBConnection, sanitizeCollectionName, validateFilter } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBUpdateAPI')
|
||||
@@ -60,12 +59,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MongoDB update attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
convertNeo4jTypesToJSON,
|
||||
createNeo4jDriver,
|
||||
@@ -27,12 +26,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j create attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = CreateSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createNeo4jDriver, validateCypherQuery } from '@/app/api/tools/neo4j/utils'
|
||||
|
||||
const logger = createLogger('Neo4jDeleteAPI')
|
||||
@@ -24,12 +23,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j delete attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
convertNeo4jTypesToJSON,
|
||||
createNeo4jDriver,
|
||||
@@ -27,12 +26,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j execute attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createNeo4jDriver } from '@/app/api/tools/neo4j/utils'
|
||||
import type { Neo4jNodeSchema, Neo4jRelationshipSchema } from '@/tools/neo4j/types'
|
||||
|
||||
@@ -22,12 +21,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j introspect attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
convertNeo4jTypesToJSON,
|
||||
createNeo4jDriver,
|
||||
@@ -27,12 +26,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j merge attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = MergeSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
convertNeo4jTypesToJSON,
|
||||
createNeo4jDriver,
|
||||
@@ -27,12 +26,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j query attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
convertNeo4jTypesToJSON,
|
||||
createNeo4jDriver,
|
||||
@@ -27,12 +26,6 @@ export async function POST(request: NextRequest) {
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Neo4j update attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRdsClient, executeDelete } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSDeleteAPI')
|
||||
@@ -23,11 +22,6 @@ const DeleteSchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRdsClient, executeStatement } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSExecuteAPI')
|
||||
@@ -20,11 +19,6 @@ const ExecuteSchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRdsClient, executeInsert } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSInsertAPI')
|
||||
@@ -23,11 +22,6 @@ const InsertSchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRdsClient, executeIntrospect, type RdsEngine } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSIntrospectAPI')
|
||||
@@ -21,11 +20,6 @@ const IntrospectSchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRdsClient, executeStatement, validateQuery } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSQueryAPI')
|
||||
@@ -20,11 +19,6 @@ const QuerySchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createRdsClient, executeUpdate } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSUpdateAPI')
|
||||
@@ -26,11 +25,6 @@ const UpdateSchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createSqsClient, sendMessage } from '../utils'
|
||||
|
||||
const logger = createLogger('SQSSendMessageAPI')
|
||||
@@ -22,11 +21,6 @@ const SendMessageSchema = z.object({
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = SendMessageSchema.parse(body)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||
@@ -92,11 +91,6 @@ function substituteVariables(text: string, variables: Record<string, string> | u
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
let stagehand: StagehandType | null = null
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||
|
||||
@@ -23,11 +22,6 @@ const requestSchema = z.object({
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
let stagehand: StagehandType | null = null
|
||||
|
||||
try {
|
||||
|
||||
@@ -58,6 +58,7 @@ export function ExecutionSnapshot({
|
||||
onClose = () => {},
|
||||
}: ExecutionSnapshotProps) {
|
||||
const { data, isLoading, error } = useExecutionSnapshot(executionId)
|
||||
const lastExecutionIdRef = useRef<string | null>(null)
|
||||
|
||||
const [isMenuOpen, setIsMenuOpen] = useState(false)
|
||||
const [menuPosition, setMenuPosition] = useState({ x: 0, y: 0 })
|
||||
@@ -81,6 +82,12 @@ export function ExecutionSnapshot({
|
||||
|
||||
const workflowState = data?.workflowState as WorkflowState | undefined
|
||||
|
||||
// Track execution ID changes for key reset
|
||||
const executionKey = executionId !== lastExecutionIdRef.current ? executionId : undefined
|
||||
if (executionId !== lastExecutionIdRef.current) {
|
||||
lastExecutionIdRef.current = executionId
|
||||
}
|
||||
|
||||
const renderContent = () => {
|
||||
if (isLoading) {
|
||||
return (
|
||||
@@ -145,7 +152,7 @@ export function ExecutionSnapshot({
|
||||
|
||||
return (
|
||||
<Preview
|
||||
key={executionId}
|
||||
key={executionKey}
|
||||
workflowState={workflowState}
|
||||
traceSpans={traceSpans}
|
||||
className={className}
|
||||
|
||||
@@ -203,9 +203,7 @@ export function ApiInfoModal({ open, onOpenChange, workflowId }: ApiInfoModalPro
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{field.name}
|
||||
</span>
|
||||
<Badge variant='type' size='sm'>
|
||||
{field.type || 'string'}
|
||||
</Badge>
|
||||
<Badge size='sm'>{field.type || 'string'}</Badge>
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
|
||||
@@ -511,9 +511,7 @@ export function McpDeploy({
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{field.name}
|
||||
</span>
|
||||
<Badge variant='type' size='sm'>
|
||||
{field.type}
|
||||
</Badge>
|
||||
<Badge size='sm'>{field.type}</Badge>
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
|
||||
@@ -39,8 +39,6 @@ import { normalizeName } from '@/executor/constants'
|
||||
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
|
||||
import { useTagSelection } from '@/hooks/kb/use-tag-selection'
|
||||
import { createShouldHighlightEnvVar, useAvailableEnvVarKeys } from '@/hooks/use-available-env-vars'
|
||||
import { useCodeUndoRedo } from '@/hooks/use-code-undo-redo'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('Code')
|
||||
|
||||
@@ -214,6 +212,7 @@ export const Code = memo(function Code({
|
||||
const handleStreamStartRef = useRef<() => void>(() => {})
|
||||
const handleGeneratedContentRef = useRef<(generatedCode: string) => void>(() => {})
|
||||
const handleStreamChunkRef = useRef<(chunk: string) => void>(() => {})
|
||||
const hasEditedSinceFocusRef = useRef(false)
|
||||
const codeRef = useRef(code)
|
||||
codeRef.current = code
|
||||
|
||||
@@ -221,12 +220,8 @@ export const Code = memo(function Code({
|
||||
const emitTagSelection = useTagSelection(blockId, subBlockId)
|
||||
const [languageValue] = useSubBlockValue<string>(blockId, 'language')
|
||||
const availableEnvVars = useAvailableEnvVarKeys(workspaceId)
|
||||
const blockType = useWorkflowStore(
|
||||
useCallback((state) => state.blocks?.[blockId]?.type, [blockId])
|
||||
)
|
||||
|
||||
const effectiveLanguage = (languageValue as 'javascript' | 'python' | 'json') || language
|
||||
const isFunctionCode = blockType === 'function' && subBlockId === 'code'
|
||||
|
||||
const trimmedCode = code.trim()
|
||||
const containsReferencePlaceholders =
|
||||
@@ -301,15 +296,6 @@ export const Code = memo(function Code({
|
||||
const updatePromptValue = wandHook?.updatePromptValue || (() => {})
|
||||
const cancelGeneration = wandHook?.cancelGeneration || (() => {})
|
||||
|
||||
const { recordChange, recordReplace, flushPending, startSession, undo, redo } = useCodeUndoRedo({
|
||||
blockId,
|
||||
subBlockId,
|
||||
value: code,
|
||||
enabled: isFunctionCode,
|
||||
isReadOnly: readOnly || disabled || isPreview,
|
||||
isStreaming: isAiStreaming,
|
||||
})
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId, false, {
|
||||
isStreaming: isAiStreaming,
|
||||
onStreamingEnd: () => {
|
||||
@@ -361,10 +347,9 @@ export const Code = memo(function Code({
|
||||
setCode(generatedCode)
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(generatedCode)
|
||||
recordReplace(generatedCode)
|
||||
}
|
||||
}
|
||||
}, [disabled, isPreview, recordReplace, setStoreValue])
|
||||
}, [isPreview, disabled, setStoreValue])
|
||||
|
||||
useEffect(() => {
|
||||
if (!editorRef.current) return
|
||||
@@ -507,7 +492,7 @@ export const Code = memo(function Code({
|
||||
|
||||
setCode(newValue)
|
||||
setStoreValue(newValue)
|
||||
recordChange(newValue)
|
||||
hasEditedSinceFocusRef.current = true
|
||||
const newCursorPosition = dropPosition + 1
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
@@ -536,7 +521,7 @@ export const Code = memo(function Code({
|
||||
if (!isPreview && !readOnly) {
|
||||
setCode(newValue)
|
||||
emitTagSelection(newValue)
|
||||
recordChange(newValue)
|
||||
hasEditedSinceFocusRef.current = true
|
||||
}
|
||||
setShowTags(false)
|
||||
setActiveSourceBlockId(null)
|
||||
@@ -554,7 +539,7 @@ export const Code = memo(function Code({
|
||||
if (!isPreview && !readOnly) {
|
||||
setCode(newValue)
|
||||
emitTagSelection(newValue)
|
||||
recordChange(newValue)
|
||||
hasEditedSinceFocusRef.current = true
|
||||
}
|
||||
setShowEnvVars(false)
|
||||
|
||||
@@ -640,9 +625,9 @@ export const Code = memo(function Code({
|
||||
const handleValueChange = useCallback(
|
||||
(newCode: string) => {
|
||||
if (!isAiStreaming && !isPreview && !disabled && !readOnly) {
|
||||
hasEditedSinceFocusRef.current = true
|
||||
setCode(newCode)
|
||||
setStoreValue(newCode)
|
||||
recordChange(newCode)
|
||||
|
||||
const textarea = editorRef.current?.querySelector('textarea')
|
||||
if (textarea) {
|
||||
@@ -661,7 +646,7 @@ export const Code = memo(function Code({
|
||||
}
|
||||
}
|
||||
},
|
||||
[isAiStreaming, isPreview, disabled, readOnly, recordChange, setStoreValue]
|
||||
[isAiStreaming, isPreview, disabled, readOnly, setStoreValue]
|
||||
)
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
@@ -672,39 +657,21 @@ export const Code = memo(function Code({
|
||||
}
|
||||
if (isAiStreaming) {
|
||||
e.preventDefault()
|
||||
return
|
||||
}
|
||||
if (!isFunctionCode) return
|
||||
const isUndo = (e.key === 'z' || e.key === 'Z') && (e.metaKey || e.ctrlKey) && !e.shiftKey
|
||||
const isRedo =
|
||||
((e.key === 'z' || e.key === 'Z') && (e.metaKey || e.ctrlKey) && e.shiftKey) ||
|
||||
(e.key === 'y' && (e.metaKey || e.ctrlKey))
|
||||
if (isUndo) {
|
||||
if (e.key === 'z' && (e.metaKey || e.ctrlKey) && !hasEditedSinceFocusRef.current) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
undo()
|
||||
return
|
||||
}
|
||||
if (isRedo) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
redo()
|
||||
}
|
||||
},
|
||||
[isAiStreaming, isFunctionCode, redo, undo]
|
||||
[isAiStreaming]
|
||||
)
|
||||
|
||||
const handleEditorFocus = useCallback(() => {
|
||||
startSession(codeRef.current)
|
||||
hasEditedSinceFocusRef.current = false
|
||||
if (!isPreview && !disabled && !readOnly && codeRef.current.trim() === '') {
|
||||
setShowTags(true)
|
||||
setCursorPosition(0)
|
||||
}
|
||||
}, [disabled, isPreview, readOnly, startSession])
|
||||
|
||||
const handleEditorBlur = useCallback(() => {
|
||||
flushPending()
|
||||
}, [flushPending])
|
||||
}, [isPreview, disabled, readOnly])
|
||||
|
||||
/**
|
||||
* Renders the line numbers, aligned with wrapped visual lines and highlighting the active line.
|
||||
@@ -824,7 +791,6 @@ export const Code = memo(function Code({
|
||||
onValueChange={handleValueChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={handleEditorFocus}
|
||||
onBlur={handleEditorBlur}
|
||||
highlight={highlightCode}
|
||||
{...getCodeEditorProps({ isStreaming: isAiStreaming, isPreview, disabled })}
|
||||
/>
|
||||
|
||||
@@ -245,9 +245,7 @@ export function DocumentTagEntry({
|
||||
{tag.collapsed ? tag.tagName || `Tag ${index + 1}` : `Tag ${index + 1}`}
|
||||
</span>
|
||||
{tag.collapsed && tag.tagName && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{FIELD_TYPE_LABELS[tag.fieldType] || 'Text'}
|
||||
</Badge>
|
||||
<Badge size='sm'>{FIELD_TYPE_LABELS[tag.fieldType] || 'Text'}</Badge>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px] pl-[8px]' onClick={(e) => e.stopPropagation()}>
|
||||
|
||||
@@ -223,11 +223,7 @@ function InputMappingField({
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{fieldName}
|
||||
</span>
|
||||
{fieldType && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{fieldType}
|
||||
</Badge>
|
||||
)}
|
||||
{fieldType && <Badge size='sm'>{fieldType}</Badge>}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -238,9 +238,7 @@ export function KnowledgeTagFilters({
|
||||
{filter.collapsed ? filter.tagName || `Filter ${index + 1}` : `Filter ${index + 1}`}
|
||||
</span>
|
||||
{filter.collapsed && filter.tagName && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{FIELD_TYPE_LABELS[filter.fieldType] || 'Text'}
|
||||
</Badge>
|
||||
<Badge size='sm'>{FIELD_TYPE_LABELS[filter.fieldType] || 'Text'}</Badge>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px] pl-[8px]' onClick={(e) => e.stopPropagation()}>
|
||||
|
||||
@@ -310,11 +310,7 @@ export function FieldFormat({
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{field.name || `${title} ${index + 1}`}
|
||||
</span>
|
||||
{field.name && showType && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{field.type}
|
||||
</Badge>
|
||||
)}
|
||||
{field.name && showType && <Badge size='sm'>{field.type}</Badge>}
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px] pl-[8px]' onClick={(e) => e.stopPropagation()}>
|
||||
<Button variant='ghost' onClick={addField} disabled={isReadOnly} className='h-auto p-0'>
|
||||
|
||||
@@ -345,11 +345,7 @@ export function VariablesInput({
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{assignment.variableName || `Variable ${index + 1}`}
|
||||
</span>
|
||||
{assignment.variableName && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{assignment.type}
|
||||
</Badge>
|
||||
)}
|
||||
{assignment.variableName && <Badge size='sm'>{assignment.type}</Badge>}
|
||||
</div>
|
||||
<div
|
||||
className='flex items-center gap-[8px] pl-[8px]'
|
||||
|
||||
@@ -796,7 +796,6 @@ export const Terminal = memo(function Terminal() {
|
||||
const terminalRef = useRef<HTMLElement>(null)
|
||||
const prevEntriesLengthRef = useRef(0)
|
||||
const prevWorkflowEntriesLengthRef = useRef(0)
|
||||
const hasInitializedEntriesRef = useRef(false)
|
||||
const isTerminalFocusedRef = useRef(false)
|
||||
const lastExpandedHeightRef = useRef<number>(DEFAULT_EXPANDED_HEIGHT)
|
||||
const setTerminalHeight = useTerminalStore((state) => state.setTerminalHeight)
|
||||
@@ -1008,33 +1007,12 @@ export const Terminal = memo(function Terminal() {
|
||||
return JSON.stringify(outputData, null, 2)
|
||||
}, [outputData])
|
||||
|
||||
/**
|
||||
* Reset entry tracking when switching workflows to ensure auto-open
|
||||
* works correctly for each workflow independently.
|
||||
*/
|
||||
useEffect(() => {
|
||||
hasInitializedEntriesRef.current = false
|
||||
}, [activeWorkflowId])
|
||||
|
||||
/**
|
||||
* Auto-open the terminal on new entries when "Open on run" is enabled.
|
||||
* This mirrors the header toggle behavior by using expandToLastHeight,
|
||||
* ensuring we always get the same smooth height transition.
|
||||
*
|
||||
* Skips the initial sync after console hydration to avoid auto-opening
|
||||
* when persisted entries are restored on page refresh.
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!hasConsoleHydrated) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!hasInitializedEntriesRef.current) {
|
||||
hasInitializedEntriesRef.current = true
|
||||
prevWorkflowEntriesLengthRef.current = allWorkflowEntries.length
|
||||
return
|
||||
}
|
||||
|
||||
if (!openOnRun) {
|
||||
prevWorkflowEntriesLengthRef.current = allWorkflowEntries.length
|
||||
return
|
||||
@@ -1048,14 +1026,7 @@ export const Terminal = memo(function Terminal() {
|
||||
}
|
||||
|
||||
prevWorkflowEntriesLengthRef.current = currentLength
|
||||
}, [
|
||||
allWorkflowEntries.length,
|
||||
expandToLastHeight,
|
||||
openOnRun,
|
||||
isExpanded,
|
||||
hasConsoleHydrated,
|
||||
activeWorkflowId,
|
||||
])
|
||||
}, [allWorkflowEntries.length, expandToLastHeight, openOnRun, isExpanded])
|
||||
|
||||
/**
|
||||
* Handle row click - toggle if clicking same entry
|
||||
|
||||
@@ -66,6 +66,7 @@ import { useWorkspaceEnvironment } from '@/hooks/queries/environment'
|
||||
import { useAutoConnect, useSnapToGridSize } from '@/hooks/queries/general-settings'
|
||||
import { useCanvasViewport } from '@/hooks/use-canvas-viewport'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { useStreamCleanup } from '@/hooks/use-stream-cleanup'
|
||||
import { useCanvasModeStore } from '@/stores/canvas-mode'
|
||||
import { useChatStore } from '@/stores/chat/store'
|
||||
@@ -98,6 +99,26 @@ const logger = createLogger('Workflow')
|
||||
|
||||
const DEFAULT_PASTE_OFFSET = { x: 50, y: 50 }
|
||||
|
||||
/**
|
||||
* Gets the center of the current viewport in flow coordinates
|
||||
*/
|
||||
function getViewportCenter(
|
||||
screenToFlowPosition: (pos: { x: number; y: number }) => { x: number; y: number }
|
||||
): { x: number; y: number } {
|
||||
const flowContainer = document.querySelector('.react-flow')
|
||||
if (!flowContainer) {
|
||||
return screenToFlowPosition({
|
||||
x: window.innerWidth / 2,
|
||||
y: window.innerHeight / 2,
|
||||
})
|
||||
}
|
||||
const rect = flowContainer.getBoundingClientRect()
|
||||
return screenToFlowPosition({
|
||||
x: rect.width / 2,
|
||||
y: rect.height / 2,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the offset to paste blocks at viewport center
|
||||
*/
|
||||
@@ -105,7 +126,7 @@ function calculatePasteOffset(
|
||||
clipboard: {
|
||||
blocks: Record<string, { position: { x: number; y: number }; type: string; height?: number }>
|
||||
} | null,
|
||||
viewportCenter: { x: number; y: number }
|
||||
screenToFlowPosition: (pos: { x: number; y: number }) => { x: number; y: number }
|
||||
): { x: number; y: number } {
|
||||
if (!clipboard) return DEFAULT_PASTE_OFFSET
|
||||
|
||||
@@ -134,6 +155,8 @@ function calculatePasteOffset(
|
||||
)
|
||||
const clipboardCenter = { x: (minX + maxX) / 2, y: (minY + maxY) / 2 }
|
||||
|
||||
const viewportCenter = getViewportCenter(screenToFlowPosition)
|
||||
|
||||
return {
|
||||
x: viewportCenter.x - clipboardCenter.x,
|
||||
y: viewportCenter.y - clipboardCenter.y,
|
||||
@@ -243,7 +266,7 @@ const WorkflowContent = React.memo(() => {
|
||||
const router = useRouter()
|
||||
const reactFlowInstance = useReactFlow()
|
||||
const { screenToFlowPosition, getNodes, setNodes, getIntersectingNodes } = reactFlowInstance
|
||||
const { fitViewToBounds, getViewportCenter } = useCanvasViewport(reactFlowInstance)
|
||||
const { fitViewToBounds } = useCanvasViewport(reactFlowInstance)
|
||||
const { emitCursorUpdate } = useSocket()
|
||||
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -315,6 +338,8 @@ const WorkflowContent = React.memo(() => {
|
||||
const isVariablesOpen = useVariablesStore((state) => state.isOpen)
|
||||
const isChatOpen = useChatStore((state) => state.isChatOpen)
|
||||
|
||||
// Permission config for invitation control
|
||||
const { isInvitationsDisabled } = usePermissionConfig()
|
||||
const snapGrid: [number, number] = useMemo(
|
||||
() => [snapToGridSize, snapToGridSize],
|
||||
[snapToGridSize]
|
||||
@@ -876,125 +901,11 @@ const WorkflowContent = React.memo(() => {
|
||||
* Consolidates shared logic for context paste, duplicate, and keyboard paste.
|
||||
*/
|
||||
const executePasteOperation = useCallback(
|
||||
(
|
||||
operation: 'paste' | 'duplicate',
|
||||
pasteOffset: { x: number; y: number },
|
||||
targetContainer?: {
|
||||
loopId: string
|
||||
loopPosition: { x: number; y: number }
|
||||
dimensions: { width: number; height: number }
|
||||
} | null,
|
||||
pasteTargetPosition?: { x: number; y: number }
|
||||
) => {
|
||||
// For context menu paste into a subflow, calculate offset to center blocks at click position
|
||||
// Skip click-position centering if blocks came from inside a subflow (relative coordinates)
|
||||
let effectiveOffset = pasteOffset
|
||||
if (targetContainer && pasteTargetPosition && clipboard) {
|
||||
const clipboardBlocks = Object.values(clipboard.blocks)
|
||||
// Only use click-position centering for top-level blocks (absolute coordinates)
|
||||
// Blocks with parentId have relative positions that can't be mixed with absolute click position
|
||||
const hasNestedBlocks = clipboardBlocks.some((b) => b.data?.parentId)
|
||||
if (clipboardBlocks.length > 0 && !hasNestedBlocks) {
|
||||
const minX = Math.min(...clipboardBlocks.map((b) => b.position.x))
|
||||
const maxX = Math.max(
|
||||
...clipboardBlocks.map((b) => b.position.x + BLOCK_DIMENSIONS.FIXED_WIDTH)
|
||||
)
|
||||
const minY = Math.min(...clipboardBlocks.map((b) => b.position.y))
|
||||
const maxY = Math.max(
|
||||
...clipboardBlocks.map((b) => b.position.y + BLOCK_DIMENSIONS.MIN_HEIGHT)
|
||||
)
|
||||
const clipboardCenter = { x: (minX + maxX) / 2, y: (minY + maxY) / 2 }
|
||||
effectiveOffset = {
|
||||
x: pasteTargetPosition.x - clipboardCenter.x,
|
||||
y: pasteTargetPosition.y - clipboardCenter.y,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pasteData = preparePasteData(effectiveOffset)
|
||||
(operation: 'paste' | 'duplicate', pasteOffset: { x: number; y: number }) => {
|
||||
const pasteData = preparePasteData(pasteOffset)
|
||||
if (!pasteData) return
|
||||
|
||||
let pastedBlocksArray = Object.values(pasteData.blocks)
|
||||
|
||||
// If pasting into a subflow, adjust blocks to be children of that subflow
|
||||
if (targetContainer) {
|
||||
// Check if any pasted block is a trigger - triggers cannot be in subflows
|
||||
const hasTrigger = pastedBlocksArray.some((b) => TriggerUtils.isTriggerBlock(b))
|
||||
if (hasTrigger) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: 'Triggers cannot be placed inside loop or parallel subflows.',
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if any pasted block is a subflow - subflows cannot be nested
|
||||
const hasSubflow = pastedBlocksArray.some((b) => b.type === 'loop' || b.type === 'parallel')
|
||||
if (hasSubflow) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: 'Subflows cannot be nested inside other subflows.',
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Adjust each block's position to be relative to the container and set parentId
|
||||
pastedBlocksArray = pastedBlocksArray.map((block) => {
|
||||
// For blocks already nested (have parentId), positions are already relative - use as-is
|
||||
// For top-level blocks, convert absolute position to relative by subtracting container position
|
||||
const wasNested = Boolean(block.data?.parentId)
|
||||
const relativePosition = wasNested
|
||||
? { x: block.position.x, y: block.position.y }
|
||||
: {
|
||||
x: block.position.x - targetContainer.loopPosition.x,
|
||||
y: block.position.y - targetContainer.loopPosition.y,
|
||||
}
|
||||
|
||||
// Clamp position to keep block inside container (below header)
|
||||
const clampedPosition = {
|
||||
x: Math.max(
|
||||
CONTAINER_DIMENSIONS.LEFT_PADDING,
|
||||
Math.min(
|
||||
relativePosition.x,
|
||||
targetContainer.dimensions.width -
|
||||
BLOCK_DIMENSIONS.FIXED_WIDTH -
|
||||
CONTAINER_DIMENSIONS.RIGHT_PADDING
|
||||
)
|
||||
),
|
||||
y: Math.max(
|
||||
CONTAINER_DIMENSIONS.HEADER_HEIGHT + CONTAINER_DIMENSIONS.TOP_PADDING,
|
||||
Math.min(
|
||||
relativePosition.y,
|
||||
targetContainer.dimensions.height -
|
||||
BLOCK_DIMENSIONS.MIN_HEIGHT -
|
||||
CONTAINER_DIMENSIONS.BOTTOM_PADDING
|
||||
)
|
||||
),
|
||||
}
|
||||
|
||||
return {
|
||||
...block,
|
||||
position: clampedPosition,
|
||||
data: {
|
||||
...block.data,
|
||||
parentId: targetContainer.loopId,
|
||||
extent: 'parent',
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
// Update pasteData.blocks with the modified blocks
|
||||
pasteData.blocks = pastedBlocksArray.reduce(
|
||||
(acc, block) => {
|
||||
acc[block.id] = block
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, (typeof pastedBlocksArray)[0]>
|
||||
)
|
||||
}
|
||||
|
||||
const pastedBlocksArray = Object.values(pasteData.blocks)
|
||||
const validation = validateTriggerPaste(pastedBlocksArray, blocks, operation)
|
||||
if (!validation.isValid) {
|
||||
addNotification({
|
||||
@@ -1015,46 +926,21 @@ const WorkflowContent = React.memo(() => {
|
||||
pasteData.parallels,
|
||||
pasteData.subBlockValues
|
||||
)
|
||||
|
||||
// Resize container if we pasted into a subflow
|
||||
if (targetContainer) {
|
||||
resizeLoopNodesWrapper()
|
||||
}
|
||||
},
|
||||
[
|
||||
preparePasteData,
|
||||
blocks,
|
||||
clipboard,
|
||||
addNotification,
|
||||
activeWorkflowId,
|
||||
collaborativeBatchAddBlocks,
|
||||
setPendingSelection,
|
||||
resizeLoopNodesWrapper,
|
||||
]
|
||||
)
|
||||
|
||||
const handleContextPaste = useCallback(() => {
|
||||
if (!hasClipboard()) return
|
||||
|
||||
// Convert context menu position to flow coordinates and check if inside a subflow
|
||||
const flowPosition = screenToFlowPosition(contextMenuPosition)
|
||||
const targetContainer = isPointInLoopNode(flowPosition)
|
||||
|
||||
executePasteOperation(
|
||||
'paste',
|
||||
calculatePasteOffset(clipboard, getViewportCenter()),
|
||||
targetContainer,
|
||||
flowPosition // Pass the click position so blocks are centered at where user right-clicked
|
||||
)
|
||||
}, [
|
||||
hasClipboard,
|
||||
executePasteOperation,
|
||||
clipboard,
|
||||
getViewportCenter,
|
||||
screenToFlowPosition,
|
||||
contextMenuPosition,
|
||||
isPointInLoopNode,
|
||||
])
|
||||
executePasteOperation('paste', calculatePasteOffset(clipboard, screenToFlowPosition))
|
||||
}, [hasClipboard, executePasteOperation, clipboard, screenToFlowPosition])
|
||||
|
||||
const handleContextDuplicate = useCallback(() => {
|
||||
copyBlocks(contextMenuBlocks.map((b) => b.id))
|
||||
@@ -1120,6 +1006,10 @@ const WorkflowContent = React.memo(() => {
|
||||
setIsChatOpen(!isChatOpen)
|
||||
}, [])
|
||||
|
||||
const handleContextInvite = useCallback(() => {
|
||||
window.dispatchEvent(new CustomEvent('open-invite-modal'))
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
let cleanup: (() => void) | null = null
|
||||
|
||||
@@ -1164,7 +1054,7 @@ const WorkflowContent = React.memo(() => {
|
||||
} else if ((event.ctrlKey || event.metaKey) && event.key === 'v') {
|
||||
if (effectivePermissions.canEdit && hasClipboard()) {
|
||||
event.preventDefault()
|
||||
executePasteOperation('paste', calculatePasteOffset(clipboard, getViewportCenter()))
|
||||
executePasteOperation('paste', calculatePasteOffset(clipboard, screenToFlowPosition))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1184,7 +1074,7 @@ const WorkflowContent = React.memo(() => {
|
||||
hasClipboard,
|
||||
effectivePermissions.canEdit,
|
||||
clipboard,
|
||||
getViewportCenter,
|
||||
screenToFlowPosition,
|
||||
executePasteOperation,
|
||||
])
|
||||
|
||||
@@ -1617,7 +1507,7 @@ const WorkflowContent = React.memo(() => {
|
||||
if (!type) return
|
||||
if (type === 'connectionBlock') return
|
||||
|
||||
const basePosition = getViewportCenter()
|
||||
const basePosition = getViewportCenter(screenToFlowPosition)
|
||||
|
||||
if (type === 'loop' || type === 'parallel') {
|
||||
const id = crypto.randomUUID()
|
||||
@@ -1686,7 +1576,7 @@ const WorkflowContent = React.memo(() => {
|
||||
)
|
||||
}
|
||||
}, [
|
||||
getViewportCenter,
|
||||
screenToFlowPosition,
|
||||
blocks,
|
||||
addBlock,
|
||||
effectivePermissions.canEdit,
|
||||
|
||||
@@ -185,16 +185,6 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
return `claude mcp add "${safeName}" --url "${mcpServerUrl}" --header "X-API-Key:$SIM_API_KEY"`
|
||||
}
|
||||
|
||||
// Cursor supports direct URL configuration (no mcp-remote needed)
|
||||
if (client === 'cursor') {
|
||||
const cursorConfig = isPublic
|
||||
? { url: mcpServerUrl }
|
||||
: { url: mcpServerUrl, headers: { 'X-API-Key': '$SIM_API_KEY' } }
|
||||
|
||||
return JSON.stringify({ mcpServers: { [safeName]: cursorConfig } }, null, 2)
|
||||
}
|
||||
|
||||
// Claude Desktop and VS Code still use mcp-remote (stdio transport)
|
||||
const mcpRemoteArgs = isPublic
|
||||
? ['-y', 'mcp-remote', mcpServerUrl]
|
||||
: ['-y', 'mcp-remote', mcpServerUrl, '--header', 'X-API-Key:$SIM_API_KEY']
|
||||
@@ -275,8 +265,14 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
.replace(/[^a-z0-9-]/g, '')
|
||||
|
||||
const config = isPublic
|
||||
? { url: mcpServerUrl }
|
||||
: { url: mcpServerUrl, headers: { 'X-API-Key': '$SIM_API_KEY' } }
|
||||
? {
|
||||
command: 'npx',
|
||||
args: ['-y', 'mcp-remote', mcpServerUrl],
|
||||
}
|
||||
: {
|
||||
command: 'npx',
|
||||
args: ['-y', 'mcp-remote', mcpServerUrl, '--header', 'X-API-Key:$SIM_API_KEY'],
|
||||
}
|
||||
|
||||
const base64Config = btoa(JSON.stringify(config))
|
||||
return `cursor://anysphere.cursor-deeplink/mcp/install?name=${encodeURIComponent(safeName)}&config=${encodeURIComponent(base64Config)}`
|
||||
@@ -611,9 +607,7 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{name}
|
||||
</span>
|
||||
<Badge variant='type' size='sm'>
|
||||
{prop.type || 'any'}
|
||||
</Badge>
|
||||
<Badge size='sm'>{prop.type || 'any'}</Badge>
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
|
||||
@@ -162,5 +162,9 @@ export const HumanInTheLoopBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
type: 'string',
|
||||
description: 'Resume API endpoint URL for direct curl requests',
|
||||
},
|
||||
response: { type: 'json', description: 'Display data shown to the approver' },
|
||||
submission: { type: 'json', description: 'Form submission data from the approver' },
|
||||
resumeInput: { type: 'json', description: 'Raw input data submitted when resuming' },
|
||||
submittedAt: { type: 'string', description: 'ISO timestamp when the workflow was resumed' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -36,22 +36,7 @@ export const IntercomBlock: BlockConfig = {
|
||||
{ label: 'Search Conversations', id: 'search_conversations' },
|
||||
{ label: 'Create Ticket', id: 'create_ticket' },
|
||||
{ label: 'Get Ticket', id: 'get_ticket' },
|
||||
{ label: 'Update Ticket', id: 'update_ticket' },
|
||||
{ label: 'Create Message', id: 'create_message' },
|
||||
{ label: 'List Admins', id: 'list_admins' },
|
||||
{ label: 'Close Conversation', id: 'close_conversation' },
|
||||
{ label: 'Open Conversation', id: 'open_conversation' },
|
||||
{ label: 'Snooze Conversation', id: 'snooze_conversation' },
|
||||
{ label: 'Assign Conversation', id: 'assign_conversation' },
|
||||
{ label: 'List Tags', id: 'list_tags' },
|
||||
{ label: 'Create Tag', id: 'create_tag' },
|
||||
{ label: 'Tag Contact', id: 'tag_contact' },
|
||||
{ label: 'Untag Contact', id: 'untag_contact' },
|
||||
{ label: 'Tag Conversation', id: 'tag_conversation' },
|
||||
{ label: 'Create Note', id: 'create_note' },
|
||||
{ label: 'Create Event', id: 'create_event' },
|
||||
{ label: 'Attach Contact to Company', id: 'attach_contact_to_company' },
|
||||
{ label: 'Detach Contact from Company', id: 'detach_contact_from_company' },
|
||||
],
|
||||
value: () => 'create_contact',
|
||||
},
|
||||
@@ -399,15 +384,7 @@ Return ONLY the numeric timestamp - no explanations, no quotes, no extra text.`,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_conversation',
|
||||
'reply_conversation',
|
||||
'close_conversation',
|
||||
'open_conversation',
|
||||
'snooze_conversation',
|
||||
'assign_conversation',
|
||||
'tag_conversation',
|
||||
],
|
||||
value: ['get_conversation', 'reply_conversation'],
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -500,20 +477,11 @@ Return ONLY the message text - no explanations.`,
|
||||
id: 'admin_id',
|
||||
title: 'Admin ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'ID of the admin performing the action',
|
||||
placeholder: 'ID of the admin sending the message',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'reply_conversation',
|
||||
'close_conversation',
|
||||
'open_conversation',
|
||||
'snooze_conversation',
|
||||
'assign_conversation',
|
||||
'tag_conversation',
|
||||
'create_note',
|
||||
'update_ticket',
|
||||
],
|
||||
value: ['reply_conversation'],
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -558,7 +526,7 @@ Return ONLY the numeric timestamp - no explanations, no quotes, no extra text.`,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_ticket', 'update_ticket'],
|
||||
value: ['get_ticket'],
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -831,307 +799,6 @@ Return ONLY the numeric timestamp - no explanations, no quotes, no extra text.`,
|
||||
value: ['list_companies'],
|
||||
},
|
||||
},
|
||||
// Close/Open conversation body
|
||||
{
|
||||
id: 'close_body',
|
||||
title: 'Closing Message',
|
||||
type: 'long-input',
|
||||
placeholder: 'Optional message to add when closing',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['close_conversation'],
|
||||
},
|
||||
},
|
||||
// Snooze conversation
|
||||
{
|
||||
id: 'snoozed_until',
|
||||
title: 'Snooze Until',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp when conversation should reopen',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['snooze_conversation'],
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Unix timestamp in seconds based on the user's description.
|
||||
The timestamp should be a Unix epoch time in seconds (10 digits).
|
||||
Examples:
|
||||
- "tomorrow" -> Tomorrow at 09:00:00 as Unix timestamp
|
||||
- "in 2 hours" -> Current time plus 7200 seconds
|
||||
- "next Monday" -> Next Monday at 09:00:00 as Unix timestamp
|
||||
|
||||
Return ONLY the numeric timestamp - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe when to unsnooze (e.g., "tomorrow", "in 2 hours")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
// Assign conversation
|
||||
{
|
||||
id: 'assignee_id',
|
||||
title: 'Assignee ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Admin or team ID to assign to (0 to unassign)',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['assign_conversation'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'assign_body',
|
||||
title: 'Assignment Message',
|
||||
type: 'long-input',
|
||||
placeholder: 'Optional message when assigning',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['assign_conversation'],
|
||||
},
|
||||
},
|
||||
// Update ticket fields
|
||||
{
|
||||
id: 'update_ticket_attributes',
|
||||
title: 'Ticket Attributes',
|
||||
type: 'long-input',
|
||||
placeholder: 'JSON object with ticket attributes to update',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_ticket'],
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON object for Intercom ticket attributes based on the user's description.
|
||||
Example: {"_default_title_": "Updated title", "_default_description_": "Updated description"}
|
||||
|
||||
Return ONLY the JSON object - no explanations or markdown formatting.`,
|
||||
placeholder: 'Describe the ticket updates (e.g., "change title to Bug Fixed")...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'ticket_open',
|
||||
title: 'Ticket Open',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Keep Open', id: 'true' },
|
||||
{ label: 'Close Ticket', id: 'false' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_ticket'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'ticket_is_shared',
|
||||
title: 'Ticket Visible to Users',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_ticket'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'ticket_snoozed_until',
|
||||
title: 'Snooze Ticket Until',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp when ticket should reopen',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_ticket'],
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Unix timestamp in seconds based on the user's description.
|
||||
Examples:
|
||||
- "tomorrow" -> Tomorrow at 09:00:00 as Unix timestamp
|
||||
- "next week" -> 7 days from now
|
||||
|
||||
Return ONLY the numeric timestamp.`,
|
||||
placeholder: 'Describe when to unsnooze (e.g., "tomorrow")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'ticket_assignee_id',
|
||||
title: 'Ticket Assignee ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Admin or team ID to assign to (0 to unassign)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_ticket'],
|
||||
},
|
||||
},
|
||||
// Tag fields
|
||||
{
|
||||
id: 'tagId',
|
||||
title: 'Tag ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'ID of the tag',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['tag_contact', 'untag_contact', 'tag_conversation'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tag_name',
|
||||
title: 'Tag Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Name of the tag to create',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_tag'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tag_id_update',
|
||||
title: 'Tag ID (for update)',
|
||||
type: 'short-input',
|
||||
placeholder: 'ID of existing tag to update (leave empty to create new)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_tag'],
|
||||
},
|
||||
},
|
||||
// Contact ID for tag/untag/note operations
|
||||
{
|
||||
id: 'tag_contact_id',
|
||||
title: 'Contact ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'ID of the contact',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'tag_contact',
|
||||
'untag_contact',
|
||||
'create_note',
|
||||
'attach_contact_to_company',
|
||||
'detach_contact_from_company',
|
||||
],
|
||||
},
|
||||
},
|
||||
// Note fields
|
||||
{
|
||||
id: 'note_body',
|
||||
title: 'Note Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Text content of the note',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_note'],
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a note for Intercom based on the user's description.
|
||||
The note should be clear, professional, and capture the key information.
|
||||
|
||||
Return ONLY the note text - no explanations.`,
|
||||
placeholder: 'Describe the note content (e.g., "customer requested callback")...',
|
||||
},
|
||||
},
|
||||
// Event fields
|
||||
{
|
||||
id: 'event_name',
|
||||
title: 'Event Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Event name (e.g., order-completed)',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_event'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'event_user_id',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Your identifier for the user',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_event'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'event_email',
|
||||
title: 'User Email',
|
||||
type: 'short-input',
|
||||
placeholder: 'Email address of the user',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_event'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'event_contact_id',
|
||||
title: 'Contact ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Intercom contact ID',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_event'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'event_metadata',
|
||||
title: 'Event Metadata',
|
||||
type: 'long-input',
|
||||
placeholder: 'JSON object with event metadata (max 10 keys)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_event'],
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON object for Intercom event metadata based on the user's description.
|
||||
The object should contain key-value pairs (max 10 keys).
|
||||
Example: {"order_value": 99.99, "items": 3, "coupon_used": true}
|
||||
|
||||
Return ONLY the JSON object - no explanations or markdown formatting.`,
|
||||
placeholder: 'Describe the event data (e.g., "order value $50, 2 items")...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'event_created_at',
|
||||
title: 'Event Time',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp when event occurred',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create_event'],
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Unix timestamp in seconds based on the user's description.
|
||||
Examples:
|
||||
- "now" -> Current Unix timestamp
|
||||
- "5 minutes ago" -> Current time minus 300 seconds
|
||||
|
||||
Return ONLY the numeric timestamp.`,
|
||||
placeholder: 'Describe when the event occurred (e.g., "now")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
// Company attachment fields
|
||||
{
|
||||
id: 'attach_company_id',
|
||||
title: 'Company ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'ID of the company to attach/detach',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['attach_contact_to_company', 'detach_contact_from_company'],
|
||||
},
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -1151,21 +818,6 @@ Return ONLY the numeric timestamp.`,
|
||||
'intercom_create_ticket',
|
||||
'intercom_get_ticket',
|
||||
'intercom_create_message',
|
||||
'intercom_update_ticket_v2',
|
||||
'intercom_list_admins_v2',
|
||||
'intercom_close_conversation_v2',
|
||||
'intercom_open_conversation_v2',
|
||||
'intercom_snooze_conversation_v2',
|
||||
'intercom_assign_conversation_v2',
|
||||
'intercom_list_tags_v2',
|
||||
'intercom_create_tag_v2',
|
||||
'intercom_tag_contact_v2',
|
||||
'intercom_untag_contact_v2',
|
||||
'intercom_tag_conversation_v2',
|
||||
'intercom_create_note_v2',
|
||||
'intercom_create_event_v2',
|
||||
'intercom_attach_contact_to_company_v2',
|
||||
'intercom_detach_contact_from_company_v2',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -1202,36 +854,6 @@ Return ONLY the numeric timestamp.`,
|
||||
return 'intercom_get_ticket'
|
||||
case 'create_message':
|
||||
return 'intercom_create_message'
|
||||
case 'update_ticket':
|
||||
return 'intercom_update_ticket_v2'
|
||||
case 'list_admins':
|
||||
return 'intercom_list_admins_v2'
|
||||
case 'close_conversation':
|
||||
return 'intercom_close_conversation_v2'
|
||||
case 'open_conversation':
|
||||
return 'intercom_open_conversation_v2'
|
||||
case 'snooze_conversation':
|
||||
return 'intercom_snooze_conversation_v2'
|
||||
case 'assign_conversation':
|
||||
return 'intercom_assign_conversation_v2'
|
||||
case 'list_tags':
|
||||
return 'intercom_list_tags_v2'
|
||||
case 'create_tag':
|
||||
return 'intercom_create_tag_v2'
|
||||
case 'tag_contact':
|
||||
return 'intercom_tag_contact_v2'
|
||||
case 'untag_contact':
|
||||
return 'intercom_untag_contact_v2'
|
||||
case 'tag_conversation':
|
||||
return 'intercom_tag_conversation_v2'
|
||||
case 'create_note':
|
||||
return 'intercom_create_note_v2'
|
||||
case 'create_event':
|
||||
return 'intercom_create_event_v2'
|
||||
case 'attach_contact_to_company':
|
||||
return 'intercom_attach_contact_to_company_v2'
|
||||
case 'detach_contact_from_company':
|
||||
return 'intercom_detach_contact_from_company_v2'
|
||||
default:
|
||||
throw new Error(`Unknown operation: ${params.operation}`)
|
||||
}
|
||||
@@ -1248,23 +870,6 @@ Return ONLY the numeric timestamp.`,
|
||||
message_created_at,
|
||||
include_translations,
|
||||
disable_notifications,
|
||||
close_body,
|
||||
assign_body,
|
||||
tag_contact_id,
|
||||
attach_company_id,
|
||||
update_ticket_attributes,
|
||||
ticket_open,
|
||||
ticket_is_shared,
|
||||
ticket_snoozed_until,
|
||||
ticket_assignee_id,
|
||||
tag_name,
|
||||
tag_id_update,
|
||||
note_body,
|
||||
event_user_id,
|
||||
event_email,
|
||||
event_contact_id,
|
||||
event_metadata,
|
||||
event_created_at,
|
||||
...rest
|
||||
} = params
|
||||
const cleanParams: Record<string, any> = {}
|
||||
@@ -1292,7 +897,7 @@ Return ONLY the numeric timestamp.`,
|
||||
cleanParams.created_at = Number(reply_created_at)
|
||||
}
|
||||
|
||||
// Map ticket fields for create_ticket
|
||||
// Map ticket fields
|
||||
if (operation === 'create_ticket') {
|
||||
if (ticket_company_id) cleanParams.company_id = ticket_company_id
|
||||
if (ticket_created_at) cleanParams.created_at = Number(ticket_created_at)
|
||||
@@ -1315,71 +920,6 @@ Return ONLY the numeric timestamp.`,
|
||||
cleanParams.include_translations = include_translations === 'true'
|
||||
}
|
||||
|
||||
// Map close_body to body for close_conversation
|
||||
if (operation === 'close_conversation' && close_body) {
|
||||
cleanParams.body = close_body
|
||||
}
|
||||
|
||||
// Map assign_body to body for assign_conversation
|
||||
if (operation === 'assign_conversation' && assign_body) {
|
||||
cleanParams.body = assign_body
|
||||
}
|
||||
|
||||
// Map tag_contact_id to contactId for tag/note/company attachment operations
|
||||
if (
|
||||
[
|
||||
'tag_contact',
|
||||
'untag_contact',
|
||||
'create_note',
|
||||
'attach_contact_to_company',
|
||||
'detach_contact_from_company',
|
||||
].includes(operation) &&
|
||||
tag_contact_id
|
||||
) {
|
||||
cleanParams.contactId = tag_contact_id
|
||||
}
|
||||
|
||||
// Map attach_company_id to companyId for company attachment operations
|
||||
if (
|
||||
['attach_contact_to_company', 'detach_contact_from_company'].includes(operation) &&
|
||||
attach_company_id
|
||||
) {
|
||||
cleanParams.companyId = attach_company_id
|
||||
}
|
||||
|
||||
// Map update_ticket fields
|
||||
if (operation === 'update_ticket') {
|
||||
if (update_ticket_attributes) cleanParams.ticket_attributes = update_ticket_attributes
|
||||
if (ticket_open !== undefined && ticket_open !== '') {
|
||||
cleanParams.open = ticket_open === 'true'
|
||||
}
|
||||
if (ticket_is_shared !== undefined && ticket_is_shared !== '') {
|
||||
cleanParams.is_shared = ticket_is_shared === 'true'
|
||||
}
|
||||
if (ticket_snoozed_until) cleanParams.snoozed_until = Number(ticket_snoozed_until)
|
||||
if (ticket_assignee_id) cleanParams.assignee_id = ticket_assignee_id
|
||||
}
|
||||
|
||||
// Map tag fields for create_tag
|
||||
if (operation === 'create_tag') {
|
||||
if (tag_name) cleanParams.name = tag_name
|
||||
if (tag_id_update) cleanParams.id = tag_id_update
|
||||
}
|
||||
|
||||
// Map note_body to body for create_note
|
||||
if (operation === 'create_note' && note_body) {
|
||||
cleanParams.body = note_body
|
||||
}
|
||||
|
||||
// Map event fields for create_event
|
||||
if (operation === 'create_event') {
|
||||
if (event_user_id) cleanParams.user_id = event_user_id
|
||||
if (event_email) cleanParams.email = event_email
|
||||
if (event_contact_id) cleanParams.id = event_contact_id
|
||||
if (event_metadata) cleanParams.metadata = event_metadata
|
||||
if (event_created_at) cleanParams.created_at = Number(event_created_at)
|
||||
}
|
||||
|
||||
Object.entries(rest).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null && value !== '') {
|
||||
cleanParams[key] = value
|
||||
@@ -1423,22 +963,7 @@ export const IntercomV2Block: BlockConfig = {
|
||||
'intercom_search_conversations_v2',
|
||||
'intercom_create_ticket_v2',
|
||||
'intercom_get_ticket_v2',
|
||||
'intercom_update_ticket_v2',
|
||||
'intercom_create_message_v2',
|
||||
'intercom_list_admins_v2',
|
||||
'intercom_close_conversation_v2',
|
||||
'intercom_open_conversation_v2',
|
||||
'intercom_snooze_conversation_v2',
|
||||
'intercom_assign_conversation_v2',
|
||||
'intercom_list_tags_v2',
|
||||
'intercom_create_tag_v2',
|
||||
'intercom_tag_contact_v2',
|
||||
'intercom_untag_contact_v2',
|
||||
'intercom_tag_conversation_v2',
|
||||
'intercom_create_note_v2',
|
||||
'intercom_create_event_v2',
|
||||
'intercom_attach_contact_to_company_v2',
|
||||
'intercom_detach_contact_from_company_v2',
|
||||
],
|
||||
config: {
|
||||
tool: createVersionedToolSelector({
|
||||
@@ -1474,38 +999,8 @@ export const IntercomV2Block: BlockConfig = {
|
||||
return 'intercom_create_ticket'
|
||||
case 'get_ticket':
|
||||
return 'intercom_get_ticket'
|
||||
case 'update_ticket':
|
||||
return 'intercom_update_ticket'
|
||||
case 'create_message':
|
||||
return 'intercom_create_message'
|
||||
case 'list_admins':
|
||||
return 'intercom_list_admins'
|
||||
case 'close_conversation':
|
||||
return 'intercom_close_conversation'
|
||||
case 'open_conversation':
|
||||
return 'intercom_open_conversation'
|
||||
case 'snooze_conversation':
|
||||
return 'intercom_snooze_conversation'
|
||||
case 'assign_conversation':
|
||||
return 'intercom_assign_conversation'
|
||||
case 'list_tags':
|
||||
return 'intercom_list_tags'
|
||||
case 'create_tag':
|
||||
return 'intercom_create_tag'
|
||||
case 'tag_contact':
|
||||
return 'intercom_tag_contact'
|
||||
case 'untag_contact':
|
||||
return 'intercom_untag_contact'
|
||||
case 'tag_conversation':
|
||||
return 'intercom_tag_conversation'
|
||||
case 'create_note':
|
||||
return 'intercom_create_note'
|
||||
case 'create_event':
|
||||
return 'intercom_create_event'
|
||||
case 'attach_contact_to_company':
|
||||
return 'intercom_attach_contact_to_company'
|
||||
case 'detach_contact_from_company':
|
||||
return 'intercom_detach_contact_from_company'
|
||||
default:
|
||||
return 'intercom_create_contact'
|
||||
}
|
||||
@@ -1513,158 +1008,7 @@ export const IntercomV2Block: BlockConfig = {
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'intercom_create_contact_v2',
|
||||
}),
|
||||
params: (params) => {
|
||||
const {
|
||||
operation,
|
||||
message_type_msg,
|
||||
company_name,
|
||||
contact_company_id,
|
||||
reply_created_at,
|
||||
ticket_company_id,
|
||||
ticket_created_at,
|
||||
message_created_at,
|
||||
include_translations,
|
||||
disable_notifications,
|
||||
close_body,
|
||||
assign_body,
|
||||
tag_contact_id,
|
||||
attach_company_id,
|
||||
update_ticket_attributes,
|
||||
ticket_open,
|
||||
ticket_is_shared,
|
||||
ticket_snoozed_until,
|
||||
ticket_assignee_id,
|
||||
tag_name,
|
||||
tag_id_update,
|
||||
note_body,
|
||||
event_user_id,
|
||||
event_email,
|
||||
event_contact_id,
|
||||
event_metadata,
|
||||
event_created_at,
|
||||
...rest
|
||||
} = params
|
||||
const cleanParams: Record<string, any> = {}
|
||||
|
||||
// Special mapping for message_type in create_message
|
||||
if (operation === 'create_message' && message_type_msg) {
|
||||
cleanParams.message_type = message_type_msg
|
||||
}
|
||||
|
||||
// Special mapping for company name
|
||||
if (operation === 'create_company' && company_name) {
|
||||
cleanParams.name = company_name
|
||||
}
|
||||
|
||||
// Map contact_company_id to company_id for contact operations
|
||||
if (
|
||||
(operation === 'create_contact' || operation === 'update_contact') &&
|
||||
contact_company_id
|
||||
) {
|
||||
cleanParams.company_id = contact_company_id
|
||||
}
|
||||
|
||||
// Map reply_created_at to created_at for reply_conversation
|
||||
if (operation === 'reply_conversation' && reply_created_at) {
|
||||
cleanParams.created_at = Number(reply_created_at)
|
||||
}
|
||||
|
||||
// Map ticket fields for create_ticket
|
||||
if (operation === 'create_ticket') {
|
||||
if (ticket_company_id) cleanParams.company_id = ticket_company_id
|
||||
if (ticket_created_at) cleanParams.created_at = Number(ticket_created_at)
|
||||
if (disable_notifications !== undefined && disable_notifications !== '') {
|
||||
cleanParams.disable_notifications = disable_notifications === 'true'
|
||||
}
|
||||
}
|
||||
|
||||
// Map message_created_at to created_at for create_message
|
||||
if (operation === 'create_message' && message_created_at) {
|
||||
cleanParams.created_at = Number(message_created_at)
|
||||
}
|
||||
|
||||
// Convert include_translations string to boolean for get_conversation
|
||||
if (
|
||||
operation === 'get_conversation' &&
|
||||
include_translations !== undefined &&
|
||||
include_translations !== ''
|
||||
) {
|
||||
cleanParams.include_translations = include_translations === 'true'
|
||||
}
|
||||
|
||||
// Map close_body to body for close_conversation
|
||||
if (operation === 'close_conversation' && close_body) {
|
||||
cleanParams.body = close_body
|
||||
}
|
||||
|
||||
// Map assign_body to body for assign_conversation
|
||||
if (operation === 'assign_conversation' && assign_body) {
|
||||
cleanParams.body = assign_body
|
||||
}
|
||||
|
||||
// Map tag_contact_id to contactId for tag/note/company attachment operations
|
||||
if (
|
||||
[
|
||||
'tag_contact',
|
||||
'untag_contact',
|
||||
'create_note',
|
||||
'attach_contact_to_company',
|
||||
'detach_contact_from_company',
|
||||
].includes(operation) &&
|
||||
tag_contact_id
|
||||
) {
|
||||
cleanParams.contactId = tag_contact_id
|
||||
}
|
||||
|
||||
// Map attach_company_id to companyId for company attachment operations
|
||||
if (
|
||||
['attach_contact_to_company', 'detach_contact_from_company'].includes(operation) &&
|
||||
attach_company_id
|
||||
) {
|
||||
cleanParams.companyId = attach_company_id
|
||||
}
|
||||
|
||||
// Map update_ticket fields
|
||||
if (operation === 'update_ticket') {
|
||||
if (update_ticket_attributes) cleanParams.ticket_attributes = update_ticket_attributes
|
||||
if (ticket_open !== undefined && ticket_open !== '') {
|
||||
cleanParams.open = ticket_open === 'true'
|
||||
}
|
||||
if (ticket_is_shared !== undefined && ticket_is_shared !== '') {
|
||||
cleanParams.is_shared = ticket_is_shared === 'true'
|
||||
}
|
||||
if (ticket_snoozed_until) cleanParams.snoozed_until = Number(ticket_snoozed_until)
|
||||
if (ticket_assignee_id) cleanParams.assignee_id = ticket_assignee_id
|
||||
}
|
||||
|
||||
// Map tag fields for create_tag
|
||||
if (operation === 'create_tag') {
|
||||
if (tag_name) cleanParams.name = tag_name
|
||||
if (tag_id_update) cleanParams.id = tag_id_update
|
||||
}
|
||||
|
||||
// Map note_body to body for create_note
|
||||
if (operation === 'create_note' && note_body) {
|
||||
cleanParams.body = note_body
|
||||
}
|
||||
|
||||
// Map event fields for create_event
|
||||
if (operation === 'create_event') {
|
||||
if (event_user_id) cleanParams.user_id = event_user_id
|
||||
if (event_email) cleanParams.email = event_email
|
||||
if (event_contact_id) cleanParams.id = event_contact_id
|
||||
if (event_metadata) cleanParams.metadata = event_metadata
|
||||
if (event_created_at) cleanParams.created_at = Number(event_created_at)
|
||||
}
|
||||
|
||||
Object.entries(rest).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null && value !== '') {
|
||||
cleanParams[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
return cleanParams
|
||||
},
|
||||
params: IntercomBlock.tools!.config!.params,
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
@@ -1687,23 +1031,10 @@ export const IntercomV2Block: BlockConfig = {
|
||||
type: 'array',
|
||||
description: 'Array of conversations (for list/search operations)',
|
||||
},
|
||||
state: { type: 'string', description: 'Conversation state (for close/open/snooze operations)' },
|
||||
ticket: { type: 'json', description: 'Ticket object with id, ticket_id, ticket_state' },
|
||||
ticketId: { type: 'string', description: 'ID of the ticket (for create/update operations)' },
|
||||
ticket_state: { type: 'string', description: 'Ticket state (for update_ticket operation)' },
|
||||
ticketId: { type: 'string', description: 'ID of the ticket (for create operations)' },
|
||||
message: { type: 'json', description: 'Message object with id, type' },
|
||||
messageId: { type: 'string', description: 'ID of the message (for create operations)' },
|
||||
admins: { type: 'array', description: 'Array of admin objects (for list_admins operation)' },
|
||||
tags: { type: 'array', description: 'Array of tag objects (for list_tags operation)' },
|
||||
tag: { type: 'json', description: 'Tag object with id and name (for tag operations)' },
|
||||
tagId: { type: 'string', description: 'ID of the tag (for create_tag operation)' },
|
||||
note: { type: 'json', description: 'Note object with id and body (for create_note operation)' },
|
||||
noteId: { type: 'string', description: 'ID of the note (for create_note operation)' },
|
||||
event_name: {
|
||||
type: 'string',
|
||||
description: 'Name of the tracked event (for create_event operation)',
|
||||
},
|
||||
name: { type: 'string', description: 'Name of the resource (for various operations)' },
|
||||
total_count: { type: 'number', description: 'Total count (for list/search operations)' },
|
||||
pages: { type: 'json', description: 'Pagination info with page, per_page, total_pages' },
|
||||
id: { type: 'string', description: 'ID of the deleted item (for delete operations)' },
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import { KalshiIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
|
||||
export const KalshiBlock: BlockConfig = {
|
||||
type: 'kalshi',
|
||||
name: 'Kalshi (Legacy)',
|
||||
name: 'Kalshi',
|
||||
description: 'Access prediction markets and trade on Kalshi',
|
||||
longDescription:
|
||||
'Integrate Kalshi prediction markets into the workflow. Can get markets, market, events, event, balance, positions, orders, orderbook, trades, candlesticks, fills, series, exchange status, and place/cancel/amend trades.',
|
||||
docsLink: 'https://docs.sim.ai/tools/kalshi',
|
||||
authMode: AuthMode.ApiKey,
|
||||
category: 'tools',
|
||||
hideFromToolbar: true,
|
||||
bgColor: '#09C285',
|
||||
icon: KalshiIcon,
|
||||
subBlocks: [
|
||||
@@ -351,14 +349,8 @@ Return ONLY the numeric timestamp (seconds since Unix epoch) - no explanations,
|
||||
id: 'count',
|
||||
title: 'Contracts',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of contracts (or use countFp)',
|
||||
condition: { field: 'operation', value: ['create_order'] },
|
||||
},
|
||||
{
|
||||
id: 'countFp',
|
||||
title: 'Contracts (Fixed-Point)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Fixed-point count (e.g., "10.50")',
|
||||
placeholder: 'Number of contracts',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['create_order'] },
|
||||
},
|
||||
{
|
||||
@@ -682,143 +674,3 @@ Return ONLY the numeric timestamp (seconds since Unix epoch) - no explanations,
|
||||
paging: { type: 'json', description: 'Pagination cursor for fetching more results' },
|
||||
},
|
||||
}
|
||||
|
||||
export const KalshiV2Block: BlockConfig = {
|
||||
...KalshiBlock,
|
||||
type: 'kalshi_v2',
|
||||
name: 'Kalshi',
|
||||
description: 'Access prediction markets and trade on Kalshi',
|
||||
longDescription:
|
||||
'Integrate Kalshi prediction markets into the workflow. Can get markets, market, events, event, balance, positions, orders, orderbook, trades, candlesticks, fills, series, exchange status, and place/cancel/amend trades.',
|
||||
hideFromToolbar: false,
|
||||
tools: {
|
||||
...KalshiBlock.tools,
|
||||
access: [
|
||||
'kalshi_get_markets_v2',
|
||||
'kalshi_get_market_v2',
|
||||
'kalshi_get_events_v2',
|
||||
'kalshi_get_event_v2',
|
||||
'kalshi_get_balance_v2',
|
||||
'kalshi_get_positions_v2',
|
||||
'kalshi_get_orders_v2',
|
||||
'kalshi_get_order_v2',
|
||||
'kalshi_get_orderbook_v2',
|
||||
'kalshi_get_trades_v2',
|
||||
'kalshi_get_candlesticks_v2',
|
||||
'kalshi_get_fills_v2',
|
||||
'kalshi_get_series_by_ticker_v2',
|
||||
'kalshi_get_exchange_status_v2',
|
||||
'kalshi_create_order_v2',
|
||||
'kalshi_cancel_order_v2',
|
||||
'kalshi_amend_order_v2',
|
||||
],
|
||||
config: {
|
||||
...KalshiBlock.tools!.config,
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'get_markets':
|
||||
return 'kalshi_get_markets'
|
||||
case 'get_market':
|
||||
return 'kalshi_get_market'
|
||||
case 'get_events':
|
||||
return 'kalshi_get_events'
|
||||
case 'get_event':
|
||||
return 'kalshi_get_event'
|
||||
case 'get_balance':
|
||||
return 'kalshi_get_balance'
|
||||
case 'get_positions':
|
||||
return 'kalshi_get_positions'
|
||||
case 'get_orders':
|
||||
return 'kalshi_get_orders'
|
||||
case 'get_order':
|
||||
return 'kalshi_get_order'
|
||||
case 'get_orderbook':
|
||||
return 'kalshi_get_orderbook'
|
||||
case 'get_trades':
|
||||
return 'kalshi_get_trades'
|
||||
case 'get_candlesticks':
|
||||
return 'kalshi_get_candlesticks'
|
||||
case 'get_fills':
|
||||
return 'kalshi_get_fills'
|
||||
case 'get_series_by_ticker':
|
||||
return 'kalshi_get_series_by_ticker'
|
||||
case 'get_exchange_status':
|
||||
return 'kalshi_get_exchange_status'
|
||||
case 'create_order':
|
||||
return 'kalshi_create_order'
|
||||
case 'cancel_order':
|
||||
return 'kalshi_cancel_order'
|
||||
case 'amend_order':
|
||||
return 'kalshi_amend_order'
|
||||
default:
|
||||
return 'kalshi_get_markets'
|
||||
}
|
||||
},
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'kalshi_get_markets_v2',
|
||||
}),
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
// List operations (V2 uses snake_case and flat cursor)
|
||||
markets: { type: 'json', description: 'Array of market objects (get_markets)' },
|
||||
events: { type: 'json', description: 'Array of event objects (get_events)' },
|
||||
orders: { type: 'json', description: 'Array of order objects (get_orders)' },
|
||||
market_positions: {
|
||||
type: 'json',
|
||||
description: 'Array of market position objects (get_positions)',
|
||||
},
|
||||
event_positions: {
|
||||
type: 'json',
|
||||
description: 'Array of event position objects (get_positions)',
|
||||
},
|
||||
fills: { type: 'json', description: 'Array of fill objects (get_fills)' },
|
||||
trades: { type: 'json', description: 'Array of trade objects (get_trades)' },
|
||||
candlesticks: {
|
||||
type: 'json',
|
||||
description: 'Array of candlestick data with yes_bid/yes_ask/price nested objects',
|
||||
},
|
||||
milestones: {
|
||||
type: 'json',
|
||||
description: 'Array of milestone objects (get_events with milestones)',
|
||||
},
|
||||
// Single item operations
|
||||
market: { type: 'json', description: 'Single market object (get_market)' },
|
||||
event: { type: 'json', description: 'Single event object (get_event)' },
|
||||
order: {
|
||||
type: 'json',
|
||||
description: 'Order object with _dollars and _fp fields (get_order, create_order, etc.)',
|
||||
},
|
||||
series: { type: 'json', description: 'Series object (get_series_by_ticker)' },
|
||||
// Account operations
|
||||
balance: { type: 'number', description: 'Account balance in cents (get_balance)' },
|
||||
portfolio_value: { type: 'number', description: 'Portfolio value in cents (get_balance)' },
|
||||
updated_ts: { type: 'number', description: 'Unix timestamp of last update (get_balance)' },
|
||||
// Orderbook (V2 uses tuple arrays)
|
||||
orderbook: {
|
||||
type: 'json',
|
||||
description: 'Orderbook with yes/no/yes_dollars/no_dollars tuple arrays',
|
||||
},
|
||||
orderbook_fp: {
|
||||
type: 'json',
|
||||
description: 'Fixed-point orderbook with yes_dollars/no_dollars tuple arrays',
|
||||
},
|
||||
// Exchange status
|
||||
exchange_status: {
|
||||
type: 'string',
|
||||
description: 'Exchange status string (get_exchange_status)',
|
||||
},
|
||||
trading_active: { type: 'boolean', description: 'Trading active flag (get_exchange_status)' },
|
||||
// Cancel order specific
|
||||
reduced_by: { type: 'number', description: 'Number of contracts reduced (cancel_order)' },
|
||||
reduced_by_fp: {
|
||||
type: 'string',
|
||||
description: 'Contracts reduced in fixed-point (cancel_order)',
|
||||
},
|
||||
// Candlesticks ticker
|
||||
ticker: { type: 'string', description: 'Market ticker (get_candlesticks)' },
|
||||
// Pagination (flat cursor instead of nested paging object)
|
||||
cursor: { type: 'string', description: 'Pagination cursor for fetching more results' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ export const PolymarketBlock: BlockConfig = {
|
||||
name: 'Polymarket',
|
||||
description: 'Access prediction markets data from Polymarket',
|
||||
longDescription:
|
||||
'Integrate Polymarket prediction markets into the workflow. Can get markets, market, events, event, tags, series, orderbook, price, midpoint, price history, last trade price, spread, tick size, positions, trades, activity, leaderboard, holders, and search.',
|
||||
'Integrate Polymarket prediction markets into the workflow. Can get markets, market, events, event, tags, series, orderbook, price, midpoint, price history, last trade price, spread, tick size, positions, trades, and search.',
|
||||
docsLink: 'https://docs.sim.ai/tools/polymarket',
|
||||
category: 'tools',
|
||||
bgColor: '#4C82FB',
|
||||
@@ -34,9 +34,6 @@ export const PolymarketBlock: BlockConfig = {
|
||||
{ label: 'Get Tick Size', id: 'get_tick_size' },
|
||||
{ label: 'Get Positions', id: 'get_positions' },
|
||||
{ label: 'Get Trades', id: 'get_trades' },
|
||||
{ label: 'Get Activity', id: 'get_activity' },
|
||||
{ label: 'Get Leaderboard', id: 'get_leaderboard' },
|
||||
{ label: 'Get Market Holders', id: 'get_holders' },
|
||||
],
|
||||
value: () => 'get_markets',
|
||||
},
|
||||
@@ -104,281 +101,14 @@ export const PolymarketBlock: BlockConfig = {
|
||||
placeholder: 'Wallet address (optional filter)',
|
||||
condition: { field: 'operation', value: ['get_trades'] },
|
||||
},
|
||||
// Market/Event filter for positions and trades
|
||||
// Market filter for positions and trades
|
||||
{
|
||||
id: 'market',
|
||||
title: 'Condition ID',
|
||||
title: 'Market ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Condition ID filter (comma-separated)',
|
||||
placeholder: 'Market ID (optional filter)',
|
||||
condition: { field: 'operation', value: ['get_positions', 'get_trades'] },
|
||||
},
|
||||
{
|
||||
id: 'positionEventId',
|
||||
title: 'Event ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Event ID filter (alternative to Condition ID)',
|
||||
condition: { field: 'operation', value: ['get_positions', 'get_trades'] },
|
||||
},
|
||||
// Positions-specific filters
|
||||
{
|
||||
id: 'sizeThreshold',
|
||||
title: 'Size Threshold',
|
||||
type: 'short-input',
|
||||
placeholder: 'Minimum position size (default: 1)',
|
||||
condition: { field: 'operation', value: ['get_positions'] },
|
||||
},
|
||||
{
|
||||
id: 'redeemable',
|
||||
title: 'Redeemable',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Redeemable Only', id: 'true' },
|
||||
{ label: 'Non-Redeemable Only', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_positions'] },
|
||||
},
|
||||
{
|
||||
id: 'mergeable',
|
||||
title: 'Mergeable',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Mergeable Only', id: 'true' },
|
||||
{ label: 'Non-Mergeable Only', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_positions'] },
|
||||
},
|
||||
{
|
||||
id: 'positionSortBy',
|
||||
title: 'Sort By',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Default', id: '' },
|
||||
{ label: 'Tokens', id: 'TOKENS' },
|
||||
{ label: 'Current Value', id: 'CURRENT' },
|
||||
{ label: 'Initial Value', id: 'INITIAL' },
|
||||
{ label: 'Cash P&L', id: 'CASHPNL' },
|
||||
{ label: 'Percent P&L', id: 'PERCENTPNL' },
|
||||
{ label: 'Title', id: 'TITLE' },
|
||||
{ label: 'Price', id: 'PRICE' },
|
||||
{ label: 'Avg Price', id: 'AVGPRICE' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_positions'] },
|
||||
},
|
||||
{
|
||||
id: 'positionSortDirection',
|
||||
title: 'Sort Direction',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Descending', id: 'DESC' },
|
||||
{ label: 'Ascending', id: 'ASC' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_positions'] },
|
||||
},
|
||||
{
|
||||
id: 'positionTitle',
|
||||
title: 'Title Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'Search by title',
|
||||
condition: { field: 'operation', value: ['get_positions'] },
|
||||
},
|
||||
// Trades-specific filters
|
||||
{
|
||||
id: 'tradeSide',
|
||||
title: 'Trade Side',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Buy', id: 'BUY' },
|
||||
{ label: 'Sell', id: 'SELL' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_trades'] },
|
||||
},
|
||||
{
|
||||
id: 'takerOnly',
|
||||
title: 'Taker Only',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes (default)', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_trades'] },
|
||||
},
|
||||
{
|
||||
id: 'filterType',
|
||||
title: 'Filter Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'None', id: '' },
|
||||
{ label: 'Cash', id: 'CASH' },
|
||||
{ label: 'Tokens', id: 'TOKENS' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_trades'] },
|
||||
},
|
||||
{
|
||||
id: 'filterAmount',
|
||||
title: 'Filter Amount',
|
||||
type: 'short-input',
|
||||
placeholder: 'Minimum amount threshold',
|
||||
condition: { field: 'operation', value: ['get_trades'] },
|
||||
},
|
||||
// Activity-specific fields
|
||||
{
|
||||
id: 'activityUser',
|
||||
title: 'User Wallet Address',
|
||||
type: 'short-input',
|
||||
placeholder: 'Wallet address (0x-prefixed)',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activityType',
|
||||
title: 'Activity Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Trade', id: 'TRADE' },
|
||||
{ label: 'Split', id: 'SPLIT' },
|
||||
{ label: 'Merge', id: 'MERGE' },
|
||||
{ label: 'Redeem', id: 'REDEEM' },
|
||||
{ label: 'Reward', id: 'REWARD' },
|
||||
{ label: 'Conversion', id: 'CONVERSION' },
|
||||
{ label: 'Maker Rebate', id: 'MAKER_REBATE' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activityMarket',
|
||||
title: 'Condition ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Condition ID filter (comma-separated)',
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activityEventId',
|
||||
title: 'Event ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Event ID filter (comma-separated)',
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activitySide',
|
||||
title: 'Trade Side',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Buy', id: 'BUY' },
|
||||
{ label: 'Sell', id: 'SELL' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activitySortBy',
|
||||
title: 'Sort By',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Timestamp', id: 'TIMESTAMP' },
|
||||
{ label: 'Tokens', id: 'TOKENS' },
|
||||
{ label: 'Cash', id: 'CASH' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activitySortDirection',
|
||||
title: 'Sort Direction',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Descending', id: 'DESC' },
|
||||
{ label: 'Ascending', id: 'ASC' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activityStart',
|
||||
title: 'Start Timestamp',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp (seconds)',
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'activityEnd',
|
||||
title: 'End Timestamp',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp (seconds)',
|
||||
condition: { field: 'operation', value: ['get_activity'] },
|
||||
},
|
||||
// Leaderboard-specific fields
|
||||
{
|
||||
id: 'leaderboardCategory',
|
||||
title: 'Category',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Overall', id: 'OVERALL' },
|
||||
{ label: 'Politics', id: 'POLITICS' },
|
||||
{ label: 'Sports', id: 'SPORTS' },
|
||||
{ label: 'Crypto', id: 'CRYPTO' },
|
||||
{ label: 'Culture', id: 'CULTURE' },
|
||||
{ label: 'Mentions', id: 'MENTIONS' },
|
||||
{ label: 'Weather', id: 'WEATHER' },
|
||||
{ label: 'Economics', id: 'ECONOMICS' },
|
||||
{ label: 'Tech', id: 'TECH' },
|
||||
{ label: 'Finance', id: 'FINANCE' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_leaderboard'] },
|
||||
},
|
||||
{
|
||||
id: 'leaderboardTimePeriod',
|
||||
title: 'Time Period',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Day', id: 'DAY' },
|
||||
{ label: 'Week', id: 'WEEK' },
|
||||
{ label: 'Month', id: 'MONTH' },
|
||||
{ label: 'All Time', id: 'ALL' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_leaderboard'] },
|
||||
},
|
||||
{
|
||||
id: 'leaderboardOrderBy',
|
||||
title: 'Order By',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Profit/Loss', id: 'PNL' },
|
||||
{ label: 'Volume', id: 'VOL' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_leaderboard'] },
|
||||
},
|
||||
{
|
||||
id: 'leaderboardUser',
|
||||
title: 'User Address',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by specific user wallet',
|
||||
condition: { field: 'operation', value: ['get_leaderboard'] },
|
||||
},
|
||||
{
|
||||
id: 'leaderboardUserName',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by username',
|
||||
condition: { field: 'operation', value: ['get_leaderboard'] },
|
||||
},
|
||||
// Market Holders-specific fields
|
||||
{
|
||||
id: 'holdersMarket',
|
||||
title: 'Condition ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Condition ID (comma-separated)',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['get_holders'] },
|
||||
},
|
||||
{
|
||||
id: 'holdersMinBalance',
|
||||
title: 'Min Balance',
|
||||
type: 'short-input',
|
||||
placeholder: 'Minimum balance threshold (default: 1)',
|
||||
condition: { field: 'operation', value: ['get_holders'] },
|
||||
},
|
||||
// Token ID for CLOB operations
|
||||
{
|
||||
id: 'tokenId',
|
||||
@@ -475,11 +205,11 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
// Filters for list operations
|
||||
{
|
||||
id: 'closed',
|
||||
title: 'Closed Status',
|
||||
title: 'Status',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Open Only', id: 'false' },
|
||||
{ label: 'Active Only', id: 'false' },
|
||||
{ label: 'Closed Only', id: 'true' },
|
||||
],
|
||||
condition: { field: 'operation', value: ['get_markets', 'get_events'] },
|
||||
@@ -539,18 +269,7 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
placeholder: 'Number of results (max 50)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_markets',
|
||||
'get_events',
|
||||
'get_tags',
|
||||
'search',
|
||||
'get_series',
|
||||
'get_trades',
|
||||
'get_positions',
|
||||
'get_activity',
|
||||
'get_leaderboard',
|
||||
'get_holders',
|
||||
],
|
||||
value: ['get_markets', 'get_events', 'get_tags', 'search', 'get_series', 'get_trades'],
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -560,25 +279,9 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
placeholder: 'Pagination offset',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_markets',
|
||||
'get_events',
|
||||
'get_tags',
|
||||
'get_series',
|
||||
'get_trades',
|
||||
'get_positions',
|
||||
'get_activity',
|
||||
'get_leaderboard',
|
||||
],
|
||||
value: ['get_markets', 'get_events', 'get_tags', 'search', 'get_series', 'get_trades'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'page',
|
||||
title: 'Page',
|
||||
type: 'short-input',
|
||||
placeholder: 'Page number (1-indexed)',
|
||||
condition: { field: 'operation', value: ['search'] },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -599,9 +302,6 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
'polymarket_get_tick_size',
|
||||
'polymarket_get_positions',
|
||||
'polymarket_get_trades',
|
||||
'polymarket_get_activity',
|
||||
'polymarket_get_leaderboard',
|
||||
'polymarket_get_holders',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -640,49 +340,12 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
return 'polymarket_get_positions'
|
||||
case 'get_trades':
|
||||
return 'polymarket_get_trades'
|
||||
case 'get_activity':
|
||||
return 'polymarket_get_activity'
|
||||
case 'get_leaderboard':
|
||||
return 'polymarket_get_leaderboard'
|
||||
case 'get_holders':
|
||||
return 'polymarket_get_holders'
|
||||
default:
|
||||
return 'polymarket_get_markets'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const {
|
||||
operation,
|
||||
marketSlug,
|
||||
eventSlug,
|
||||
orderEvents,
|
||||
order,
|
||||
positionEventId,
|
||||
tradeSide,
|
||||
positionSortBy,
|
||||
positionSortDirection,
|
||||
positionTitle,
|
||||
// Activity params
|
||||
activityUser,
|
||||
activityType,
|
||||
activityMarket,
|
||||
activityEventId,
|
||||
activitySide,
|
||||
activitySortBy,
|
||||
activitySortDirection,
|
||||
activityStart,
|
||||
activityEnd,
|
||||
// Leaderboard params
|
||||
leaderboardCategory,
|
||||
leaderboardTimePeriod,
|
||||
leaderboardOrderBy,
|
||||
leaderboardUser,
|
||||
leaderboardUserName,
|
||||
// Holders params
|
||||
holdersMarket,
|
||||
holdersMinBalance,
|
||||
...rest
|
||||
} = params
|
||||
const { operation, marketSlug, eventSlug, orderEvents, order, ...rest } = params
|
||||
const cleanParams: Record<string, any> = {}
|
||||
|
||||
// Map marketSlug to slug for get_market
|
||||
@@ -702,51 +365,6 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
cleanParams.order = orderEvents
|
||||
}
|
||||
|
||||
// Map positionEventId to eventId for positions and trades
|
||||
if ((operation === 'get_positions' || operation === 'get_trades') && positionEventId) {
|
||||
cleanParams.eventId = positionEventId
|
||||
}
|
||||
|
||||
// Map tradeSide to side for trades
|
||||
if (operation === 'get_trades' && tradeSide) {
|
||||
cleanParams.side = tradeSide
|
||||
}
|
||||
|
||||
// Map position-specific fields
|
||||
if (operation === 'get_positions') {
|
||||
if (positionSortBy) cleanParams.sortBy = positionSortBy
|
||||
if (positionSortDirection) cleanParams.sortDirection = positionSortDirection
|
||||
if (positionTitle) cleanParams.title = positionTitle
|
||||
}
|
||||
|
||||
// Map activity-specific fields
|
||||
if (operation === 'get_activity') {
|
||||
if (activityUser) cleanParams.user = activityUser
|
||||
if (activityType) cleanParams.type = activityType
|
||||
if (activityMarket) cleanParams.market = activityMarket
|
||||
if (activityEventId) cleanParams.eventId = activityEventId
|
||||
if (activitySide) cleanParams.side = activitySide
|
||||
if (activitySortBy) cleanParams.sortBy = activitySortBy
|
||||
if (activitySortDirection) cleanParams.sortDirection = activitySortDirection
|
||||
if (activityStart) cleanParams.start = Number(activityStart)
|
||||
if (activityEnd) cleanParams.end = Number(activityEnd)
|
||||
}
|
||||
|
||||
// Map leaderboard-specific fields
|
||||
if (operation === 'get_leaderboard') {
|
||||
if (leaderboardCategory) cleanParams.category = leaderboardCategory
|
||||
if (leaderboardTimePeriod) cleanParams.timePeriod = leaderboardTimePeriod
|
||||
if (leaderboardOrderBy) cleanParams.orderBy = leaderboardOrderBy
|
||||
if (leaderboardUser) cleanParams.user = leaderboardUser
|
||||
if (leaderboardUserName) cleanParams.userName = leaderboardUserName
|
||||
}
|
||||
|
||||
// Map holders-specific fields
|
||||
if (operation === 'get_holders') {
|
||||
if (holdersMarket) cleanParams.market = holdersMarket
|
||||
if (holdersMinBalance) cleanParams.minBalance = holdersMinBalance
|
||||
}
|
||||
|
||||
// Convert numeric fields from string to number for get_price_history
|
||||
if (operation === 'get_price_history') {
|
||||
if (rest.fidelity) cleanParams.fidelity = Number(rest.fidelity)
|
||||
@@ -776,55 +394,13 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
seriesId: { type: 'string', description: 'Series ID' },
|
||||
query: { type: 'string', description: 'Search query' },
|
||||
user: { type: 'string', description: 'User wallet address' },
|
||||
market: { type: 'string', description: 'Condition ID filter' },
|
||||
positionEventId: { type: 'string', description: 'Event ID filter for positions/trades' },
|
||||
market: { type: 'string', description: 'Market ID filter' },
|
||||
tokenId: { type: 'string', description: 'CLOB Token ID' },
|
||||
side: { type: 'string', description: 'Order side (buy/sell)' },
|
||||
interval: { type: 'string', description: 'Price history interval' },
|
||||
fidelity: { type: 'number', description: 'Data resolution in minutes' },
|
||||
startTs: { type: 'number', description: 'Start timestamp (Unix)' },
|
||||
endTs: { type: 'number', description: 'End timestamp (Unix)' },
|
||||
// Positions-specific inputs
|
||||
sizeThreshold: { type: 'string', description: 'Minimum position size threshold' },
|
||||
redeemable: { type: 'string', description: 'Filter by redeemable status' },
|
||||
mergeable: { type: 'string', description: 'Filter by mergeable status' },
|
||||
positionSortBy: { type: 'string', description: 'Sort positions by field' },
|
||||
positionSortDirection: { type: 'string', description: 'Sort direction (ASC/DESC)' },
|
||||
positionTitle: { type: 'string', description: 'Filter positions by title' },
|
||||
// Trades-specific inputs
|
||||
tradeSide: { type: 'string', description: 'Filter trades by side (BUY/SELL)' },
|
||||
takerOnly: { type: 'string', description: 'Filter to taker trades only' },
|
||||
filterType: { type: 'string', description: 'Trade filter type (CASH/TOKENS)' },
|
||||
filterAmount: { type: 'string', description: 'Minimum trade amount threshold' },
|
||||
// List operation filters
|
||||
closed: { type: 'string', description: 'Filter by closed status' },
|
||||
order: { type: 'string', description: 'Sort field for markets' },
|
||||
orderEvents: { type: 'string', description: 'Sort field for events' },
|
||||
ascending: { type: 'string', description: 'Sort order (true/false)' },
|
||||
tagId: { type: 'string', description: 'Filter by tag ID' },
|
||||
// Pagination
|
||||
limit: { type: 'string', description: 'Number of results per page' },
|
||||
offset: { type: 'string', description: 'Pagination offset' },
|
||||
page: { type: 'string', description: 'Page number for search' },
|
||||
// Activity-specific inputs
|
||||
activityUser: { type: 'string', description: 'User wallet address for activity' },
|
||||
activityType: { type: 'string', description: 'Activity type filter' },
|
||||
activityMarket: { type: 'string', description: 'Condition ID filter for activity' },
|
||||
activityEventId: { type: 'string', description: 'Event ID filter for activity' },
|
||||
activitySide: { type: 'string', description: 'Trade side filter for activity' },
|
||||
activitySortBy: { type: 'string', description: 'Sort field for activity' },
|
||||
activitySortDirection: { type: 'string', description: 'Sort direction for activity' },
|
||||
activityStart: { type: 'string', description: 'Start timestamp for activity' },
|
||||
activityEnd: { type: 'string', description: 'End timestamp for activity' },
|
||||
// Leaderboard-specific inputs
|
||||
leaderboardCategory: { type: 'string', description: 'Leaderboard category' },
|
||||
leaderboardTimePeriod: { type: 'string', description: 'Leaderboard time period' },
|
||||
leaderboardOrderBy: { type: 'string', description: 'Leaderboard order by field' },
|
||||
leaderboardUser: { type: 'string', description: 'Filter leaderboard by user' },
|
||||
leaderboardUserName: { type: 'string', description: 'Filter leaderboard by username' },
|
||||
// Holders-specific inputs
|
||||
holdersMarket: { type: 'string', description: 'Condition ID for holders lookup' },
|
||||
holdersMinBalance: { type: 'string', description: 'Minimum balance threshold' },
|
||||
},
|
||||
outputs: {
|
||||
// List operations
|
||||
@@ -846,19 +422,11 @@ Return ONLY the Unix timestamp as a number - no explanations, no quotes, no extr
|
||||
description: 'Search results with markets, events, profiles (search)',
|
||||
},
|
||||
// CLOB operations
|
||||
orderbook: {
|
||||
type: 'json',
|
||||
description: 'Order book with bids and asks (get_orderbook)',
|
||||
},
|
||||
orderbook: { type: 'json', description: 'Order book with bids and asks (get_orderbook)' },
|
||||
price: { type: 'string', description: 'Market price (get_price, get_last_trade_price)' },
|
||||
side: { type: 'string', description: 'Last trade side - BUY or SELL (get_last_trade_price)' },
|
||||
midpoint: { type: 'string', description: 'Midpoint price (get_midpoint)' },
|
||||
history: { type: 'json', description: 'Price history entries (get_price_history)' },
|
||||
spread: { type: 'json', description: 'Spread value object (get_spread)' },
|
||||
spread: { type: 'json', description: 'Bid-ask spread (get_spread)' },
|
||||
tickSize: { type: 'string', description: 'Minimum tick size (get_tick_size)' },
|
||||
// Data API operations
|
||||
activity: { type: 'json', description: 'Array of user activity entries (get_activity)' },
|
||||
leaderboard: { type: 'json', description: 'Array of leaderboard entries (get_leaderboard)' },
|
||||
holders: { type: 'json', description: 'Array of market holder groups (get_holders)' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ import { IntercomBlock, IntercomV2Block } from '@/blocks/blocks/intercom'
|
||||
import { JinaBlock } from '@/blocks/blocks/jina'
|
||||
import { JiraBlock } from '@/blocks/blocks/jira'
|
||||
import { JiraServiceManagementBlock } from '@/blocks/blocks/jira_service_management'
|
||||
import { KalshiBlock, KalshiV2Block } from '@/blocks/blocks/kalshi'
|
||||
import { KalshiBlock } from '@/blocks/blocks/kalshi'
|
||||
import { KnowledgeBlock } from '@/blocks/blocks/knowledge'
|
||||
import { LangsmithBlock } from '@/blocks/blocks/langsmith'
|
||||
import { LemlistBlock } from '@/blocks/blocks/lemlist'
|
||||
@@ -222,7 +222,6 @@ export const registry: Record<string, BlockConfig> = {
|
||||
jira: JiraBlock,
|
||||
jira_service_management: JiraServiceManagementBlock,
|
||||
kalshi: KalshiBlock,
|
||||
kalshi_v2: KalshiV2Block,
|
||||
knowledge: KnowledgeBlock,
|
||||
langsmith: LangsmithBlock,
|
||||
lemlist: LemlistBlock,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user