mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 07:58:06 -05:00
Compare commits
11 Commits
improvemen
...
SIM-514-us
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f81c0ba9bf | ||
|
|
6c10f31a40 | ||
|
|
896e9674c2 | ||
|
|
f2450d3c26 | ||
|
|
cfbe4a4790 | ||
|
|
1f22d7a9ec | ||
|
|
2259bfcb8f | ||
|
|
85af046754 | ||
|
|
57f3697dd5 | ||
|
|
a15ac7360d | ||
|
|
93217438ef |
@@ -2452,56 +2452,6 @@ export const GeminiIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
</svg>
|
||||
)
|
||||
|
||||
export const VertexIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
<svg
|
||||
{...props}
|
||||
id='standard_product_icon'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
version='1.1'
|
||||
viewBox='0 0 512 512'
|
||||
>
|
||||
<g id='bounding_box'>
|
||||
<rect width='512' height='512' fill='none' />
|
||||
</g>
|
||||
<g id='art'>
|
||||
<path
|
||||
d='M128,244.99c-8.84,0-16-7.16-16-16v-95.97c0-8.84,7.16-16,16-16s16,7.16,16,16v95.97c0,8.84-7.16,16-16,16Z'
|
||||
fill='#ea4335'
|
||||
/>
|
||||
<path
|
||||
d='M256,458c-2.98,0-5.97-.83-8.59-2.5l-186-122c-7.46-4.74-9.65-14.63-4.91-22.09,4.75-7.46,14.64-9.65,22.09-4.91l177.41,116.53,177.41-116.53c7.45-4.74,17.34-2.55,22.09,4.91,4.74,7.46,2.55,17.34-4.91,22.09l-186,122c-2.62,1.67-5.61,2.5-8.59,2.5Z'
|
||||
fill='#fbbc04'
|
||||
/>
|
||||
<path
|
||||
d='M256,388.03c-8.84,0-16-7.16-16-16v-73.06c0-8.84,7.16-16,16-16s16,7.16,16,16v73.06c0,8.84-7.16,16-16,16Z'
|
||||
fill='#34a853'
|
||||
/>
|
||||
<circle cx='128' cy='70' r='16' fill='#ea4335' />
|
||||
<circle cx='128' cy='292' r='16' fill='#ea4335' />
|
||||
<path
|
||||
d='M384.23,308.01c-8.82,0-15.98-7.14-16-15.97l-.23-94.01c-.02-8.84,7.13-16.02,15.97-16.03h.04c8.82,0,15.98,7.14,16,15.97l.23,94.01c.02,8.84-7.13,16.02-15.97,16.03h-.04Z'
|
||||
fill='#4285f4'
|
||||
/>
|
||||
<circle cx='384' cy='70' r='16' fill='#4285f4' />
|
||||
<circle cx='384' cy='134' r='16' fill='#4285f4' />
|
||||
<path
|
||||
d='M320,220.36c-8.84,0-16-7.16-16-16v-103.02c0-8.84,7.16-16,16-16s16,7.16,16,16v103.02c0,8.84-7.16,16-16,16Z'
|
||||
fill='#fbbc04'
|
||||
/>
|
||||
<circle cx='256' cy='171' r='16' fill='#34a853' />
|
||||
<circle cx='256' cy='235' r='16' fill='#34a853' />
|
||||
<circle cx='320' cy='265' r='16' fill='#fbbc04' />
|
||||
<circle cx='320' cy='329' r='16' fill='#fbbc04' />
|
||||
<path
|
||||
d='M192,217.36c-8.84,0-16-7.16-16-16v-100.02c0-8.84,7.16-16,16-16s16,7.16,16,16v100.02c0,8.84-7.16,16-16,16Z'
|
||||
fill='#fbbc04'
|
||||
/>
|
||||
<circle cx='192' cy='265' r='16' fill='#fbbc04' />
|
||||
<circle cx='192' cy='329' r='16' fill='#fbbc04' />
|
||||
</g>
|
||||
</svg>
|
||||
)
|
||||
|
||||
export const CerebrasIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
<svg
|
||||
{...props}
|
||||
@@ -3387,14 +3337,17 @@ export function SalesforceIcon(props: SVGProps<SVGSVGElement>) {
|
||||
|
||||
export function ServiceNowIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 71.1 63.6'>
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 1570 1403'
|
||||
width='48'
|
||||
height='48'
|
||||
>
|
||||
<path
|
||||
fill='#62d84e'
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
fill='#62D84E'
|
||||
d='M35.8,0C16.1,0,0,15.9,0,35.6c0,9.8,4,19.3,11.2,26c2.5,2.4,6.4,2.6,9.2,0.5c9-6.7,21.4-6.7,30.4,0
|
||||
c2.8,2.1,6.7,1.9,9.2-0.5C74.3,48,74.9,25.4,61.3,11.1C54.7,4.1,45.4,0.1,35.8,0 M35.6,53.5C26,53.8,18,46.2,17.8,36.7
|
||||
c0-0.3,0-0.6,0-0.9c0-9.8,8-17.8,17.8-17.8s17.8,8,17.8,17.8c0.3,9.6-7.3,17.5-16.8,17.8C36.2,53.5,35.9,53.5,35.6,53.5'
|
||||
d='M1228.4 138.9c129.2 88.9 228.9 214.3 286.3 360.2 57.5 145.8 70 305.5 36 458.5S1437.8 1250 1324 1357.9c-13.3 12.9-28.8 23.4-45.8 30.8-17 7.5-35.2 11.9-53.7 12.9-18.5 1.1-37.1-1.1-54.8-6.6-17.7-5.4-34.3-13.9-49.1-25.2-48.2-35.9-101.8-63.8-158.8-82.6-57.1-18.9-116.7-28.5-176.8-28.5s-119.8 9.6-176.8 28.5c-57 18.8-110.7 46.7-158.9 82.6-14.6 11.2-31 19.8-48.6 25.3s-36 7.8-54.4 6.8c-18.4-.9-36.5-5.1-53.4-12.4s-32.4-17.5-45.8-30.2C132.5 1251 53 1110.8 19 956.8s-20.9-314.6 37.6-461c58.5-146.5 159.6-272 290.3-360.3S631.8.1 789.6.5c156.8 1.3 309.6 49.6 438.8 138.4m-291.8 1014c48.2-19.2 92-48 128.7-84.6 36.7-36.7 65.5-80.4 84.7-128.6 19.2-48.1 28.4-99.7 27-151.5 0-103.9-41.3-203.5-114.8-277S889 396.4 785 396.4s-203.7 41.3-277.2 114.8S393 684.3 393 788.2c-1.4 51.8 7.8 103.4 27 151.5 19.2 48.2 48 91.9 84.7 128.6 36.7 36.6 80.5 65.4 128.6 84.6 48.2 19.2 99.8 28.4 151.7 27 51.8 1.4 103.4-7.8 151.6-27'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
|
||||
@@ -120,117 +120,117 @@ import {
|
||||
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||
|
||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
zoom: ZoomIcon,
|
||||
zep: ZepIcon,
|
||||
calendly: CalendlyIcon,
|
||||
mailchimp: MailchimpIcon,
|
||||
postgresql: PostgresIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
elasticsearch: ElasticsearchIcon,
|
||||
rds: RDSIcon,
|
||||
translate: TranslateIcon,
|
||||
dynamodb: DynamoDBIcon,
|
||||
wordpress: WordpressIcon,
|
||||
tavily: TavilyIcon,
|
||||
zendesk: ZendeskIcon,
|
||||
youtube: YouTubeIcon,
|
||||
x: xIcon,
|
||||
wordpress: WordpressIcon,
|
||||
wikipedia: WikipediaIcon,
|
||||
whatsapp: WhatsAppIcon,
|
||||
webflow: WebflowIcon,
|
||||
wealthbox: WealthboxIcon,
|
||||
vision: EyeIcon,
|
||||
video_generator: VideoIcon,
|
||||
typeform: TypeformIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
twilio_sms: TwilioIcon,
|
||||
tts: TTSIcon,
|
||||
trello: TrelloIcon,
|
||||
translate: TranslateIcon,
|
||||
thinking: BrainIcon,
|
||||
telegram: TelegramIcon,
|
||||
tavily: TavilyIcon,
|
||||
supabase: SupabaseIcon,
|
||||
stt: STTIcon,
|
||||
stripe: StripeIcon,
|
||||
stagehand: StagehandIcon,
|
||||
ssh: SshIcon,
|
||||
sqs: SQSIcon,
|
||||
spotify: SpotifyIcon,
|
||||
smtp: SmtpIcon,
|
||||
slack: SlackIcon,
|
||||
shopify: ShopifyIcon,
|
||||
sharepoint: MicrosoftSharepointIcon,
|
||||
sftp: SftpIcon,
|
||||
servicenow: ServiceNowIcon,
|
||||
serper: SerperIcon,
|
||||
sentry: SentryIcon,
|
||||
sendgrid: SendgridIcon,
|
||||
search: SearchIcon,
|
||||
salesforce: SalesforceIcon,
|
||||
s3: S3Icon,
|
||||
resend: ResendIcon,
|
||||
reddit: RedditIcon,
|
||||
rds: RDSIcon,
|
||||
qdrant: QdrantIcon,
|
||||
posthog: PosthogIcon,
|
||||
postgresql: PostgresIcon,
|
||||
polymarket: PolymarketIcon,
|
||||
pipedrive: PipedriveIcon,
|
||||
pinecone: PineconeIcon,
|
||||
perplexity: PerplexityIcon,
|
||||
parallel_ai: ParallelIcon,
|
||||
outlook: OutlookIcon,
|
||||
openai: OpenAIIcon,
|
||||
onedrive: MicrosoftOneDriveIcon,
|
||||
notion: NotionIcon,
|
||||
neo4j: Neo4jIcon,
|
||||
mysql: MySQLIcon,
|
||||
mongodb: MongoDBIcon,
|
||||
mistral_parse: MistralIcon,
|
||||
microsoft_teams: MicrosoftTeamsIcon,
|
||||
microsoft_planner: MicrosoftPlannerIcon,
|
||||
microsoft_excel: MicrosoftExcelIcon,
|
||||
memory: BrainIcon,
|
||||
mem0: Mem0Icon,
|
||||
mailgun: MailgunIcon,
|
||||
mailchimp: MailchimpIcon,
|
||||
linkup: LinkupIcon,
|
||||
linkedin: LinkedInIcon,
|
||||
linear: LinearIcon,
|
||||
knowledge: PackageSearchIcon,
|
||||
kalshi: KalshiIcon,
|
||||
jira: JiraIcon,
|
||||
jina: JinaAIIcon,
|
||||
intercom: IntercomIcon,
|
||||
incidentio: IncidentioIcon,
|
||||
image_generator: ImageIcon,
|
||||
hunter: HunterIOIcon,
|
||||
huggingface: HuggingFaceIcon,
|
||||
hubspot: HubspotIcon,
|
||||
grafana: GrafanaIcon,
|
||||
google_vault: GoogleVaultIcon,
|
||||
google_slides: GoogleSlidesIcon,
|
||||
google_sheets: GoogleSheetsIcon,
|
||||
google_groups: GoogleGroupsIcon,
|
||||
google_forms: GoogleFormsIcon,
|
||||
google_drive: GoogleDriveIcon,
|
||||
google_docs: GoogleDocsIcon,
|
||||
google_calendar: GoogleCalendarIcon,
|
||||
google_search: GoogleIcon,
|
||||
gmail: GmailIcon,
|
||||
gitlab: GitLabIcon,
|
||||
github: GithubIcon,
|
||||
firecrawl: FirecrawlIcon,
|
||||
file: DocumentIcon,
|
||||
exa: ExaAIIcon,
|
||||
elevenlabs: ElevenLabsIcon,
|
||||
elasticsearch: ElasticsearchIcon,
|
||||
dynamodb: DynamoDBIcon,
|
||||
duckduckgo: DuckDuckGoIcon,
|
||||
dropbox: DropboxIcon,
|
||||
discord: DiscordIcon,
|
||||
datadog: DatadogIcon,
|
||||
cursor: CursorIcon,
|
||||
vision: EyeIcon,
|
||||
zoom: ZoomIcon,
|
||||
confluence: ConfluenceIcon,
|
||||
clay: ClayIcon,
|
||||
calendly: CalendlyIcon,
|
||||
browser_use: BrowserUseIcon,
|
||||
asana: AsanaIcon,
|
||||
arxiv: ArxivIcon,
|
||||
webflow: WebflowIcon,
|
||||
pinecone: PineconeIcon,
|
||||
apollo: ApolloIcon,
|
||||
servicenow: ServiceNowIcon,
|
||||
whatsapp: WhatsAppIcon,
|
||||
typeform: TypeformIcon,
|
||||
qdrant: QdrantIcon,
|
||||
shopify: ShopifyIcon,
|
||||
asana: AsanaIcon,
|
||||
sqs: SQSIcon,
|
||||
apify: ApifyIcon,
|
||||
memory: BrainIcon,
|
||||
gitlab: GitLabIcon,
|
||||
polymarket: PolymarketIcon,
|
||||
serper: SerperIcon,
|
||||
linear: LinearIcon,
|
||||
exa: ExaAIIcon,
|
||||
telegram: TelegramIcon,
|
||||
salesforce: SalesforceIcon,
|
||||
hubspot: HubspotIcon,
|
||||
hunter: HunterIOIcon,
|
||||
linkup: LinkupIcon,
|
||||
mongodb: MongoDBIcon,
|
||||
airtable: AirtableIcon,
|
||||
discord: DiscordIcon,
|
||||
ahrefs: AhrefsIcon,
|
||||
neo4j: Neo4jIcon,
|
||||
tts: TTSIcon,
|
||||
jina: JinaAIIcon,
|
||||
google_docs: GoogleDocsIcon,
|
||||
perplexity: PerplexityIcon,
|
||||
google_search: GoogleIcon,
|
||||
x: xIcon,
|
||||
kalshi: KalshiIcon,
|
||||
google_calendar: GoogleCalendarIcon,
|
||||
zep: ZepIcon,
|
||||
posthog: PosthogIcon,
|
||||
grafana: GrafanaIcon,
|
||||
google_slides: GoogleSlidesIcon,
|
||||
microsoft_planner: MicrosoftPlannerIcon,
|
||||
thinking: BrainIcon,
|
||||
pipedrive: PipedriveIcon,
|
||||
dropbox: DropboxIcon,
|
||||
stagehand: StagehandIcon,
|
||||
google_forms: GoogleFormsIcon,
|
||||
file: DocumentIcon,
|
||||
mistral_parse: MistralIcon,
|
||||
gmail: GmailIcon,
|
||||
openai: OpenAIIcon,
|
||||
outlook: OutlookIcon,
|
||||
incidentio: IncidentioIcon,
|
||||
onedrive: MicrosoftOneDriveIcon,
|
||||
resend: ResendIcon,
|
||||
google_vault: GoogleVaultIcon,
|
||||
sharepoint: MicrosoftSharepointIcon,
|
||||
huggingface: HuggingFaceIcon,
|
||||
sendgrid: SendgridIcon,
|
||||
video_generator: VideoIcon,
|
||||
smtp: SmtpIcon,
|
||||
google_groups: GoogleGroupsIcon,
|
||||
mailgun: MailgunIcon,
|
||||
clay: ClayIcon,
|
||||
jira: JiraIcon,
|
||||
search: SearchIcon,
|
||||
linkedin: LinkedInIcon,
|
||||
wealthbox: WealthboxIcon,
|
||||
notion: NotionIcon,
|
||||
elevenlabs: ElevenLabsIcon,
|
||||
microsoft_teams: MicrosoftTeamsIcon,
|
||||
github: GithubIcon,
|
||||
sftp: SftpIcon,
|
||||
ssh: SshIcon,
|
||||
google_drive: GoogleDriveIcon,
|
||||
sentry: SentryIcon,
|
||||
reddit: RedditIcon,
|
||||
parallel_ai: ParallelIcon,
|
||||
spotify: SpotifyIcon,
|
||||
stripe: StripeIcon,
|
||||
s3: S3Icon,
|
||||
trello: TrelloIcon,
|
||||
mem0: Mem0Icon,
|
||||
knowledge: PackageSearchIcon,
|
||||
intercom: IntercomIcon,
|
||||
twilio_sms: TwilioIcon,
|
||||
duckduckgo: DuckDuckGoIcon,
|
||||
slack: SlackIcon,
|
||||
datadog: DatadogIcon,
|
||||
microsoft_excel: MicrosoftExcelIcon,
|
||||
image_generator: ImageIcon,
|
||||
google_sheets: GoogleSheetsIcon,
|
||||
wikipedia: WikipediaIcon,
|
||||
cursor: CursorIcon,
|
||||
firecrawl: FirecrawlIcon,
|
||||
mysql: MySQLIcon,
|
||||
browser_use: BrowserUseIcon,
|
||||
stt: STTIcon,
|
||||
}
|
||||
|
||||
@@ -111,24 +111,26 @@ Verschiedene Blocktypen erzeugen unterschiedliche Ausgabestrukturen. Hier ist, w
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "Original content passed through",
|
||||
"conditionResult": true,
|
||||
"selectedPath": {
|
||||
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
|
||||
"blockType": "agent",
|
||||
"blockTitle": "Follow-up Agent"
|
||||
},
|
||||
"selectedOption": "condition-1"
|
||||
"selectedConditionId": "condition-1"
|
||||
}
|
||||
```
|
||||
|
||||
### Ausgabefelder des Condition-Blocks
|
||||
|
||||
- **content**: Der ursprüngliche, durchgeleitete Inhalt
|
||||
- **conditionResult**: Boolesches Ergebnis der Bedingungsauswertung
|
||||
- **selectedPath**: Informationen über den ausgewählten Pfad
|
||||
- **blockId**: ID des nächsten Blocks im ausgewählten Pfad
|
||||
- **blockType**: Typ des nächsten Blocks
|
||||
- **blockTitle**: Titel des nächsten Blocks
|
||||
- **selectedOption**: ID der ausgewählten Bedingung
|
||||
- **selectedConditionId**: ID der ausgewählten Bedingung
|
||||
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
---
|
||||
title: ServiceNow
|
||||
description: ServiceNow-Datensätze erstellen, lesen, aktualisieren und löschen
|
||||
description: Erstellen, lesen, aktualisieren, löschen und Massenimport von
|
||||
ServiceNow-Datensätzen
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -10,36 +11,22 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#032D42"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ServiceNow](https://www.servicenow.com/) ist eine leistungsstarke Cloud-Plattform zur Optimierung und Automatisierung von IT-Service-Management (ITSM), Workflows und Geschäftsprozessen in Ihrem Unternehmen. ServiceNow ermöglicht Ihnen die Verwaltung von Vorfällen, Anfragen, Aufgaben, Benutzern und mehr über seine umfangreiche API.
|
||||
## Nutzungsanleitung
|
||||
|
||||
Mit ServiceNow können Sie:
|
||||
|
||||
- **IT-Workflows automatisieren**: Datensätze in jeder ServiceNow-Tabelle erstellen, lesen, aktualisieren und löschen, z. B. Vorfälle, Aufgaben, Änderungsanfragen und Benutzer.
|
||||
- **Systeme integrieren**: ServiceNow mit Ihren anderen Tools und Prozessen für nahtlose Automatisierung verbinden.
|
||||
- **Eine einzige Informationsquelle pflegen**: Alle Ihre Service- und Betriebsdaten organisiert und zugänglich halten.
|
||||
- **Betriebliche Effizienz steigern**: Manuelle Arbeit reduzieren und Servicequalität mit anpassbaren Workflows und Automatisierung verbessern.
|
||||
|
||||
In Sim ermöglicht die ServiceNow-Integration Ihren Agenten, direkt mit Ihrer ServiceNow-Instanz als Teil ihrer Workflows zu interagieren. Agenten können Datensätze in jeder ServiceNow-Tabelle erstellen, lesen, aktualisieren oder löschen und Ticket- oder Benutzerdaten für ausgefeilte Automatisierung und Entscheidungsfindung nutzen. Diese Integration verbindet Ihre Workflow-Automatisierung und IT-Betrieb und befähigt Ihre Agenten, Serviceanfragen, Vorfälle, Benutzer und Assets ohne manuelle Eingriffe zu verwalten. Durch die Verbindung von Sim mit ServiceNow können Sie Service-Management-Aufgaben automatisieren, Reaktionszeiten verbessern und konsistenten, sicheren Zugriff auf die wichtigen Servicedaten Ihres Unternehmens gewährleisten.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Nutzungsanweisungen
|
||||
|
||||
Integrieren Sie ServiceNow in Ihren Workflow. Erstellen, lesen, aktualisieren und löschen Sie Datensätze in jeder ServiceNow-Tabelle, einschließlich Vorfälle, Aufgaben, Änderungsanfragen, Benutzer und mehr.
|
||||
Integrieren Sie ServiceNow in Ihren Workflow. Kann Datensätze in jeder ServiceNow-Tabelle erstellen, lesen, aktualisieren und löschen (Vorfälle, Aufgaben, Benutzer usw.). Unterstützt Massenimport-Operationen für Datenmigration und ETL.
|
||||
|
||||
## Tools
|
||||
|
||||
### `servicenow_create_record`
|
||||
|
||||
Einen neuen Datensatz in einer ServiceNow-Tabelle erstellen
|
||||
Erstellen eines neuen Datensatzes in einer ServiceNow-Tabelle
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
|
||||
| `username` | string | Ja | ServiceNow-Benutzername |
|
||||
| `password` | string | Ja | ServiceNow-Passwort |
|
||||
| `credential` | string | Nein | ServiceNow OAuth-Anmeldeinformations-ID |
|
||||
| `tableName` | string | Ja | Tabellenname \(z. B. incident, task, sys_user\) |
|
||||
| `fields` | json | Ja | Felder, die für den Datensatz festgelegt werden sollen \(JSON-Objekt\) |
|
||||
|
||||
@@ -52,15 +39,14 @@ Einen neuen Datensatz in einer ServiceNow-Tabelle erstellen
|
||||
|
||||
### `servicenow_read_record`
|
||||
|
||||
Datensätze aus einer ServiceNow-Tabelle lesen
|
||||
Lesen von Datensätzen aus einer ServiceNow-Tabelle
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
|
||||
| `username` | string | Ja | ServiceNow-Benutzername |
|
||||
| `password` | string | Ja | ServiceNow-Passwort |
|
||||
| `instanceUrl` | string | Nein | ServiceNow-Instanz-URL \(automatisch aus OAuth erkannt, falls nicht angegeben\) |
|
||||
| `credential` | string | Nein | ServiceNow OAuth-Anmeldeinformations-ID |
|
||||
| `tableName` | string | Ja | Tabellenname |
|
||||
| `sysId` | string | Nein | Spezifische Datensatz-sys_id |
|
||||
| `number` | string | Nein | Datensatznummer \(z. B. INC0010001\) |
|
||||
@@ -83,11 +69,10 @@ Einen bestehenden Datensatz in einer ServiceNow-Tabelle aktualisieren
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
|
||||
| `username` | string | Ja | ServiceNow-Benutzername |
|
||||
| `password` | string | Ja | ServiceNow-Passwort |
|
||||
| `instanceUrl` | string | Nein | ServiceNow-Instanz-URL \(wird automatisch aus OAuth erkannt, falls nicht angegeben\) |
|
||||
| `credential` | string | Nein | ServiceNow-OAuth-Credential-ID |
|
||||
| `tableName` | string | Ja | Tabellenname |
|
||||
| `sysId` | string | Ja | Datensatz-sys_id zum Aktualisieren |
|
||||
| `sysId` | string | Ja | Sys_id des zu aktualisierenden Datensatzes |
|
||||
| `fields` | json | Ja | Zu aktualisierende Felder \(JSON-Objekt\) |
|
||||
|
||||
#### Ausgabe
|
||||
@@ -105,11 +90,10 @@ Einen Datensatz aus einer ServiceNow-Tabelle löschen
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
|
||||
| `username` | string | Ja | ServiceNow-Benutzername |
|
||||
| `password` | string | Ja | ServiceNow-Passwort |
|
||||
| `instanceUrl` | string | Nein | ServiceNow-Instanz-URL \(wird automatisch aus OAuth erkannt, falls nicht angegeben\) |
|
||||
| `credential` | string | Nein | ServiceNow-OAuth-Credential-ID |
|
||||
| `tableName` | string | Ja | Tabellenname |
|
||||
| `sysId` | string | Ja | Datensatz-sys_id zum Löschen |
|
||||
| `sysId` | string | Ja | Sys_id des zu löschenden Datensatzes |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
|
||||
@@ -39,16 +39,14 @@ Senden Sie eine Chat-Completion-Anfrage an jeden unterstützten LLM-Anbieter
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `model` | string | Ja | Das zu verwendende Modell \(z. B. gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
|
||||
| `systemPrompt` | string | Nein | System-Prompt zur Festlegung des Verhaltens des Assistenten |
|
||||
| `context` | string | Ja | Die Benutzernachricht oder der Kontext, der an das Modell gesendet werden soll |
|
||||
| `apiKey` | string | Nein | API-Schlüssel für den Anbieter \(verwendet Plattform-Schlüssel, falls nicht für gehostete Modelle angegeben\) |
|
||||
| `temperature` | number | Nein | Temperatur für die Antwortgenerierung \(0-2\) |
|
||||
| `maxTokens` | number | Nein | Maximale Anzahl von Tokens in der Antwort |
|
||||
| `model` | string | Ja | Das zu verwendende Modell (z.B. gpt-4o, claude-sonnet-4-5, gemini-2.0-flash) |
|
||||
| `systemPrompt` | string | Nein | System-Prompt zur Festlegung des Assistentenverhaltens |
|
||||
| `context` | string | Ja | Die Benutzernachricht oder der Kontext, der an das Modell gesendet wird |
|
||||
| `apiKey` | string | Nein | API-Schlüssel für den Anbieter (verwendet den Plattformschlüssel, wenn für gehostete Modelle nicht angegeben) |
|
||||
| `temperature` | number | Nein | Temperatur für die Antwortgenerierung (0-2) |
|
||||
| `maxTokens` | number | Nein | Maximale Tokens in der Antwort |
|
||||
| `azureEndpoint` | string | Nein | Azure OpenAI-Endpunkt-URL |
|
||||
| `azureApiVersion` | string | Nein | Azure OpenAI-API-Version |
|
||||
| `vertexProject` | string | Nein | Google Cloud-Projekt-ID für Vertex AI |
|
||||
| `vertexLocation` | string | Nein | Google Cloud-Standort für Vertex AI \(Standard: us-central1\) |
|
||||
| `azureApiVersion` | string | Nein | Azure OpenAI API-Version |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
|
||||
@@ -106,24 +106,26 @@ Different block types produce different output structures. Here's what you can e
|
||||
<Tab>
|
||||
```json
|
||||
{
|
||||
"content": "Original content passed through",
|
||||
"conditionResult": true,
|
||||
"selectedPath": {
|
||||
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
|
||||
"blockType": "agent",
|
||||
"blockTitle": "Follow-up Agent"
|
||||
},
|
||||
"selectedOption": "condition-1"
|
||||
"selectedConditionId": "condition-1"
|
||||
}
|
||||
```
|
||||
|
||||
### Condition Block Output Fields
|
||||
|
||||
- **content**: The original content passed through
|
||||
- **conditionResult**: Boolean result of the condition evaluation
|
||||
- **selectedPath**: Information about the selected path
|
||||
- **blockId**: ID of the next block in the selected path
|
||||
- **blockType**: Type of the next block
|
||||
- **blockTitle**: Title of the next block
|
||||
- **selectedOption**: ID of the selected condition
|
||||
- **selectedConditionId**: ID of the selected condition
|
||||
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: ServiceNow
|
||||
description: Create, read, update, and delete ServiceNow records
|
||||
description: Create, read, update, delete, and bulk import ServiceNow records
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -10,23 +10,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#032D42"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ServiceNow](https://www.servicenow.com/) is a powerful cloud platform designed to streamline and automate IT service management (ITSM), workflows, and business processes across your organization. ServiceNow enables you to manage incidents, requests, tasks, users, and more using its extensive API.
|
||||
|
||||
With ServiceNow, you can:
|
||||
|
||||
- **Automate IT workflows**: Create, read, update, and delete records in any ServiceNow table, such as incidents, tasks, change requests, and users.
|
||||
- **Integrate systems**: Connect ServiceNow with your other tools and processes for seamless automation.
|
||||
- **Maintain a single source of truth**: Keep all your service and operations data organized and accessible.
|
||||
- **Drive operational efficiency**: Reduce manual work and improve service quality with customizable workflows and automation.
|
||||
|
||||
In Sim, the ServiceNow integration enables your agents to interact directly with your ServiceNow instance as part of their workflows. Agents can create, read, update, or delete records in any ServiceNow table and leverage ticket or user data for sophisticated automation and decision-making. This integration bridges your workflow automation and IT operations, empowering your agents to manage service requests, incidents, users, and assets without manual intervention. By connecting Sim with ServiceNow, you can automate service management tasks, improve response times, and ensure consistent, secure access to your organization's vital service data.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate ServiceNow into your workflow. Create, read, update, and delete records in any ServiceNow table including incidents, tasks, change requests, users, and more.
|
||||
Integrate ServiceNow into your workflow. Can create, read, update, and delete records in any ServiceNow table (incidents, tasks, users, etc.). Supports bulk import operations for data migration and ETL.
|
||||
|
||||
|
||||
|
||||
@@ -41,8 +27,7 @@ Create a new record in a ServiceNow table
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
|
||||
| `username` | string | Yes | ServiceNow username |
|
||||
| `password` | string | Yes | ServiceNow password |
|
||||
| `credential` | string | No | ServiceNow OAuth credential ID |
|
||||
| `tableName` | string | Yes | Table name \(e.g., incident, task, sys_user\) |
|
||||
| `fields` | json | Yes | Fields to set on the record \(JSON object\) |
|
||||
|
||||
@@ -61,9 +46,8 @@ Read records from a ServiceNow table
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
|
||||
| `username` | string | Yes | ServiceNow username |
|
||||
| `password` | string | Yes | ServiceNow password |
|
||||
| `instanceUrl` | string | No | ServiceNow instance URL \(auto-detected from OAuth if not provided\) |
|
||||
| `credential` | string | No | ServiceNow OAuth credential ID |
|
||||
| `tableName` | string | Yes | Table name |
|
||||
| `sysId` | string | No | Specific record sys_id |
|
||||
| `number` | string | No | Record number \(e.g., INC0010001\) |
|
||||
@@ -86,9 +70,8 @@ Update an existing record in a ServiceNow table
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
|
||||
| `username` | string | Yes | ServiceNow username |
|
||||
| `password` | string | Yes | ServiceNow password |
|
||||
| `instanceUrl` | string | No | ServiceNow instance URL \(auto-detected from OAuth if not provided\) |
|
||||
| `credential` | string | No | ServiceNow OAuth credential ID |
|
||||
| `tableName` | string | Yes | Table name |
|
||||
| `sysId` | string | Yes | Record sys_id to update |
|
||||
| `fields` | json | Yes | Fields to update \(JSON object\) |
|
||||
@@ -108,9 +91,8 @@ Delete a record from a ServiceNow table
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
|
||||
| `username` | string | Yes | ServiceNow username |
|
||||
| `password` | string | Yes | ServiceNow password |
|
||||
| `instanceUrl` | string | No | ServiceNow instance URL \(auto-detected from OAuth if not provided\) |
|
||||
| `credential` | string | No | ServiceNow OAuth credential ID |
|
||||
| `tableName` | string | Yes | Table name |
|
||||
| `sysId` | string | Yes | Record sys_id to delete |
|
||||
|
||||
|
||||
@@ -50,8 +50,6 @@ Send a chat completion request to any supported LLM provider
|
||||
| `maxTokens` | number | No | Maximum tokens in the response |
|
||||
| `azureEndpoint` | string | No | Azure OpenAI endpoint URL |
|
||||
| `azureApiVersion` | string | No | Azure OpenAI API version |
|
||||
| `vertexProject` | string | No | Google Cloud project ID for Vertex AI |
|
||||
| `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -111,24 +111,26 @@ Diferentes tipos de bloques producen diferentes estructuras de salida. Esto es l
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "Original content passed through",
|
||||
"conditionResult": true,
|
||||
"selectedPath": {
|
||||
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
|
||||
"blockType": "agent",
|
||||
"blockTitle": "Follow-up Agent"
|
||||
},
|
||||
"selectedOption": "condition-1"
|
||||
"selectedConditionId": "condition-1"
|
||||
}
|
||||
```
|
||||
|
||||
### Campos de salida del bloque de condición
|
||||
|
||||
- **conditionResult**: resultado booleano de la evaluación de la condición
|
||||
- **selectedPath**: información sobre la ruta seleccionada
|
||||
- **content**: El contenido original que se transmite
|
||||
- **conditionResult**: Resultado booleano de la evaluación de la condición
|
||||
- **selectedPath**: Información sobre la ruta seleccionada
|
||||
- **blockId**: ID del siguiente bloque en la ruta seleccionada
|
||||
- **blockType**: tipo del siguiente bloque
|
||||
- **blockTitle**: título del siguiente bloque
|
||||
- **selectedOption**: ID de la condición seleccionada
|
||||
- **blockType**: Tipo del siguiente bloque
|
||||
- **blockTitle**: Título del siguiente bloque
|
||||
- **selectedConditionId**: ID de la condición seleccionada
|
||||
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: ServiceNow
|
||||
description: Crear, leer, actualizar y eliminar registros de ServiceNow
|
||||
description: Crea, lee, actualiza, elimina e importa masivamente registros de ServiceNow
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -10,37 +10,23 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#032D42"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ServiceNow](https://www.servicenow.com/) es una potente plataforma en la nube diseñada para optimizar y automatizar la gestión de servicios de TI (ITSM), flujos de trabajo y procesos empresariales en toda tu organización. ServiceNow te permite gestionar incidencias, solicitudes, tareas, usuarios y más utilizando su amplia API.
|
||||
|
||||
Con ServiceNow, puedes:
|
||||
|
||||
- **Automatizar flujos de trabajo de TI**: crear, leer, actualizar y eliminar registros en cualquier tabla de ServiceNow, como incidencias, tareas, solicitudes de cambio y usuarios.
|
||||
- **Integrar sistemas**: conectar ServiceNow con tus otras herramientas y procesos para una automatización fluida.
|
||||
- **Mantener una única fuente de verdad**: mantener todos tus datos de servicio y operaciones organizados y accesibles.
|
||||
- **Impulsar la eficiencia operativa**: reducir el trabajo manual y mejorar la calidad del servicio con flujos de trabajo personalizables y automatización.
|
||||
|
||||
En Sim, la integración de ServiceNow permite que tus agentes interactúen directamente con tu instancia de ServiceNow como parte de sus flujos de trabajo. Los agentes pueden crear, leer, actualizar o eliminar registros en cualquier tabla de ServiceNow y aprovechar datos de tickets o usuarios para automatización y toma de decisiones sofisticadas. Esta integración conecta tu automatización de flujos de trabajo y operaciones de TI, permitiendo que tus agentes gestionen solicitudes de servicio, incidencias, usuarios y activos sin intervención manual. Al conectar Sim con ServiceNow, puedes automatizar tareas de gestión de servicios, mejorar los tiempos de respuesta y garantizar un acceso consistente y seguro a los datos de servicio vitales de tu organización.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Instrucciones de uso
|
||||
|
||||
Integra ServiceNow en tu flujo de trabajo. Crea, lee, actualiza y elimina registros en cualquier tabla de ServiceNow, incluyendo incidencias, tareas, solicitudes de cambio, usuarios y más.
|
||||
Integra ServiceNow en tu flujo de trabajo. Puede crear, leer, actualizar y eliminar registros en cualquier tabla de ServiceNow (incidentes, tareas, usuarios, etc.). Admite operaciones de importación masiva para migración de datos y ETL.
|
||||
|
||||
## Herramientas
|
||||
|
||||
### `servicenow_create_record`
|
||||
|
||||
Crear un nuevo registro en una tabla de ServiceNow
|
||||
Crea un nuevo registro en una tabla de ServiceNow
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Requerido | Descripción |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(p. ej., https://instance.service-now.com\) |
|
||||
| `username` | string | Sí | Nombre de usuario de ServiceNow |
|
||||
| `password` | string | Sí | Contraseña de ServiceNow |
|
||||
| `tableName` | string | Sí | Nombre de la tabla \(p. ej., incident, task, sys_user\) |
|
||||
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(ej., https://instance.service-now.com\) |
|
||||
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
|
||||
| `tableName` | string | Sí | Nombre de la tabla \(ej., incident, task, sys_user\) |
|
||||
| `fields` | json | Sí | Campos a establecer en el registro \(objeto JSON\) |
|
||||
|
||||
#### Salida
|
||||
@@ -52,19 +38,18 @@ Crear un nuevo registro en una tabla de ServiceNow
|
||||
|
||||
### `servicenow_read_record`
|
||||
|
||||
Leer registros de una tabla de ServiceNow
|
||||
Lee registros de una tabla de ServiceNow
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Requerido | Descripción |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(p. ej., https://instance.service-now.com\) |
|
||||
| `username` | string | Sí | Nombre de usuario de ServiceNow |
|
||||
| `password` | string | Sí | Contraseña de ServiceNow |
|
||||
| `instanceUrl` | string | No | URL de la instancia de ServiceNow \(detectada automáticamente desde OAuth si no se proporciona\) |
|
||||
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
|
||||
| `tableName` | string | Sí | Nombre de la tabla |
|
||||
| `sysId` | string | No | sys_id del registro específico |
|
||||
| `number` | string | No | Número de registro \(p. ej., INC0010001\) |
|
||||
| `query` | string | No | Cadena de consulta codificada \(p. ej., "active=true^priority=1"\) |
|
||||
| `sysId` | string | No | sys_id específico del registro |
|
||||
| `number` | string | No | Número de registro \(ej., INC0010001\) |
|
||||
| `query` | string | No | Cadena de consulta codificada \(ej., "active=true^priority=1"\) |
|
||||
| `limit` | number | No | Número máximo de registros a devolver |
|
||||
| `fields` | string | No | Lista de campos separados por comas a devolver |
|
||||
|
||||
@@ -77,15 +62,14 @@ Leer registros de una tabla de ServiceNow
|
||||
|
||||
### `servicenow_update_record`
|
||||
|
||||
Actualiza un registro existente en una tabla de ServiceNow
|
||||
Actualizar un registro existente en una tabla de ServiceNow
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Requerido | Descripción |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(ej., https://instance.service-now.com\) |
|
||||
| `username` | string | Sí | Nombre de usuario de ServiceNow |
|
||||
| `password` | string | Sí | Contraseña de ServiceNow |
|
||||
| `instanceUrl` | string | No | URL de la instancia de ServiceNow \(detectada automáticamente desde OAuth si no se proporciona\) |
|
||||
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
|
||||
| `tableName` | string | Sí | Nombre de la tabla |
|
||||
| `sysId` | string | Sí | sys_id del registro a actualizar |
|
||||
| `fields` | json | Sí | Campos a actualizar \(objeto JSON\) |
|
||||
@@ -99,15 +83,14 @@ Actualiza un registro existente en una tabla de ServiceNow
|
||||
|
||||
### `servicenow_delete_record`
|
||||
|
||||
Elimina un registro de una tabla de ServiceNow
|
||||
Eliminar un registro de una tabla de ServiceNow
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Requerido | Descripción |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(ej., https://instance.service-now.com\) |
|
||||
| `username` | string | Sí | Nombre de usuario de ServiceNow |
|
||||
| `password` | string | Sí | Contraseña de ServiceNow |
|
||||
| `instanceUrl` | string | No | URL de la instancia de ServiceNow \(detectada automáticamente desde OAuth si no se proporciona\) |
|
||||
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
|
||||
| `tableName` | string | Sí | Nombre de la tabla |
|
||||
| `sysId` | string | Sí | sys_id del registro a eliminar |
|
||||
|
||||
|
||||
@@ -37,18 +37,16 @@ Envía una solicitud de completado de chat a cualquier proveedor de LLM compatib
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Requerido | Descripción |
|
||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `model` | string | Sí | El modelo a utilizar \(ej., gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
|
||||
| `model` | string | Sí | El modelo a utilizar \(p. ej., gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
|
||||
| `systemPrompt` | string | No | Prompt del sistema para establecer el comportamiento del asistente |
|
||||
| `context` | string | Sí | El mensaje del usuario o contexto a enviar al modelo |
|
||||
| `apiKey` | string | No | Clave API del proveedor \(usa la clave de la plataforma si no se proporciona para modelos alojados\) |
|
||||
| `context` | string | Sí | El mensaje del usuario o contexto para enviar al modelo |
|
||||
| `apiKey` | string | No | Clave API para el proveedor \(usa la clave de la plataforma si no se proporciona para modelos alojados\) |
|
||||
| `temperature` | number | No | Temperatura para la generación de respuestas \(0-2\) |
|
||||
| `maxTokens` | number | No | Tokens máximos en la respuesta |
|
||||
| `azureEndpoint` | string | No | URL del endpoint de Azure OpenAI |
|
||||
| `azureApiVersion` | string | No | Versión de la API de Azure OpenAI |
|
||||
| `vertexProject` | string | No | ID del proyecto de Google Cloud para Vertex AI |
|
||||
| `vertexLocation` | string | No | Ubicación de Google Cloud para Vertex AI \(por defecto us-central1\) |
|
||||
|
||||
#### Salida
|
||||
|
||||
|
||||
@@ -111,24 +111,26 @@ Différents types de blocs produisent différentes structures de sortie. Voici c
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "Original content passed through",
|
||||
"conditionResult": true,
|
||||
"selectedPath": {
|
||||
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
|
||||
"blockType": "agent",
|
||||
"blockTitle": "Follow-up Agent"
|
||||
},
|
||||
"selectedOption": "condition-1"
|
||||
"selectedConditionId": "condition-1"
|
||||
}
|
||||
```
|
||||
|
||||
### Champs de sortie du bloc de condition
|
||||
|
||||
- **content** : le contenu original transmis
|
||||
- **conditionResult** : résultat booléen de l'évaluation de la condition
|
||||
- **selectedPath** : informations sur le chemin sélectionné
|
||||
- **blockId** : ID du bloc suivant dans le chemin sélectionné
|
||||
- **blockType** : type du bloc suivant
|
||||
- **blockTitle** : titre du bloc suivant
|
||||
- **selectedOption** : ID de la condition sélectionnée
|
||||
- **selectedConditionId** : ID de la condition sélectionnée
|
||||
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
---
|
||||
title: ServiceNow
|
||||
description: Créer, lire, mettre à jour et supprimer des enregistrements ServiceNow
|
||||
description: Créer, lire, mettre à jour, supprimer et importer en masse des
|
||||
enregistrements ServiceNow
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -10,22 +11,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#032D42"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ServiceNow](https://www.servicenow.com/) est une plateforme cloud puissante conçue pour rationaliser et automatiser la gestion des services informatiques (ITSM), les workflows et les processus métier au sein de votre organisation. ServiceNow vous permet de gérer les incidents, les demandes, les tâches, les utilisateurs et bien plus encore grâce à son API étendue.
|
||||
|
||||
Avec ServiceNow, vous pouvez :
|
||||
|
||||
- **Automatiser les workflows informatiques** : créer, lire, mettre à jour et supprimer des enregistrements dans n'importe quelle table ServiceNow, tels que les incidents, les tâches, les demandes de changement et les utilisateurs.
|
||||
- **Intégrer les systèmes** : connecter ServiceNow avec vos autres outils et processus pour une automatisation transparente.
|
||||
- **Maintenir une source unique de vérité** : garder toutes vos données de service et d'exploitation organisées et accessibles.
|
||||
- **Améliorer l'efficacité opérationnelle** : réduire le travail manuel et améliorer la qualité du service grâce à des workflows personnalisables et à l'automatisation.
|
||||
|
||||
Dans Sim, l'intégration ServiceNow permet à vos agents d'interagir directement avec votre instance ServiceNow dans le cadre de leurs workflows. Les agents peuvent créer, lire, mettre à jour ou supprimer des enregistrements dans n'importe quelle table ServiceNow et exploiter les données de tickets ou d'utilisateurs pour une automatisation et une prise de décision sophistiquées. Cette intégration relie votre automatisation de workflow et vos opérations informatiques, permettant à vos agents de gérer les demandes de service, les incidents, les utilisateurs et les actifs sans intervention manuelle. En connectant Sim avec ServiceNow, vous pouvez automatiser les tâches de gestion des services, améliorer les temps de réponse et garantir un accès cohérent et sécurisé aux données de service vitales de votre organisation.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Instructions d'utilisation
|
||||
|
||||
Intégrez ServiceNow dans votre workflow. Créez, lisez, mettez à jour et supprimez des enregistrements dans n'importe quelle table ServiceNow, y compris les incidents, les tâches, les demandes de changement, les utilisateurs et bien plus encore.
|
||||
Intégrez ServiceNow dans votre flux de travail. Permet de créer, lire, mettre à jour et supprimer des enregistrements dans n'importe quelle table ServiceNow (incidents, tâches, utilisateurs, etc.). Prend en charge les opérations d'importation en masse pour la migration de données et l'ETL.
|
||||
|
||||
## Outils
|
||||
|
||||
@@ -37,11 +25,10 @@ Créer un nouvel enregistrement dans une table ServiceNow
|
||||
|
||||
| Paramètre | Type | Requis | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow (par ex., https://instance.service-now.com) |
|
||||
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
|
||||
| `password` | string | Oui | Mot de passe ServiceNow |
|
||||
| `tableName` | string | Oui | Nom de la table (par ex., incident, task, sys_user) |
|
||||
| `fields` | json | Oui | Champs à définir sur l'enregistrement (objet JSON) |
|
||||
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow \(par exemple, https://instance.service-now.com\) |
|
||||
| `credential` | string | Non | ID d'identification OAuth ServiceNow |
|
||||
| `tableName` | string | Oui | Nom de la table \(par exemple, incident, task, sys_user\) |
|
||||
| `fields` | json | Oui | Champs à définir sur l'enregistrement \(objet JSON\) |
|
||||
|
||||
#### Sortie
|
||||
|
||||
@@ -58,21 +45,20 @@ Lire des enregistrements d'une table ServiceNow
|
||||
|
||||
| Paramètre | Type | Requis | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow (par ex., https://instance.service-now.com) |
|
||||
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
|
||||
| `password` | string | Oui | Mot de passe ServiceNow |
|
||||
| `instanceUrl` | string | Non | URL de l'instance ServiceNow \(détectée automatiquement depuis OAuth si non fournie\) |
|
||||
| `credential` | string | Non | ID d'identification OAuth ServiceNow |
|
||||
| `tableName` | string | Oui | Nom de la table |
|
||||
| `sysId` | string | Non | sys_id d'enregistrement spécifique |
|
||||
| `number` | string | Non | Numéro d'enregistrement (par ex., INC0010001) |
|
||||
| `query` | string | Non | Chaîne de requête encodée (par ex., "active=true^priority=1") |
|
||||
| `sysId` | string | Non | sys_id spécifique de l'enregistrement |
|
||||
| `number` | string | Non | Numéro d'enregistrement \(par exemple, INC0010001\) |
|
||||
| `query` | string | Non | Chaîne de requête encodée \(par exemple, "active=true^priority=1"\) |
|
||||
| `limit` | number | Non | Nombre maximum d'enregistrements à retourner |
|
||||
| `fields` | string | Non | Liste de champs à retourner, séparés par des virgules |
|
||||
| `fields` | string | Non | Liste de champs séparés par des virgules à retourner |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `records` | array | Tableau d'enregistrements ServiceNow |
|
||||
| `records` | array | Tableau des enregistrements ServiceNow |
|
||||
| `metadata` | json | Métadonnées de l'opération |
|
||||
|
||||
### `servicenow_update_record`
|
||||
@@ -83,12 +69,11 @@ Mettre à jour un enregistrement existant dans une table ServiceNow
|
||||
|
||||
| Paramètre | Type | Requis | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow \(par exemple, https://instance.service-now.com\) |
|
||||
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
|
||||
| `password` | string | Oui | Mot de passe ServiceNow |
|
||||
| `instanceUrl` | string | Non | URL de l'instance ServiceNow (détectée automatiquement depuis OAuth si non fournie) |
|
||||
| `credential` | string | Non | ID des identifiants OAuth ServiceNow |
|
||||
| `tableName` | string | Oui | Nom de la table |
|
||||
| `sysId` | string | Oui | sys_id de l'enregistrement à mettre à jour |
|
||||
| `fields` | json | Oui | Champs à mettre à jour \(objet JSON\) |
|
||||
| `fields` | json | Oui | Champs à mettre à jour (objet JSON) |
|
||||
|
||||
#### Sortie
|
||||
|
||||
@@ -105,9 +90,8 @@ Supprimer un enregistrement d'une table ServiceNow
|
||||
|
||||
| Paramètre | Type | Requis | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow \(par exemple, https://instance.service-now.com\) |
|
||||
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
|
||||
| `password` | string | Oui | Mot de passe ServiceNow |
|
||||
| `instanceUrl` | string | Non | URL de l'instance ServiceNow (détectée automatiquement depuis OAuth si non fournie) |
|
||||
| `credential` | string | Non | ID des identifiants OAuth ServiceNow |
|
||||
| `tableName` | string | Oui | Nom de la table |
|
||||
| `sysId` | string | Oui | sys_id de l'enregistrement à supprimer |
|
||||
|
||||
@@ -118,7 +102,7 @@ Supprimer un enregistrement d'une table ServiceNow
|
||||
| `success` | boolean | Indique si la suppression a réussi |
|
||||
| `metadata` | json | Métadonnées de l'opération |
|
||||
|
||||
## Remarques
|
||||
## Notes
|
||||
|
||||
- Catégorie : `tools`
|
||||
- Type : `servicenow`
|
||||
|
||||
@@ -37,18 +37,16 @@ Envoyez une requête de complétion de chat à n'importe quel fournisseur de LLM
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Requis | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `model` | string | Oui | Le modèle à utiliser \(par exemple, gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
|
||||
| `systemPrompt` | string | Non | Prompt système pour définir le comportement de l'assistant |
|
||||
| `context` | string | Oui | Le message utilisateur ou le contexte à envoyer au modèle |
|
||||
| `apiKey` | string | Non | Clé API pour le fournisseur \(utilise la clé de la plateforme si non fournie pour les modèles hébergés\) |
|
||||
| `temperature` | number | Non | Température pour la génération de réponse \(0-2\) |
|
||||
| `maxTokens` | number | Non | Nombre maximum de tokens dans la réponse |
|
||||
| `azureEndpoint` | string | Non | URL du point de terminaison Azure OpenAI |
|
||||
| `azureApiVersion` | string | Non | Version de l'API Azure OpenAI |
|
||||
| `vertexProject` | string | Non | ID du projet Google Cloud pour Vertex AI |
|
||||
| `vertexLocation` | string | Non | Emplacement Google Cloud pour Vertex AI \(par défaut us-central1\) |
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | ---------- | ----------- |
|
||||
| `model` | chaîne | Oui | Le modèle à utiliser (ex. : gpt-4o, claude-sonnet-4-5, gemini-2.0-flash) |
|
||||
| `systemPrompt` | chaîne | Non | Instruction système pour définir le comportement de l'assistant |
|
||||
| `context` | chaîne | Oui | Le message utilisateur ou le contexte à envoyer au modèle |
|
||||
| `apiKey` | chaîne | Non | Clé API pour le fournisseur (utilise la clé de plateforme si non fournie pour les modèles hébergés) |
|
||||
| `temperature` | nombre | Non | Température pour la génération de réponse (0-2) |
|
||||
| `maxTokens` | nombre | Non | Nombre maximum de tokens dans la réponse |
|
||||
| `azureEndpoint` | chaîne | Non | URL du point de terminaison Azure OpenAI |
|
||||
| `azureApiVersion` | chaîne | Non | Version de l'API Azure OpenAI |
|
||||
|
||||
#### Sortie
|
||||
|
||||
|
||||
@@ -110,24 +110,26 @@ import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "Original content passed through",
|
||||
"conditionResult": true,
|
||||
"selectedPath": {
|
||||
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
|
||||
"blockType": "agent",
|
||||
"blockTitle": "Follow-up Agent"
|
||||
},
|
||||
"selectedOption": "condition-1"
|
||||
"selectedConditionId": "condition-1"
|
||||
}
|
||||
```
|
||||
|
||||
### 条件ブロックの出力フィールド
|
||||
|
||||
- **content**: そのまま渡される元のコンテンツ
|
||||
- **conditionResult**: 条件評価の真偽値結果
|
||||
- **selectedPath**: 選択されたパスに関する情報
|
||||
- **blockId**: 選択されたパスの次のブロックのID
|
||||
- **blockType**: 次のブロックのタイプ
|
||||
- **blockTitle**: 次のブロックのタイトル
|
||||
- **selectedOption**: 選択された条件のID
|
||||
- **selectedConditionId**: 選択された条件のID
|
||||
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: ServiceNow
|
||||
description: ServiceNowレコードの作成、読み取り、更新、削除
|
||||
description: ServiceNowレコードの作成、読み取り、更新、削除、一括インポート
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -10,22 +10,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#032D42"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ServiceNow](https://www.servicenow.com/)は、組織全体のITサービス管理(ITSM)、ワークフロー、ビジネスプロセスを効率化し自動化するために設計された強力なクラウドプラットフォームです。ServiceNowを使用すると、広範なAPIを使用してインシデント、リクエスト、タスク、ユーザーなどを管理できます。
|
||||
|
||||
ServiceNowでは、次のことができます。
|
||||
|
||||
- **ITワークフローの自動化**: インシデント、タスク、変更リクエスト、ユーザーなど、任意のServiceNowテーブルのレコードを作成、読み取り、更新、削除します。
|
||||
- **システムの統合**: ServiceNowを他のツールやプロセスと接続して、シームレスな自動化を実現します。
|
||||
- **単一の信頼できる情報源の維持**: すべてのサービスおよび運用データを整理してアクセス可能な状態に保ちます。
|
||||
- **運用効率の向上**: カスタマイズ可能なワークフローと自動化により、手作業を削減し、サービス品質を向上させます。
|
||||
|
||||
Simでは、ServiceNow統合により、エージェントがワークフローの一部としてServiceNowインスタンスと直接やり取りできるようになります。エージェントは、任意のServiceNowテーブルのレコードを作成、読み取り、更新、削除でき、チケットやユーザーデータを活用して高度な自動化と意思決定を行うことができます。この統合により、ワークフロー自動化とIT運用が橋渡しされ、エージェントは手動介入なしでサービスリクエスト、インシデント、ユーザー、資産を管理できるようになります。SimとServiceNowを接続することで、サービス管理タスクを自動化し、応答時間を改善し、組織の重要なサービスデータへの一貫性のある安全なアクセスを確保できます。
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## 使用方法
|
||||
|
||||
ServiceNowをワークフローに統合します。インシデント、タスク、変更リクエスト、ユーザーなど、任意のServiceNowテーブルのレコードを作成、読み取り、更新、削除します。
|
||||
ServiceNowをワークフローに統合します。任意のServiceNowテーブル(インシデント、タスク、ユーザーなど)のレコードを作成、読み取り、更新、削除できます。データ移行とETLのための一括インポート操作をサポートします。
|
||||
|
||||
## ツール
|
||||
|
||||
@@ -37,11 +24,10 @@ ServiceNowテーブルに新しいレコードを作成
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例: https://instance.service-now.com) |
|
||||
| `username` | string | はい | ServiceNowユーザー名 |
|
||||
| `password` | string | はい | ServiceNowパスワード |
|
||||
| `tableName` | string | はい | テーブル名(例: incident、task、sys_user) |
|
||||
| `fields` | json | はい | レコードに設定するフィールド(JSONオブジェクト) |
|
||||
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例:https://instance.service-now.com) |
|
||||
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
|
||||
| `tableName` | string | はい | テーブル名(例:incident、task、sys_user) |
|
||||
| `fields` | json | はい | レコードに設定するフィールド(JSONオブジェクト) |
|
||||
|
||||
#### 出力
|
||||
|
||||
@@ -52,20 +38,19 @@ ServiceNowテーブルに新しいレコードを作成
|
||||
|
||||
### `servicenow_read_record`
|
||||
|
||||
ServiceNowテーブルからレコードを読み取ります
|
||||
ServiceNowテーブルからレコードを読み取り
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例: https://instance.service-now.com) |
|
||||
| `username` | string | はい | ServiceNowユーザー名 |
|
||||
| `password` | string | はい | ServiceNowパスワード |
|
||||
| `instanceUrl` | string | いいえ | ServiceNowインスタンスURL(指定されていない場合はOAuthから自動検出) |
|
||||
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
|
||||
| `tableName` | string | はい | テーブル名 |
|
||||
| `sysId` | string | いいえ | 特定のレコードのsys_id |
|
||||
| `number` | string | いいえ | レコード番号(例: INC0010001) |
|
||||
| `query` | string | いいえ | エンコードされたクエリ文字列(例: "active=true^priority=1") |
|
||||
| `limit` | number | いいえ | 返すレコードの最大数 |
|
||||
| `sysId` | string | いいえ | 特定のレコードsys_id |
|
||||
| `number` | string | いいえ | レコード番号(例:INC0010001) |
|
||||
| `query` | string | いいえ | エンコードされたクエリ文字列(例:"active=true^priority=1") |
|
||||
| `limit` | number | いいえ | 返す最大レコード数 |
|
||||
| `fields` | string | いいえ | 返すフィールドのカンマ区切りリスト |
|
||||
|
||||
#### 出力
|
||||
@@ -77,18 +62,17 @@ ServiceNowテーブルからレコードを読み取ります
|
||||
|
||||
### `servicenow_update_record`
|
||||
|
||||
ServiceNowテーブル内の既存のレコードを更新
|
||||
ServiceNowテーブル内の既存のレコードを更新します
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例:https://instance.service-now.com) |
|
||||
| `username` | string | はい | ServiceNowユーザー名 |
|
||||
| `password` | string | はい | ServiceNowパスワード |
|
||||
| `instanceUrl` | string | いいえ | ServiceNowインスタンスURL(指定されていない場合はOAuthから自動検出) |
|
||||
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
|
||||
| `tableName` | string | はい | テーブル名 |
|
||||
| `sysId` | string | はい | 更新するレコードのsys_id |
|
||||
| `fields` | json | はい | 更新するフィールド(JSONオブジェクト) |
|
||||
| `fields` | json | はい | 更新するフィールド(JSONオブジェクト) |
|
||||
|
||||
#### 出力
|
||||
|
||||
@@ -99,15 +83,14 @@ ServiceNowテーブル内の既存のレコードを更新
|
||||
|
||||
### `servicenow_delete_record`
|
||||
|
||||
ServiceNowテーブルからレコードを削除
|
||||
ServiceNowテーブルからレコードを削除します
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例:https://instance.service-now.com) |
|
||||
| `username` | string | はい | ServiceNowユーザー名 |
|
||||
| `password` | string | はい | ServiceNowパスワード |
|
||||
| `instanceUrl` | string | いいえ | ServiceNowインスタンスURL(指定されていない場合はOAuthから自動検出) |
|
||||
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
|
||||
| `tableName` | string | はい | テーブル名 |
|
||||
| `sysId` | string | はい | 削除するレコードのsys_id |
|
||||
|
||||
@@ -118,7 +101,7 @@ ServiceNowテーブルからレコードを削除
|
||||
| `success` | boolean | 削除が成功したかどうか |
|
||||
| `metadata` | json | 操作メタデータ |
|
||||
|
||||
## 注意事項
|
||||
## 注記
|
||||
|
||||
- カテゴリ: `tools`
|
||||
- カテゴリー: `tools`
|
||||
- タイプ: `servicenow`
|
||||
|
||||
@@ -42,13 +42,11 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
| `model` | string | はい | 使用するモデル(例:gpt-4o、claude-sonnet-4-5、gemini-2.0-flash) |
|
||||
| `systemPrompt` | string | いいえ | アシスタントの動作を設定するシステムプロンプト |
|
||||
| `context` | string | はい | モデルに送信するユーザーメッセージまたはコンテキスト |
|
||||
| `apiKey` | string | いいえ | プロバイダーのAPIキー(ホストされたモデルの場合、提供されない場合はプラットフォームキーを使用) |
|
||||
| `apiKey` | string | いいえ | プロバイダーのAPIキー(ホストされたモデルの場合、提供されなければプラットフォームキーを使用) |
|
||||
| `temperature` | number | いいえ | レスポンス生成の温度(0-2) |
|
||||
| `maxTokens` | number | いいえ | レスポンスの最大トークン数 |
|
||||
| `azureEndpoint` | string | いいえ | Azure OpenAIエンドポイントURL |
|
||||
| `azureApiVersion` | string | いいえ | Azure OpenAI APIバージョン |
|
||||
| `vertexProject` | string | いいえ | Vertex AI用のGoogle CloudプロジェクトID |
|
||||
| `vertexLocation` | string | いいえ | Vertex AI用のGoogle Cloudロケーション(デフォルトはus-central1) |
|
||||
|
||||
#### 出力
|
||||
|
||||
|
||||
@@ -110,24 +110,26 @@ import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "Original content passed through",
|
||||
"conditionResult": true,
|
||||
"selectedPath": {
|
||||
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
|
||||
"blockType": "agent",
|
||||
"blockTitle": "Follow-up Agent"
|
||||
},
|
||||
"selectedOption": "condition-1"
|
||||
"selectedConditionId": "condition-1"
|
||||
}
|
||||
```
|
||||
|
||||
### 条件模块输出字段
|
||||
|
||||
- **conditionResult**:条件判断的布尔值结果
|
||||
- **selectedPath**:所选路径的信息
|
||||
- **blockId**:所选路径下一个区块的 ID
|
||||
- **blockType**:下一个区块的类型
|
||||
- **blockTitle**:下一个区块的标题
|
||||
- **selectedOption**:所选条件的 ID
|
||||
- **content**:传递的原始内容
|
||||
- **conditionResult**:条件评估的布尔结果
|
||||
- **selectedPath**:关于选定路径的信息
|
||||
- **blockId**:选定路径中下一个模块的 ID
|
||||
- **blockType**:下一个模块的类型
|
||||
- **blockTitle**:下一个模块的标题
|
||||
- **selectedConditionId**:选定条件的 ID
|
||||
|
||||
</Tab>
|
||||
<Tab>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: ServiceNow
|
||||
description: 创建、读取、更新和删除 ServiceNow 记录
|
||||
description: 创建、读取、更新、删除及批量导入 ServiceNow 记录
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -10,22 +10,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#032D42"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ServiceNow](https://www.servicenow.com/) 是一款强大的云平台,旨在简化和自动化 IT 服务管理(ITSM)、工作流以及企业各类业务流程。ServiceNow 让您能够通过其强大的 API 管理事件、请求、任务、用户等多种内容。
|
||||
|
||||
使用 ServiceNow,您可以:
|
||||
|
||||
- **自动化 IT 工作流**:在任意 ServiceNow 表中创建、读取、更新和删除记录,如事件、任务、变更请求和用户等。
|
||||
- **集成系统**:将 ServiceNow 与您的其他工具和流程连接,实现无缝自动化。
|
||||
- **维护单一数据源**:让所有服务和运营数据井然有序,便于访问。
|
||||
- **提升运营效率**:通过可定制的工作流和自动化,减少手动操作,提高服务质量。
|
||||
|
||||
在 Sim 中,ServiceNow 集成让您的代理能够在工作流中直接与 ServiceNow 实例交互。代理可以在任意 ServiceNow 表中创建、读取、更新或删除记录,并利用工单或用户数据实现复杂的自动化和决策。这一集成将您的工作流自动化与 IT 运维无缝衔接,使代理能够自动化管理服务请求、事件、用户和资产,无需人工干预。通过将 Sim 与 ServiceNow 连接,您可以自动化服务管理任务、提升响应速度,并确保对组织关键服务数据的持续、安全访问。
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## 使用说明
|
||||
|
||||
将 ServiceNow 集成到您的工作流中。在任意 ServiceNow 表(包括事件、任务、变更请求、用户等)中创建、读取、更新和删除记录。
|
||||
将 ServiceNow 集成到您的工作流程中。可在任意 ServiceNow 表(如事件、任务、用户等)中创建、读取、更新和删除记录。支持批量导入操作,便于数据迁移和 ETL。
|
||||
|
||||
## 工具
|
||||
|
||||
@@ -35,17 +22,16 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 是否必填 | 描述 |
|
||||
| 参数 | 类型 | 必填 | 说明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | 是 | ServiceNow 实例 URL(例如:https://instance.service-now.com) |
|
||||
| `username` | string | 是 | ServiceNow 用户名 |
|
||||
| `password` | string | 是 | ServiceNow 密码 |
|
||||
| `credential` | string | 否 | ServiceNow OAuth 凭证 ID |
|
||||
| `tableName` | string | 是 | 表名(例如:incident、task、sys_user) |
|
||||
| `fields` | json | 是 | 记录中要设置的字段(JSON 对象) |
|
||||
| `fields` | json | 是 | 要设置在记录上的字段(JSON 对象) |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| 参数 | 类型 | 说明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `record` | json | 创建的 ServiceNow 记录,包含 sys_id 及其他字段 |
|
||||
| `metadata` | json | 操作元数据 |
|
||||
@@ -56,11 +42,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 是否必填 | 描述 |
|
||||
| 参数 | 类型 | 必填 | 说明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | 是 | ServiceNow 实例 URL(例如:https://instance.service-now.com) |
|
||||
| `username` | string | 是 | ServiceNow 用户名 |
|
||||
| `password` | string | 是 | ServiceNow 密码 |
|
||||
| `instanceUrl` | string | 否 | ServiceNow 实例 URL(如未提供,将通过 OAuth 自动检测) |
|
||||
| `credential` | string | 否 | ServiceNow OAuth 凭证 ID |
|
||||
| `tableName` | string | 是 | 表名 |
|
||||
| `sysId` | string | 否 | 指定记录 sys_id |
|
||||
| `number` | string | 否 | 记录编号(例如:INC0010001) |
|
||||
@@ -70,7 +55,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 说明 |
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `records` | array | ServiceNow 记录数组 |
|
||||
| `metadata` | json | 操作元数据 |
|
||||
@@ -81,18 +66,17 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必填 | 说明 |
|
||||
| 参数 | 类型 | 是否必填 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | 是 | ServiceNow 实例 URL(例如:https://instance.service-now.com) |
|
||||
| `username` | string | 是 | ServiceNow 用户名 |
|
||||
| `password` | string | 是 | ServiceNow 密码 |
|
||||
| `instanceUrl` | string | 否 | ServiceNow 实例 URL(如果未提供,将通过 OAuth 自动检测) |
|
||||
| `credential` | string | 否 | ServiceNow OAuth 凭证 ID |
|
||||
| `tableName` | string | 是 | 表名 |
|
||||
| `sysId` | string | 是 | 要更新的记录 sys_id |
|
||||
| `fields` | json | 是 | 要更新的字段(JSON 对象) |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 说明 |
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `record` | json | 已更新的 ServiceNow 记录 |
|
||||
| `metadata` | json | 操作元数据 |
|
||||
@@ -103,11 +87,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必填 | 说明 |
|
||||
| 参数 | 类型 | 是否必填 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `instanceUrl` | string | 是 | ServiceNow 实例 URL(例如:https://instance.service-now.com) |
|
||||
| `username` | string | 是 | ServiceNow 用户名 |
|
||||
| `password` | string | 是 | ServiceNow 密码 |
|
||||
| `instanceUrl` | string | 否 | ServiceNow 实例 URL(如果未提供,将通过 OAuth 自动检测) |
|
||||
| `credential` | string | 否 | ServiceNow OAuth 凭证 ID |
|
||||
| `tableName` | string | 是 | 表名 |
|
||||
| `sysId` | string | 是 | 要删除的记录 sys_id |
|
||||
|
||||
@@ -118,7 +101,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
| `success` | boolean | 删除是否成功 |
|
||||
| `metadata` | json | 操作元数据 |
|
||||
|
||||
## 备注
|
||||
## 注意事项
|
||||
|
||||
- 分类:`tools`
|
||||
- 类型:`servicenow`
|
||||
|
||||
@@ -37,18 +37,16 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必填 | 说明 |
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `model` | string | 是 | 要使用的模型(例如 gpt-4o、claude-sonnet-4-5、gemini-2.0-flash) |
|
||||
| `systemPrompt` | string | 否 | 设置助手行为的 system prompt |
|
||||
| `context` | string | 是 | 发送给模型的用户消息或上下文 |
|
||||
| `apiKey` | string | 否 | 提供方的 API key(如未提供,托管模型将使用平台密钥) |
|
||||
| `temperature` | number | 否 | 响应生成的 temperature(0-2) |
|
||||
| `maxTokens` | number | 否 | 响应中的最大 tokens 数 |
|
||||
| `azureEndpoint` | string | 否 | Azure OpenAI endpoint URL |
|
||||
| `model` | string | 是 | 要使用的模型 \(例如,gpt-4o、claude-sonnet-4-5、gemini-2.0-flash\) |
|
||||
| `systemPrompt` | string | 否 | 设置助手行为的系统提示 |
|
||||
| `context` | string | 是 | 要发送给模型的用户消息或上下文 |
|
||||
| `apiKey` | string | 否 | 提供商的 API 密钥 \(如果未为托管模型提供,则使用平台密钥\) |
|
||||
| `temperature` | number | 否 | 响应生成的温度 \(0-2\) |
|
||||
| `maxTokens` | number | 否 | 响应的最大令牌数 |
|
||||
| `azureEndpoint` | string | 否 | Azure OpenAI 端点 URL |
|
||||
| `azureApiVersion` | string | 否 | Azure OpenAI API 版本 |
|
||||
| `vertexProject` | string | 否 | Vertex AI 的 Google Cloud 项目 ID |
|
||||
| `vertexLocation` | string | 否 | Vertex AI 的 Google Cloud 区域(默认为 us-central1) |
|
||||
|
||||
#### 输出
|
||||
|
||||
|
||||
@@ -557,7 +557,7 @@ checksums:
|
||||
content/8: 6325adefb6e1520835225285b18b6a45
|
||||
content/9: b7fa85fce9c7476fe132df189e27dac1
|
||||
content/10: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/11: 7ad14ccfe548588081626cfe769ad492
|
||||
content/11: 985f435f721b00df4d13fa0a5552684c
|
||||
content/12: bcadfc362b69078beee0088e5936c98b
|
||||
content/13: 6af66efd0da20944a87fdb8d9defa358
|
||||
content/14: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
@@ -4811,9 +4811,9 @@ checksums:
|
||||
content/19: 85547efea8ae0e8170ac4e2030f6be25
|
||||
content/20: 25c56dcdc4af1516c3fbf9d82d96b48d
|
||||
content/21: 56dbe63da14a319cd520ab1615c94be7
|
||||
content/22: e039f6c905c8aa148cc3e7af19f05239
|
||||
content/22: e092cde0c92ef09c642a62636e7e3ae3
|
||||
content/23: c7004f5db8f7134d7e3a36a1916691a2
|
||||
content/24: 26555018b90fc8fb3ac65cece15f3966
|
||||
content/24: bbc26961050b132b9bc4f14ba11f407a
|
||||
content/25: 56dbe63da14a319cd520ab1615c94be7
|
||||
content/26: 3e835ecc38acf2c76179034360d41670
|
||||
content/27: a13bbc3dac7388e1ef4e9cbafdcc8241
|
||||
@@ -49824,39 +49824,35 @@ checksums:
|
||||
content/474: 27c398e669b297cea076e4ce4cc0c5eb
|
||||
9a28da736b42bf8de55126d4c06b6150:
|
||||
meta/title: 418d5c8a18ad73520b38765741601f32
|
||||
meta/description: 41cb31abf94297849fb8a4023cf0211d
|
||||
meta/description: 2b5a9723c7a45d2be5001d5d056b7c7b
|
||||
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
|
||||
content/1: e72670f88454b5b1c955b029de5fa8b5
|
||||
content/2: d586e5af506d99add847369c0accfb4d
|
||||
content/3: a2ce9ed4954ab55bcebed927cec8e890
|
||||
content/4: 5fc7b723a6adcf201e8deb3f5ed9a9e3
|
||||
content/5: a78981875c359a3343f26ed4d115f899
|
||||
content/6: 821e6394b0a953e2b0842b04ae8f3105
|
||||
content/7: 56a538eaccb1158fb1f7a01cc32f7331
|
||||
content/8: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
|
||||
content/9: 263633aee6db9332de806ae50d87de05
|
||||
content/10: 5a7e2171e5f73fec5eae21a50e5de661
|
||||
content/11: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/12: 5905ef5d0db0354c08394acb0b5cda4b
|
||||
content/13: bcadfc362b69078beee0088e5936c98b
|
||||
content/14: d81ef802f80143282cf4e534561a9570
|
||||
content/15: 02233e6212003c1d121424cfd8b86b62
|
||||
content/16: efe2c6dd368708de68a1addbfdb11b0c
|
||||
content/17: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/18: 2722e8bee100e7bc4590fa02710e9508
|
||||
content/19: bcadfc362b69078beee0088e5936c98b
|
||||
content/20: 953f353184dc27db1f20156db2a9ad90
|
||||
content/21: 2011e87d0555cd0ab133ef2d35e7a37b
|
||||
content/22: dbf08acb413d845ec419e45b1f986bdb
|
||||
content/23: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/24: afc35de2990ed0e9bb8f98dc1b9609ce
|
||||
content/25: bcadfc362b69078beee0088e5936c98b
|
||||
content/26: c06a5bb458242baa23d34957034c2fe7
|
||||
content/27: ff043e912417bc29ac7c64520160c07d
|
||||
content/28: 9c2175ab469cb6ff9e62bc8bdcf7621d
|
||||
content/29: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/30: 20e6bddad8e7f34a3d09e5b0c5678c13
|
||||
content/31: bcadfc362b69078beee0088e5936c98b
|
||||
content/32: fd0f38eb3fe5cf95be366a4ff6b4fb90
|
||||
content/33: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/34: 4a7b2c644e487f3d12b6a6b54f8c6773
|
||||
content/2: 821e6394b0a953e2b0842b04ae8f3105
|
||||
content/3: 7fa671d05a60d4f25b4980405c2c7278
|
||||
content/4: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
|
||||
content/5: 263633aee6db9332de806ae50d87de05
|
||||
content/6: 5a7e2171e5f73fec5eae21a50e5de661
|
||||
content/7: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/8: 10d2d4eccb4b8923f048980dc16e43e1
|
||||
content/9: bcadfc362b69078beee0088e5936c98b
|
||||
content/10: d81ef802f80143282cf4e534561a9570
|
||||
content/11: 02233e6212003c1d121424cfd8b86b62
|
||||
content/12: efe2c6dd368708de68a1addbfdb11b0c
|
||||
content/13: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/14: 0f3295854b7de5dbfab1ebd2a130b498
|
||||
content/15: bcadfc362b69078beee0088e5936c98b
|
||||
content/16: 953f353184dc27db1f20156db2a9ad90
|
||||
content/17: 2011e87d0555cd0ab133ef2d35e7a37b
|
||||
content/18: dbf08acb413d845ec419e45b1f986bdb
|
||||
content/19: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/20: 3a8417b390ec7d3d55b1920c721e9006
|
||||
content/21: bcadfc362b69078beee0088e5936c98b
|
||||
content/22: c06a5bb458242baa23d34957034c2fe7
|
||||
content/23: ff043e912417bc29ac7c64520160c07d
|
||||
content/24: 9c2175ab469cb6ff9e62bc8bdcf7621d
|
||||
content/25: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/26: 67e6ba04cf67f92e714ed94e7483dec5
|
||||
content/27: bcadfc362b69078beee0088e5936c98b
|
||||
content/28: fd0f38eb3fe5cf95be366a4ff6b4fb90
|
||||
content/29: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/30: 4a7b2c644e487f3d12b6a6b54f8c6773
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"private": true,
|
||||
"license": "Apache-2.0",
|
||||
"scripts": {
|
||||
"dev": "next dev --port 3001",
|
||||
"dev": "next dev --port 7322",
|
||||
"build": "fumadocs-mdx && NODE_OPTIONS='--max-old-space-size=8192' next build",
|
||||
"start": "next start",
|
||||
"postinstall": "fumadocs-mdx",
|
||||
|
||||
@@ -70,7 +70,6 @@ export const FOOTER_TOOLS = [
|
||||
'Salesforce',
|
||||
'SendGrid',
|
||||
'Serper',
|
||||
'ServiceNow',
|
||||
'SharePoint',
|
||||
'Slack',
|
||||
'Smtp',
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Suspense } from 'react'
|
||||
import dynamic from 'next/dynamic'
|
||||
import { Background, Footer, Nav, StructuredData } from '@/app/(landing)/components'
|
||||
|
||||
// Lazy load heavy components for better initial load performance
|
||||
const Hero = dynamic(() => import('@/app/(landing)/components/hero/hero'), {
|
||||
loading: () => <div className='h-[600px] animate-pulse bg-gray-50' />,
|
||||
})
|
||||
|
||||
@@ -38,6 +38,7 @@ vi.mock('@/lib/logs/console/logger', () => ({
|
||||
}))
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshOAuthToken } from '@/lib/oauth/oauth'
|
||||
import {
|
||||
getCredential,
|
||||
@@ -48,6 +49,7 @@ import {
|
||||
|
||||
const mockDb = db as any
|
||||
const mockRefreshOAuthToken = refreshOAuthToken as any
|
||||
const mockLogger = (createLogger as any)()
|
||||
|
||||
describe('OAuth Utils', () => {
|
||||
beforeEach(() => {
|
||||
@@ -85,6 +87,7 @@ describe('OAuth Utils', () => {
|
||||
const userId = await getUserId('request-id')
|
||||
|
||||
expect(userId).toBeUndefined()
|
||||
expect(mockLogger.warn).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return undefined if workflow is not found', async () => {
|
||||
@@ -93,6 +96,7 @@ describe('OAuth Utils', () => {
|
||||
const userId = await getUserId('request-id', 'nonexistent-workflow-id')
|
||||
|
||||
expect(userId).toBeUndefined()
|
||||
expect(mockLogger.warn).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -117,6 +121,7 @@ describe('OAuth Utils', () => {
|
||||
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
|
||||
|
||||
expect(credential).toBeUndefined()
|
||||
expect(mockLogger.warn).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -134,6 +139,7 @@ describe('OAuth Utils', () => {
|
||||
|
||||
expect(mockRefreshOAuthToken).not.toHaveBeenCalled()
|
||||
expect(result).toEqual({ accessToken: 'valid-token', refreshed: false })
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(expect.stringContaining('Access token is valid'))
|
||||
})
|
||||
|
||||
it('should refresh token when expired', async () => {
|
||||
@@ -153,10 +159,13 @@ describe('OAuth Utils', () => {
|
||||
|
||||
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||
|
||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token', undefined)
|
||||
expect(mockDb.update).toHaveBeenCalled()
|
||||
expect(mockDb.set).toHaveBeenCalled()
|
||||
expect(result).toEqual({ accessToken: 'new-token', refreshed: true })
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Successfully refreshed')
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle refresh token error', async () => {
|
||||
@@ -173,6 +182,8 @@ describe('OAuth Utils', () => {
|
||||
await expect(
|
||||
refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||
).rejects.toThrow('Failed to refresh token')
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not attempt refresh if no refresh token', async () => {
|
||||
@@ -228,7 +239,7 @@ describe('OAuth Utils', () => {
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token', undefined)
|
||||
expect(mockDb.update).toHaveBeenCalled()
|
||||
expect(mockDb.set).toHaveBeenCalled()
|
||||
expect(token).toBe('new-token')
|
||||
@@ -240,6 +251,7 @@ describe('OAuth Utils', () => {
|
||||
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
|
||||
|
||||
expect(token).toBeNull()
|
||||
expect(mockLogger.warn).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return null if refresh fails', async () => {
|
||||
@@ -258,6 +270,7 @@ describe('OAuth Utils', () => {
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
expect(token).toBeNull()
|
||||
expect(mockLogger.error).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -132,7 +132,14 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
|
||||
|
||||
try {
|
||||
// Use the existing refreshOAuthToken function
|
||||
const refreshResult = await refreshOAuthToken(providerId, credential.refreshToken!)
|
||||
// For ServiceNow, pass the instance URL (stored in idToken) for the token endpoint
|
||||
const instanceUrl =
|
||||
providerId === 'servicenow' ? (credential.idToken ?? undefined) : undefined
|
||||
const refreshResult = await refreshOAuthToken(
|
||||
providerId,
|
||||
credential.refreshToken!,
|
||||
instanceUrl
|
||||
)
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`Failed to refresh token for user ${userId}, provider ${providerId}`, {
|
||||
@@ -215,9 +222,13 @@ export async function refreshAccessTokenIfNeeded(
|
||||
if (shouldRefresh) {
|
||||
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
|
||||
try {
|
||||
// For ServiceNow, pass the instance URL (stored in idToken) for the token endpoint
|
||||
const instanceUrl =
|
||||
credential.providerId === 'servicenow' ? (credential.idToken ?? undefined) : undefined
|
||||
const refreshedToken = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
credential.refreshToken!
|
||||
credential.refreshToken!,
|
||||
instanceUrl
|
||||
)
|
||||
|
||||
if (!refreshedToken) {
|
||||
@@ -289,7 +300,14 @@ export async function refreshTokenIfNeeded(
|
||||
}
|
||||
|
||||
try {
|
||||
const refreshResult = await refreshOAuthToken(credential.providerId, credential.refreshToken!)
|
||||
// For ServiceNow, pass the instance URL (stored in idToken) for the token endpoint
|
||||
const instanceUrl =
|
||||
credential.providerId === 'servicenow' ? (credential.idToken ?? undefined) : undefined
|
||||
const refreshResult = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
credential.refreshToken!,
|
||||
instanceUrl
|
||||
)
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential`)
|
||||
|
||||
166
apps/sim/app/api/auth/oauth2/callback/servicenow/route.ts
Normal file
166
apps/sim/app/api/auth/oauth2/callback/servicenow/route.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('ServiceNowCallback')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=unauthorized`)
|
||||
}
|
||||
|
||||
const { searchParams } = request.nextUrl
|
||||
const code = searchParams.get('code')
|
||||
const state = searchParams.get('state')
|
||||
const error = searchParams.get('error')
|
||||
const errorDescription = searchParams.get('error_description')
|
||||
|
||||
// Handle OAuth errors from ServiceNow
|
||||
if (error) {
|
||||
logger.error('ServiceNow OAuth error:', { error, errorDescription })
|
||||
return NextResponse.redirect(
|
||||
`${baseUrl}/workspace?error=servicenow_auth_error&message=${encodeURIComponent(errorDescription || error)}`
|
||||
)
|
||||
}
|
||||
|
||||
const storedState = request.cookies.get('servicenow_oauth_state')?.value
|
||||
const storedInstanceUrl = request.cookies.get('servicenow_instance_url')?.value
|
||||
|
||||
const clientId = env.SERVICENOW_CLIENT_ID
|
||||
const clientSecret = env.SERVICENOW_CLIENT_SECRET
|
||||
|
||||
if (!clientId || !clientSecret) {
|
||||
logger.error('ServiceNow credentials not configured')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_config_error`)
|
||||
}
|
||||
|
||||
// Validate state parameter
|
||||
if (!state || state !== storedState) {
|
||||
logger.error('State mismatch in ServiceNow OAuth callback')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_state_mismatch`)
|
||||
}
|
||||
|
||||
// Validate authorization code
|
||||
if (!code) {
|
||||
logger.error('No code received from ServiceNow')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_no_code`)
|
||||
}
|
||||
|
||||
// Validate instance URL
|
||||
if (!storedInstanceUrl) {
|
||||
logger.error('No instance URL stored')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_no_instance`)
|
||||
}
|
||||
|
||||
const redirectUri = `${baseUrl}/api/auth/oauth2/callback/servicenow`
|
||||
|
||||
// Exchange authorization code for access token
|
||||
const tokenResponse = await fetch(`${storedInstanceUrl}/oauth_token.do`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: code,
|
||||
redirect_uri: redirectUri,
|
||||
client_id: clientId,
|
||||
client_secret: clientSecret,
|
||||
}).toString(),
|
||||
})
|
||||
|
||||
if (!tokenResponse.ok) {
|
||||
const errorText = await tokenResponse.text()
|
||||
logger.error('Failed to exchange code for token:', {
|
||||
status: tokenResponse.status,
|
||||
body: errorText,
|
||||
})
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_token_error`)
|
||||
}
|
||||
|
||||
const tokenData = await tokenResponse.json()
|
||||
const accessToken = tokenData.access_token
|
||||
const refreshToken = tokenData.refresh_token
|
||||
const expiresIn = tokenData.expires_in
|
||||
// ServiceNow always grants 'useraccount' scope but returns empty string
|
||||
const scope = tokenData.scope || 'useraccount'
|
||||
|
||||
logger.info('ServiceNow token exchange successful:', {
|
||||
hasAccessToken: !!accessToken,
|
||||
hasRefreshToken: !!refreshToken,
|
||||
expiresIn,
|
||||
})
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('No access token in response')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_no_token`)
|
||||
}
|
||||
|
||||
// Redirect to store endpoint with token data in cookies
|
||||
const storeUrl = new URL(`${baseUrl}/api/auth/oauth2/servicenow/store`)
|
||||
|
||||
const response = NextResponse.redirect(storeUrl)
|
||||
|
||||
// Store token data in secure cookies for the store endpoint
|
||||
response.cookies.set('servicenow_pending_token', accessToken, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60, // 1 minute
|
||||
path: '/',
|
||||
})
|
||||
|
||||
if (refreshToken) {
|
||||
response.cookies.set('servicenow_pending_refresh_token', refreshToken, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60,
|
||||
path: '/',
|
||||
})
|
||||
}
|
||||
|
||||
response.cookies.set('servicenow_pending_instance', storedInstanceUrl, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60,
|
||||
path: '/',
|
||||
})
|
||||
|
||||
response.cookies.set('servicenow_pending_scope', scope || '', {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60,
|
||||
path: '/',
|
||||
})
|
||||
|
||||
if (expiresIn) {
|
||||
response.cookies.set('servicenow_pending_expires_in', expiresIn.toString(), {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60,
|
||||
path: '/',
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up OAuth state cookies
|
||||
response.cookies.delete('servicenow_oauth_state')
|
||||
response.cookies.delete('servicenow_instance_url')
|
||||
|
||||
return response
|
||||
} catch (error) {
|
||||
logger.error('Error in ServiceNow OAuth callback:', error)
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_callback_error`)
|
||||
}
|
||||
}
|
||||
142
apps/sim/app/api/auth/oauth2/servicenow/store/route.ts
Normal file
142
apps/sim/app/api/auth/oauth2/servicenow/store/route.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { safeAccountInsert } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
const logger = createLogger('ServiceNowStore')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn('Unauthorized attempt to store ServiceNow token')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=unauthorized`)
|
||||
}
|
||||
|
||||
// Retrieve token data from cookies
|
||||
const accessToken = request.cookies.get('servicenow_pending_token')?.value
|
||||
const refreshToken = request.cookies.get('servicenow_pending_refresh_token')?.value
|
||||
const instanceUrl = request.cookies.get('servicenow_pending_instance')?.value
|
||||
const scope = request.cookies.get('servicenow_pending_scope')?.value
|
||||
const expiresInStr = request.cookies.get('servicenow_pending_expires_in')?.value
|
||||
|
||||
if (!accessToken || !instanceUrl) {
|
||||
logger.error('Missing token or instance URL in cookies')
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_missing_data`)
|
||||
}
|
||||
|
||||
// Validate the token by fetching user info from ServiceNow
|
||||
const userResponse = await fetch(
|
||||
`${instanceUrl}/api/now/table/sys_user?sysparm_query=user_name=${encodeURIComponent('javascript:gs.getUserName()')}&sysparm_limit=1`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Alternative: Use the instance info endpoint instead
|
||||
let accountIdentifier = instanceUrl
|
||||
let userInfo: Record<string, unknown> | null = null
|
||||
|
||||
// Try to get current user info
|
||||
try {
|
||||
const whoamiResponse = await fetch(`${instanceUrl}/api/now/ui/user/current_user`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (whoamiResponse.ok) {
|
||||
const whoamiData = await whoamiResponse.json()
|
||||
userInfo = whoamiData.result
|
||||
if (userInfo?.user_sys_id) {
|
||||
accountIdentifier = userInfo.user_sys_id as string
|
||||
} else if (userInfo?.user_name) {
|
||||
accountIdentifier = userInfo.user_name as string
|
||||
}
|
||||
logger.info('Retrieved ServiceNow user info', { accountIdentifier })
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Could not retrieve ServiceNow user info, using instance URL as identifier')
|
||||
}
|
||||
|
||||
// Calculate expiration time
|
||||
const now = new Date()
|
||||
const expiresIn = expiresInStr ? Number.parseInt(expiresInStr, 10) : 3600 // Default to 1 hour
|
||||
const accessTokenExpiresAt = new Date(now.getTime() + expiresIn * 1000)
|
||||
|
||||
// Check for existing ServiceNow account for this user
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'servicenow')),
|
||||
})
|
||||
|
||||
// ServiceNow always grants 'useraccount' scope but returns empty string
|
||||
const effectiveScope = scope?.trim() ? scope : 'useraccount'
|
||||
|
||||
const accountData = {
|
||||
accessToken: accessToken,
|
||||
refreshToken: refreshToken || null,
|
||||
accountId: accountIdentifier,
|
||||
scope: effectiveScope,
|
||||
updatedAt: now,
|
||||
accessTokenExpiresAt: accessTokenExpiresAt,
|
||||
idToken: instanceUrl, // Store instance URL in idToken for API calls
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
await db.update(account).set(accountData).where(eq(account.id, existing.id))
|
||||
logger.info('Updated existing ServiceNow account', { accountId: existing.id })
|
||||
} else {
|
||||
await safeAccountInsert(
|
||||
{
|
||||
id: `servicenow_${session.user.id}_${Date.now()}`,
|
||||
userId: session.user.id,
|
||||
providerId: 'servicenow',
|
||||
accountId: accountData.accountId,
|
||||
accessToken: accountData.accessToken,
|
||||
refreshToken: accountData.refreshToken || undefined,
|
||||
accessTokenExpiresAt: accountData.accessTokenExpiresAt,
|
||||
scope: accountData.scope,
|
||||
idToken: accountData.idToken,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
},
|
||||
{ provider: 'ServiceNow', identifier: instanceUrl }
|
||||
)
|
||||
logger.info('Created new ServiceNow account')
|
||||
}
|
||||
|
||||
// Get return URL from cookie
|
||||
const returnUrl = request.cookies.get('servicenow_return_url')?.value
|
||||
|
||||
const redirectUrl = returnUrl || `${baseUrl}/workspace`
|
||||
const finalUrl = new URL(redirectUrl)
|
||||
finalUrl.searchParams.set('servicenow_connected', 'true')
|
||||
|
||||
const response = NextResponse.redirect(finalUrl.toString())
|
||||
|
||||
// Clean up all ServiceNow cookies
|
||||
response.cookies.delete('servicenow_pending_token')
|
||||
response.cookies.delete('servicenow_pending_refresh_token')
|
||||
response.cookies.delete('servicenow_pending_instance')
|
||||
response.cookies.delete('servicenow_pending_scope')
|
||||
response.cookies.delete('servicenow_pending_expires_in')
|
||||
response.cookies.delete('servicenow_return_url')
|
||||
|
||||
return response
|
||||
} catch (error) {
|
||||
logger.error('Error storing ServiceNow token:', error)
|
||||
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_store_error`)
|
||||
}
|
||||
}
|
||||
264
apps/sim/app/api/auth/servicenow/authorize/route.ts
Normal file
264
apps/sim/app/api/auth/servicenow/authorize/route.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('ServiceNowAuthorize')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* ServiceNow OAuth scopes
|
||||
* useraccount - Default scope for user account access
|
||||
* Note: ServiceNow always returns 'useraccount' in OAuth responses regardless of requested scopes.
|
||||
* Table API permissions are configured at the OAuth application level in ServiceNow.
|
||||
*/
|
||||
const SERVICENOW_SCOPES = 'useraccount'
|
||||
|
||||
/**
|
||||
* Validates a ServiceNow instance URL format
|
||||
*/
|
||||
function isValidInstanceUrl(url: string): boolean {
|
||||
try {
|
||||
const parsed = new URL(url)
|
||||
return (
|
||||
parsed.protocol === 'https:' &&
|
||||
(parsed.hostname.endsWith('.service-now.com') || parsed.hostname.endsWith('.servicenow.com'))
|
||||
)
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const clientId = env.SERVICENOW_CLIENT_ID
|
||||
|
||||
if (!clientId) {
|
||||
logger.error('SERVICENOW_CLIENT_ID not configured')
|
||||
return NextResponse.json({ error: 'ServiceNow client ID not configured' }, { status: 500 })
|
||||
}
|
||||
|
||||
const instanceUrl = request.nextUrl.searchParams.get('instanceUrl')
|
||||
const returnUrl = request.nextUrl.searchParams.get('returnUrl')
|
||||
|
||||
if (!instanceUrl) {
|
||||
const returnUrlParam = returnUrl ? encodeURIComponent(returnUrl) : ''
|
||||
return new NextResponse(
|
||||
`<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Connect ServiceNow Instance</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
background: linear-gradient(135deg, #81B5A1 0%, #5A8A75 100%);
|
||||
}
|
||||
.container {
|
||||
background: white;
|
||||
padding: 2rem;
|
||||
border-radius: 12px;
|
||||
box-shadow: 0 10px 40px rgba(0,0,0,0.1);
|
||||
text-align: center;
|
||||
max-width: 450px;
|
||||
width: 90%;
|
||||
}
|
||||
h2 {
|
||||
color: #111827;
|
||||
margin: 0 0 0.5rem 0;
|
||||
}
|
||||
p {
|
||||
color: #6b7280;
|
||||
margin: 0 0 1.5rem 0;
|
||||
}
|
||||
input {
|
||||
width: 100%;
|
||||
padding: 0.75rem;
|
||||
border: 1px solid #d1d5db;
|
||||
border-radius: 8px;
|
||||
font-size: 1rem;
|
||||
margin-bottom: 1rem;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
input:focus {
|
||||
outline: none;
|
||||
border-color: #81B5A1;
|
||||
box-shadow: 0 0 0 3px rgba(129, 181, 161, 0.2);
|
||||
}
|
||||
button {
|
||||
width: 100%;
|
||||
padding: 0.75rem;
|
||||
background: #81B5A1;
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
font-size: 1rem;
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
}
|
||||
button:hover {
|
||||
background: #6A9A87;
|
||||
}
|
||||
.help {
|
||||
font-size: 0.875rem;
|
||||
color: #9ca3af;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
.error {
|
||||
color: #dc2626;
|
||||
font-size: 0.875rem;
|
||||
margin-bottom: 1rem;
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h2>Connect Your ServiceNow Instance</h2>
|
||||
<p>Enter your ServiceNow instance URL to continue</p>
|
||||
<div id="error" class="error"></div>
|
||||
<form onsubmit="handleSubmit(event)">
|
||||
<input
|
||||
type="text"
|
||||
id="instanceUrl"
|
||||
placeholder="https://mycompany.service-now.com"
|
||||
required
|
||||
/>
|
||||
<button type="submit">Connect Instance</button>
|
||||
</form>
|
||||
<p class="help">Your instance URL looks like: https://yourcompany.service-now.com</p>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const returnUrl = '${returnUrlParam}';
|
||||
function handleSubmit(e) {
|
||||
e.preventDefault();
|
||||
const errorEl = document.getElementById('error');
|
||||
let instanceUrl = document.getElementById('instanceUrl').value.trim();
|
||||
|
||||
// Ensure https:// prefix
|
||||
if (!instanceUrl.startsWith('https://') && !instanceUrl.startsWith('http://')) {
|
||||
instanceUrl = 'https://' + instanceUrl;
|
||||
}
|
||||
|
||||
// Validate the URL format
|
||||
try {
|
||||
const parsed = new URL(instanceUrl);
|
||||
if (!parsed.hostname.endsWith('.service-now.com') && !parsed.hostname.endsWith('.servicenow.com')) {
|
||||
errorEl.textContent = 'Please enter a valid ServiceNow instance URL (e.g., https://yourcompany.service-now.com)';
|
||||
errorEl.style.display = 'block';
|
||||
return;
|
||||
}
|
||||
// Clean the URL (remove trailing slashes, paths)
|
||||
instanceUrl = parsed.origin;
|
||||
} catch {
|
||||
errorEl.textContent = 'Please enter a valid URL';
|
||||
errorEl.style.display = 'block';
|
||||
return;
|
||||
}
|
||||
|
||||
let url = window.location.pathname + '?instanceUrl=' + encodeURIComponent(instanceUrl);
|
||||
if (returnUrl) {
|
||||
url += '&returnUrl=' + returnUrl;
|
||||
}
|
||||
window.location.href = url;
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>`,
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/html; charset=utf-8',
|
||||
'Cache-Control': 'no-store, no-cache, must-revalidate',
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Validate instance URL
|
||||
if (!isValidInstanceUrl(instanceUrl)) {
|
||||
logger.error('Invalid ServiceNow instance URL:', { instanceUrl })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Invalid ServiceNow instance URL. Must be a valid .service-now.com or .servicenow.com domain.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Clean the instance URL
|
||||
const parsedUrl = new URL(instanceUrl)
|
||||
const cleanInstanceUrl = parsedUrl.origin
|
||||
|
||||
const baseUrl = getBaseUrl()
|
||||
const redirectUri = `${baseUrl}/api/auth/oauth2/callback/servicenow`
|
||||
|
||||
const state = crypto.randomUUID()
|
||||
|
||||
// ServiceNow OAuth authorization URL
|
||||
const oauthUrl =
|
||||
`${cleanInstanceUrl}/oauth_auth.do?` +
|
||||
new URLSearchParams({
|
||||
response_type: 'code',
|
||||
client_id: clientId,
|
||||
redirect_uri: redirectUri,
|
||||
state: state,
|
||||
scope: SERVICENOW_SCOPES,
|
||||
}).toString()
|
||||
|
||||
logger.info('Initiating ServiceNow OAuth:', {
|
||||
instanceUrl: cleanInstanceUrl,
|
||||
requestedScopes: SERVICENOW_SCOPES,
|
||||
redirectUri,
|
||||
returnUrl: returnUrl || 'not specified',
|
||||
})
|
||||
|
||||
const response = NextResponse.redirect(oauthUrl)
|
||||
|
||||
// Store state and instance URL in cookies for validation in callback
|
||||
response.cookies.set('servicenow_oauth_state', state, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60 * 10, // 10 minutes
|
||||
path: '/',
|
||||
})
|
||||
|
||||
response.cookies.set('servicenow_instance_url', cleanInstanceUrl, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60 * 10,
|
||||
path: '/',
|
||||
})
|
||||
|
||||
if (returnUrl) {
|
||||
response.cookies.set('servicenow_return_url', returnUrl, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'lax',
|
||||
maxAge: 60 * 10,
|
||||
path: '/',
|
||||
})
|
||||
}
|
||||
|
||||
return response
|
||||
} catch (error) {
|
||||
logger.error('Error initiating ServiceNow authorization:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
129
apps/sim/app/api/mcp/discover/route.ts
Normal file
129
apps/sim/app/api/mcp/discover/route.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflowMcpServer, workspace } from '@sim/db/schema'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('McpDiscoverAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* GET - Discover all published MCP servers available to the authenticated user
|
||||
*
|
||||
* This endpoint allows external MCP clients to discover available servers
|
||||
* using just their API key, without needing to know workspace IDs.
|
||||
*
|
||||
* Authentication: API Key (X-API-Key header) or Session
|
||||
*
|
||||
* Returns all published MCP servers from workspaces the user has access to.
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
// Authenticate the request
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Authentication required. Provide X-API-Key header with your Sim API key.',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const userId = auth.userId
|
||||
|
||||
// Get all workspaces the user has access to via permissions table
|
||||
const userWorkspacePermissions = await db
|
||||
.select({ entityId: permissions.entityId })
|
||||
.from(permissions)
|
||||
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
|
||||
|
||||
const workspaceIds = userWorkspacePermissions.map((w) => w.entityId)
|
||||
|
||||
if (workspaceIds.length === 0) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
servers: [],
|
||||
message: 'No workspaces found for this user',
|
||||
})
|
||||
}
|
||||
|
||||
// Get all published MCP servers from user's workspaces with tool count
|
||||
const servers = await db
|
||||
.select({
|
||||
id: workflowMcpServer.id,
|
||||
name: workflowMcpServer.name,
|
||||
description: workflowMcpServer.description,
|
||||
workspaceId: workflowMcpServer.workspaceId,
|
||||
workspaceName: workspace.name,
|
||||
isPublished: workflowMcpServer.isPublished,
|
||||
publishedAt: workflowMcpServer.publishedAt,
|
||||
toolCount: sql<number>`(
|
||||
SELECT COUNT(*)::int
|
||||
FROM "workflow_mcp_tool"
|
||||
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
|
||||
)`.as('tool_count'),
|
||||
})
|
||||
.from(workflowMcpServer)
|
||||
.leftJoin(workspace, eq(workflowMcpServer.workspaceId, workspace.id))
|
||||
.where(
|
||||
and(
|
||||
eq(workflowMcpServer.isPublished, true),
|
||||
sql`${workflowMcpServer.workspaceId} IN ${workspaceIds}`
|
||||
)
|
||||
)
|
||||
.orderBy(workflowMcpServer.name)
|
||||
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
// Format response with connection URLs
|
||||
const formattedServers = servers.map((server) => ({
|
||||
id: server.id,
|
||||
name: server.name,
|
||||
description: server.description,
|
||||
workspace: {
|
||||
id: server.workspaceId,
|
||||
name: server.workspaceName,
|
||||
},
|
||||
toolCount: server.toolCount || 0,
|
||||
publishedAt: server.publishedAt,
|
||||
urls: {
|
||||
http: `${baseUrl}/api/mcp/serve/${server.id}`,
|
||||
sse: `${baseUrl}/api/mcp/serve/${server.id}/sse`,
|
||||
},
|
||||
}))
|
||||
|
||||
logger.info(`User ${userId} discovered ${formattedServers.length} MCP servers`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
servers: formattedServers,
|
||||
authentication: {
|
||||
method: 'API Key',
|
||||
header: 'X-API-Key',
|
||||
description: 'Include your Sim API key in the X-API-Key header for all MCP requests',
|
||||
},
|
||||
usage: {
|
||||
listTools: {
|
||||
method: 'POST',
|
||||
body: '{"jsonrpc":"2.0","id":1,"method":"tools/list"}',
|
||||
},
|
||||
callTool: {
|
||||
method: 'POST',
|
||||
body: '{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"TOOL_NAME","arguments":{}}}',
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error discovering MCP servers:', error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to discover MCP servers' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
360
apps/sim/app/api/mcp/serve/[serverId]/route.ts
Normal file
360
apps/sim/app/api/mcp/serve/[serverId]/route.ts
Normal file
@@ -0,0 +1,360 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServeAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
serverId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* MCP JSON-RPC Request
|
||||
*/
|
||||
interface JsonRpcRequest {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number
|
||||
method: string
|
||||
params?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* MCP JSON-RPC Response
|
||||
*/
|
||||
interface JsonRpcResponse {
|
||||
jsonrpc: '2.0'
|
||||
id: string | number
|
||||
result?: unknown
|
||||
error?: {
|
||||
code: number
|
||||
message: string
|
||||
data?: unknown
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create JSON-RPC success response
|
||||
*/
|
||||
function createJsonRpcResponse(id: string | number, result: unknown): JsonRpcResponse {
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
id,
|
||||
result,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create JSON-RPC error response
|
||||
*/
|
||||
function createJsonRpcError(
|
||||
id: string | number,
|
||||
code: number,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): JsonRpcResponse {
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
id,
|
||||
error: { code, message, data },
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that the server exists and is published
|
||||
*/
|
||||
async function validateServer(serverId: string) {
|
||||
const [server] = await db
|
||||
.select({
|
||||
id: workflowMcpServer.id,
|
||||
name: workflowMcpServer.name,
|
||||
workspaceId: workflowMcpServer.workspaceId,
|
||||
isPublished: workflowMcpServer.isPublished,
|
||||
})
|
||||
.from(workflowMcpServer)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.limit(1)
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Server info and capabilities (MCP initialize)
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { serverId } = await params
|
||||
|
||||
try {
|
||||
const server = await validateServer(serverId)
|
||||
|
||||
if (!server) {
|
||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!server.isPublished) {
|
||||
return NextResponse.json({ error: 'Server is not published' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Return server capabilities
|
||||
return NextResponse.json({
|
||||
name: server.name,
|
||||
version: '1.0.0',
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
instructions: `This MCP server exposes workflow tools from Sim Studio. Each tool executes a deployed workflow.`,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting MCP server info:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Handle MCP JSON-RPC requests
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { serverId } = await params
|
||||
|
||||
try {
|
||||
// Validate server
|
||||
const server = await validateServer(serverId)
|
||||
|
||||
if (!server) {
|
||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!server.isPublished) {
|
||||
return NextResponse.json({ error: 'Server is not published' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Authenticate the request
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Parse JSON-RPC request
|
||||
const body = await request.json()
|
||||
const rpcRequest = body as JsonRpcRequest
|
||||
|
||||
if (rpcRequest.jsonrpc !== '2.0' || !rpcRequest.method) {
|
||||
return NextResponse.json(createJsonRpcError(rpcRequest?.id || 0, -32600, 'Invalid Request'), {
|
||||
status: 400,
|
||||
})
|
||||
}
|
||||
|
||||
// Handle different MCP methods
|
||||
switch (rpcRequest.method) {
|
||||
case 'initialize':
|
||||
return NextResponse.json(
|
||||
createJsonRpcResponse(rpcRequest.id, {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
serverInfo: {
|
||||
name: server.name,
|
||||
version: '1.0.0',
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
case 'tools/list':
|
||||
return handleToolsList(rpcRequest, serverId)
|
||||
|
||||
case 'tools/call': {
|
||||
// Get the API key from the request to forward to the workflow execute call
|
||||
const apiKey =
|
||||
request.headers.get('X-API-Key') ||
|
||||
request.headers.get('Authorization')?.replace('Bearer ', '')
|
||||
return handleToolsCall(rpcRequest, serverId, auth.userId, server.workspaceId, apiKey)
|
||||
}
|
||||
|
||||
case 'ping':
|
||||
return NextResponse.json(createJsonRpcResponse(rpcRequest.id, {}))
|
||||
|
||||
default:
|
||||
return NextResponse.json(
|
||||
createJsonRpcError(rpcRequest.id, -32601, `Method not found: ${rpcRequest.method}`),
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error handling MCP request:', error)
|
||||
return NextResponse.json(createJsonRpcError(0, -32603, 'Internal error'), { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tools/list method
|
||||
*/
|
||||
async function handleToolsList(
|
||||
rpcRequest: JsonRpcRequest,
|
||||
serverId: string
|
||||
): Promise<NextResponse> {
|
||||
try {
|
||||
const tools = await db
|
||||
.select({
|
||||
id: workflowMcpTool.id,
|
||||
toolName: workflowMcpTool.toolName,
|
||||
toolDescription: workflowMcpTool.toolDescription,
|
||||
parameterSchema: workflowMcpTool.parameterSchema,
|
||||
isEnabled: workflowMcpTool.isEnabled,
|
||||
workflowId: workflowMcpTool.workflowId,
|
||||
})
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.serverId, serverId))
|
||||
|
||||
const mcpTools = tools
|
||||
.filter((tool) => tool.isEnabled)
|
||||
.map((tool) => ({
|
||||
name: tool.toolName,
|
||||
description: tool.toolDescription || `Execute workflow tool: ${tool.toolName}`,
|
||||
inputSchema: tool.parameterSchema || {
|
||||
type: 'object',
|
||||
properties: {
|
||||
input: {
|
||||
type: 'object',
|
||||
description: 'Input data for the workflow',
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
return NextResponse.json(createJsonRpcResponse(rpcRequest.id, { tools: mcpTools }))
|
||||
} catch (error) {
|
||||
logger.error('Error listing tools:', error)
|
||||
return NextResponse.json(createJsonRpcError(rpcRequest.id, -32603, 'Failed to list tools'), {
|
||||
status: 500,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tools/call method
|
||||
*/
|
||||
async function handleToolsCall(
|
||||
rpcRequest: JsonRpcRequest,
|
||||
serverId: string,
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
apiKey?: string | null
|
||||
): Promise<NextResponse> {
|
||||
try {
|
||||
const params = rpcRequest.params as
|
||||
| { name: string; arguments?: Record<string, unknown> }
|
||||
| undefined
|
||||
|
||||
if (!params?.name) {
|
||||
return NextResponse.json(
|
||||
createJsonRpcError(rpcRequest.id, -32602, 'Invalid params: tool name required'),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Find the tool
|
||||
const [tool] = await db
|
||||
.select({
|
||||
id: workflowMcpTool.id,
|
||||
toolName: workflowMcpTool.toolName,
|
||||
workflowId: workflowMcpTool.workflowId,
|
||||
isEnabled: workflowMcpTool.isEnabled,
|
||||
})
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.serverId, serverId))
|
||||
.then((tools) => tools.filter((t) => t.toolName === params.name))
|
||||
|
||||
if (!tool) {
|
||||
return NextResponse.json(
|
||||
createJsonRpcError(rpcRequest.id, -32602, `Tool not found: ${params.name}`),
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!tool.isEnabled) {
|
||||
return NextResponse.json(
|
||||
createJsonRpcError(rpcRequest.id, -32602, `Tool is disabled: ${params.name}`),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Verify workflow is still deployed
|
||||
const [workflowRecord] = await db
|
||||
.select({ id: workflow.id, isDeployed: workflow.isDeployed })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, tool.workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord || !workflowRecord.isDeployed) {
|
||||
return NextResponse.json(
|
||||
createJsonRpcError(rpcRequest.id, -32603, 'Workflow is not deployed'),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
const baseUrl = getBaseUrl()
|
||||
const executeUrl = `${baseUrl}/api/workflows/${tool.workflowId}/execute`
|
||||
|
||||
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)
|
||||
|
||||
// Build headers for the internal execute call
|
||||
const executeHeaders: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
// Forward the API key for authentication
|
||||
if (apiKey) {
|
||||
executeHeaders['X-API-Key'] = apiKey
|
||||
}
|
||||
|
||||
const executeResponse = await fetch(executeUrl, {
|
||||
method: 'POST',
|
||||
headers: executeHeaders,
|
||||
body: JSON.stringify({
|
||||
input: params.arguments || {},
|
||||
triggerType: 'mcp',
|
||||
}),
|
||||
})
|
||||
|
||||
const executeResult = await executeResponse.json()
|
||||
|
||||
if (!executeResponse.ok) {
|
||||
return NextResponse.json(
|
||||
createJsonRpcError(
|
||||
rpcRequest.id,
|
||||
-32603,
|
||||
executeResult.error || 'Workflow execution failed'
|
||||
),
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Format response for MCP
|
||||
const content = [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(executeResult.output || executeResult, null, 2),
|
||||
},
|
||||
]
|
||||
|
||||
return NextResponse.json(
|
||||
createJsonRpcResponse(rpcRequest.id, {
|
||||
content,
|
||||
isError: !executeResult.success,
|
||||
})
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Error calling tool:', error)
|
||||
return NextResponse.json(createJsonRpcError(rpcRequest.id, -32603, 'Tool execution failed'), {
|
||||
status: 500,
|
||||
})
|
||||
}
|
||||
}
|
||||
197
apps/sim/app/api/mcp/serve/[serverId]/sse/route.ts
Normal file
197
apps/sim/app/api/mcp/serve/[serverId]/sse/route.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
/**
|
||||
* MCP SSE/HTTP Endpoint
|
||||
*
|
||||
* Implements MCP protocol using the official @modelcontextprotocol/sdk
|
||||
* with a Next.js-compatible transport adapter.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { workflowMcpServer } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMcpSseStream, handleMcpRequest } from '@/lib/mcp/workflow-mcp-server'
|
||||
|
||||
const logger = createLogger('WorkflowMcpSSE')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
interface RouteParams {
|
||||
serverId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that the server exists and is published
|
||||
*/
|
||||
async function validateServer(serverId: string) {
|
||||
const [server] = await db
|
||||
.select({
|
||||
id: workflowMcpServer.id,
|
||||
name: workflowMcpServer.name,
|
||||
workspaceId: workflowMcpServer.workspaceId,
|
||||
isPublished: workflowMcpServer.isPublished,
|
||||
})
|
||||
.from(workflowMcpServer)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.limit(1)
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - SSE endpoint for MCP protocol
|
||||
* Establishes a Server-Sent Events connection for MCP notifications
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { serverId } = await params
|
||||
|
||||
try {
|
||||
// Validate server exists and is published
|
||||
const server = await validateServer(serverId)
|
||||
|
||||
if (!server) {
|
||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!server.isPublished) {
|
||||
return NextResponse.json({ error: 'Server is not published' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check authentication
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const apiKey =
|
||||
request.headers.get('X-API-Key') ||
|
||||
request.headers.get('Authorization')?.replace('Bearer ', '')
|
||||
|
||||
// Create SSE stream using the SDK-based server
|
||||
const stream = createMcpSseStream({
|
||||
serverId,
|
||||
serverName: server.name,
|
||||
userId: auth.userId,
|
||||
workspaceId: server.workspaceId,
|
||||
apiKey,
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
'X-MCP-Server-Id': serverId,
|
||||
'X-MCP-Server-Name': server.name,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error establishing SSE connection:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Handle MCP JSON-RPC messages
|
||||
* This is the primary endpoint for MCP protocol messages using the SDK
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { serverId } = await params
|
||||
|
||||
try {
|
||||
// Validate server
|
||||
const server = await validateServer(serverId)
|
||||
|
||||
if (!server) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
jsonrpc: '2.0',
|
||||
id: null,
|
||||
error: { code: -32000, message: 'Server not found' },
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!server.isPublished) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
jsonrpc: '2.0',
|
||||
id: null,
|
||||
error: { code: -32000, message: 'Server is not published' },
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check authentication
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
jsonrpc: '2.0',
|
||||
id: null,
|
||||
error: { code: -32000, message: 'Unauthorized' },
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const apiKey =
|
||||
request.headers.get('X-API-Key') ||
|
||||
request.headers.get('Authorization')?.replace('Bearer ', '')
|
||||
|
||||
// Handle the request using the SDK-based server
|
||||
return handleMcpRequest(
|
||||
{
|
||||
serverId,
|
||||
serverName: server.name,
|
||||
userId: auth.userId,
|
||||
workspaceId: server.workspaceId,
|
||||
apiKey,
|
||||
},
|
||||
request
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Error handling MCP POST request:', error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
jsonrpc: '2.0',
|
||||
id: null,
|
||||
error: { code: -32603, message: 'Internal error' },
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE - Handle session termination
|
||||
* MCP clients may send DELETE to end a session
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
|
||||
const { serverId } = await params
|
||||
|
||||
try {
|
||||
// Validate server exists
|
||||
const server = await validateServer(serverId)
|
||||
|
||||
if (!server) {
|
||||
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check authentication
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
logger.info(`MCP session terminated for server ${serverId}`)
|
||||
|
||||
return new NextResponse(null, { status: 204 })
|
||||
} catch (error) {
|
||||
logger.error('Error handling MCP DELETE request:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
150
apps/sim/app/api/mcp/workflow-servers/[id]/publish/route.ts
Normal file
150
apps/sim/app/api/mcp/workflow-servers/[id]/publish/route.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServerPublishAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Publish a workflow MCP server (make it accessible via OAuth)
|
||||
*/
|
||||
export const POST = withMcpAuth<RouteParams>('admin')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Publishing workflow MCP server: ${serverId}`)
|
||||
|
||||
const [existingServer] = await db
|
||||
.select({ id: workflowMcpServer.id, isPublished: workflowMcpServer.isPublished })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!existingServer) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
if (existingServer.isPublished) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Server is already published'),
|
||||
'Server is already published',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
// Check if server has at least one tool
|
||||
const tools = await db
|
||||
.select({ id: workflowMcpTool.id })
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.serverId, serverId))
|
||||
.limit(1)
|
||||
|
||||
if (tools.length === 0) {
|
||||
return createMcpErrorResponse(
|
||||
new Error(
|
||||
'Cannot publish server without any tools. Add at least one workflow as a tool first.'
|
||||
),
|
||||
'Server has no tools',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
const [updatedServer] = await db
|
||||
.update(workflowMcpServer)
|
||||
.set({
|
||||
isPublished: true,
|
||||
publishedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.returning()
|
||||
|
||||
const baseUrl = getBaseUrl()
|
||||
const mcpServerUrl = `${baseUrl}/api/mcp/serve/${serverId}/sse`
|
||||
|
||||
logger.info(`[${requestId}] Successfully published workflow MCP server: ${serverId}`)
|
||||
|
||||
return createMcpSuccessResponse({
|
||||
server: updatedServer,
|
||||
mcpServerUrl,
|
||||
message: 'Server published successfully. External MCP clients can now connect using OAuth.',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error publishing workflow MCP server:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to publish workflow MCP server'),
|
||||
'Failed to publish workflow MCP server',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* DELETE - Unpublish a workflow MCP server
|
||||
*/
|
||||
export const DELETE = withMcpAuth<RouteParams>('admin')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Unpublishing workflow MCP server: ${serverId}`)
|
||||
|
||||
const [existingServer] = await db
|
||||
.select({ id: workflowMcpServer.id, isPublished: workflowMcpServer.isPublished })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!existingServer) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
if (!existingServer.isPublished) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Server is not published'),
|
||||
'Server is not published',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
const [updatedServer] = await db
|
||||
.update(workflowMcpServer)
|
||||
.set({
|
||||
isPublished: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.returning()
|
||||
|
||||
logger.info(`[${requestId}] Successfully unpublished workflow MCP server: ${serverId}`)
|
||||
|
||||
return createMcpSuccessResponse({
|
||||
server: updatedServer,
|
||||
message: 'Server unpublished successfully. External MCP clients can no longer connect.',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error unpublishing workflow MCP server:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to unpublish workflow MCP server'),
|
||||
'Failed to unpublish workflow MCP server',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
157
apps/sim/app/api/mcp/workflow-servers/[id]/route.ts
Normal file
157
apps/sim/app/api/mcp/workflow-servers/[id]/route.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServerAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Get a specific workflow MCP server with its tools
|
||||
*/
|
||||
export const GET = withMcpAuth<RouteParams>('read')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Getting workflow MCP server: ${serverId}`)
|
||||
|
||||
const [server] = await db
|
||||
.select({
|
||||
id: workflowMcpServer.id,
|
||||
workspaceId: workflowMcpServer.workspaceId,
|
||||
createdBy: workflowMcpServer.createdBy,
|
||||
name: workflowMcpServer.name,
|
||||
description: workflowMcpServer.description,
|
||||
isPublished: workflowMcpServer.isPublished,
|
||||
publishedAt: workflowMcpServer.publishedAt,
|
||||
createdAt: workflowMcpServer.createdAt,
|
||||
updatedAt: workflowMcpServer.updatedAt,
|
||||
})
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
const tools = await db
|
||||
.select()
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.serverId, serverId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Found workflow MCP server: ${server.name} with ${tools.length} tools`
|
||||
)
|
||||
|
||||
return createMcpSuccessResponse({ server, tools })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error getting workflow MCP server:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to get workflow MCP server'),
|
||||
'Failed to get workflow MCP server',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* PATCH - Update a workflow MCP server
|
||||
*/
|
||||
export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
|
||||
logger.info(`[${requestId}] Updating workflow MCP server: ${serverId}`)
|
||||
|
||||
const [existingServer] = await db
|
||||
.select({ id: workflowMcpServer.id })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!existingServer) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
const updateData: Record<string, unknown> = {
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
if (body.name !== undefined) {
|
||||
updateData.name = body.name.trim()
|
||||
}
|
||||
if (body.description !== undefined) {
|
||||
updateData.description = body.description?.trim() || null
|
||||
}
|
||||
|
||||
const [updatedServer] = await db
|
||||
.update(workflowMcpServer)
|
||||
.set(updateData)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.returning()
|
||||
|
||||
logger.info(`[${requestId}] Successfully updated workflow MCP server: ${serverId}`)
|
||||
|
||||
return createMcpSuccessResponse({ server: updatedServer })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating workflow MCP server:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to update workflow MCP server'),
|
||||
'Failed to update workflow MCP server',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* DELETE - Delete a workflow MCP server and all its tools
|
||||
*/
|
||||
export const DELETE = withMcpAuth<RouteParams>('admin')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Deleting workflow MCP server: ${serverId}`)
|
||||
|
||||
const [deletedServer] = await db
|
||||
.delete(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (!deletedServer) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted workflow MCP server: ${serverId}`)
|
||||
|
||||
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to delete workflow MCP server'),
|
||||
'Failed to delete workflow MCP server',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -0,0 +1,178 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpToolAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
toolId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Get a specific tool
|
||||
*/
|
||||
export const GET = withMcpAuth<RouteParams>('read')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId, toolId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Getting tool ${toolId} from server ${serverId}`)
|
||||
|
||||
// Verify server exists and belongs to workspace
|
||||
const [server] = await db
|
||||
.select({ id: workflowMcpServer.id })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
const [tool] = await db
|
||||
.select()
|
||||
.from(workflowMcpTool)
|
||||
.where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId)))
|
||||
.limit(1)
|
||||
|
||||
if (!tool) {
|
||||
return createMcpErrorResponse(new Error('Tool not found'), 'Tool not found', 404)
|
||||
}
|
||||
|
||||
return createMcpSuccessResponse({ tool })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error getting tool:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to get tool'),
|
||||
'Failed to get tool',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* PATCH - Update a tool's configuration
|
||||
*/
|
||||
export const PATCH = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId, toolId } = await params
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
|
||||
logger.info(`[${requestId}] Updating tool ${toolId} in server ${serverId}`)
|
||||
|
||||
// Verify server exists and belongs to workspace
|
||||
const [server] = await db
|
||||
.select({ id: workflowMcpServer.id })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
const [existingTool] = await db
|
||||
.select({ id: workflowMcpTool.id })
|
||||
.from(workflowMcpTool)
|
||||
.where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId)))
|
||||
.limit(1)
|
||||
|
||||
if (!existingTool) {
|
||||
return createMcpErrorResponse(new Error('Tool not found'), 'Tool not found', 404)
|
||||
}
|
||||
|
||||
const updateData: Record<string, unknown> = {
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
if (body.toolName !== undefined) {
|
||||
updateData.toolName = body.toolName.trim()
|
||||
}
|
||||
if (body.toolDescription !== undefined) {
|
||||
updateData.toolDescription = body.toolDescription?.trim() || null
|
||||
}
|
||||
if (body.parameterSchema !== undefined) {
|
||||
updateData.parameterSchema = body.parameterSchema
|
||||
}
|
||||
if (body.isEnabled !== undefined) {
|
||||
updateData.isEnabled = body.isEnabled
|
||||
}
|
||||
|
||||
const [updatedTool] = await db
|
||||
.update(workflowMcpTool)
|
||||
.set(updateData)
|
||||
.where(eq(workflowMcpTool.id, toolId))
|
||||
.returning()
|
||||
|
||||
logger.info(`[${requestId}] Successfully updated tool ${toolId}`)
|
||||
|
||||
return createMcpSuccessResponse({ tool: updatedTool })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating tool:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to update tool'),
|
||||
'Failed to update tool',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* DELETE - Remove a tool from an MCP server
|
||||
*/
|
||||
export const DELETE = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId, toolId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Deleting tool ${toolId} from server ${serverId}`)
|
||||
|
||||
// Verify server exists and belongs to workspace
|
||||
const [server] = await db
|
||||
.select({ id: workflowMcpServer.id })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
const [deletedTool] = await db
|
||||
.delete(workflowMcpTool)
|
||||
.where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId)))
|
||||
.returning()
|
||||
|
||||
if (!deletedTool) {
|
||||
return createMcpErrorResponse(new Error('Tool not found'), 'Tool not found', 404)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted tool ${toolId}`)
|
||||
|
||||
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting tool:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to delete tool'),
|
||||
'Failed to delete tool',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
226
apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts
Normal file
226
apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpToolsAPI')
|
||||
|
||||
/**
|
||||
* Check if a workflow has a valid start block by loading from database
|
||||
*/
|
||||
async function hasValidStartBlock(workflowId: string): Promise<boolean> {
|
||||
try {
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
return hasValidStartBlockInState(normalizedData)
|
||||
} catch (error) {
|
||||
logger.warn('Error checking for start block:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - List all tools for a workflow MCP server
|
||||
*/
|
||||
export const GET = withMcpAuth<RouteParams>('read')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
|
||||
logger.info(`[${requestId}] Listing tools for workflow MCP server: ${serverId}`)
|
||||
|
||||
// Verify server exists and belongs to workspace
|
||||
const [server] = await db
|
||||
.select({ id: workflowMcpServer.id })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
// Get tools with workflow details
|
||||
const tools = await db
|
||||
.select({
|
||||
id: workflowMcpTool.id,
|
||||
serverId: workflowMcpTool.serverId,
|
||||
workflowId: workflowMcpTool.workflowId,
|
||||
toolName: workflowMcpTool.toolName,
|
||||
toolDescription: workflowMcpTool.toolDescription,
|
||||
parameterSchema: workflowMcpTool.parameterSchema,
|
||||
isEnabled: workflowMcpTool.isEnabled,
|
||||
createdAt: workflowMcpTool.createdAt,
|
||||
updatedAt: workflowMcpTool.updatedAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
})
|
||||
.from(workflowMcpTool)
|
||||
.leftJoin(workflow, eq(workflowMcpTool.workflowId, workflow.id))
|
||||
.where(eq(workflowMcpTool.serverId, serverId))
|
||||
|
||||
logger.info(`[${requestId}] Found ${tools.length} tools for server ${serverId}`)
|
||||
|
||||
return createMcpSuccessResponse({ tools })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error listing tools:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to list tools'),
|
||||
'Failed to list tools',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* POST - Add a workflow as a tool to an MCP server
|
||||
*/
|
||||
export const POST = withMcpAuth<RouteParams>('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
|
||||
try {
|
||||
const { id: serverId } = await params
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
|
||||
logger.info(`[${requestId}] Adding tool to workflow MCP server: ${serverId}`, {
|
||||
workflowId: body.workflowId,
|
||||
})
|
||||
|
||||
if (!body.workflowId) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Missing required field: workflowId'),
|
||||
'Missing required field',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
// Verify server exists and belongs to workspace
|
||||
const [server] = await db
|
||||
.select({ id: workflowMcpServer.id })
|
||||
.from(workflowMcpServer)
|
||||
.where(
|
||||
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
|
||||
}
|
||||
|
||||
// Verify workflow exists and is deployed
|
||||
const [workflowRecord] = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
workspaceId: workflow.workspaceId,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, body.workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord) {
|
||||
return createMcpErrorResponse(new Error('Workflow not found'), 'Workflow not found', 404)
|
||||
}
|
||||
|
||||
// Verify workflow belongs to the same workspace
|
||||
if (workflowRecord.workspaceId !== workspaceId) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Workflow does not belong to this workspace'),
|
||||
'Access denied',
|
||||
403
|
||||
)
|
||||
}
|
||||
|
||||
if (!workflowRecord.isDeployed) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Workflow must be deployed before adding as a tool'),
|
||||
'Workflow not deployed',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
// Verify workflow has a valid start block
|
||||
const hasStartBlock = await hasValidStartBlock(body.workflowId)
|
||||
if (!hasStartBlock) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Workflow must have a Start block to be used as an MCP tool'),
|
||||
'No start block found',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
// Check if tool already exists for this workflow
|
||||
const [existingTool] = await db
|
||||
.select({ id: workflowMcpTool.id })
|
||||
.from(workflowMcpTool)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowMcpTool.serverId, serverId),
|
||||
eq(workflowMcpTool.workflowId, body.workflowId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingTool) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('This workflow is already added as a tool to this server'),
|
||||
'Tool already exists',
|
||||
409
|
||||
)
|
||||
}
|
||||
|
||||
// Generate tool name and description
|
||||
const toolName = body.toolName?.trim() || sanitizeToolName(workflowRecord.name)
|
||||
const toolDescription =
|
||||
body.toolDescription?.trim() ||
|
||||
workflowRecord.description ||
|
||||
`Execute ${workflowRecord.name} workflow`
|
||||
|
||||
// Create the tool
|
||||
const toolId = crypto.randomUUID()
|
||||
const [tool] = await db
|
||||
.insert(workflowMcpTool)
|
||||
.values({
|
||||
id: toolId,
|
||||
serverId,
|
||||
workflowId: body.workflowId,
|
||||
toolName,
|
||||
toolDescription,
|
||||
parameterSchema: body.parameterSchema || {},
|
||||
isEnabled: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully added tool ${toolName} (workflow: ${body.workflowId}) to server ${serverId}`
|
||||
)
|
||||
|
||||
return createMcpSuccessResponse({ tool }, 201)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error adding tool:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to add tool'),
|
||||
'Failed to add tool',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
107
apps/sim/app/api/mcp/workflow-servers/route.ts
Normal file
107
apps/sim/app/api/mcp/workflow-servers/route.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowMcpServer } from '@sim/db/schema'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServersAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* GET - List all workflow MCP servers for the workspace
|
||||
*/
|
||||
export const GET = withMcpAuth('read')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }) => {
|
||||
try {
|
||||
logger.info(`[${requestId}] Listing workflow MCP servers for workspace ${workspaceId}`)
|
||||
|
||||
const servers = await db
|
||||
.select({
|
||||
id: workflowMcpServer.id,
|
||||
workspaceId: workflowMcpServer.workspaceId,
|
||||
createdBy: workflowMcpServer.createdBy,
|
||||
name: workflowMcpServer.name,
|
||||
description: workflowMcpServer.description,
|
||||
isPublished: workflowMcpServer.isPublished,
|
||||
publishedAt: workflowMcpServer.publishedAt,
|
||||
createdAt: workflowMcpServer.createdAt,
|
||||
updatedAt: workflowMcpServer.updatedAt,
|
||||
toolCount: sql<number>`(
|
||||
SELECT COUNT(*)::int
|
||||
FROM "workflow_mcp_tool"
|
||||
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
|
||||
)`.as('tool_count'),
|
||||
})
|
||||
.from(workflowMcpServer)
|
||||
.where(eq(workflowMcpServer.workspaceId, workspaceId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Listed ${servers.length} workflow MCP servers for workspace ${workspaceId}`
|
||||
)
|
||||
return createMcpSuccessResponse({ servers })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error listing workflow MCP servers:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to list workflow MCP servers'),
|
||||
'Failed to list workflow MCP servers',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* POST - Create a new workflow MCP server
|
||||
*/
|
||||
export const POST = withMcpAuth('write')(
|
||||
async (request: NextRequest, { userId, workspaceId, requestId }) => {
|
||||
try {
|
||||
const body = getParsedBody(request) || (await request.json())
|
||||
|
||||
logger.info(`[${requestId}] Creating workflow MCP server:`, {
|
||||
name: body.name,
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
if (!body.name) {
|
||||
return createMcpErrorResponse(
|
||||
new Error('Missing required field: name'),
|
||||
'Missing required field',
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
const serverId = crypto.randomUUID()
|
||||
|
||||
const [server] = await db
|
||||
.insert(workflowMcpServer)
|
||||
.values({
|
||||
id: serverId,
|
||||
workspaceId,
|
||||
createdBy: userId,
|
||||
name: body.name.trim(),
|
||||
description: body.description?.trim() || null,
|
||||
isPublished: false,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
|
||||
)
|
||||
|
||||
return createMcpSuccessResponse({ server }, 201)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)
|
||||
return createMcpErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to create workflow MCP server'),
|
||||
'Failed to create workflow MCP server',
|
||||
500
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -1,17 +1,121 @@
|
||||
import { db, workflow, workflowDeploymentVersion } from '@sim/db'
|
||||
import { db, workflow, workflowDeploymentVersion, workflowMcpTool } from '@sim/db'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { deployWorkflow } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
extractInputFormatFromBlocks,
|
||||
generateToolInputSchema,
|
||||
} from '@/lib/mcp/workflow-tool-schema'
|
||||
import { deployWorkflow, loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('WorkflowDeployAPI')
|
||||
|
||||
/**
|
||||
* Check if a workflow has a valid start block by loading from database
|
||||
*/
|
||||
async function hasValidStartBlock(workflowId: string): Promise<boolean> {
|
||||
try {
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
return hasValidStartBlockInState(normalizedData)
|
||||
} catch (error) {
|
||||
logger.warn('Error checking for start block:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
/**
|
||||
* Extract input format from workflow blocks and generate MCP tool parameter schema
|
||||
*/
|
||||
async function generateMcpToolSchema(workflowId: string): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!normalizedData?.blocks) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
const inputFormat = extractInputFormatFromBlocks(normalizedData.blocks)
|
||||
if (!inputFormat || inputFormat.length === 0) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
return generateToolInputSchema(inputFormat) as unknown as Record<string, unknown>
|
||||
} catch (error) {
|
||||
logger.warn('Error generating MCP tool schema:', error)
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update all MCP tools that reference this workflow with the latest parameter schema.
|
||||
* If the workflow no longer has a start block, remove all MCP tools.
|
||||
*/
|
||||
async function syncMcpToolsOnDeploy(workflowId: string, requestId: string): Promise<void> {
|
||||
try {
|
||||
// Get all MCP tools that use this workflow
|
||||
const tools = await db
|
||||
.select({ id: workflowMcpTool.id })
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
if (tools.length === 0) {
|
||||
logger.debug(`[${requestId}] No MCP tools to sync for workflow: ${workflowId}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if workflow still has a valid start block
|
||||
const hasStart = await hasValidStartBlock(workflowId)
|
||||
if (!hasStart) {
|
||||
// No start block - remove all MCP tools for this workflow
|
||||
await db.delete(workflowMcpTool).where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Removed ${tools.length} MCP tool(s) - workflow no longer has a start block: ${workflowId}`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Generate the latest parameter schema
|
||||
const parameterSchema = await generateMcpToolSchema(workflowId)
|
||||
|
||||
// Update all tools with the new schema
|
||||
await db
|
||||
.update(workflowMcpTool)
|
||||
.set({
|
||||
parameterSchema,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(`[${requestId}] Synced ${tools.length} MCP tool(s) for workflow: ${workflowId}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error syncing MCP tools:`, error)
|
||||
// Don't throw - this is a non-critical operation
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all MCP tools that reference this workflow when undeploying
|
||||
*/
|
||||
async function removeMcpToolsOnUndeploy(workflowId: string, requestId: string): Promise<void> {
|
||||
try {
|
||||
const result = await db
|
||||
.delete(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(`[${requestId}] Removed MCP tools for undeployed workflow: ${workflowId}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error removing MCP tools:`, error)
|
||||
// Don't throw - this is a non-critical operation
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
@@ -119,6 +223,9 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
logger.info(`[${requestId}] Workflow deployed successfully: ${id}`)
|
||||
|
||||
// Sync MCP tools with the latest parameter schema
|
||||
await syncMcpToolsOnDeploy(id, requestId)
|
||||
|
||||
const responseApiKeyInfo = workflowData!.workspaceId
|
||||
? 'Workspace API keys'
|
||||
: 'Personal API keys'
|
||||
@@ -167,6 +274,9 @@ export async function DELETE(
|
||||
.where(eq(workflow.id, id))
|
||||
})
|
||||
|
||||
// Remove all MCP tools that reference this workflow
|
||||
await removeMcpToolsOnUndeploy(id, requestId)
|
||||
|
||||
logger.info(`[${requestId}] Workflow undeployed successfully: ${id}`)
|
||||
|
||||
// Track workflow undeployment
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import { db, workflow, workflowDeploymentVersion } from '@sim/db'
|
||||
import { db, workflow, workflowDeploymentVersion, workflowMcpTool } from '@sim/db'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
extractInputFormatFromBlocks,
|
||||
generateToolInputSchema,
|
||||
} from '@/lib/mcp/workflow-tool-schema'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -11,6 +16,80 @@ const logger = createLogger('WorkflowActivateDeploymentAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
/**
|
||||
* Extract input format from a deployment version state and generate MCP tool parameter schema
|
||||
*/
|
||||
function generateMcpToolSchemaFromState(state: any): Record<string, unknown> {
|
||||
try {
|
||||
if (!state?.blocks) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
const inputFormat = extractInputFormatFromBlocks(state.blocks)
|
||||
if (!inputFormat || inputFormat.length === 0) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
return generateToolInputSchema(inputFormat) as unknown as Record<string, unknown>
|
||||
} catch (error) {
|
||||
logger.warn('Error generating MCP tool schema from state:', error)
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync MCP tools when activating a deployment version.
|
||||
* If the version has no start block, remove all MCP tools.
|
||||
*/
|
||||
async function syncMcpToolsOnVersionActivate(
|
||||
workflowId: string,
|
||||
versionState: any,
|
||||
requestId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Get all MCP tools that use this workflow
|
||||
const tools = await db
|
||||
.select({ id: workflowMcpTool.id })
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
if (tools.length === 0) {
|
||||
logger.debug(`[${requestId}] No MCP tools to sync for workflow: ${workflowId}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the activated version has a valid start block
|
||||
if (!hasValidStartBlockInState(versionState)) {
|
||||
// No start block - remove all MCP tools for this workflow
|
||||
await db.delete(workflowMcpTool).where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Removed ${tools.length} MCP tool(s) - activated version has no start block: ${workflowId}`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Generate the parameter schema from the activated version's state
|
||||
const parameterSchema = generateMcpToolSchemaFromState(versionState)
|
||||
|
||||
// Update all tools with the new schema
|
||||
await db
|
||||
.update(workflowMcpTool)
|
||||
.set({
|
||||
parameterSchema,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Synced ${tools.length} MCP tool(s) for workflow version activation: ${workflowId}`
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error syncing MCP tools on version activate:`, error)
|
||||
// Don't throw - this is a non-critical operation
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; version: string }> }
|
||||
@@ -31,6 +110,18 @@ export async function POST(
|
||||
|
||||
const now = new Date()
|
||||
|
||||
// Get the state of the version being activated for MCP tool sync
|
||||
const [versionData] = await db
|
||||
.select({ state: workflowDeploymentVersion.state })
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.version, versionNum)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
await tx
|
||||
.update(workflowDeploymentVersion)
|
||||
@@ -65,6 +156,11 @@ export async function POST(
|
||||
await tx.update(workflow).set(updateData).where(eq(workflow.id, id))
|
||||
})
|
||||
|
||||
// Sync MCP tools with the activated version's parameter schema
|
||||
if (versionData?.state) {
|
||||
await syncMcpToolsOnVersionActivate(id, versionData.state, requestId)
|
||||
}
|
||||
|
||||
return createSuccessResponse({ success: true, deployedAt: now })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { db, workflow, workflowDeploymentVersion } from '@sim/db'
|
||||
import { db, workflow, workflowDeploymentVersion, workflowMcpTool } from '@sim/db'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
extractInputFormatFromBlocks,
|
||||
generateToolInputSchema,
|
||||
} from '@/lib/mcp/workflow-tool-schema'
|
||||
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -13,6 +18,80 @@ const logger = createLogger('RevertToDeploymentVersionAPI')
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
/**
|
||||
* Extract input format from a deployment version state and generate MCP tool parameter schema
|
||||
*/
|
||||
function generateMcpToolSchemaFromState(state: any): Record<string, unknown> {
|
||||
try {
|
||||
if (!state?.blocks) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
const inputFormat = extractInputFormatFromBlocks(state.blocks)
|
||||
if (!inputFormat || inputFormat.length === 0) {
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
|
||||
return generateToolInputSchema(inputFormat) as unknown as Record<string, unknown>
|
||||
} catch (error) {
|
||||
logger.warn('Error generating MCP tool schema from state:', error)
|
||||
return { type: 'object', properties: {} }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync MCP tools when reverting to a deployment version.
|
||||
* If the version has no start block, remove all MCP tools.
|
||||
*/
|
||||
async function syncMcpToolsOnRevert(
|
||||
workflowId: string,
|
||||
versionState: any,
|
||||
requestId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Get all MCP tools that use this workflow
|
||||
const tools = await db
|
||||
.select({ id: workflowMcpTool.id })
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
if (tools.length === 0) {
|
||||
logger.debug(`[${requestId}] No MCP tools to sync for workflow: ${workflowId}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the reverted version has a valid start block
|
||||
if (!hasValidStartBlockInState(versionState)) {
|
||||
// No start block - remove all MCP tools for this workflow
|
||||
await db.delete(workflowMcpTool).where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Removed ${tools.length} MCP tool(s) - reverted version has no start block: ${workflowId}`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// Generate the parameter schema from the reverted version's state
|
||||
const parameterSchema = generateMcpToolSchemaFromState(versionState)
|
||||
|
||||
// Update all tools with the new schema
|
||||
await db
|
||||
.update(workflowMcpTool)
|
||||
.set({
|
||||
parameterSchema,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Synced ${tools.length} MCP tool(s) for workflow revert: ${workflowId}`
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error syncing MCP tools on revert:`, error)
|
||||
// Don't throw - this is a non-critical operation
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; version: string }> }
|
||||
@@ -87,6 +166,9 @@ export async function POST(
|
||||
.set({ lastSynced: new Date(), updatedAt: new Date() })
|
||||
.where(eq(workflow.id, id))
|
||||
|
||||
// Sync MCP tools with the reverted version's parameter schema
|
||||
await syncMcpToolsOnRevert(id, deployedState, requestId)
|
||||
|
||||
try {
|
||||
const socketServerUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||
await fetch(`${socketServerUrl}/api/workflow-reverted`, {
|
||||
|
||||
@@ -30,7 +30,7 @@ const logger = createLogger('WorkflowExecuteAPI')
|
||||
|
||||
const ExecuteWorkflowSchema = z.object({
|
||||
selectedOutputs: z.array(z.string()).optional().default([]),
|
||||
triggerType: z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']).optional(),
|
||||
triggerType: z.enum(['api', 'webhook', 'schedule', 'manual', 'chat', 'mcp']).optional(),
|
||||
stream: z.boolean().optional(),
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
@@ -227,7 +227,7 @@ type AsyncExecutionParams = {
|
||||
workflowId: string
|
||||
userId: string
|
||||
input: any
|
||||
triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -370,14 +370,15 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
})
|
||||
|
||||
const executionId = uuidv4()
|
||||
type LoggingTriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
type LoggingTriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
let loggingTriggerType: LoggingTriggerType = 'manual'
|
||||
if (
|
||||
triggerType === 'api' ||
|
||||
triggerType === 'chat' ||
|
||||
triggerType === 'webhook' ||
|
||||
triggerType === 'schedule' ||
|
||||
triggerType === 'manual'
|
||||
triggerType === 'manual' ||
|
||||
triggerType === 'mcp'
|
||||
) {
|
||||
loggingTriggerType = triggerType as LoggingTriggerType
|
||||
}
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { loadBulkWorkflowsFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceExportAPI')
|
||||
|
||||
/**
|
||||
* GET /api/workspaces/[id]/export
|
||||
* Export all workspace data (workflows with states, folders) in a single request.
|
||||
* Much more efficient than fetching each workflow individually.
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const startTime = Date.now()
|
||||
const { id: workspaceId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user has access to this workspace
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (!userPermission) {
|
||||
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Fetch all workflows and folders in parallel (2 queries)
|
||||
const [workflows, folders] = await Promise.all([
|
||||
db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId)),
|
||||
db.select().from(workflowFolder).where(eq(workflowFolder.workspaceId, workspaceId)),
|
||||
])
|
||||
|
||||
const workflowIds = workflows.map((w) => w.id)
|
||||
|
||||
// Bulk load all workflow states (3 queries total via inArray)
|
||||
const workflowStates = await loadBulkWorkflowsFromNormalizedTables(workflowIds)
|
||||
|
||||
// Build export data
|
||||
const workflowsExport = workflows.map((w) => {
|
||||
const state = workflowStates.get(w.id)
|
||||
|
||||
// Build the workflow state with defaults if no normalized data
|
||||
const workflowState = state
|
||||
? {
|
||||
blocks: state.blocks,
|
||||
edges: state.edges,
|
||||
loops: state.loops,
|
||||
parallels: state.parallels,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: w.isDeployed || false,
|
||||
deployedAt: w.deployedAt,
|
||||
}
|
||||
: {
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: w.isDeployed || false,
|
||||
deployedAt: w.deployedAt,
|
||||
}
|
||||
|
||||
// Extract variables from workflow record
|
||||
const variables = Object.values((w.variables as Record<string, any>) || {}).map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
|
||||
return {
|
||||
workflow: {
|
||||
id: w.id,
|
||||
name: w.name,
|
||||
description: w.description,
|
||||
color: w.color,
|
||||
folderId: w.folderId,
|
||||
},
|
||||
state: workflowState,
|
||||
variables,
|
||||
}
|
||||
})
|
||||
|
||||
const foldersExport = folders.map((f) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
parentId: f.parentId,
|
||||
}))
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.info(`Exported workspace ${workspaceId} in ${elapsed}ms`, {
|
||||
workflowsCount: workflowsExport.length,
|
||||
foldersCount: foldersExport.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
workflows: workflowsExport,
|
||||
folders: foldersExport,
|
||||
})
|
||||
} catch (error) {
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.error(`Error exporting workspace ${workspaceId} after ${elapsed}ms:`, error)
|
||||
return NextResponse.json({ error: 'Failed to export workspace' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -43,7 +43,7 @@ const PRIMARY_BUTTON_STYLES =
|
||||
|
||||
type NotificationType = 'webhook' | 'email' | 'slack'
|
||||
type LogLevel = 'info' | 'error'
|
||||
type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
type AlertRule =
|
||||
| 'none'
|
||||
| 'consecutive_failures'
|
||||
@@ -84,7 +84,7 @@ interface NotificationSettingsProps {
|
||||
}
|
||||
|
||||
const LOG_LEVELS: LogLevel[] = ['info', 'error']
|
||||
const TRIGGER_TYPES: TriggerType[] = ['api', 'webhook', 'schedule', 'manual', 'chat']
|
||||
const TRIGGER_TYPES: TriggerType[] = ['api', 'webhook', 'schedule', 'manual', 'chat', 'mcp']
|
||||
|
||||
function formatAlertConfigLabel(config: {
|
||||
rule: AlertRule
|
||||
@@ -137,7 +137,7 @@ export function NotificationSettings({
|
||||
workflowIds: [] as string[],
|
||||
allWorkflows: true,
|
||||
levelFilter: ['info', 'error'] as LogLevel[],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat'] as TriggerType[],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat', 'mcp'] as TriggerType[],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
@@ -207,7 +207,7 @@ export function NotificationSettings({
|
||||
workflowIds: [],
|
||||
allWorkflows: true,
|
||||
levelFilter: ['info', 'error'],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat'],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat', 'mcp'],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
|
||||
@@ -21,7 +21,7 @@ import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import { AutocompleteSearch } from './components/search'
|
||||
|
||||
const CORE_TRIGGER_TYPES = ['manual', 'api', 'schedule', 'chat', 'webhook'] as const
|
||||
const CORE_TRIGGER_TYPES = ['manual', 'api', 'schedule', 'chat', 'webhook', 'mcp'] as const
|
||||
|
||||
const TIME_RANGE_OPTIONS: ComboboxOption[] = [
|
||||
{ value: 'All time', label: 'All time' },
|
||||
|
||||
@@ -4,7 +4,7 @@ import { Badge } from '@/components/emcn'
|
||||
import { getIntegrationMetadata } from '@/lib/logs/get-trigger-options'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
|
||||
const CORE_TRIGGER_TYPES = ['manual', 'api', 'schedule', 'chat', 'webhook'] as const
|
||||
const CORE_TRIGGER_TYPES = ['manual', 'api', 'schedule', 'chat', 'webhook', 'mcp'] as const
|
||||
const RUNNING_COLOR = '#22c55e' as const
|
||||
const PENDING_COLOR = '#f59e0b' as const
|
||||
|
||||
|
||||
@@ -101,9 +101,6 @@ const ACTION_VERBS = [
|
||||
'Generated',
|
||||
'Rendering',
|
||||
'Rendered',
|
||||
'Sleeping',
|
||||
'Slept',
|
||||
'Resumed',
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -583,11 +580,6 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const showWake =
|
||||
toolCall.name === 'sleep' &&
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const handleStateChange = (state: any) => {
|
||||
forceUpdate({})
|
||||
onStateChange?.(state)
|
||||
@@ -1110,37 +1102,6 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
Move to Background
|
||||
</Button>
|
||||
</div>
|
||||
) : showWake ? (
|
||||
<div className='mt-[8px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
// Get elapsed seconds before waking
|
||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
||||
// Transition to background state locally so UI updates immediately
|
||||
// Pass elapsed seconds in the result so dynamic text can use it
|
||||
instance?.setState?.((ClientToolCallState as any).background, {
|
||||
result: { _elapsedSeconds: elapsedSeconds },
|
||||
})
|
||||
// Update the tool call params in the store to include elapsed time for display
|
||||
const { updateToolCallParams } = useCopilotStore.getState()
|
||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
||||
await instance?.markToolComplete?.(
|
||||
200,
|
||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
||||
)
|
||||
// Optionally force a re-render; store should sync state from server
|
||||
forceUpdate({})
|
||||
onStateChange?.('background')
|
||||
} catch {}
|
||||
}}
|
||||
variant='primary'
|
||||
title='Wake'
|
||||
>
|
||||
Wake
|
||||
</Button>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -0,0 +1,861 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import {
|
||||
AlertTriangle,
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
Plus,
|
||||
RefreshCw,
|
||||
Server,
|
||||
Trash2,
|
||||
} from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Input as EmcnInput,
|
||||
Label,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import {
|
||||
useAddWorkflowMcpTool,
|
||||
useDeleteWorkflowMcpTool,
|
||||
useUpdateWorkflowMcpTool,
|
||||
useWorkflowMcpServers,
|
||||
useWorkflowMcpTools,
|
||||
type WorkflowMcpServer,
|
||||
type WorkflowMcpTool,
|
||||
} from '@/hooks/queries/workflow-mcp-servers'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('McpToolDeploy')
|
||||
|
||||
interface McpToolDeployProps {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
workflowDescription?: string | null
|
||||
isDeployed: boolean
|
||||
onAddedToServer?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract input format from workflow blocks using SubBlockStore
|
||||
* The actual input format values are stored in useSubBlockStore, not directly in the block structure
|
||||
*/
|
||||
function extractInputFormat(
|
||||
blocks: Record<string, unknown>
|
||||
): Array<{ name: string; type: string }> {
|
||||
// Find the starter block
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
|
||||
const blockObj = block as Record<string, unknown>
|
||||
const blockType = blockObj.type
|
||||
|
||||
// Check for all possible start/trigger block types
|
||||
if (
|
||||
blockType === 'starter' ||
|
||||
blockType === 'start' ||
|
||||
blockType === 'start_trigger' || // This is the unified start block type
|
||||
blockType === 'api' ||
|
||||
blockType === 'api_trigger' ||
|
||||
blockType === 'input_trigger'
|
||||
) {
|
||||
// Get the inputFormat value from the SubBlockStore (where the actual values are stored)
|
||||
const inputFormatValue = useSubBlockStore.getState().getValue(blockId, 'inputFormat')
|
||||
|
||||
if (Array.isArray(inputFormatValue) && inputFormatValue.length > 0) {
|
||||
return inputFormatValue
|
||||
.filter(
|
||||
(field: unknown): field is { name: string; type: string } =>
|
||||
field !== null &&
|
||||
typeof field === 'object' &&
|
||||
'name' in field &&
|
||||
typeof (field as { name: unknown }).name === 'string' &&
|
||||
(field as { name: string }).name.trim() !== ''
|
||||
)
|
||||
.map((field) => ({
|
||||
name: field.name.trim(),
|
||||
type: field.type || 'string',
|
||||
}))
|
||||
}
|
||||
|
||||
// Fallback: try to get from block's subBlocks structure (for backwards compatibility)
|
||||
const subBlocks = blockObj.subBlocks as Record<string, unknown> | undefined
|
||||
if (subBlocks?.inputFormat) {
|
||||
const inputFormatSubBlock = subBlocks.inputFormat as Record<string, unknown>
|
||||
const value = inputFormatSubBlock.value
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
return value
|
||||
.filter(
|
||||
(field: unknown): field is { name: string; type: string } =>
|
||||
field !== null &&
|
||||
typeof field === 'object' &&
|
||||
'name' in field &&
|
||||
typeof (field as { name: unknown }).name === 'string' &&
|
||||
(field as { name: string }).name.trim() !== ''
|
||||
)
|
||||
.map((field) => ({
|
||||
name: field.name.trim(),
|
||||
type: field.type || 'string',
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate JSON Schema from input format using the shared utility
|
||||
* Optionally applies custom descriptions from the UI
|
||||
*/
|
||||
function generateParameterSchema(
|
||||
inputFormat: Array<{ name: string; type: string }>,
|
||||
customDescriptions?: Record<string, string>
|
||||
): Record<string, unknown> {
|
||||
// Convert to InputFormatField with descriptions
|
||||
const fieldsWithDescriptions = inputFormat.map((field) => ({
|
||||
...field,
|
||||
description: customDescriptions?.[field.name]?.trim() || undefined,
|
||||
}))
|
||||
return generateToolInputSchema(fieldsWithDescriptions) as unknown as Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract parameter names from a tool's parameter schema
|
||||
*/
|
||||
function getToolParameterNames(schema: Record<string, unknown>): string[] {
|
||||
const properties = schema.properties as Record<string, unknown> | undefined
|
||||
if (!properties) return []
|
||||
return Object.keys(properties)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the tool's parameters differ from the current workflow's input format
|
||||
*/
|
||||
function hasParameterMismatch(
|
||||
tool: WorkflowMcpTool,
|
||||
currentInputFormat: Array<{ name: string; type: string }>
|
||||
): boolean {
|
||||
const toolParams = getToolParameterNames(tool.parameterSchema as Record<string, unknown>)
|
||||
const currentParams = currentInputFormat.map((f) => f.name)
|
||||
|
||||
if (toolParams.length !== currentParams.length) return true
|
||||
|
||||
const toolParamSet = new Set(toolParams)
|
||||
for (const param of currentParams) {
|
||||
if (!toolParamSet.has(param)) return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Component to query tools for a single server and report back via callback.
|
||||
* This pattern avoids calling hooks in a loop.
|
||||
*/
|
||||
function ServerToolsQuery({
|
||||
workspaceId,
|
||||
server,
|
||||
workflowId,
|
||||
onData,
|
||||
}: {
|
||||
workspaceId: string
|
||||
server: WorkflowMcpServer
|
||||
workflowId: string
|
||||
onData: (serverId: string, tool: WorkflowMcpTool | null, isLoading: boolean) => void
|
||||
}) {
|
||||
const { data: tools, isLoading } = useWorkflowMcpTools(workspaceId, server.id)
|
||||
|
||||
useEffect(() => {
|
||||
const tool = tools?.find((t) => t.workflowId === workflowId) || null
|
||||
onData(server.id, tool, isLoading)
|
||||
}, [tools, isLoading, workflowId, server.id, onData])
|
||||
|
||||
return null // This component doesn't render anything
|
||||
}
|
||||
|
||||
interface ToolOnServerProps {
|
||||
server: WorkflowMcpServer
|
||||
tool: WorkflowMcpTool
|
||||
workspaceId: string
|
||||
currentInputFormat: Array<{ name: string; type: string }>
|
||||
currentParameterSchema: Record<string, unknown>
|
||||
workflowDescription: string | null | undefined
|
||||
onRemoved: (serverId: string) => void
|
||||
onUpdated: () => void
|
||||
}
|
||||
|
||||
function ToolOnServer({
|
||||
server,
|
||||
tool,
|
||||
workspaceId,
|
||||
currentInputFormat,
|
||||
currentParameterSchema,
|
||||
workflowDescription,
|
||||
onRemoved,
|
||||
onUpdated,
|
||||
}: ToolOnServerProps) {
|
||||
const deleteToolMutation = useDeleteWorkflowMcpTool()
|
||||
const updateToolMutation = useUpdateWorkflowMcpTool()
|
||||
const [showConfirm, setShowConfirm] = useState(false)
|
||||
const [showDetails, setShowDetails] = useState(false)
|
||||
|
||||
const needsUpdate = hasParameterMismatch(tool, currentInputFormat)
|
||||
const toolParams = getToolParameterNames(tool.parameterSchema as Record<string, unknown>)
|
||||
|
||||
const handleRemove = async () => {
|
||||
try {
|
||||
await deleteToolMutation.mutateAsync({
|
||||
workspaceId,
|
||||
serverId: server.id,
|
||||
toolId: tool.id,
|
||||
})
|
||||
onRemoved(server.id)
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove tool:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const handleUpdate = async () => {
|
||||
try {
|
||||
await updateToolMutation.mutateAsync({
|
||||
workspaceId,
|
||||
serverId: server.id,
|
||||
toolId: tool.id,
|
||||
toolDescription: workflowDescription || `Execute workflow`,
|
||||
parameterSchema: currentParameterSchema,
|
||||
})
|
||||
onUpdated()
|
||||
logger.info(`Updated tool ${tool.id} with new parameters`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to update tool:', error)
|
||||
}
|
||||
}
|
||||
|
||||
if (showConfirm) {
|
||||
return (
|
||||
<div className='flex items-center justify-between rounded-[6px] border border-[var(--text-error)]/30 bg-[var(--surface-3)] px-[10px] py-[8px]'>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>Remove from {server.name}?</span>
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => setShowConfirm(false)}
|
||||
className='h-[24px] px-[8px] text-[11px]'
|
||||
disabled={deleteToolMutation.isPending}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleRemove}
|
||||
className='h-[24px] px-[8px] text-[11px] text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||
disabled={deleteToolMutation.isPending}
|
||||
>
|
||||
{deleteToolMutation.isPending ? 'Removing...' : 'Remove'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='rounded-[6px] border bg-[var(--surface-3)]'>
|
||||
<div
|
||||
className='flex cursor-pointer items-center justify-between px-[10px] py-[8px]'
|
||||
onClick={() => setShowDetails(!showDetails)}
|
||||
>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
{showDetails ? (
|
||||
<ChevronDown className='h-[12px] w-[12px] text-[var(--text-tertiary)]' />
|
||||
) : (
|
||||
<ChevronRight className='h-[12px] w-[12px] text-[var(--text-tertiary)]' />
|
||||
)}
|
||||
<span className='text-[13px] text-[var(--text-primary)]'>{server.name}</span>
|
||||
{server.isPublished && (
|
||||
<Badge variant='outline' className='text-[10px]'>
|
||||
Published
|
||||
</Badge>
|
||||
)}
|
||||
{needsUpdate && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='border-amber-500/50 bg-amber-500/10 text-[10px] text-amber-500'
|
||||
>
|
||||
<AlertTriangle className='mr-[4px] h-[10px] w-[10px]' />
|
||||
Needs Update
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex items-center gap-[4px]' onClick={(e) => e.stopPropagation()}>
|
||||
{needsUpdate && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleUpdate}
|
||||
disabled={updateToolMutation.isPending}
|
||||
className='h-[24px] px-[8px] text-[11px] text-amber-500 hover:text-amber-600'
|
||||
>
|
||||
<RefreshCw
|
||||
className={cn(
|
||||
'mr-[4px] h-[10px] w-[10px]',
|
||||
updateToolMutation.isPending && 'animate-spin'
|
||||
)}
|
||||
/>
|
||||
{updateToolMutation.isPending ? 'Updating...' : 'Update'}
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => setShowConfirm(true)}
|
||||
className='h-[24px] w-[24px] p-0 text-[var(--text-tertiary)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash2 className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{showDetails && (
|
||||
<div className='border-[var(--border)] border-t px-[10px] py-[8px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='text-[11px] text-[var(--text-muted)]'>Tool Name</span>
|
||||
<span className='font-mono text-[11px] text-[var(--text-secondary)]'>
|
||||
{tool.toolName}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-start justify-between gap-[8px]'>
|
||||
<span className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
Description
|
||||
</span>
|
||||
<span className='text-right text-[11px] text-[var(--text-secondary)]'>
|
||||
{tool.toolDescription || '—'}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-start justify-between gap-[8px]'>
|
||||
<span className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
Parameters ({toolParams.length})
|
||||
</span>
|
||||
<div className='flex flex-wrap justify-end gap-[4px]'>
|
||||
{toolParams.length === 0 ? (
|
||||
<span className='text-[11px] text-[var(--text-muted)]'>None</span>
|
||||
) : (
|
||||
toolParams.map((param) => (
|
||||
<Badge key={param} variant='outline' className='text-[9px]'>
|
||||
{param}
|
||||
</Badge>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export function McpToolDeploy({
|
||||
workflowId,
|
||||
workflowName,
|
||||
workflowDescription,
|
||||
isDeployed,
|
||||
onAddedToServer,
|
||||
}: McpToolDeployProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const {
|
||||
data: servers = [],
|
||||
isLoading: isLoadingServers,
|
||||
refetch: refetchServers,
|
||||
} = useWorkflowMcpServers(workspaceId)
|
||||
const addToolMutation = useAddWorkflowMcpTool()
|
||||
|
||||
// Get workflow blocks
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
|
||||
// Find the starter block ID to subscribe to its inputFormat changes
|
||||
const starterBlockId = useMemo(() => {
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
const blockType = (block as { type?: string }).type
|
||||
// Check for all possible start/trigger block types
|
||||
if (
|
||||
blockType === 'starter' ||
|
||||
blockType === 'start' ||
|
||||
blockType === 'start_trigger' || // This is the unified start block type
|
||||
blockType === 'api' ||
|
||||
blockType === 'api_trigger' ||
|
||||
blockType === 'input_trigger'
|
||||
) {
|
||||
return blockId
|
||||
}
|
||||
}
|
||||
return null
|
||||
}, [blocks])
|
||||
|
||||
// Subscribe to the inputFormat value in SubBlockStore for reactivity
|
||||
// Use workflowId prop directly (not activeWorkflowId from registry) to ensure we get the correct workflow's data
|
||||
const subBlockValues = useSubBlockStore((state) =>
|
||||
workflowId ? (state.workflowValues[workflowId] ?? {}) : {}
|
||||
)
|
||||
|
||||
// Extract and normalize input format - now reactive to SubBlockStore changes
|
||||
const inputFormat = useMemo(() => {
|
||||
// First try to get from SubBlockStore (where runtime values are stored)
|
||||
if (starterBlockId && subBlockValues[starterBlockId]) {
|
||||
const inputFormatValue = subBlockValues[starterBlockId].inputFormat
|
||||
|
||||
if (Array.isArray(inputFormatValue) && inputFormatValue.length > 0) {
|
||||
const filtered = inputFormatValue
|
||||
.filter(
|
||||
(field: unknown): field is { name: string; type: string } =>
|
||||
field !== null &&
|
||||
typeof field === 'object' &&
|
||||
'name' in field &&
|
||||
typeof (field as { name: unknown }).name === 'string' &&
|
||||
(field as { name: string }).name.trim() !== ''
|
||||
)
|
||||
.map((field) => ({
|
||||
name: field.name.trim(),
|
||||
type: field.type || 'string',
|
||||
}))
|
||||
if (filtered.length > 0) {
|
||||
return filtered
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: try to get from block structure (for initial load or backwards compatibility)
|
||||
if (starterBlockId && blocks[starterBlockId]) {
|
||||
const startBlock = blocks[starterBlockId]
|
||||
const subBlocksValue = startBlock?.subBlocks?.inputFormat?.value as unknown
|
||||
|
||||
if (Array.isArray(subBlocksValue) && subBlocksValue.length > 0) {
|
||||
const validFields: Array<{ name: string; type: string }> = []
|
||||
for (const field of subBlocksValue) {
|
||||
if (
|
||||
field !== null &&
|
||||
typeof field === 'object' &&
|
||||
'name' in field &&
|
||||
typeof field.name === 'string' &&
|
||||
field.name.trim() !== ''
|
||||
) {
|
||||
validFields.push({
|
||||
name: field.name.trim(),
|
||||
type: typeof field.type === 'string' ? field.type : 'string',
|
||||
})
|
||||
}
|
||||
}
|
||||
if (validFields.length > 0) {
|
||||
return validFields
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Last fallback: use extractInputFormat helper
|
||||
return extractInputFormat(blocks)
|
||||
}, [starterBlockId, subBlockValues, blocks])
|
||||
|
||||
const [selectedServer, setSelectedServer] = useState<WorkflowMcpServer | null>(null)
|
||||
const [toolName, setToolName] = useState('')
|
||||
const [toolDescription, setToolDescription] = useState('')
|
||||
const [showServerSelector, setShowServerSelector] = useState(false)
|
||||
const [showParameterSchema, setShowParameterSchema] = useState(false)
|
||||
// Track custom descriptions for each parameter
|
||||
const [parameterDescriptions, setParameterDescriptions] = useState<Record<string, string>>({})
|
||||
|
||||
const parameterSchema = useMemo(
|
||||
() => generateParameterSchema(inputFormat, parameterDescriptions),
|
||||
[inputFormat, parameterDescriptions]
|
||||
)
|
||||
|
||||
// Track tools data from each server using state instead of hooks in a loop
|
||||
const [serverToolsMap, setServerToolsMap] = useState<
|
||||
Record<string, { tool: WorkflowMcpTool | null; isLoading: boolean }>
|
||||
>({})
|
||||
|
||||
// Stable callback to handle tool data from ServerToolsQuery components
|
||||
const handleServerToolData = useCallback(
|
||||
(serverId: string, tool: WorkflowMcpTool | null, isLoading: boolean) => {
|
||||
setServerToolsMap((prev) => {
|
||||
// Only update if data has changed to prevent infinite loops
|
||||
const existing = prev[serverId]
|
||||
if (existing?.tool?.id === tool?.id && existing?.isLoading === isLoading) {
|
||||
return prev
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
[serverId]: { tool, isLoading },
|
||||
}
|
||||
})
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
// Find which servers already have this workflow as a tool and get the tool info
|
||||
const serversWithThisWorkflow = useMemo(() => {
|
||||
const result: Array<{ server: WorkflowMcpServer; tool: WorkflowMcpTool }> = []
|
||||
for (const server of servers) {
|
||||
const toolInfo = serverToolsMap[server.id]
|
||||
if (toolInfo?.tool) {
|
||||
result.push({ server, tool: toolInfo.tool })
|
||||
}
|
||||
}
|
||||
return result
|
||||
}, [servers, serverToolsMap])
|
||||
|
||||
// Check if any tools need updating
|
||||
const toolsNeedingUpdate = useMemo(() => {
|
||||
return serversWithThisWorkflow.filter(({ tool }) => hasParameterMismatch(tool, inputFormat))
|
||||
}, [serversWithThisWorkflow, inputFormat])
|
||||
|
||||
// Load existing parameter descriptions from the first deployed tool
|
||||
useEffect(() => {
|
||||
if (serversWithThisWorkflow.length > 0) {
|
||||
const existingTool = serversWithThisWorkflow[0].tool
|
||||
const schema = existingTool.parameterSchema as Record<string, unknown> | undefined
|
||||
const properties = schema?.properties as Record<string, { description?: string }> | undefined
|
||||
|
||||
if (properties) {
|
||||
const descriptions: Record<string, string> = {}
|
||||
for (const [name, prop] of Object.entries(properties)) {
|
||||
// Only use description if it differs from the field name (i.e., it's custom)
|
||||
if (
|
||||
prop.description &&
|
||||
prop.description !== name &&
|
||||
prop.description !== 'Array of file objects'
|
||||
) {
|
||||
descriptions[name] = prop.description
|
||||
}
|
||||
}
|
||||
if (Object.keys(descriptions).length > 0) {
|
||||
setParameterDescriptions(descriptions)
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [serversWithThisWorkflow])
|
||||
|
||||
// Reset form when selected server changes
|
||||
useEffect(() => {
|
||||
if (selectedServer) {
|
||||
setToolName(sanitizeToolName(workflowName))
|
||||
setToolDescription(workflowDescription || `Execute ${workflowName} workflow`)
|
||||
}
|
||||
}, [selectedServer, workflowName, workflowDescription])
|
||||
|
||||
const handleAddTool = useCallback(async () => {
|
||||
if (!selectedServer || !toolName.trim()) return
|
||||
|
||||
try {
|
||||
await addToolMutation.mutateAsync({
|
||||
workspaceId,
|
||||
serverId: selectedServer.id,
|
||||
workflowId,
|
||||
toolName: toolName.trim(),
|
||||
toolDescription: toolDescription.trim() || undefined,
|
||||
parameterSchema,
|
||||
})
|
||||
|
||||
setSelectedServer(null)
|
||||
setToolName('')
|
||||
setToolDescription('')
|
||||
|
||||
// Refetch servers to update tool count
|
||||
refetchServers()
|
||||
onAddedToServer?.()
|
||||
|
||||
logger.info(`Added workflow ${workflowId} as tool to server ${selectedServer.id}`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to add tool:', error)
|
||||
}
|
||||
}, [
|
||||
selectedServer,
|
||||
toolName,
|
||||
toolDescription,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
parameterSchema,
|
||||
addToolMutation,
|
||||
refetchServers,
|
||||
onAddedToServer,
|
||||
])
|
||||
|
||||
const handleToolChanged = useCallback(
|
||||
(removedServerId?: string) => {
|
||||
// If a tool was removed from a specific server, clear just that entry
|
||||
// The ServerToolsQuery component will re-query and update the map
|
||||
if (removedServerId) {
|
||||
setServerToolsMap((prev) => {
|
||||
const next = { ...prev }
|
||||
delete next[removedServerId]
|
||||
return next
|
||||
})
|
||||
}
|
||||
refetchServers()
|
||||
},
|
||||
[refetchServers]
|
||||
)
|
||||
|
||||
const availableServers = useMemo(() => {
|
||||
const addedServerIds = new Set(serversWithThisWorkflow.map((s) => s.server.id))
|
||||
return servers.filter((server) => !addedServerIds.has(server.id))
|
||||
}, [servers, serversWithThisWorkflow])
|
||||
|
||||
if (!isDeployed) {
|
||||
return (
|
||||
<div className='flex h-full flex-col items-center justify-center gap-[12px] text-center'>
|
||||
<Server className='h-[32px] w-[32px] text-[var(--text-muted)]' />
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<p className='text-[14px] text-[var(--text-primary)]'>Deploy workflow first</p>
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
You need to deploy your workflow before adding it as an MCP tool.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (isLoadingServers) {
|
||||
return (
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
<Skeleton className='h-[60px] w-full' />
|
||||
<Skeleton className='h-[40px] w-full' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (servers.length === 0) {
|
||||
return (
|
||||
<div className='flex h-full flex-col items-center justify-center gap-[12px] text-center'>
|
||||
<Server className='h-[32px] w-[32px] text-[var(--text-muted)]' />
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<p className='text-[14px] text-[var(--text-primary)]'>No MCP servers yet</p>
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
Create a Workflow MCP Server in Settings → Workflow MCP Servers first.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
{/* Query tools for each server using separate components to follow Rules of Hooks */}
|
||||
{servers.map((server) => (
|
||||
<ServerToolsQuery
|
||||
key={server.id}
|
||||
workspaceId={workspaceId}
|
||||
server={server}
|
||||
workflowId={workflowId}
|
||||
onData={handleServerToolData}
|
||||
/>
|
||||
))}
|
||||
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<p className='text-[13px] text-[var(--text-secondary)]'>
|
||||
Add this workflow as an MCP tool to make it callable by external MCP clients like Cursor
|
||||
or Claude Desktop.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Update Warning */}
|
||||
{toolsNeedingUpdate.length > 0 && (
|
||||
<div className='flex items-center gap-[8px] rounded-[6px] border border-amber-500/30 bg-amber-500/10 px-[10px] py-[8px]'>
|
||||
<AlertTriangle className='h-[14px] w-[14px] flex-shrink-0 text-amber-500' />
|
||||
<p className='text-[12px] text-amber-600 dark:text-amber-400'>
|
||||
{toolsNeedingUpdate.length} server{toolsNeedingUpdate.length > 1 ? 's have' : ' has'}{' '}
|
||||
outdated tool definitions. Click "Update" on each to sync with current parameters.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Parameter Schema Preview */}
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => setShowParameterSchema(!showParameterSchema)}
|
||||
className='flex items-center gap-[6px] text-left'
|
||||
>
|
||||
{showParameterSchema ? (
|
||||
<ChevronDown className='h-[12px] w-[12px] text-[var(--text-tertiary)]' />
|
||||
) : (
|
||||
<ChevronRight className='h-[12px] w-[12px] text-[var(--text-tertiary)]' />
|
||||
)}
|
||||
<Label className='cursor-pointer text-[13px] text-[var(--text-primary)]'>
|
||||
Current Tool Parameters ({inputFormat.length})
|
||||
</Label>
|
||||
</button>
|
||||
|
||||
{showParameterSchema && (
|
||||
<div className='rounded-[6px] border bg-[var(--surface-4)] p-[12px]'>
|
||||
{inputFormat.length === 0 ? (
|
||||
<p className='text-[12px] text-[var(--text-muted)]'>
|
||||
No parameters defined. Add input fields in the Starter block to define tool
|
||||
parameters.
|
||||
</p>
|
||||
) : (
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
{inputFormat.map((field, index) => (
|
||||
<div key={index} className='flex flex-col gap-[6px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-mono text-[12px] text-[var(--text-primary)]'>
|
||||
{field.name}
|
||||
</span>
|
||||
<Badge variant='outline' className='text-[10px]'>
|
||||
{field.type}
|
||||
</Badge>
|
||||
</div>
|
||||
<EmcnInput
|
||||
value={parameterDescriptions[field.name] || ''}
|
||||
onChange={(e) =>
|
||||
setParameterDescriptions((prev) => ({
|
||||
...prev,
|
||||
[field.name]: e.target.value,
|
||||
}))
|
||||
}
|
||||
placeholder={`Describe what "${field.name}" is for...`}
|
||||
className='h-[32px] text-[12px]'
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
Descriptions help MCP clients understand what each parameter is for.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Servers with this workflow */}
|
||||
{serversWithThisWorkflow.length > 0 && (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label className='text-[13px] text-[var(--text-primary)]'>
|
||||
Added to ({serversWithThisWorkflow.length})
|
||||
</Label>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
{serversWithThisWorkflow.map(({ server, tool }) => (
|
||||
<ToolOnServer
|
||||
key={server.id}
|
||||
server={server}
|
||||
tool={tool}
|
||||
workspaceId={workspaceId}
|
||||
currentInputFormat={inputFormat}
|
||||
currentParameterSchema={parameterSchema}
|
||||
workflowDescription={workflowDescription}
|
||||
onRemoved={(serverId) => handleToolChanged(serverId)}
|
||||
onUpdated={() => handleToolChanged()}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Add to new server */}
|
||||
{availableServers.length > 0 ? (
|
||||
<>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label className='text-[13px] text-[var(--text-primary)]'>Add to Server</Label>
|
||||
<Popover open={showServerSelector} onOpenChange={setShowServerSelector}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='default'
|
||||
className='h-[36px] w-full justify-between border bg-[var(--surface-3)]'
|
||||
>
|
||||
<span className={cn(!selectedServer && 'text-[var(--text-muted)]')}>
|
||||
{selectedServer?.name || 'Choose a server...'}
|
||||
</span>
|
||||
<ChevronDown className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
side='bottom'
|
||||
align='start'
|
||||
sideOffset={4}
|
||||
className='w-[var(--radix-popover-trigger-width)]'
|
||||
border
|
||||
>
|
||||
{availableServers.map((server) => (
|
||||
<PopoverItem
|
||||
key={server.id}
|
||||
onClick={() => {
|
||||
setSelectedServer(server)
|
||||
setShowServerSelector(false)
|
||||
}}
|
||||
>
|
||||
<Server className='mr-[8px] h-[14px] w-[14px] text-[var(--text-tertiary)]' />
|
||||
<span>{server.name}</span>
|
||||
{server.isPublished && (
|
||||
<Badge variant='outline' className='ml-auto text-[10px]'>
|
||||
Published
|
||||
</Badge>
|
||||
)}
|
||||
</PopoverItem>
|
||||
))}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
|
||||
{selectedServer && (
|
||||
<>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label className='text-[13px] text-[var(--text-primary)]'>Tool Name</Label>
|
||||
<EmcnInput
|
||||
value={toolName}
|
||||
onChange={(e) => setToolName(e.target.value)}
|
||||
placeholder='e.g., book_flight'
|
||||
className='h-[36px]'
|
||||
/>
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
Use lowercase letters, numbers, and underscores only.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label className='text-[13px] text-[var(--text-primary)]'>Description</Label>
|
||||
<EmcnInput
|
||||
value={toolDescription}
|
||||
onChange={(e) => setToolDescription(e.target.value)}
|
||||
placeholder='Describe what this tool does...'
|
||||
className='h-[36px]'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='primary'
|
||||
onClick={handleAddTool}
|
||||
disabled={addToolMutation.isPending || !toolName.trim()}
|
||||
className='!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90'
|
||||
>
|
||||
<Plus className='mr-[6px] h-[14px] w-[14px]' />
|
||||
{addToolMutation.isPending ? 'Adding...' : 'Add to Server'}
|
||||
</Button>
|
||||
|
||||
{addToolMutation.isError && (
|
||||
<p className='text-[12px] text-[var(--text-error)]'>
|
||||
{addToolMutation.error?.message || 'Failed to add tool'}
|
||||
</p>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
) : serversWithThisWorkflow.length > 0 ? (
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
This workflow has been added to all available servers.
|
||||
</p>
|
||||
) : null}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -24,6 +24,7 @@ import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { ApiDeploy } from './components/api/api'
|
||||
import { ChatDeploy, type ExistingChat } from './components/chat/chat'
|
||||
import { GeneralDeploy } from './components/general/general'
|
||||
import { McpToolDeploy } from './components/mcp-tool/mcp-tool'
|
||||
import { TemplateDeploy } from './components/template/template'
|
||||
|
||||
const logger = createLogger('DeployModal')
|
||||
@@ -49,7 +50,7 @@ interface WorkflowDeploymentInfo {
|
||||
needsRedeployment: boolean
|
||||
}
|
||||
|
||||
type TabView = 'general' | 'api' | 'chat' | 'template'
|
||||
type TabView = 'general' | 'api' | 'chat' | 'template' | 'mcp-tool'
|
||||
|
||||
export function DeployModal({
|
||||
open,
|
||||
@@ -552,6 +553,7 @@ export function DeployModal({
|
||||
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='mcp-tool'>MCP Tool</ModalTabsTrigger>
|
||||
</ModalTabsList>
|
||||
|
||||
<ModalBody className='min-h-0 flex-1'>
|
||||
@@ -610,6 +612,17 @@ export function DeployModal({
|
||||
/>
|
||||
)}
|
||||
</ModalTabsContent>
|
||||
|
||||
<ModalTabsContent value='mcp-tool'>
|
||||
{workflowId && (
|
||||
<McpToolDeploy
|
||||
workflowId={workflowId}
|
||||
workflowName={workflowMetadata?.name || 'Workflow'}
|
||||
workflowDescription={workflowMetadata?.description}
|
||||
isDeployed={isDeployed}
|
||||
/>
|
||||
)}
|
||||
</ModalTabsContent>
|
||||
</ModalBody>
|
||||
</ModalTabs>
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { Check } from 'lucide-react'
|
||||
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
|
||||
import { client } from '@/lib/auth/auth-client'
|
||||
@@ -316,28 +315,14 @@ export function OAuthRequiredModal({
|
||||
}
|
||||
}
|
||||
|
||||
const newScopesSet = useMemo(
|
||||
() =>
|
||||
new Set(
|
||||
(newScopes || []).filter(
|
||||
(scope) => !scope.includes('userinfo.email') && !scope.includes('userinfo.profile')
|
||||
)
|
||||
),
|
||||
[newScopes]
|
||||
const displayScopes = requiredScopes.filter(
|
||||
(scope) => !scope.includes('userinfo.email') && !scope.includes('userinfo.profile')
|
||||
)
|
||||
|
||||
const displayScopes = useMemo(() => {
|
||||
const filtered = requiredScopes.filter(
|
||||
const newScopesSet = new Set(
|
||||
(newScopes || []).filter(
|
||||
(scope) => !scope.includes('userinfo.email') && !scope.includes('userinfo.profile')
|
||||
)
|
||||
return filtered.sort((a, b) => {
|
||||
const aIsNew = newScopesSet.has(a)
|
||||
const bIsNew = newScopesSet.has(b)
|
||||
if (aIsNew && !bIsNew) return -1
|
||||
if (!aIsNew && bIsNew) return 1
|
||||
return 0
|
||||
})
|
||||
}, [requiredScopes, newScopesSet])
|
||||
)
|
||||
|
||||
const handleConnectDirectly = async () => {
|
||||
try {
|
||||
@@ -362,6 +347,13 @@ export function OAuthRequiredModal({
|
||||
return
|
||||
}
|
||||
|
||||
if (providerId === 'servicenow') {
|
||||
// Pass the current URL so we can redirect back after OAuth
|
||||
const returnUrl = encodeURIComponent(window.location.href)
|
||||
window.location.href = `/api/auth/servicenow/authorize?returnUrl=${returnUrl}`
|
||||
return
|
||||
}
|
||||
|
||||
await client.oauth2.link({
|
||||
providerId,
|
||||
callbackURL: window.location.href,
|
||||
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
@@ -46,14 +45,10 @@ export function CredentialSelector({
|
||||
const label = subBlock.placeholder || 'Select credential'
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
|
||||
const { depsSatisfied, dependsOn } = useDependsOnGate(blockId, subBlock, { disabled, isPreview })
|
||||
const hasDependencies = dependsOn.length > 0
|
||||
|
||||
const effectiveDisabled = disabled || (hasDependencies && !depsSatisfied)
|
||||
|
||||
const effectiveValue = isPreview && previewValue !== undefined ? previewValue : storeValue
|
||||
const selectedId = typeof effectiveValue === 'string' ? effectiveValue : ''
|
||||
|
||||
// serviceId is now the canonical identifier - derive provider from it
|
||||
const effectiveProviderId = useMemo(
|
||||
() => getProviderIdFromServiceId(serviceId) as OAuthProvider,
|
||||
[serviceId]
|
||||
@@ -135,7 +130,7 @@ export function CredentialSelector({
|
||||
const needsUpdate =
|
||||
hasSelection &&
|
||||
missingRequiredScopes.length > 0 &&
|
||||
!effectiveDisabled &&
|
||||
!disabled &&
|
||||
!isPreview &&
|
||||
!credentialsLoading
|
||||
|
||||
@@ -235,10 +230,8 @@ export function CredentialSelector({
|
||||
selectedValue={selectedId}
|
||||
onChange={handleComboboxChange}
|
||||
onOpenChange={handleOpenChange}
|
||||
placeholder={
|
||||
hasDependencies && !depsSatisfied ? 'Fill in required fields above first' : label
|
||||
}
|
||||
disabled={effectiveDisabled}
|
||||
placeholder={label}
|
||||
disabled={disabled}
|
||||
editable={true}
|
||||
filterOptions={true}
|
||||
isLoading={credentialsLoading}
|
||||
|
||||
@@ -85,11 +85,11 @@ export function ShortInput({
|
||||
const persistSubBlockValueRef = useRef<(value: string) => void>(() => {})
|
||||
|
||||
const justPastedRef = useRef(false)
|
||||
|
||||
const webhookManagement = useWebhookManagement({
|
||||
blockId,
|
||||
triggerId: undefined,
|
||||
isPreview,
|
||||
useWebhookUrl,
|
||||
})
|
||||
|
||||
const wandHook = useWand({
|
||||
|
||||
@@ -40,8 +40,6 @@ import { useSelectorDisplayName } from '@/hooks/use-selector-display-name'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { wouldCreateCycle } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('WorkflowBlock')
|
||||
|
||||
@@ -846,11 +844,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
data-handleid='target'
|
||||
isConnectableStart={false}
|
||||
isConnectableEnd={true}
|
||||
isValidConnection={(connection) => {
|
||||
if (connection.source === id) return false
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
return !wouldCreateCycle(edges, connection.source!, connection.target!)
|
||||
}}
|
||||
isValidConnection={(connection) => connection.source !== id}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -1051,11 +1045,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
data-handleid={`condition-${cond.id}`}
|
||||
isConnectableStart={true}
|
||||
isConnectableEnd={false}
|
||||
isValidConnection={(connection) => {
|
||||
if (connection.target === id) return false
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
return !wouldCreateCycle(edges, connection.source!, connection.target!)
|
||||
}}
|
||||
isValidConnection={(connection) => connection.target !== id}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
@@ -1074,11 +1064,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
data-handleid='error'
|
||||
isConnectableStart={true}
|
||||
isConnectableEnd={false}
|
||||
isValidConnection={(connection) => {
|
||||
if (connection.target === id) return false
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
return !wouldCreateCycle(edges, connection.source!, connection.target!)
|
||||
}}
|
||||
isValidConnection={(connection) => connection.target !== id}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
@@ -1095,11 +1081,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
data-handleid='source'
|
||||
isConnectableStart={true}
|
||||
isConnectableEnd={false}
|
||||
isValidConnection={(connection) => {
|
||||
if (connection.target === id) return false
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
return !wouldCreateCycle(edges, connection.source!, connection.target!)
|
||||
}}
|
||||
isValidConnection={(connection) => connection.target !== id}
|
||||
/>
|
||||
|
||||
{shouldShowDefaultHandles && (
|
||||
@@ -1118,11 +1100,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
data-handleid='error'
|
||||
isConnectableStart={true}
|
||||
isConnectableEnd={false}
|
||||
isValidConnection={(connection) => {
|
||||
if (connection.target === id) return false
|
||||
const edges = useWorkflowStore.getState().edges
|
||||
return !wouldCreateCycle(edges, connection.source!, connection.target!)
|
||||
}}
|
||||
isValidConnection={(connection) => connection.target !== id}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -1642,6 +1642,11 @@ const WorkflowContent = React.memo(() => {
|
||||
const onConnect = useCallback(
|
||||
(connection: any) => {
|
||||
if (connection.source && connection.target) {
|
||||
// Prevent self-connections
|
||||
if (connection.source === connection.target) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if connecting nodes across container boundaries
|
||||
const sourceNode = getNodes().find((n) => n.id === connection.source)
|
||||
const targetNode = getNodes().find((n) => n.id === connection.target)
|
||||
|
||||
@@ -9,3 +9,4 @@ export { MCP } from './mcp/mcp'
|
||||
export { SSO } from './sso/sso'
|
||||
export { Subscription } from './subscription/subscription'
|
||||
export { TeamManagement } from './team-management/team-management'
|
||||
export { WorkflowMcpServers } from './workflow-mcp-servers/workflow-mcp-servers'
|
||||
|
||||
@@ -0,0 +1,591 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { Check, ChevronLeft, Clipboard, Globe, Plus, Search, Server, Trash2 } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Input as EmcnInput,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
useCreateWorkflowMcpServer,
|
||||
useDeleteWorkflowMcpServer,
|
||||
useDeleteWorkflowMcpTool,
|
||||
usePublishWorkflowMcpServer,
|
||||
useUnpublishWorkflowMcpServer,
|
||||
useWorkflowMcpServer,
|
||||
useWorkflowMcpServers,
|
||||
type WorkflowMcpServer,
|
||||
type WorkflowMcpTool,
|
||||
} from '@/hooks/queries/workflow-mcp-servers'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServers')
|
||||
|
||||
function ServerSkeleton() {
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-[12px] rounded-[8px] border bg-[var(--surface-3)] p-[12px]'>
|
||||
<div className='flex min-w-0 flex-col justify-center gap-[4px]'>
|
||||
<Skeleton className='h-[14px] w-[120px]' />
|
||||
<Skeleton className='h-[12px] w-[80px]' />
|
||||
</div>
|
||||
<Skeleton className='h-[28px] w-[60px] rounded-[4px]' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface ServerListItemProps {
|
||||
server: WorkflowMcpServer
|
||||
onViewDetails: () => void
|
||||
onDelete: () => void
|
||||
isDeleting: boolean
|
||||
}
|
||||
|
||||
function ServerListItem({ server, onViewDetails, onDelete, isDeleting }: ServerListItemProps) {
|
||||
return (
|
||||
<div
|
||||
className='flex items-center justify-between gap-[12px] rounded-[8px] border bg-[var(--surface-3)] p-[12px] transition-colors hover:bg-[var(--surface-4)]'
|
||||
role='button'
|
||||
tabIndex={0}
|
||||
onClick={onViewDetails}
|
||||
onKeyDown={(e) => e.key === 'Enter' && onViewDetails()}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<Server className='h-[16px] w-[16px] flex-shrink-0 text-[var(--text-tertiary)]' />
|
||||
<div className='flex min-w-0 flex-col gap-[2px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||
{server.name}
|
||||
</span>
|
||||
{server.isPublished && (
|
||||
<Badge variant='outline' className='flex-shrink-0 text-[10px]'>
|
||||
<Globe className='mr-[4px] h-[10px] w-[10px]' />
|
||||
Published
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
<span className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
{server.toolCount || 0} tool{(server.toolCount || 0) !== 1 ? 's' : ''}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onDelete()
|
||||
}}
|
||||
disabled={isDeleting}
|
||||
className='h-[28px] px-[8px]'
|
||||
>
|
||||
{isDeleting ? 'Deleting...' : 'Delete'}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface ServerDetailViewProps {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
onBack: () => void
|
||||
}
|
||||
|
||||
function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewProps) {
|
||||
const { data, isLoading, error } = useWorkflowMcpServer(workspaceId, serverId)
|
||||
const publishMutation = usePublishWorkflowMcpServer()
|
||||
const unpublishMutation = useUnpublishWorkflowMcpServer()
|
||||
const deleteToolMutation = useDeleteWorkflowMcpTool()
|
||||
const [copiedUrl, setCopiedUrl] = useState(false)
|
||||
const [toolToDelete, setToolToDelete] = useState<WorkflowMcpTool | null>(null)
|
||||
|
||||
const mcpServerUrl = useMemo(() => {
|
||||
if (!data?.server?.isPublished) return null
|
||||
return `${getBaseUrl()}/api/mcp/serve/${serverId}/sse`
|
||||
}, [data?.server?.isPublished, serverId])
|
||||
|
||||
const handlePublish = async () => {
|
||||
try {
|
||||
await publishMutation.mutateAsync({ workspaceId, serverId })
|
||||
} catch (error) {
|
||||
logger.error('Failed to publish server:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const handleUnpublish = async () => {
|
||||
try {
|
||||
await unpublishMutation.mutateAsync({ workspaceId, serverId })
|
||||
} catch (error) {
|
||||
logger.error('Failed to unpublish server:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const handleCopyUrl = () => {
|
||||
if (mcpServerUrl) {
|
||||
navigator.clipboard.writeText(mcpServerUrl)
|
||||
setCopiedUrl(true)
|
||||
setTimeout(() => setCopiedUrl(false), 2000)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDeleteTool = async () => {
|
||||
if (!toolToDelete) return
|
||||
try {
|
||||
await deleteToolMutation.mutateAsync({
|
||||
workspaceId,
|
||||
serverId,
|
||||
toolId: toolToDelete.id,
|
||||
})
|
||||
setToolToDelete(null)
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete tool:', error)
|
||||
}
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex h-full flex-col gap-[16px]'>
|
||||
<Skeleton className='h-[24px] w-[200px]' />
|
||||
<Skeleton className='h-[100px] w-full' />
|
||||
<Skeleton className='h-[150px] w-full' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error || !data) {
|
||||
return (
|
||||
<div className='flex h-full flex-col items-center justify-center gap-[8px]'>
|
||||
<p className='text-[13px] text-[var(--text-error)]'>Failed to load server details</p>
|
||||
<Button variant='default' onClick={onBack}>
|
||||
Go Back
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const { server, tools } = data
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='flex h-full flex-col gap-[16px]'>
|
||||
<div className='min-h-0 flex-1 overflow-y-auto'>
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Server Name
|
||||
</span>
|
||||
<p className='text-[14px] text-[var(--text-secondary)]'>{server.name}</p>
|
||||
</div>
|
||||
|
||||
{server.description && (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Description
|
||||
</span>
|
||||
<p className='text-[14px] text-[var(--text-secondary)]'>{server.description}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>Status</span>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
{server.isPublished ? (
|
||||
<>
|
||||
<Badge variant='outline' className='text-[12px]'>
|
||||
<Globe className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Published
|
||||
</Badge>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleUnpublish}
|
||||
disabled={unpublishMutation.isPending}
|
||||
className='h-[28px] text-[12px]'
|
||||
>
|
||||
{unpublishMutation.isPending ? 'Unpublishing...' : 'Unpublish'}
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<span className='text-[14px] text-[var(--text-tertiary)]'>Not Published</span>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handlePublish}
|
||||
disabled={publishMutation.isPending || tools.length === 0}
|
||||
className='h-[28px] text-[12px]'
|
||||
>
|
||||
{publishMutation.isPending ? 'Publishing...' : 'Publish'}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
{publishMutation.isError && (
|
||||
<p className='text-[12px] text-[var(--text-error)]'>
|
||||
{publishMutation.error?.message || 'Failed to publish'}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{mcpServerUrl && (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
MCP Server URL
|
||||
</span>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<code className='flex-1 truncate rounded-[4px] bg-[var(--surface-5)] px-[8px] py-[6px] font-mono text-[12px] text-[var(--text-secondary)]'>
|
||||
{mcpServerUrl}
|
||||
</code>
|
||||
<Button variant='ghost' onClick={handleCopyUrl} className='h-[32px] w-[32px] p-0'>
|
||||
{copiedUrl ? (
|
||||
<Check className='h-[14px] w-[14px]' />
|
||||
) : (
|
||||
<Clipboard className='h-[14px] w-[14px]' />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>
|
||||
Use this URL to connect external MCP clients like Cursor or Claude Desktop.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Tools ({tools.length})
|
||||
</span>
|
||||
{tools.length === 0 ? (
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
No tools added yet. Deploy a workflow and add it as a tool from the deploy modal.
|
||||
</p>
|
||||
) : (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{tools.map((tool) => (
|
||||
<div
|
||||
key={tool.id}
|
||||
className='flex items-center justify-between rounded-[6px] border bg-[var(--surface-3)] px-[10px] py-[8px]'
|
||||
>
|
||||
<div className='flex min-w-0 flex-col gap-[2px]'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
{tool.toolName}
|
||||
</p>
|
||||
{tool.toolDescription && (
|
||||
<p className='truncate text-[12px] text-[var(--text-tertiary)]'>
|
||||
{tool.toolDescription}
|
||||
</p>
|
||||
)}
|
||||
{tool.workflowName && (
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
Workflow: {tool.workflowName}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => setToolToDelete(tool)}
|
||||
className='h-[24px] w-[24px] p-0 text-[var(--text-tertiary)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash2 className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-auto flex items-center justify-end'>
|
||||
<Button
|
||||
onClick={onBack}
|
||||
variant='primary'
|
||||
className='!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90'
|
||||
>
|
||||
<ChevronLeft className='mr-[4px] h-[14px] w-[14px]' />
|
||||
Back
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Modal open={!!toolToDelete} onOpenChange={(open) => !open && setToolToDelete(null)}>
|
||||
<ModalContent className='w-[400px]'>
|
||||
<ModalHeader>Remove Tool</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Are you sure you want to remove{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{toolToDelete?.toolName}
|
||||
</span>{' '}
|
||||
from this server?
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => setToolToDelete(null)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='primary'
|
||||
onClick={handleDeleteTool}
|
||||
disabled={deleteToolMutation.isPending}
|
||||
className='!bg-[var(--text-error)] !text-white hover:!bg-[var(--text-error)]/90'
|
||||
>
|
||||
{deleteToolMutation.isPending ? 'Removing...' : 'Remove'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow MCP Servers settings component.
|
||||
* Allows users to create and manage MCP servers that expose workflows as tools.
|
||||
*/
|
||||
export function WorkflowMcpServers() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const { data: servers = [], isLoading, error } = useWorkflowMcpServers(workspaceId)
|
||||
const createServerMutation = useCreateWorkflowMcpServer()
|
||||
const deleteServerMutation = useDeleteWorkflowMcpServer()
|
||||
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [showAddForm, setShowAddForm] = useState(false)
|
||||
const [formData, setFormData] = useState({ name: '', description: '' })
|
||||
const [selectedServerId, setSelectedServerId] = useState<string | null>(null)
|
||||
const [serverToDelete, setServerToDelete] = useState<WorkflowMcpServer | null>(null)
|
||||
const [deletingServers, setDeletingServers] = useState<Set<string>>(new Set())
|
||||
|
||||
const filteredServers = useMemo(() => {
|
||||
if (!searchTerm.trim()) return servers
|
||||
const search = searchTerm.toLowerCase()
|
||||
return servers.filter(
|
||||
(server) =>
|
||||
server.name.toLowerCase().includes(search) ||
|
||||
server.description?.toLowerCase().includes(search)
|
||||
)
|
||||
}, [servers, searchTerm])
|
||||
|
||||
const resetForm = useCallback(() => {
|
||||
setFormData({ name: '', description: '' })
|
||||
setShowAddForm(false)
|
||||
}, [])
|
||||
|
||||
const handleCreateServer = async () => {
|
||||
if (!formData.name.trim()) return
|
||||
|
||||
try {
|
||||
await createServerMutation.mutateAsync({
|
||||
workspaceId,
|
||||
name: formData.name.trim(),
|
||||
description: formData.description.trim() || undefined,
|
||||
})
|
||||
resetForm()
|
||||
} catch (error) {
|
||||
logger.error('Failed to create server:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDeleteServer = async () => {
|
||||
if (!serverToDelete) return
|
||||
|
||||
setDeletingServers((prev) => new Set(prev).add(serverToDelete.id))
|
||||
setServerToDelete(null)
|
||||
|
||||
try {
|
||||
await deleteServerMutation.mutateAsync({
|
||||
workspaceId,
|
||||
serverId: serverToDelete.id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete server:', error)
|
||||
} finally {
|
||||
setDeletingServers((prev) => {
|
||||
const next = new Set(prev)
|
||||
next.delete(serverToDelete.id)
|
||||
return next
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const hasServers = servers.length > 0
|
||||
const showEmptyState = !hasServers && !showAddForm
|
||||
const showNoResults = searchTerm.trim() && filteredServers.length === 0 && hasServers
|
||||
const isFormValid = formData.name.trim().length > 0
|
||||
|
||||
// Show detail view if a server is selected
|
||||
if (selectedServerId) {
|
||||
return (
|
||||
<ServerDetailView
|
||||
workspaceId={workspaceId}
|
||||
serverId={selectedServerId}
|
||||
onBack={() => setSelectedServerId(null)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='flex h-full flex-col gap-[16px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-1 items-center gap-[8px] rounded-[8px] border bg-[var(--surface-6)] px-[8px] py-[5px]',
|
||||
isLoading && 'opacity-50'
|
||||
)}
|
||||
>
|
||||
<Search
|
||||
className='h-[14px] w-[14px] flex-shrink-0 text-[var(--text-tertiary)]'
|
||||
strokeWidth={2}
|
||||
/>
|
||||
<Input
|
||||
placeholder='Search servers...'
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
disabled={isLoading}
|
||||
className='h-auto flex-1 border-0 bg-transparent p-0 font-base leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0 disabled:cursor-not-allowed disabled:opacity-100'
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
onClick={() => setShowAddForm(true)}
|
||||
disabled={isLoading}
|
||||
variant='primary'
|
||||
className='!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90'
|
||||
>
|
||||
<Plus className='mr-[6px] h-[13px] w-[13px]' />
|
||||
Add
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{showAddForm && (
|
||||
<div className='rounded-[8px] border bg-[var(--surface-3)] p-[12px]'>
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<label
|
||||
htmlFor='mcp-server-name'
|
||||
className='font-medium text-[13px] text-[var(--text-secondary)]'
|
||||
>
|
||||
Server Name
|
||||
</label>
|
||||
<EmcnInput
|
||||
id='mcp-server-name'
|
||||
placeholder='e.g., My Workflow Tools'
|
||||
value={formData.name}
|
||||
onChange={(e) => setFormData((prev) => ({ ...prev, name: e.target.value }))}
|
||||
className='h-9'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<label
|
||||
htmlFor='mcp-server-description'
|
||||
className='font-medium text-[13px] text-[var(--text-secondary)]'
|
||||
>
|
||||
Description (optional)
|
||||
</label>
|
||||
<EmcnInput
|
||||
id='mcp-server-description'
|
||||
placeholder='Describe what this server provides...'
|
||||
value={formData.description}
|
||||
onChange={(e) =>
|
||||
setFormData((prev) => ({ ...prev, description: e.target.value }))
|
||||
}
|
||||
className='h-9'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center justify-end gap-[8px] pt-[4px]'>
|
||||
<Button variant='ghost' onClick={resetForm}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleCreateServer}
|
||||
disabled={!isFormValid || createServerMutation.isPending}
|
||||
className='!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90'
|
||||
>
|
||||
{createServerMutation.isPending ? 'Creating...' : 'Create Server'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='min-h-0 flex-1 overflow-y-auto'>
|
||||
{error ? (
|
||||
<div className='flex h-full flex-col items-center justify-center gap-[8px]'>
|
||||
<p className='text-[#DC2626] text-[11px] leading-tight dark:text-[#F87171]'>
|
||||
{error instanceof Error ? error.message : 'Failed to load servers'}
|
||||
</p>
|
||||
</div>
|
||||
) : isLoading ? (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<ServerSkeleton />
|
||||
<ServerSkeleton />
|
||||
</div>
|
||||
) : showEmptyState ? (
|
||||
<div className='flex h-full flex-col items-center justify-center gap-[8px] text-center'>
|
||||
<Server className='h-[32px] w-[32px] text-[var(--text-muted)]' />
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
No workflow MCP servers yet.
|
||||
<br />
|
||||
Create one to expose your workflows as MCP tools.
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{filteredServers.map((server) => (
|
||||
<ServerListItem
|
||||
key={server.id}
|
||||
server={server}
|
||||
onViewDetails={() => setSelectedServerId(server.id)}
|
||||
onDelete={() => setServerToDelete(server)}
|
||||
isDeleting={deletingServers.has(server.id)}
|
||||
/>
|
||||
))}
|
||||
{showNoResults && (
|
||||
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
No servers found matching "{searchTerm}"
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Modal open={!!serverToDelete} onOpenChange={(open) => !open && setServerToDelete(null)}>
|
||||
<ModalContent className='w-[400px]'>
|
||||
<ModalHeader>Delete MCP Server</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>{serverToDelete?.name}</span>
|
||||
?{' '}
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This will remove all tools and cannot be undone.
|
||||
</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => setServerToDelete(null)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='primary'
|
||||
onClick={handleDeleteServer}
|
||||
className='!bg-[var(--text-error)] !text-white hover:!bg-[var(--text-error)]/90'
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import * as DialogPrimitive from '@radix-ui/react-dialog'
|
||||
import * as VisuallyHidden from '@radix-ui/react-visually-hidden'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Files, LogIn, Settings, User, Users, Wrench } from 'lucide-react'
|
||||
import { Files, LogIn, Server, Settings, User, Users, Wrench } from 'lucide-react'
|
||||
import {
|
||||
Card,
|
||||
Connections,
|
||||
@@ -40,6 +40,7 @@ import {
|
||||
SSO,
|
||||
Subscription,
|
||||
TeamManagement,
|
||||
WorkflowMcpServers,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components'
|
||||
import { TemplateProfile } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/template-profile/template-profile'
|
||||
import { generalSettingsKeys, useGeneralSettings } from '@/hooks/queries/general-settings'
|
||||
@@ -69,6 +70,7 @@ type SettingsSection =
|
||||
| 'copilot'
|
||||
| 'mcp'
|
||||
| 'custom-tools'
|
||||
| 'workflow-mcp-servers'
|
||||
|
||||
type NavigationSection = 'account' | 'subscription' | 'tools' | 'system'
|
||||
|
||||
@@ -112,6 +114,7 @@ const allNavigationItems: NavigationItem[] = [
|
||||
{ id: 'integrations', label: 'Integrations', icon: Connections, section: 'tools' },
|
||||
{ id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' },
|
||||
{ id: 'mcp', label: 'MCPs', icon: McpIcon, section: 'tools' },
|
||||
{ id: 'workflow-mcp-servers', label: 'Workflow MCP Servers', icon: Server, section: 'tools' },
|
||||
{ id: 'environment', label: 'Environment', icon: FolderCode, section: 'system' },
|
||||
{ id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' },
|
||||
{
|
||||
@@ -459,6 +462,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
{activeSection === 'copilot' && <Copilot />}
|
||||
{activeSection === 'mcp' && <MCP initialServerId={pendingMcpServerId} />}
|
||||
{activeSection === 'custom-tools' && <CustomTools />}
|
||||
{activeSection === 'workflow-mcp-servers' && <WorkflowMcpServers />}
|
||||
</SModalMainBody>
|
||||
</SModalMain>
|
||||
</SModalContent>
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
|
||||
import {
|
||||
exportWorkspaceToZip,
|
||||
type WorkflowExportData,
|
||||
} from '@/lib/workflows/operations/import-export'
|
||||
|
||||
const logger = createLogger('useExportWorkspace')
|
||||
|
||||
@@ -15,7 +18,8 @@ interface UseExportWorkspaceProps {
|
||||
* Hook for managing workspace export to ZIP.
|
||||
*
|
||||
* Handles:
|
||||
* - Fetching all workflows and folders from workspace via bulk export endpoint
|
||||
* - Fetching all workflows and folders from workspace
|
||||
* - Fetching workflow states and variables
|
||||
* - Creating ZIP file with all workspace data
|
||||
* - Downloading the ZIP file
|
||||
* - Loading state management
|
||||
@@ -38,13 +42,74 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Exporting workspace', { workspaceId })
|
||||
|
||||
// Single API call to get all workspace data (workflows with states + folders)
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/export`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to export workspace')
|
||||
// Fetch all workflows in workspace
|
||||
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
||||
if (!workflowsResponse.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
const { data: workflows } = await workflowsResponse.json()
|
||||
|
||||
// Fetch all folders in workspace
|
||||
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
if (!foldersResponse.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
const foldersData = await foldersResponse.json()
|
||||
|
||||
// Export each workflow
|
||||
const workflowsToExport: WorkflowExportData[] = []
|
||||
|
||||
for (const workflow of workflows) {
|
||||
try {
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflow.id}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.warn(`Failed to fetch workflow ${workflow.id}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflow.id} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`)
|
||||
let workflowVariables: any[] = []
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
}
|
||||
|
||||
workflowsToExport.push({
|
||||
workflow: {
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
folderId: workflow.folderId,
|
||||
},
|
||||
state: workflowData.state,
|
||||
variables: workflowVariables,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflow.id}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const { workflows: workflowsToExport, folders: foldersToExport } = await response.json()
|
||||
const foldersToExport: Array<{
|
||||
id: string
|
||||
name: string
|
||||
parentId: string | null
|
||||
}> = (foldersData.folders || []).map((folder: any) => ({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
parentId: folder.parentId,
|
||||
}))
|
||||
|
||||
const zipBlob = await exportWorkspaceToZip(
|
||||
workspaceName,
|
||||
|
||||
@@ -14,7 +14,7 @@ export type WorkflowExecutionPayload = {
|
||||
workflowId: string
|
||||
userId: string
|
||||
input?: any
|
||||
triggerType?: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
triggerType?: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
metadata?: Record<string, any>
|
||||
}
|
||||
|
||||
|
||||
@@ -4,13 +4,14 @@ import type { BlockConfig } from '@/blocks/types'
|
||||
interface ConditionBlockOutput {
|
||||
success: boolean
|
||||
output: {
|
||||
content: string
|
||||
conditionResult: boolean
|
||||
selectedPath: {
|
||||
blockId: string
|
||||
blockType: string
|
||||
blockTitle: string
|
||||
}
|
||||
selectedOption: string
|
||||
selectedConditionId: string
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,8 +40,9 @@ export const ConditionBlock: BlockConfig<ConditionBlockOutput> = {
|
||||
},
|
||||
inputs: {},
|
||||
outputs: {
|
||||
content: { type: 'string', description: 'Condition evaluation content' },
|
||||
conditionResult: { type: 'boolean', description: 'Condition result' },
|
||||
selectedPath: { type: 'json', description: 'Selected execution path' },
|
||||
selectedOption: { type: 'string', description: 'Selected condition option ID' },
|
||||
selectedConditionId: { type: 'string', description: 'Selected condition identifier' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -155,6 +155,15 @@ export const ScheduleBlock: BlockConfig = {
|
||||
condition: { field: 'scheduleType', value: ['minutes', 'hourly'], not: true },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'inputFormat',
|
||||
title: 'Input Format',
|
||||
type: 'input-format',
|
||||
description:
|
||||
'Define input parameters that will be available when the schedule triggers. Use Value to set default values for scheduled executions.',
|
||||
mode: 'trigger',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'scheduleSave',
|
||||
type: 'schedule-save',
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { ServiceNowIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { ServiceNowResponse } from '@/tools/servicenow/types'
|
||||
|
||||
export const ServiceNowBlock: BlockConfig<ServiceNowResponse> = {
|
||||
type: 'servicenow',
|
||||
name: 'ServiceNow',
|
||||
description: 'Create, read, update, and delete ServiceNow records',
|
||||
description: 'Create, read, update, delete, and bulk import ServiceNow records',
|
||||
authMode: AuthMode.OAuth,
|
||||
longDescription:
|
||||
'Integrate ServiceNow into your workflow. Create, read, update, and delete records in any ServiceNow table including incidents, tasks, change requests, users, and more.',
|
||||
'Integrate ServiceNow into your workflow. Can create, read, update, and delete records in any ServiceNow table (incidents, tasks, users, etc.). Supports bulk import operations for data migration and ETL.',
|
||||
docsLink: 'https://docs.sim.ai/tools/servicenow',
|
||||
category: 'tools',
|
||||
bgColor: '#032D42',
|
||||
@@ -19,12 +21,12 @@ export const ServiceNowBlock: BlockConfig<ServiceNowResponse> = {
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Create Record', id: 'servicenow_create_record' },
|
||||
{ label: 'Read Records', id: 'servicenow_read_record' },
|
||||
{ label: 'Update Record', id: 'servicenow_update_record' },
|
||||
{ label: 'Delete Record', id: 'servicenow_delete_record' },
|
||||
{ label: 'Create Record', id: 'create' },
|
||||
{ label: 'Read Records', id: 'read' },
|
||||
{ label: 'Update Record', id: 'update' },
|
||||
{ label: 'Delete Record', id: 'delete' },
|
||||
],
|
||||
value: () => 'servicenow_read_record',
|
||||
value: () => 'read',
|
||||
},
|
||||
// Instance URL
|
||||
{
|
||||
@@ -33,26 +35,17 @@ export const ServiceNowBlock: BlockConfig<ServiceNowResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'https://instance.service-now.com',
|
||||
required: true,
|
||||
description: 'Your ServiceNow instance URL (e.g., https://yourcompany.service-now.com)',
|
||||
description: 'Your ServiceNow instance URL',
|
||||
},
|
||||
// Username
|
||||
// OAuth Credential
|
||||
{
|
||||
id: 'username',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your ServiceNow username',
|
||||
id: 'credential',
|
||||
title: 'ServiceNow Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'servicenow',
|
||||
requiredScopes: ['useraccount'],
|
||||
placeholder: 'Select ServiceNow account',
|
||||
required: true,
|
||||
description: 'ServiceNow user with web service access',
|
||||
},
|
||||
// Password
|
||||
{
|
||||
id: 'password',
|
||||
title: 'Password',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your ServiceNow password',
|
||||
password: true,
|
||||
required: true,
|
||||
description: 'Password for the ServiceNow user',
|
||||
},
|
||||
// Table Name
|
||||
{
|
||||
@@ -70,7 +63,7 @@ export const ServiceNowBlock: BlockConfig<ServiceNowResponse> = {
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: '{\n "short_description": "Issue description",\n "priority": "1"\n}',
|
||||
condition: { field: 'operation', value: 'servicenow_create_record' },
|
||||
condition: { field: 'operation', value: 'create' },
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
@@ -103,21 +96,21 @@ Output: {"short_description": "Network outage", "description": "Network connecti
|
||||
title: 'Record sys_id',
|
||||
type: 'short-input',
|
||||
placeholder: 'Specific record sys_id (optional)',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
},
|
||||
{
|
||||
id: 'number',
|
||||
title: 'Record Number',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., INC0010001 (optional)',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Query String',
|
||||
type: 'short-input',
|
||||
placeholder: 'active=true^priority=1',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
description: 'ServiceNow encoded query string',
|
||||
},
|
||||
{
|
||||
@@ -125,14 +118,14 @@ Output: {"short_description": "Network outage", "description": "Network connecti
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '10',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
},
|
||||
{
|
||||
id: 'fields',
|
||||
title: 'Fields to Return',
|
||||
type: 'short-input',
|
||||
placeholder: 'number,short_description,priority',
|
||||
condition: { field: 'operation', value: 'servicenow_read_record' },
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
description: 'Comma-separated list of fields',
|
||||
},
|
||||
// Update-specific: sysId and fields
|
||||
@@ -141,7 +134,7 @@ Output: {"short_description": "Network outage", "description": "Network connecti
|
||||
title: 'Record sys_id',
|
||||
type: 'short-input',
|
||||
placeholder: 'Record sys_id to update',
|
||||
condition: { field: 'operation', value: 'servicenow_update_record' },
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
@@ -150,7 +143,7 @@ Output: {"short_description": "Network outage", "description": "Network connecti
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: '{\n "state": "2",\n "assigned_to": "user.sys_id"\n}',
|
||||
condition: { field: 'operation', value: 'servicenow_update_record' },
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
@@ -182,7 +175,7 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
title: 'Record sys_id',
|
||||
type: 'short-input',
|
||||
placeholder: 'Record sys_id to delete',
|
||||
condition: { field: 'operation', value: 'servicenow_delete_record' },
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
@@ -194,26 +187,60 @@ Output: {"state": "2", "assigned_to": "john.doe", "work_notes": "Assigned and st
|
||||
'servicenow_delete_record',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => params.operation,
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'create':
|
||||
return 'servicenow_create_record'
|
||||
case 'read':
|
||||
return 'servicenow_read_record'
|
||||
case 'update':
|
||||
return 'servicenow_update_record'
|
||||
case 'delete':
|
||||
return 'servicenow_delete_record'
|
||||
default:
|
||||
throw new Error(`Invalid ServiceNow operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, fields, ...rest } = params
|
||||
const isCreateOrUpdate =
|
||||
operation === 'servicenow_create_record' || operation === 'servicenow_update_record'
|
||||
const { operation, fields, records, credential, ...rest } = params
|
||||
|
||||
if (fields && isCreateOrUpdate) {
|
||||
const parsedFields = typeof fields === 'string' ? JSON.parse(fields) : fields
|
||||
return { ...rest, fields: parsedFields }
|
||||
// Parse JSON fields if provided
|
||||
let parsedFields: Record<string, any> | undefined
|
||||
if (fields && (operation === 'create' || operation === 'update')) {
|
||||
try {
|
||||
parsedFields = typeof fields === 'string' ? JSON.parse(fields) : fields
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Invalid JSON in fields: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return rest
|
||||
// Validate OAuth credential
|
||||
if (!credential) {
|
||||
throw new Error('ServiceNow account credential is required')
|
||||
}
|
||||
|
||||
// Build params
|
||||
const baseParams: Record<string, any> = {
|
||||
...rest,
|
||||
credential,
|
||||
}
|
||||
|
||||
if (operation === 'create' || operation === 'update') {
|
||||
return {
|
||||
...baseParams,
|
||||
fields: parsedFields,
|
||||
}
|
||||
}
|
||||
return baseParams
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
instanceUrl: { type: 'string', description: 'ServiceNow instance URL' },
|
||||
username: { type: 'string', description: 'ServiceNow username' },
|
||||
password: { type: 'string', description: 'ServiceNow password' },
|
||||
credential: { type: 'string', description: 'ServiceNow OAuth credential ID' },
|
||||
tableName: { type: 'string', description: 'Table name' },
|
||||
sysId: { type: 'string', description: 'Record sys_id' },
|
||||
number: { type: 'string', description: 'Record number' },
|
||||
|
||||
@@ -3387,14 +3387,17 @@ export function SalesforceIcon(props: SVGProps<SVGSVGElement>) {
|
||||
|
||||
export function ServiceNowIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 71.1 63.6'>
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 1570 1403'
|
||||
width='48'
|
||||
height='48'
|
||||
>
|
||||
<path
|
||||
fill='#62d84e'
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
fill='#62D84E'
|
||||
d='M35.8,0C16.1,0,0,15.9,0,35.6c0,9.8,4,19.3,11.2,26c2.5,2.4,6.4,2.6,9.2,0.5c9-6.7,21.4-6.7,30.4,0
|
||||
c2.8,2.1,6.7,1.9,9.2-0.5C74.3,48,74.9,25.4,61.3,11.1C54.7,4.1,45.4,0.1,35.8,0 M35.6,53.5C26,53.8,18,46.2,17.8,36.7
|
||||
c0-0.3,0-0.6,0-0.9c0-9.8,8-17.8,17.8-17.8s17.8,8,17.8,17.8c0.3,9.6-7.3,17.5-16.8,17.8C36.2,53.5,35.9,53.5,35.6,53.5'
|
||||
d='M1228.4 138.9c129.2 88.9 228.9 214.3 286.3 360.2 57.5 145.8 70 305.5 36 458.5S1437.8 1250 1324 1357.9c-13.3 12.9-28.8 23.4-45.8 30.8-17 7.5-35.2 11.9-53.7 12.9-18.5 1.1-37.1-1.1-54.8-6.6-17.7-5.4-34.3-13.9-49.1-25.2-48.2-35.9-101.8-63.8-158.8-82.6-57.1-18.9-116.7-28.5-176.8-28.5s-119.8 9.6-176.8 28.5c-57 18.8-110.7 46.7-158.9 82.6-14.6 11.2-31 19.8-48.6 25.3s-36 7.8-54.4 6.8c-18.4-.9-36.5-5.1-53.4-12.4s-32.4-17.5-45.8-30.2C132.5 1251 53 1110.8 19 956.8s-20.9-314.6 37.6-461c58.5-146.5 159.6-272 290.3-360.3S631.8.1 789.6.5c156.8 1.3 309.6 49.6 438.8 138.4m-291.8 1014c48.2-19.2 92-48 128.7-84.6 36.7-36.7 65.5-80.4 84.7-128.6 19.2-48.1 28.4-99.7 27-151.5 0-103.9-41.3-203.5-114.8-277S889 396.4 785 396.4s-203.7 41.3-277.2 114.8S393 684.3 393 788.2c-1.4 51.8 7.8 103.4 27 151.5 19.2 48.2 48 91.9 84.7 128.6 36.7 36.6 80.5 65.4 128.6 84.6 48.2 19.2 99.8 28.4 151.7 27 51.8 1.4 103.4-7.8 151.6-27'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
|
||||
@@ -1,47 +1,11 @@
|
||||
import '@/executor/__test-utils__/mock-dependencies'
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { BlockType } from '@/executor/constants'
|
||||
import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-handler'
|
||||
import type { BlockState, ExecutionContext } from '@/executor/types'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
vi.mock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn(() => ({
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/execution/isolated-vm', () => ({
|
||||
executeInIsolatedVM: vi.fn(),
|
||||
}))
|
||||
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
|
||||
const mockExecuteInIsolatedVM = executeInIsolatedVM as ReturnType<typeof vi.fn>
|
||||
|
||||
function simulateIsolatedVMExecution(
|
||||
code: string,
|
||||
contextVariables: Record<string, unknown>
|
||||
): { result: unknown; stdout: string; error?: { message: string; name: string } } {
|
||||
try {
|
||||
const fn = new Function(...Object.keys(contextVariables), code)
|
||||
const result = fn(...Object.values(contextVariables))
|
||||
return { result, stdout: '' }
|
||||
} catch (error: any) {
|
||||
return {
|
||||
result: null,
|
||||
stdout: '',
|
||||
error: { message: error.message, name: error.name || 'Error' },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe('ConditionBlockHandler', () => {
|
||||
let handler: ConditionBlockHandler
|
||||
let mockBlock: SerializedBlock
|
||||
@@ -54,6 +18,7 @@ describe('ConditionBlockHandler', () => {
|
||||
let mockPathTracker: any
|
||||
|
||||
beforeEach(() => {
|
||||
// Define blocks first
|
||||
mockSourceBlock = {
|
||||
id: 'source-block-1',
|
||||
metadata: { id: 'source', name: 'Source Block' },
|
||||
@@ -68,7 +33,7 @@ describe('ConditionBlockHandler', () => {
|
||||
metadata: { id: BlockType.CONDITION, name: 'Test Condition' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.CONDITION, params: {} },
|
||||
inputs: { conditions: 'json' },
|
||||
inputs: { conditions: 'json' }, // Corrected based on previous step
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
@@ -91,6 +56,7 @@ describe('ConditionBlockHandler', () => {
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
// Then define workflow using the block objects
|
||||
mockWorkflow = {
|
||||
blocks: [mockSourceBlock, mockBlock, mockTargetBlock1, mockTargetBlock2],
|
||||
connections: [
|
||||
@@ -118,6 +84,7 @@ describe('ConditionBlockHandler', () => {
|
||||
|
||||
handler = new ConditionBlockHandler(mockPathTracker, mockResolver)
|
||||
|
||||
// Define mock context *after* workflow and blocks are set up
|
||||
mockContext = {
|
||||
workflowId: 'test-workflow-id',
|
||||
blockStates: new Map<string, BlockState>([
|
||||
@@ -132,7 +99,7 @@ describe('ConditionBlockHandler', () => {
|
||||
]),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
environmentVariables: {}, // Now set the context's env vars
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
loopExecutions: new Map(),
|
||||
executedBlocks: new Set([mockSourceBlock.id]),
|
||||
@@ -141,11 +108,11 @@ describe('ConditionBlockHandler', () => {
|
||||
completedLoops: new Set(),
|
||||
}
|
||||
|
||||
// Reset mocks using vi
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockExecuteInIsolatedVM.mockImplementation(async ({ code, contextVariables }) => {
|
||||
return simulateIsolatedVMExecution(code, contextVariables)
|
||||
})
|
||||
// Default mock implementations - Removed as it's in the shared mock now
|
||||
// mockResolver.resolveBlockReferences.mockImplementation((value) => value)
|
||||
})
|
||||
|
||||
it('should handle condition blocks', () => {
|
||||
@@ -170,9 +137,11 @@ describe('ConditionBlockHandler', () => {
|
||||
blockType: 'target',
|
||||
blockTitle: 'Target Block 1',
|
||||
},
|
||||
selectedConditionId: 'cond1',
|
||||
selectedOption: 'cond1',
|
||||
}
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('context.value > 5')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('context.value > 5')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('context.value > 5')
|
||||
@@ -209,9 +178,11 @@ describe('ConditionBlockHandler', () => {
|
||||
blockType: 'target',
|
||||
blockTitle: 'Target Block 2',
|
||||
},
|
||||
selectedConditionId: 'else1',
|
||||
selectedOption: 'else1',
|
||||
}
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('context.value < 0')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('context.value < 0')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('context.value < 0')
|
||||
@@ -236,7 +207,7 @@ describe('ConditionBlockHandler', () => {
|
||||
const inputs = { conditions: '{ "invalid json ' }
|
||||
|
||||
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
|
||||
/^Invalid conditions format:/
|
||||
/^Invalid conditions format: Unterminated string.*/
|
||||
)
|
||||
})
|
||||
|
||||
@@ -247,6 +218,7 @@ describe('ConditionBlockHandler', () => {
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('{{source-block-1.value}} > 5')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('10 > 5')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('10 > 5')
|
||||
@@ -273,6 +245,7 @@ describe('ConditionBlockHandler', () => {
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Mock the full resolution pipeline for variable resolution
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('"john" !== null')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('"john" !== null')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('"john" !== null')
|
||||
@@ -299,6 +272,7 @@ describe('ConditionBlockHandler', () => {
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Mock the full resolution pipeline for env variable resolution
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('{{POOP}} === "hi"')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('{{POOP}} === "hi"')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('"hi" === "hi"')
|
||||
@@ -326,6 +300,7 @@ describe('ConditionBlockHandler', () => {
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
const resolutionError = new Error('Could not resolve reference: invalid-ref')
|
||||
// Mock the pipeline to throw at the variable resolution stage
|
||||
mockResolver.resolveVariableReferences.mockImplementation(() => {
|
||||
throw resolutionError
|
||||
})
|
||||
@@ -342,6 +317,7 @@ describe('ConditionBlockHandler', () => {
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences.mockReturnValue(
|
||||
'context.nonExistentProperty.doSomething()'
|
||||
)
|
||||
@@ -349,7 +325,7 @@ describe('ConditionBlockHandler', () => {
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('context.nonExistentProperty.doSomething()')
|
||||
|
||||
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
|
||||
/Evaluation error in condition "if".*doSomething/
|
||||
/^Evaluation error in condition "if": Evaluation error in condition: Cannot read properties of undefined \(reading 'doSomething'\)\. \(Resolved: context\.nonExistentProperty\.doSomething\(\)\)$/
|
||||
)
|
||||
})
|
||||
|
||||
@@ -357,6 +333,7 @@ describe('ConditionBlockHandler', () => {
|
||||
const conditions = [{ id: 'cond1', title: 'if', value: 'true' }]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Create a new context with empty blockStates instead of trying to delete from readonly map
|
||||
const contextWithoutSource = {
|
||||
...mockContext,
|
||||
blockStates: new Map<string, BlockState>(),
|
||||
@@ -369,7 +346,7 @@ describe('ConditionBlockHandler', () => {
|
||||
const result = await handler.execute(contextWithoutSource, mockBlock, inputs)
|
||||
|
||||
expect(result).toHaveProperty('conditionResult', true)
|
||||
expect(result).toHaveProperty('selectedOption', 'cond1')
|
||||
expect(result).toHaveProperty('selectedConditionId', 'cond1')
|
||||
})
|
||||
|
||||
it('should throw error if target block is missing', async () => {
|
||||
@@ -378,6 +355,7 @@ describe('ConditionBlockHandler', () => {
|
||||
|
||||
mockContext.workflow!.blocks = [mockSourceBlock, mockBlock, mockTargetBlock2]
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('true')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('true')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('true')
|
||||
@@ -403,6 +381,7 @@ describe('ConditionBlockHandler', () => {
|
||||
},
|
||||
]
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences
|
||||
.mockReturnValueOnce('false')
|
||||
.mockReturnValueOnce('context.value === 99')
|
||||
@@ -415,9 +394,12 @@ describe('ConditionBlockHandler', () => {
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
// Should return success with no path selected (branch ends gracefully)
|
||||
expect((result as any).conditionResult).toBe(false)
|
||||
expect((result as any).selectedPath).toBeNull()
|
||||
expect((result as any).selectedConditionId).toBeNull()
|
||||
expect((result as any).selectedOption).toBeNull()
|
||||
// Decision should not be set when no condition matches
|
||||
expect(mockContext.decisions.condition.has(mockBlock.id)).toBe(false)
|
||||
})
|
||||
|
||||
@@ -428,6 +410,7 @@ describe('ConditionBlockHandler', () => {
|
||||
]
|
||||
const inputs = { conditions: JSON.stringify(conditions) }
|
||||
|
||||
// Mock the full resolution pipeline
|
||||
mockResolver.resolveVariableReferences.mockReturnValue('context.item === "apple"')
|
||||
mockResolver.resolveBlockReferences.mockReturnValue('context.item === "apple"')
|
||||
mockResolver.resolveEnvVariables.mockReturnValue('context.item === "apple"')
|
||||
@@ -435,6 +418,6 @@ describe('ConditionBlockHandler', () => {
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('else1')
|
||||
expect((result as any).selectedOption).toBe('else1')
|
||||
expect((result as any).selectedConditionId).toBe('else1')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { BlockType, CONDITION, DEFAULTS, EDGE } from '@/executor/constants'
|
||||
@@ -8,8 +6,6 @@ import type { SerializedBlock } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('ConditionBlockHandler')
|
||||
|
||||
const CONDITION_TIMEOUT_MS = 5000
|
||||
|
||||
/**
|
||||
* Evaluates a single condition expression with variable/block reference resolution
|
||||
* Returns true if condition is met, false otherwise
|
||||
@@ -39,32 +35,11 @@ export async function evaluateConditionExpression(
|
||||
}
|
||||
|
||||
try {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
const code = `return Boolean(${resolvedConditionValue})`
|
||||
|
||||
const result = await executeInIsolatedVM({
|
||||
code,
|
||||
params: {},
|
||||
envVars: {},
|
||||
contextVariables: { context: evalContext },
|
||||
timeoutMs: CONDITION_TIMEOUT_MS,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (result.error) {
|
||||
logger.error(`Failed to evaluate condition: ${result.error.message}`, {
|
||||
originalCondition: conditionExpression,
|
||||
resolvedCondition: resolvedConditionValue,
|
||||
evalContext,
|
||||
error: result.error,
|
||||
})
|
||||
throw new Error(
|
||||
`Evaluation error in condition: ${result.error.message}. (Resolved: ${resolvedConditionValue})`
|
||||
)
|
||||
}
|
||||
|
||||
return Boolean(result.result)
|
||||
const conditionMet = new Function(
|
||||
'context',
|
||||
`with(context) { return ${resolvedConditionValue} }`
|
||||
)(evalContext)
|
||||
return Boolean(conditionMet)
|
||||
} catch (evalError: any) {
|
||||
logger.error(`Failed to evaluate condition: ${evalError.message}`, {
|
||||
originalCondition: conditionExpression,
|
||||
@@ -112,11 +87,13 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
block
|
||||
)
|
||||
|
||||
// Handle case where no condition matched and no else exists - branch ends gracefully
|
||||
if (!selectedConnection || !selectedCondition) {
|
||||
return {
|
||||
...((sourceOutput as any) || {}),
|
||||
conditionResult: false,
|
||||
selectedPath: null,
|
||||
selectedConditionId: null,
|
||||
selectedOption: null,
|
||||
}
|
||||
}
|
||||
@@ -138,6 +115,7 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
blockTitle: targetBlock.metadata?.name || DEFAULTS.BLOCK_TITLE,
|
||||
},
|
||||
selectedOption: selectedCondition.id,
|
||||
selectedConditionId: selectedCondition.id,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,12 +206,14 @@ export class ConditionBlockHandler implements BlockHandler {
|
||||
if (elseConnection) {
|
||||
return { selectedConnection: elseConnection, selectedCondition: elseCondition }
|
||||
}
|
||||
// Else exists but has no connection - treat as no match, branch ends
|
||||
logger.info(`No condition matched and else has no connection - branch ending`, {
|
||||
blockId: block.id,
|
||||
})
|
||||
return { selectedConnection: null, selectedCondition: null }
|
||||
}
|
||||
|
||||
// No condition matched and no else exists - branch ends gracefully
|
||||
logger.info(`No condition matched and no else block - branch ending`, { blockId: block.id })
|
||||
return { selectedConnection: null, selectedCondition: null }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/constants'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
@@ -19,8 +17,6 @@ import type { SerializedLoop } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('LoopOrchestrator')
|
||||
|
||||
const LOOP_CONDITION_TIMEOUT_MS = 5000
|
||||
|
||||
export type LoopRoute = typeof EDGE.LOOP_CONTINUE | typeof EDGE.LOOP_EXIT
|
||||
|
||||
export interface LoopContinuationResult {
|
||||
@@ -116,10 +112,7 @@ export class LoopOrchestrator {
|
||||
scope.currentIterationOutputs.set(baseId, output)
|
||||
}
|
||||
|
||||
async evaluateLoopContinuation(
|
||||
ctx: ExecutionContext,
|
||||
loopId: string
|
||||
): Promise<LoopContinuationResult> {
|
||||
evaluateLoopContinuation(ctx: ExecutionContext, loopId: string): LoopContinuationResult {
|
||||
const scope = ctx.loopExecutions?.get(loopId)
|
||||
if (!scope) {
|
||||
logger.error('Loop scope not found during continuation evaluation', { loopId })
|
||||
@@ -130,6 +123,7 @@ export class LoopOrchestrator {
|
||||
}
|
||||
}
|
||||
|
||||
// Check for cancellation
|
||||
if (ctx.isCancelled) {
|
||||
logger.info('Loop execution cancelled', { loopId, iteration: scope.iteration })
|
||||
return this.createExitResult(ctx, loopId, scope)
|
||||
@@ -146,7 +140,7 @@ export class LoopOrchestrator {
|
||||
|
||||
scope.currentIterationOutputs.clear()
|
||||
|
||||
if (!(await this.evaluateCondition(ctx, scope, scope.iteration + 1))) {
|
||||
if (!this.evaluateCondition(ctx, scope, scope.iteration + 1)) {
|
||||
return this.createExitResult(ctx, loopId, scope)
|
||||
}
|
||||
|
||||
@@ -179,11 +173,7 @@ export class LoopOrchestrator {
|
||||
}
|
||||
}
|
||||
|
||||
private async evaluateCondition(
|
||||
ctx: ExecutionContext,
|
||||
scope: LoopScope,
|
||||
iteration?: number
|
||||
): Promise<boolean> {
|
||||
private evaluateCondition(ctx: ExecutionContext, scope: LoopScope, iteration?: number): boolean {
|
||||
if (!scope.condition) {
|
||||
logger.warn('No condition defined for loop')
|
||||
return false
|
||||
@@ -194,7 +184,7 @@ export class LoopOrchestrator {
|
||||
scope.iteration = iteration
|
||||
}
|
||||
|
||||
const result = await this.evaluateWhileCondition(ctx, scope.condition, scope)
|
||||
const result = this.evaluateWhileCondition(ctx, scope.condition, scope)
|
||||
|
||||
if (iteration !== undefined) {
|
||||
scope.iteration = currentIteration
|
||||
@@ -233,6 +223,7 @@ export class LoopOrchestrator {
|
||||
const loopNodes = loopConfig.nodes
|
||||
const allLoopNodeIds = new Set([sentinelStartId, sentinelEndId, ...loopNodes])
|
||||
|
||||
// Clear deactivated edges for loop nodes so error/success edges can be re-evaluated
|
||||
if (this.edgeManager) {
|
||||
this.edgeManager.clearDeactivatedEdgesForNodes(allLoopNodeIds)
|
||||
}
|
||||
@@ -272,7 +263,7 @@ export class LoopOrchestrator {
|
||||
*
|
||||
* @returns true if the loop should execute, false if it should be skipped
|
||||
*/
|
||||
async evaluateInitialCondition(ctx: ExecutionContext, loopId: string): Promise<boolean> {
|
||||
evaluateInitialCondition(ctx: ExecutionContext, loopId: string): boolean {
|
||||
const scope = ctx.loopExecutions?.get(loopId)
|
||||
if (!scope) {
|
||||
logger.warn('Loop scope not found for initial condition evaluation', { loopId })
|
||||
@@ -309,7 +300,7 @@ export class LoopOrchestrator {
|
||||
return false
|
||||
}
|
||||
|
||||
const result = await this.evaluateWhileCondition(ctx, scope.condition, scope)
|
||||
const result = this.evaluateWhileCondition(ctx, scope.condition, scope)
|
||||
logger.info('While loop initial condition evaluation', {
|
||||
loopId,
|
||||
condition: scope.condition,
|
||||
@@ -336,11 +327,11 @@ export class LoopOrchestrator {
|
||||
return undefined
|
||||
}
|
||||
|
||||
private async evaluateWhileCondition(
|
||||
private evaluateWhileCondition(
|
||||
ctx: ExecutionContext,
|
||||
condition: string,
|
||||
scope: LoopScope
|
||||
): Promise<boolean> {
|
||||
): boolean {
|
||||
if (!condition) {
|
||||
return false
|
||||
}
|
||||
@@ -352,6 +343,7 @@ export class LoopOrchestrator {
|
||||
workflowVariables: ctx.workflowVariables,
|
||||
})
|
||||
|
||||
// Use generic utility for smart variable reference replacement
|
||||
const evaluatedCondition = replaceValidReferences(condition, (match) => {
|
||||
const resolved = this.resolver.resolveSingleReference(ctx, '', match, scope)
|
||||
logger.info('Resolved variable reference in loop condition', {
|
||||
@@ -360,9 +352,11 @@ export class LoopOrchestrator {
|
||||
resolvedType: typeof resolved,
|
||||
})
|
||||
if (resolved !== undefined) {
|
||||
// For booleans and numbers, return as-is (no quotes)
|
||||
if (typeof resolved === 'boolean' || typeof resolved === 'number') {
|
||||
return String(resolved)
|
||||
}
|
||||
// For strings that represent booleans, return without quotes
|
||||
if (typeof resolved === 'string') {
|
||||
const lower = resolved.toLowerCase().trim()
|
||||
if (lower === 'true' || lower === 'false') {
|
||||
@@ -370,33 +364,13 @@ export class LoopOrchestrator {
|
||||
}
|
||||
return `"${resolved}"`
|
||||
}
|
||||
// For other types, stringify them
|
||||
return JSON.stringify(resolved)
|
||||
}
|
||||
return match
|
||||
})
|
||||
|
||||
const requestId = generateRequestId()
|
||||
const code = `return Boolean(${evaluatedCondition})`
|
||||
|
||||
const vmResult = await executeInIsolatedVM({
|
||||
code,
|
||||
params: {},
|
||||
envVars: {},
|
||||
contextVariables: {},
|
||||
timeoutMs: LOOP_CONDITION_TIMEOUT_MS,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (vmResult.error) {
|
||||
logger.error('Failed to evaluate loop condition', {
|
||||
condition,
|
||||
evaluatedCondition,
|
||||
error: vmResult.error,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const result = Boolean(vmResult.result)
|
||||
const result = Boolean(new Function(`return (${evaluatedCondition})`)())
|
||||
|
||||
logger.info('Loop condition evaluation result', {
|
||||
originalCondition: condition,
|
||||
|
||||
@@ -68,7 +68,7 @@ export class NodeExecutionOrchestrator {
|
||||
}
|
||||
|
||||
if (node.metadata.isSentinel) {
|
||||
const output = await this.handleSentinel(ctx, node)
|
||||
const output = this.handleSentinel(ctx, node)
|
||||
const isFinalOutput = node.outgoingEdges.size === 0
|
||||
return {
|
||||
nodeId,
|
||||
@@ -86,17 +86,14 @@ export class NodeExecutionOrchestrator {
|
||||
}
|
||||
}
|
||||
|
||||
private async handleSentinel(
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode
|
||||
): Promise<NormalizedBlockOutput> {
|
||||
private handleSentinel(ctx: ExecutionContext, node: DAGNode): NormalizedBlockOutput {
|
||||
const sentinelType = node.metadata.sentinelType
|
||||
const loopId = node.metadata.loopId
|
||||
|
||||
switch (sentinelType) {
|
||||
case 'start': {
|
||||
if (loopId) {
|
||||
const shouldExecute = await this.loopOrchestrator.evaluateInitialCondition(ctx, loopId)
|
||||
const shouldExecute = this.loopOrchestrator.evaluateInitialCondition(ctx, loopId)
|
||||
if (!shouldExecute) {
|
||||
logger.info('While loop initial condition false, skipping loop body', { loopId })
|
||||
return {
|
||||
@@ -115,7 +112,7 @@ export class NodeExecutionOrchestrator {
|
||||
return { shouldExit: true, selectedRoute: EDGE.LOOP_EXIT }
|
||||
}
|
||||
|
||||
const continuationResult = await this.loopOrchestrator.evaluateLoopContinuation(ctx, loopId)
|
||||
const continuationResult = this.loopOrchestrator.evaluateLoopContinuation(ctx, loopId)
|
||||
|
||||
if (continuationResult.shouldContinue) {
|
||||
return {
|
||||
|
||||
@@ -83,7 +83,7 @@ export interface NormalizedBlockOutput {
|
||||
blockType?: string
|
||||
blockTitle?: string
|
||||
}
|
||||
selectedOption?: string
|
||||
selectedConditionId?: string
|
||||
conditionResult?: boolean
|
||||
result?: any
|
||||
stdout?: string
|
||||
|
||||
@@ -18,7 +18,7 @@ export const notificationKeys = {
|
||||
|
||||
type NotificationType = 'webhook' | 'email' | 'slack'
|
||||
type LogLevel = 'info' | 'error'
|
||||
type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'mcp'
|
||||
|
||||
type AlertRuleType =
|
||||
| 'consecutive_failures'
|
||||
|
||||
@@ -28,7 +28,7 @@ export interface ServiceInfo extends OAuthServiceConfig {
|
||||
function defineServices(): ServiceInfo[] {
|
||||
const servicesList: ServiceInfo[] = []
|
||||
|
||||
Object.entries(OAUTH_PROVIDERS).forEach(([_providerKey, provider]) => {
|
||||
Object.values(OAUTH_PROVIDERS).forEach((provider) => {
|
||||
Object.values(provider.services).forEach((service) => {
|
||||
servicesList.push({
|
||||
...service,
|
||||
@@ -142,6 +142,13 @@ export function useConnectOAuthService() {
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
// ServiceNow requires a custom OAuth flow with instance URL input
|
||||
if (providerId === 'servicenow') {
|
||||
const returnUrl = encodeURIComponent(callbackURL)
|
||||
window.location.href = `/api/auth/servicenow/authorize?returnUrl=${returnUrl}`
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
await client.oauth2.link({
|
||||
providerId,
|
||||
callbackURL,
|
||||
|
||||
508
apps/sim/hooks/queries/workflow-mcp-servers.ts
Normal file
508
apps/sim/hooks/queries/workflow-mcp-servers.ts
Normal file
@@ -0,0 +1,508 @@
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServerQueries')
|
||||
|
||||
/**
|
||||
* Query key factories for Workflow MCP Server queries
|
||||
*/
|
||||
export const workflowMcpServerKeys = {
|
||||
all: ['workflow-mcp-servers'] as const,
|
||||
servers: (workspaceId: string) => [...workflowMcpServerKeys.all, 'servers', workspaceId] as const,
|
||||
server: (workspaceId: string, serverId: string) =>
|
||||
[...workflowMcpServerKeys.servers(workspaceId), serverId] as const,
|
||||
tools: (workspaceId: string, serverId: string) =>
|
||||
[...workflowMcpServerKeys.server(workspaceId, serverId), 'tools'] as const,
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow MCP Server Types
|
||||
*/
|
||||
export interface WorkflowMcpServer {
|
||||
id: string
|
||||
workspaceId: string
|
||||
createdBy: string
|
||||
name: string
|
||||
description: string | null
|
||||
isPublished: boolean
|
||||
publishedAt: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
toolCount?: number
|
||||
}
|
||||
|
||||
export interface WorkflowMcpTool {
|
||||
id: string
|
||||
serverId: string
|
||||
workflowId: string
|
||||
toolName: string
|
||||
toolDescription: string | null
|
||||
parameterSchema: Record<string, unknown>
|
||||
isEnabled: boolean
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
workflowName?: string
|
||||
workflowDescription?: string | null
|
||||
isDeployed?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch workflow MCP servers for a workspace
|
||||
*/
|
||||
async function fetchWorkflowMcpServers(workspaceId: string): Promise<WorkflowMcpServer[]> {
|
||||
const response = await fetch(`/api/mcp/workflow-servers?workspaceId=${workspaceId}`)
|
||||
|
||||
if (response.status === 404) {
|
||||
return []
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to fetch workflow MCP servers')
|
||||
}
|
||||
|
||||
return data.data?.servers || []
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch workflow MCP servers
|
||||
*/
|
||||
export function useWorkflowMcpServers(workspaceId: string) {
|
||||
return useQuery({
|
||||
queryKey: workflowMcpServerKeys.servers(workspaceId),
|
||||
queryFn: () => fetchWorkflowMcpServers(workspaceId),
|
||||
enabled: !!workspaceId,
|
||||
retry: false,
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a single workflow MCP server with its tools
|
||||
*/
|
||||
async function fetchWorkflowMcpServer(
|
||||
workspaceId: string,
|
||||
serverId: string
|
||||
): Promise<{ server: WorkflowMcpServer; tools: WorkflowMcpTool[] }> {
|
||||
const response = await fetch(`/api/mcp/workflow-servers/${serverId}?workspaceId=${workspaceId}`)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to fetch workflow MCP server')
|
||||
}
|
||||
|
||||
return {
|
||||
server: data.data?.server,
|
||||
tools: data.data?.tools || [],
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch a single workflow MCP server
|
||||
*/
|
||||
export function useWorkflowMcpServer(workspaceId: string, serverId: string | null) {
|
||||
return useQuery({
|
||||
queryKey: workflowMcpServerKeys.server(workspaceId, serverId || ''),
|
||||
queryFn: () => fetchWorkflowMcpServer(workspaceId, serverId!),
|
||||
enabled: !!workspaceId && !!serverId,
|
||||
retry: false,
|
||||
staleTime: 30 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch tools for a workflow MCP server
|
||||
*/
|
||||
async function fetchWorkflowMcpTools(
|
||||
workspaceId: string,
|
||||
serverId: string
|
||||
): Promise<WorkflowMcpTool[]> {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}/tools?workspaceId=${workspaceId}`
|
||||
)
|
||||
|
||||
if (response.status === 404) {
|
||||
return []
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to fetch workflow MCP tools')
|
||||
}
|
||||
|
||||
return data.data?.tools || []
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch tools for a workflow MCP server
|
||||
*/
|
||||
export function useWorkflowMcpTools(workspaceId: string, serverId: string | null) {
|
||||
return useQuery({
|
||||
queryKey: workflowMcpServerKeys.tools(workspaceId, serverId || ''),
|
||||
queryFn: () => fetchWorkflowMcpTools(workspaceId, serverId!),
|
||||
enabled: !!workspaceId && !!serverId,
|
||||
retry: false,
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow MCP server mutation
|
||||
*/
|
||||
interface CreateWorkflowMcpServerParams {
|
||||
workspaceId: string
|
||||
name: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export function useCreateWorkflowMcpServer() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, name, description }: CreateWorkflowMcpServerParams) => {
|
||||
const response = await fetch('/api/mcp/workflow-servers', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId, name, description }),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to create workflow MCP server')
|
||||
}
|
||||
|
||||
logger.info(`Created workflow MCP server: ${name}`)
|
||||
return data.data?.server as WorkflowMcpServer
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow MCP server mutation
|
||||
*/
|
||||
interface UpdateWorkflowMcpServerParams {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
name?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export function useUpdateWorkflowMcpServer() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
workspaceId,
|
||||
serverId,
|
||||
name,
|
||||
description,
|
||||
}: UpdateWorkflowMcpServerParams) => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ name, description }),
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to update workflow MCP server')
|
||||
}
|
||||
|
||||
logger.info(`Updated workflow MCP server: ${serverId}`)
|
||||
return data.data?.server as WorkflowMcpServer
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.server(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete workflow MCP server mutation
|
||||
*/
|
||||
interface DeleteWorkflowMcpServerParams {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
}
|
||||
|
||||
export function useDeleteWorkflowMcpServer() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, serverId }: DeleteWorkflowMcpServerParams) => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to delete workflow MCP server')
|
||||
}
|
||||
|
||||
logger.info(`Deleted workflow MCP server: ${serverId}`)
|
||||
return data
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Publish workflow MCP server mutation
|
||||
*/
|
||||
interface PublishWorkflowMcpServerParams {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
}
|
||||
|
||||
export interface PublishWorkflowMcpServerResult {
|
||||
server: WorkflowMcpServer
|
||||
mcpServerUrl: string
|
||||
message: string
|
||||
}
|
||||
|
||||
export function usePublishWorkflowMcpServer() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
workspaceId,
|
||||
serverId,
|
||||
}: PublishWorkflowMcpServerParams): Promise<PublishWorkflowMcpServerResult> => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}/publish?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'POST',
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to publish workflow MCP server')
|
||||
}
|
||||
|
||||
logger.info(`Published workflow MCP server: ${serverId}`)
|
||||
return data.data
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.server(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Unpublish workflow MCP server mutation
|
||||
*/
|
||||
export function useUnpublishWorkflowMcpServer() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, serverId }: PublishWorkflowMcpServerParams) => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}/publish?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to unpublish workflow MCP server')
|
||||
}
|
||||
|
||||
logger.info(`Unpublished workflow MCP server: ${serverId}`)
|
||||
return data.data
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.server(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Add tool to workflow MCP server mutation
|
||||
*/
|
||||
interface AddWorkflowMcpToolParams {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
workflowId: string
|
||||
toolName?: string
|
||||
toolDescription?: string
|
||||
parameterSchema?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export function useAddWorkflowMcpTool() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
workspaceId,
|
||||
serverId,
|
||||
workflowId,
|
||||
toolName,
|
||||
toolDescription,
|
||||
parameterSchema,
|
||||
}: AddWorkflowMcpToolParams) => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}/tools?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workflowId, toolName, toolDescription, parameterSchema }),
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to add tool to workflow MCP server')
|
||||
}
|
||||
|
||||
logger.info(`Added tool to workflow MCP server: ${serverId}`)
|
||||
return data.data?.tool as WorkflowMcpTool
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.server(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.tools(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update tool mutation
|
||||
*/
|
||||
interface UpdateWorkflowMcpToolParams {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
toolId: string
|
||||
toolName?: string
|
||||
toolDescription?: string
|
||||
parameterSchema?: Record<string, unknown>
|
||||
isEnabled?: boolean
|
||||
}
|
||||
|
||||
export function useUpdateWorkflowMcpTool() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
workspaceId,
|
||||
serverId,
|
||||
toolId,
|
||||
...updates
|
||||
}: UpdateWorkflowMcpToolParams) => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}/tools/${toolId}?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to update tool')
|
||||
}
|
||||
|
||||
logger.info(`Updated tool ${toolId} in workflow MCP server: ${serverId}`)
|
||||
return data.data?.tool as WorkflowMcpTool
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.tools(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete tool mutation
|
||||
*/
|
||||
interface DeleteWorkflowMcpToolParams {
|
||||
workspaceId: string
|
||||
serverId: string
|
||||
toolId: string
|
||||
}
|
||||
|
||||
export function useDeleteWorkflowMcpTool() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, serverId, toolId }: DeleteWorkflowMcpToolParams) => {
|
||||
const response = await fetch(
|
||||
`/api/mcp/workflow-servers/${serverId}/tools/${toolId}?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to delete tool')
|
||||
}
|
||||
|
||||
logger.info(`Deleted tool ${toolId} from workflow MCP server: ${serverId}`)
|
||||
return data
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.servers(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.server(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowMcpServerKeys.tools(variables.workspaceId, variables.serverId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -14,6 +14,7 @@ interface UseWebhookManagementProps {
|
||||
blockId: string
|
||||
triggerId?: string
|
||||
isPreview?: boolean
|
||||
useWebhookUrl?: boolean
|
||||
}
|
||||
|
||||
interface WebhookManagementState {
|
||||
@@ -90,6 +91,7 @@ export function useWebhookManagement({
|
||||
blockId,
|
||||
triggerId,
|
||||
isPreview = false,
|
||||
useWebhookUrl = false,
|
||||
}: UseWebhookManagementProps): WebhookManagementState {
|
||||
const params = useParams()
|
||||
const workflowId = params.workflowId as string
|
||||
@@ -134,7 +136,6 @@ export function useWebhookManagement({
|
||||
const currentlyLoading = store.loadingWebhooks.has(blockId)
|
||||
const alreadyChecked = store.checkedWebhooks.has(blockId)
|
||||
const currentWebhookId = store.getValue(blockId, 'webhookId')
|
||||
|
||||
if (currentlyLoading || (alreadyChecked && currentWebhookId)) {
|
||||
return
|
||||
}
|
||||
@@ -205,7 +206,9 @@ export function useWebhookManagement({
|
||||
}
|
||||
}
|
||||
|
||||
loadWebhookOrGenerateUrl()
|
||||
if (useWebhookUrl) {
|
||||
loadWebhookOrGenerateUrl()
|
||||
}
|
||||
}, [isPreview, triggerId, workflowId, blockId])
|
||||
|
||||
const createWebhook = async (
|
||||
|
||||
@@ -110,20 +110,28 @@ export const auth = betterAuth({
|
||||
account: {
|
||||
create: {
|
||||
before: async (account) => {
|
||||
// Only one credential per (userId, providerId) is allowed
|
||||
// If user reconnects (even with a different external account), replace the existing one
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(
|
||||
eq(schema.account.userId, account.userId),
|
||||
eq(schema.account.providerId, account.providerId)
|
||||
eq(schema.account.providerId, account.providerId),
|
||||
eq(schema.account.accountId, account.accountId)
|
||||
),
|
||||
})
|
||||
|
||||
if (existing) {
|
||||
logger.warn(
|
||||
'[databaseHooks.account.create.before] Duplicate account detected, updating existing',
|
||||
{
|
||||
existingId: existing.id,
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
accountId: account.accountId,
|
||||
}
|
||||
)
|
||||
|
||||
await db
|
||||
.update(schema.account)
|
||||
.set({
|
||||
accountId: account.accountId,
|
||||
accessToken: account.accessToken,
|
||||
refreshToken: account.refreshToken,
|
||||
idToken: account.idToken,
|
||||
@@ -725,17 +733,17 @@ export const auth = betterAuth({
|
||||
scopes: ['login', 'data'],
|
||||
responseType: 'code',
|
||||
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/wealthbox`,
|
||||
getUserInfo: async (_tokens) => {
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
logger.info('Creating Wealthbox user profile from token data')
|
||||
|
||||
const uniqueId = 'wealthbox-user'
|
||||
const uniqueId = `wealthbox-${Date.now()}`
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: uniqueId,
|
||||
name: 'Wealthbox User',
|
||||
email: `${uniqueId}@wealthbox.user`,
|
||||
email: `${uniqueId.replace(/[^a-zA-Z0-9]/g, '')}@wealthbox.user`,
|
||||
emailVerified: false,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
@@ -1647,42 +1655,33 @@ export const auth = betterAuth({
|
||||
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/slack`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
const response = await fetch('https://slack.com/api/auth.test', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${tokens.accessToken}`,
|
||||
},
|
||||
})
|
||||
logger.info('Creating Slack bot profile from token data')
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error('Slack auth.test failed', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
})
|
||||
return null
|
||||
// Extract user identifier from tokens if possible
|
||||
let userId = 'slack-bot'
|
||||
if (tokens.idToken) {
|
||||
try {
|
||||
const decodedToken = JSON.parse(
|
||||
Buffer.from(tokens.idToken.split('.')[1], 'base64').toString()
|
||||
)
|
||||
if (decodedToken.sub) {
|
||||
userId = decodedToken.sub
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to decode Slack ID token', { error: e })
|
||||
}
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error('Slack auth.test returned error', { error: data.error })
|
||||
return null
|
||||
}
|
||||
|
||||
const teamId = data.team_id || 'unknown'
|
||||
const userId = data.user_id || data.bot_id || 'bot'
|
||||
const teamName = data.team || 'Slack Workspace'
|
||||
|
||||
const uniqueId = `${teamId}-${userId}`
|
||||
|
||||
logger.info('Slack credential identifier', { teamId, userId, uniqueId, teamName })
|
||||
const uniqueId = `${userId}-${Date.now()}`
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: uniqueId,
|
||||
name: teamName,
|
||||
email: `${teamId}-${userId}@slack.bot`,
|
||||
name: 'Slack Bot',
|
||||
email: `${uniqueId.replace(/[^a-zA-Z0-9]/g, '')}@slack.bot`,
|
||||
emailVerified: false,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error creating Slack bot profile:', { error })
|
||||
@@ -1723,7 +1722,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
const now = new Date()
|
||||
|
||||
const userId = data.user_id || 'user'
|
||||
const userId = data.user_id || `webflow-${Date.now()}`
|
||||
const uniqueId = `webflow-${userId}`
|
||||
|
||||
return {
|
||||
|
||||
@@ -33,7 +33,6 @@ export const ToolIds = z.enum([
|
||||
'knowledge_base',
|
||||
'manage_custom_tool',
|
||||
'manage_mcp_tool',
|
||||
'sleep',
|
||||
])
|
||||
export type ToolId = z.infer<typeof ToolIds>
|
||||
|
||||
@@ -253,14 +252,6 @@ export const ToolArgSchemas = {
|
||||
.optional()
|
||||
.describe('Required for add and edit operations. The MCP server configuration.'),
|
||||
}),
|
||||
|
||||
sleep: z.object({
|
||||
seconds: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(180)
|
||||
.describe('The number of seconds to sleep (0-180, max 3 minutes)'),
|
||||
}),
|
||||
} as const
|
||||
export type ToolArgSchemaMap = typeof ToolArgSchemas
|
||||
|
||||
@@ -327,7 +318,6 @@ export const ToolSSESchemas = {
|
||||
knowledge_base: toolCallSSEFor('knowledge_base', ToolArgSchemas.knowledge_base),
|
||||
manage_custom_tool: toolCallSSEFor('manage_custom_tool', ToolArgSchemas.manage_custom_tool),
|
||||
manage_mcp_tool: toolCallSSEFor('manage_mcp_tool', ToolArgSchemas.manage_mcp_tool),
|
||||
sleep: toolCallSSEFor('sleep', ToolArgSchemas.sleep),
|
||||
} as const
|
||||
export type ToolSSESchemaMap = typeof ToolSSESchemas
|
||||
|
||||
@@ -562,11 +552,6 @@ export const ToolResultSchemas = {
|
||||
serverName: z.string().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
sleep: z.object({
|
||||
success: z.boolean(),
|
||||
seconds: z.number(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
} as const
|
||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||
|
||||
|
||||
@@ -1,144 +0,0 @@
|
||||
import { Loader2, MinusCircle, Moon, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
/** Maximum sleep duration in seconds (3 minutes) */
|
||||
const MAX_SLEEP_SECONDS = 180
|
||||
|
||||
/** Track sleep start times for calculating elapsed time on wake */
|
||||
const sleepStartTimes: Record<string, number> = {}
|
||||
|
||||
interface SleepArgs {
|
||||
seconds?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Format seconds into a human-readable duration string
|
||||
*/
|
||||
function formatDuration(seconds: number): string {
|
||||
if (seconds >= 60) {
|
||||
return `${Math.round(seconds / 60)} minute${seconds >= 120 ? 's' : ''}`
|
||||
}
|
||||
return `${seconds} second${seconds !== 1 ? 's' : ''}`
|
||||
}
|
||||
|
||||
export class SleepClientTool extends BaseClientTool {
|
||||
static readonly id = 'sleep'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SleepClientTool.id, SleepClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Preparing to sleep', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Sleeping', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Sleeping', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Finished sleeping', icon: Moon },
|
||||
[ClientToolCallState.error]: { text: 'Sleep interrupted', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Sleep skipped', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Sleep aborted', icon: MinusCircle },
|
||||
[ClientToolCallState.background]: { text: 'Resumed', icon: Moon },
|
||||
},
|
||||
// No interrupt - auto-execute immediately
|
||||
getDynamicText: (params, state) => {
|
||||
const seconds = params?.seconds
|
||||
if (typeof seconds === 'number' && seconds > 0) {
|
||||
const displayTime = formatDuration(seconds)
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Slept for ${displayTime}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.pending:
|
||||
return `Sleeping for ${displayTime}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to sleep for ${displayTime}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to sleep for ${displayTime}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped sleeping for ${displayTime}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted sleeping for ${displayTime}`
|
||||
case ClientToolCallState.background: {
|
||||
// Calculate elapsed time from when sleep started
|
||||
const elapsedSeconds = params?._elapsedSeconds
|
||||
if (typeof elapsedSeconds === 'number' && elapsedSeconds > 0) {
|
||||
return `Resumed after ${formatDuration(Math.round(elapsedSeconds))}`
|
||||
}
|
||||
return 'Resumed early'
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Get elapsed seconds since sleep started
|
||||
*/
|
||||
getElapsedSeconds(): number {
|
||||
const startTime = sleepStartTimes[this.toolCallId]
|
||||
if (!startTime) return 0
|
||||
return (Date.now() - startTime) / 1000
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: SleepArgs): Promise<void> {
|
||||
const logger = createLogger('SleepClientTool')
|
||||
|
||||
// Use a timeout slightly longer than max sleep (3 minutes + buffer)
|
||||
const timeoutMs = (MAX_SLEEP_SECONDS + 30) * 1000
|
||||
|
||||
await this.executeWithTimeout(async () => {
|
||||
const params = args || {}
|
||||
logger.debug('handleAccept() called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
hasArgs: !!args,
|
||||
seconds: params.seconds,
|
||||
})
|
||||
|
||||
// Validate and clamp seconds
|
||||
let seconds = typeof params.seconds === 'number' ? params.seconds : 0
|
||||
if (seconds < 0) seconds = 0
|
||||
if (seconds > MAX_SLEEP_SECONDS) seconds = MAX_SLEEP_SECONDS
|
||||
|
||||
logger.debug('Starting sleep', { seconds })
|
||||
|
||||
// Track start time for elapsed calculation
|
||||
sleepStartTimes[this.toolCallId] = Date.now()
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
try {
|
||||
// Sleep for the specified duration
|
||||
await new Promise((resolve) => setTimeout(resolve, seconds * 1000))
|
||||
|
||||
logger.debug('Sleep completed successfully')
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, `Slept for ${seconds} seconds`)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
logger.error('Sleep failed', { error: message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, message)
|
||||
} finally {
|
||||
// Clean up start time tracking
|
||||
delete sleepStartTimes[this.toolCallId]
|
||||
}
|
||||
}, timeoutMs)
|
||||
}
|
||||
|
||||
async execute(args?: SleepArgs): Promise<void> {
|
||||
// Auto-execute without confirmation - go straight to executing
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,6 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -851,18 +850,13 @@ function applyOperationsToWorkflowState(
|
||||
* Reorder operations to ensure correct execution sequence:
|
||||
* 1. delete - Remove blocks first to free up IDs and clean state
|
||||
* 2. extract_from_subflow - Extract blocks from subflows before modifications
|
||||
* 3. add - Create new blocks (sorted by connection dependencies)
|
||||
* 3. add - Create new blocks so they exist before being referenced
|
||||
* 4. insert_into_subflow - Insert blocks into subflows (sorted by parent dependency)
|
||||
* 5. edit - Edit existing blocks last, so connections to newly added blocks work
|
||||
*
|
||||
* This ordering is CRITICAL: operations may reference blocks being added/inserted
|
||||
* in the same batch. Without proper ordering, target blocks wouldn't exist yet.
|
||||
*
|
||||
* For add operations, we use a two-pass approach:
|
||||
* - Pass 1: Create all blocks (without connections)
|
||||
* - Pass 2: Add all connections (now all blocks exist)
|
||||
* This ensures that if block A connects to block B, and both are being added,
|
||||
* B will exist when we try to create the edge from A to B.
|
||||
* This ordering is CRITICAL: edit operations may reference blocks being added
|
||||
* in the same batch (e.g., connecting block A to newly added block B).
|
||||
* Without proper ordering, the target block wouldn't exist yet.
|
||||
*/
|
||||
const deletes = operations.filter((op) => op.operation_type === 'delete')
|
||||
const extracts = operations.filter((op) => op.operation_type === 'extract_from_subflow')
|
||||
@@ -874,8 +868,6 @@ function applyOperationsToWorkflowState(
|
||||
// This handles cases where a loop/parallel is being added along with its children
|
||||
const sortedInserts = topologicalSortInserts(inserts, adds)
|
||||
|
||||
// We'll process add operations in two passes (handled in the switch statement below)
|
||||
// This is tracked via a separate flag to know which pass we're in
|
||||
const orderedOperations: EditWorkflowOperation[] = [
|
||||
...deletes,
|
||||
...extracts,
|
||||
@@ -885,46 +877,15 @@ function applyOperationsToWorkflowState(
|
||||
]
|
||||
|
||||
logger.info('Operations after reordering:', {
|
||||
totalOperations: orderedOperations.length,
|
||||
deleteCount: deletes.length,
|
||||
extractCount: extracts.length,
|
||||
addCount: adds.length,
|
||||
insertCount: sortedInserts.length,
|
||||
editCount: edits.length,
|
||||
operationOrder: orderedOperations.map(
|
||||
order: orderedOperations.map(
|
||||
(op) =>
|
||||
`${op.operation_type}:${op.block_id}${op.params?.subflowId ? `(parent:${op.params.subflowId})` : ''}`
|
||||
),
|
||||
})
|
||||
|
||||
// Two-pass processing for add operations:
|
||||
// Pass 1: Create all blocks (without connections)
|
||||
// Pass 2: Add all connections (all blocks now exist)
|
||||
const addOperationsWithConnections: Array<{
|
||||
blockId: string
|
||||
connections: Record<string, any>
|
||||
}> = []
|
||||
|
||||
for (const operation of orderedOperations) {
|
||||
const { operation_type, block_id, params } = operation
|
||||
|
||||
// CRITICAL: Validate block_id is a valid string and not "undefined"
|
||||
// This prevents undefined keys from being set in the workflow state
|
||||
if (!isValidKey(block_id)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: operation_type,
|
||||
blockId: String(block_id || 'invalid'),
|
||||
reason: `Invalid block_id "${block_id}" (type: ${typeof block_id}) - operation skipped. Block IDs must be valid non-empty strings.`,
|
||||
})
|
||||
logger.error('Invalid block_id detected in operation', {
|
||||
operation_type,
|
||||
block_id,
|
||||
block_id_type: typeof block_id,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
logger.debug(`Executing operation: ${operation_type} for block ${block_id}`, {
|
||||
params: params ? Object.keys(params) : [],
|
||||
currentBlockCount: Object.keys(modifiedState.blocks).length,
|
||||
@@ -1167,22 +1128,6 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
// Add new nested blocks
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
@@ -1415,22 +1360,6 @@ function applyOperationsToWorkflowState(
|
||||
// Handle nested nodes (for loops/parallels created from scratch)
|
||||
if (params.nestedNodes) {
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
@@ -1439,22 +1368,21 @@ function applyOperationsToWorkflowState(
|
||||
)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
if (childBlock.connections) {
|
||||
addOperationsWithConnections.push({
|
||||
blockId: childId,
|
||||
connections: childBlock.connections,
|
||||
})
|
||||
addConnectionsAsEdges(
|
||||
modifiedState,
|
||||
childId,
|
||||
childBlock.connections,
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first (pass 2)
|
||||
// Add connections as edges
|
||||
if (params.connections) {
|
||||
addOperationsWithConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -1578,18 +1506,13 @@ function applyOperationsToWorkflowState(
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
}
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
// This is particularly important when multiple blocks are being inserted
|
||||
// and they have connections to each other
|
||||
// Add/update connections as edges
|
||||
if (params.connections) {
|
||||
// Remove existing edges from this block first
|
||||
// Remove existing edges from this block
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add to deferred connections list
|
||||
addOperationsWithConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
// Add new connections
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -1639,34 +1562,6 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
}
|
||||
|
||||
// Pass 2: Add all deferred connections from add/insert operations
|
||||
// Now all blocks exist (from add, insert, and edit operations), so connections can be safely created
|
||||
// This ensures that if block A connects to block B, and both are being added/inserted,
|
||||
// B will exist when we create the edge from A to B
|
||||
if (addOperationsWithConnections.length > 0) {
|
||||
logger.info('Processing deferred connections from add/insert operations', {
|
||||
deferredConnectionCount: addOperationsWithConnections.length,
|
||||
totalBlocks: Object.keys(modifiedState.blocks).length,
|
||||
})
|
||||
|
||||
for (const { blockId, connections } of addOperationsWithConnections) {
|
||||
// Verify the source block still exists (it might have been deleted by a later operation)
|
||||
if (!modifiedState.blocks[blockId]) {
|
||||
logger.warn('Source block no longer exists for deferred connection', {
|
||||
blockId,
|
||||
availableBlocks: Object.keys(modifiedState.blocks),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
addConnectionsAsEdges(modifiedState, blockId, connections, logger, skippedItems)
|
||||
}
|
||||
|
||||
logger.info('Finished processing deferred connections', {
|
||||
totalEdges: modifiedState.edges.length,
|
||||
})
|
||||
}
|
||||
|
||||
// Regenerate loops and parallels after modifications
|
||||
modifiedState.loops = generateLoopBlocks(modifiedState.blocks)
|
||||
modifiedState.parallels = generateParallelBlocks(modifiedState.blocks)
|
||||
|
||||
@@ -237,6 +237,8 @@ export const env = createEnv({
|
||||
WORDPRESS_CLIENT_SECRET: z.string().optional(), // WordPress.com OAuth client secret
|
||||
SPOTIFY_CLIENT_ID: z.string().optional(), // Spotify OAuth client ID
|
||||
SPOTIFY_CLIENT_SECRET: z.string().optional(), // Spotify OAuth client secret
|
||||
SERVICENOW_CLIENT_ID: z.string().optional(), // ServiceNow OAuth client ID
|
||||
SERVICENOW_CLIENT_SECRET: z.string().optional(), // ServiceNow OAuth client secret
|
||||
|
||||
// E2B Remote Code Execution
|
||||
E2B_ENABLED: z.string().optional(), // Enable E2B remote code execution
|
||||
@@ -290,8 +292,13 @@ export const env = createEnv({
|
||||
|
||||
// Billing
|
||||
NEXT_PUBLIC_BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking (client-side)
|
||||
|
||||
// Google Services - For client-side Google integrations
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for browser auth
|
||||
|
||||
// Analytics & Tracking
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(), // Google API key for client-side API calls
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(), // Google project number for Drive picker
|
||||
NEXT_PUBLIC_POSTHOG_ENABLED: z.boolean().optional(), // Enable PostHog analytics (client-side)
|
||||
NEXT_PUBLIC_POSTHOG_KEY: z.string().optional(), // PostHog project API key
|
||||
|
||||
@@ -331,6 +338,9 @@ export const env = createEnv({
|
||||
experimental__runtimeEnv: {
|
||||
NEXT_PUBLIC_APP_URL: process.env.NEXT_PUBLIC_APP_URL,
|
||||
NEXT_PUBLIC_BILLING_ENABLED: process.env.NEXT_PUBLIC_BILLING_ENABLED,
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID: process.env.NEXT_PUBLIC_GOOGLE_CLIENT_ID,
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: process.env.NEXT_PUBLIC_GOOGLE_API_KEY,
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: process.env.NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER,
|
||||
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
|
||||
NEXT_PUBLIC_BRAND_NAME: process.env.NEXT_PUBLIC_BRAND_NAME,
|
||||
NEXT_PUBLIC_BRAND_LOGO_URL: process.env.NEXT_PUBLIC_BRAND_LOGO_URL,
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import type { TokenBucketConfig } from './storage'
|
||||
|
||||
export type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | 'api-endpoint'
|
||||
export type TriggerType =
|
||||
| 'api'
|
||||
| 'webhook'
|
||||
| 'schedule'
|
||||
| 'manual'
|
||||
| 'chat'
|
||||
| 'mcp'
|
||||
| 'api-endpoint'
|
||||
|
||||
export type RateLimitCounterType = 'sync' | 'async' | 'api-endpoint'
|
||||
|
||||
|
||||
@@ -204,17 +204,12 @@ async function ensureWorker(): Promise<void> {
|
||||
|
||||
import('node:child_process').then(({ spawn }) => {
|
||||
worker = spawn('node', [workerPath], {
|
||||
stdio: ['ignore', 'pipe', 'pipe', 'ipc'],
|
||||
stdio: ['ignore', 'pipe', 'inherit', 'ipc'],
|
||||
serialization: 'json',
|
||||
})
|
||||
|
||||
worker.on('message', handleWorkerMessage)
|
||||
|
||||
let stderrData = ''
|
||||
worker.stderr?.on('data', (data: Buffer) => {
|
||||
stderrData += data.toString()
|
||||
})
|
||||
|
||||
const startTimeout = setTimeout(() => {
|
||||
worker?.kill()
|
||||
worker = null
|
||||
@@ -237,42 +232,20 @@ async function ensureWorker(): Promise<void> {
|
||||
}
|
||||
worker.on('message', readyHandler)
|
||||
|
||||
worker.on('exit', (code) => {
|
||||
worker.on('exit', () => {
|
||||
if (workerIdleTimeout) {
|
||||
clearTimeout(workerIdleTimeout)
|
||||
workerIdleTimeout = null
|
||||
}
|
||||
|
||||
const wasStartupFailure = !workerReady && workerReadyPromise
|
||||
|
||||
worker = null
|
||||
workerReady = false
|
||||
workerReadyPromise = null
|
||||
|
||||
let errorMessage = 'Worker process exited unexpectedly'
|
||||
if (stderrData.includes('isolated_vm') || stderrData.includes('MODULE_NOT_FOUND')) {
|
||||
errorMessage =
|
||||
'Code execution requires the isolated-vm native module which failed to load. ' +
|
||||
'This usually means the module needs to be rebuilt for your Node.js version. ' +
|
||||
'Please run: cd node_modules/isolated-vm && npm rebuild'
|
||||
logger.error('isolated-vm module failed to load', { stderr: stderrData })
|
||||
} else if (stderrData) {
|
||||
errorMessage = `Worker process failed: ${stderrData.slice(0, 500)}`
|
||||
logger.error('Worker process failed', { stderr: stderrData })
|
||||
}
|
||||
|
||||
if (wasStartupFailure) {
|
||||
clearTimeout(startTimeout)
|
||||
reject(new Error(errorMessage))
|
||||
return
|
||||
}
|
||||
|
||||
for (const [id, pending] of pendingExecutions) {
|
||||
clearTimeout(pending.timeout)
|
||||
pending.resolve({
|
||||
result: null,
|
||||
stdout: '',
|
||||
error: { message: errorMessage, name: 'WorkerError' },
|
||||
error: { message: 'Worker process exited unexpectedly', name: 'WorkerError' },
|
||||
})
|
||||
pendingExecutions.delete(id)
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ export interface PreprocessExecutionOptions {
|
||||
// Required fields
|
||||
workflowId: string
|
||||
userId: string // The authenticated user ID
|
||||
triggerType: 'manual' | 'api' | 'webhook' | 'schedule' | 'chat'
|
||||
triggerType: 'manual' | 'api' | 'webhook' | 'schedule' | 'chat' | 'mcp'
|
||||
executionId: string
|
||||
requestId: string
|
||||
|
||||
|
||||
@@ -36,6 +36,7 @@ export function getTriggerOptions(): TriggerOption[] {
|
||||
{ value: 'schedule', label: 'Schedule', color: '#059669' },
|
||||
{ value: 'chat', label: 'Chat', color: '#7c3aed' },
|
||||
{ value: 'webhook', label: 'Webhook', color: '#ea580c' },
|
||||
{ value: 'mcp', label: 'MCP', color: '#dc2626' },
|
||||
]
|
||||
|
||||
for (const trigger of triggers) {
|
||||
|
||||
115
apps/sim/lib/mcp/serve-auth.ts
Normal file
115
apps/sim/lib/mcp/serve-auth.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowMcpServer } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('McpServeAuth')
|
||||
|
||||
export interface McpServeAuthResult {
|
||||
success: boolean
|
||||
userId?: string
|
||||
workspaceId?: string
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates authentication for accessing a workflow MCP server.
|
||||
*
|
||||
* Authentication can be done via:
|
||||
* 1. API Key (X-API-Key header) - for programmatic access
|
||||
* 2. Session cookie - for logged-in users
|
||||
*
|
||||
* The user must have at least read access to the workspace that owns the server.
|
||||
*/
|
||||
export async function validateMcpServeAuth(
|
||||
request: NextRequest,
|
||||
serverId: string
|
||||
): Promise<McpServeAuthResult> {
|
||||
try {
|
||||
// First, get the server to find its workspace
|
||||
const [server] = await db
|
||||
.select({
|
||||
id: workflowMcpServer.id,
|
||||
workspaceId: workflowMcpServer.workspaceId,
|
||||
isPublished: workflowMcpServer.isPublished,
|
||||
})
|
||||
.from(workflowMcpServer)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.limit(1)
|
||||
|
||||
if (!server) {
|
||||
return { success: false, error: 'Server not found' }
|
||||
}
|
||||
|
||||
if (!server.isPublished) {
|
||||
return { success: false, error: 'Server is not published' }
|
||||
}
|
||||
|
||||
// Check authentication using hybrid auth (supports both session and API key)
|
||||
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!auth.success || !auth.userId) {
|
||||
return { success: false, error: auth.error || 'Authentication required' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
userId: auth.userId,
|
||||
workspaceId: server.workspaceId,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error validating MCP serve auth:', error)
|
||||
return {
|
||||
success: false,
|
||||
error: 'Authentication validation failed',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get connection instructions for an MCP server.
|
||||
* This provides the information users need to connect their MCP clients.
|
||||
*/
|
||||
export function getMcpServerConnectionInfo(
|
||||
serverId: string,
|
||||
serverName: string,
|
||||
baseUrl: string
|
||||
): {
|
||||
sseUrl: string
|
||||
httpUrl: string
|
||||
authHeader: string
|
||||
instructions: string
|
||||
} {
|
||||
const sseUrl = `${baseUrl}/api/mcp/serve/${serverId}/sse`
|
||||
const httpUrl = `${baseUrl}/api/mcp/serve/${serverId}`
|
||||
|
||||
return {
|
||||
sseUrl,
|
||||
httpUrl,
|
||||
authHeader: 'X-API-Key: YOUR_SIM_API_KEY',
|
||||
instructions: `
|
||||
To connect to this MCP server from Cursor or Claude Desktop:
|
||||
|
||||
1. Get your Sim API key from Settings -> API Keys
|
||||
2. Configure your MCP client with:
|
||||
- Server URL: ${sseUrl}
|
||||
- Authentication: Add header "X-API-Key" with your API key
|
||||
|
||||
For Cursor, add to your MCP configuration:
|
||||
{
|
||||
"mcpServers": {
|
||||
"${serverName.toLowerCase().replace(/\s+/g, '-')}": {
|
||||
"url": "${sseUrl}",
|
||||
"headers": {
|
||||
"X-API-Key": "YOUR_SIM_API_KEY"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
For Claude Desktop, configure similarly in your settings.
|
||||
`.trim(),
|
||||
}
|
||||
}
|
||||
399
apps/sim/lib/mcp/workflow-mcp-server.ts
Normal file
399
apps/sim/lib/mcp/workflow-mcp-server.ts
Normal file
@@ -0,0 +1,399 @@
|
||||
/**
|
||||
* Workflow MCP Server
|
||||
*
|
||||
* Creates an MCP server using the official @modelcontextprotocol/sdk
|
||||
* that exposes workflows as tools via a Next.js-compatible transport.
|
||||
*/
|
||||
|
||||
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'
|
||||
import type { Transport } from '@modelcontextprotocol/sdk/shared/transport.js'
|
||||
import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowMcpTool } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { z } from 'zod'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { fileItemZodSchema } from '@/lib/mcp/workflow-tool-schema'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServer')
|
||||
|
||||
/**
|
||||
* Convert stored JSON schema to Zod schema.
|
||||
* Uses fileItemZodSchema from workflow-tool-schema for file arrays.
|
||||
*/
|
||||
function jsonSchemaToZodShape(schema: Record<string, unknown> | null): z.ZodRawShape | undefined {
|
||||
if (!schema || schema.type !== 'object') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const properties = schema.properties as
|
||||
| Record<string, { type: string; description?: string; items?: unknown }>
|
||||
| undefined
|
||||
if (!properties || Object.keys(properties).length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const shape: z.ZodRawShape = {}
|
||||
const required = (schema.required as string[] | undefined) || []
|
||||
|
||||
for (const [key, prop] of Object.entries(properties)) {
|
||||
let zodType: z.ZodTypeAny
|
||||
|
||||
// Check if this array has items (file arrays have items.type === 'object')
|
||||
const hasObjectItems =
|
||||
prop.type === 'array' &&
|
||||
prop.items &&
|
||||
typeof prop.items === 'object' &&
|
||||
(prop.items as Record<string, unknown>).type === 'object'
|
||||
|
||||
switch (prop.type) {
|
||||
case 'string':
|
||||
zodType = z.string()
|
||||
break
|
||||
case 'number':
|
||||
zodType = z.number()
|
||||
break
|
||||
case 'boolean':
|
||||
zodType = z.boolean()
|
||||
break
|
||||
case 'array':
|
||||
if (hasObjectItems) {
|
||||
// File arrays - use the shared file item schema
|
||||
zodType = z.array(fileItemZodSchema)
|
||||
} else {
|
||||
zodType = z.array(z.any())
|
||||
}
|
||||
break
|
||||
case 'object':
|
||||
zodType = z.record(z.any())
|
||||
break
|
||||
default:
|
||||
zodType = z.any()
|
||||
}
|
||||
|
||||
if (prop.description) {
|
||||
zodType = zodType.describe(prop.description)
|
||||
}
|
||||
|
||||
if (!required.includes(key)) {
|
||||
zodType = zodType.optional()
|
||||
}
|
||||
|
||||
shape[key] = zodType
|
||||
}
|
||||
|
||||
return Object.keys(shape).length > 0 ? shape : undefined
|
||||
}
|
||||
|
||||
interface WorkflowTool {
|
||||
id: string
|
||||
toolName: string
|
||||
toolDescription: string | null
|
||||
parameterSchema: Record<string, unknown> | null
|
||||
workflowId: string
|
||||
isEnabled: boolean
|
||||
}
|
||||
|
||||
interface ServerContext {
|
||||
serverId: string
|
||||
serverName: string
|
||||
userId: string
|
||||
workspaceId: string
|
||||
apiKey?: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple transport for handling single request/response cycles in Next.js
|
||||
* This transport is designed for stateless request handling where each
|
||||
* request creates a new server instance.
|
||||
*/
|
||||
class NextJsTransport implements Transport {
|
||||
private responseMessage: JSONRPCMessage | null = null
|
||||
private resolveResponse: ((message: JSONRPCMessage) => void) | null = null
|
||||
|
||||
onclose?: () => void
|
||||
onerror?: (error: Error) => void
|
||||
onmessage?: (message: JSONRPCMessage) => void
|
||||
|
||||
async start(): Promise<void> {
|
||||
// No-op for stateless transport
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
this.onclose?.()
|
||||
}
|
||||
|
||||
async send(message: JSONRPCMessage): Promise<void> {
|
||||
this.responseMessage = message
|
||||
this.resolveResponse?.(message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects a message into the transport as if it was received from the client
|
||||
*/
|
||||
receiveMessage(message: JSONRPCMessage): void {
|
||||
this.onmessage?.(message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the server to send a response
|
||||
*/
|
||||
waitForResponse(): Promise<JSONRPCMessage> {
|
||||
if (this.responseMessage) {
|
||||
return Promise.resolve(this.responseMessage)
|
||||
}
|
||||
return new Promise((resolve) => {
|
||||
this.resolveResponse = resolve
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and configures an MCP server with workflow tools
|
||||
*/
|
||||
async function createConfiguredMcpServer(context: ServerContext): Promise<McpServer> {
|
||||
const { serverId, serverName, apiKey } = context
|
||||
|
||||
// Create the MCP server using the SDK
|
||||
const server = new McpServer({
|
||||
name: serverName,
|
||||
version: '1.0.0',
|
||||
})
|
||||
|
||||
// Load tools from the database
|
||||
const tools = await db
|
||||
.select({
|
||||
id: workflowMcpTool.id,
|
||||
toolName: workflowMcpTool.toolName,
|
||||
toolDescription: workflowMcpTool.toolDescription,
|
||||
parameterSchema: workflowMcpTool.parameterSchema,
|
||||
workflowId: workflowMcpTool.workflowId,
|
||||
isEnabled: workflowMcpTool.isEnabled,
|
||||
})
|
||||
.from(workflowMcpTool)
|
||||
.where(eq(workflowMcpTool.serverId, serverId))
|
||||
|
||||
// Register each enabled tool
|
||||
for (const tool of tools.filter((t) => t.isEnabled)) {
|
||||
const zodSchema = jsonSchemaToZodShape(tool.parameterSchema as Record<string, unknown> | null)
|
||||
|
||||
if (zodSchema) {
|
||||
// Tool with parameters - callback receives (args, extra)
|
||||
server.tool(
|
||||
tool.toolName,
|
||||
tool.toolDescription || `Execute workflow: ${tool.toolName}`,
|
||||
zodSchema,
|
||||
async (args) => {
|
||||
return executeWorkflowTool(tool as WorkflowTool, args, apiKey)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
// Tool without parameters - callback only receives (extra)
|
||||
server.tool(
|
||||
tool.toolName,
|
||||
tool.toolDescription || `Execute workflow: ${tool.toolName}`,
|
||||
async () => {
|
||||
return executeWorkflowTool(tool as WorkflowTool, {}, apiKey)
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Created MCP server "${serverName}" with ${tools.filter((t) => t.isEnabled).length} tools`
|
||||
)
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a workflow tool and returns the result
|
||||
*/
|
||||
async function executeWorkflowTool(
|
||||
tool: WorkflowTool,
|
||||
args: Record<string, unknown>,
|
||||
apiKey?: string | null
|
||||
): Promise<{
|
||||
content: Array<{ type: 'text'; text: string }>
|
||||
isError?: boolean
|
||||
}> {
|
||||
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${tool.toolName}`)
|
||||
|
||||
try {
|
||||
// Verify workflow is deployed
|
||||
const [workflowRecord] = await db
|
||||
.select({ id: workflow.id, isDeployed: workflow.isDeployed })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, tool.workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord || !workflowRecord.isDeployed) {
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify({ error: 'Workflow is not deployed' }) }],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the workflow
|
||||
const baseUrl = getBaseUrl()
|
||||
const executeUrl = `${baseUrl}/api/workflows/${tool.workflowId}/execute`
|
||||
|
||||
const executeHeaders: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (apiKey) {
|
||||
executeHeaders['X-API-Key'] = apiKey
|
||||
}
|
||||
|
||||
const executeResponse = await fetch(executeUrl, {
|
||||
method: 'POST',
|
||||
headers: executeHeaders,
|
||||
body: JSON.stringify({
|
||||
input: args,
|
||||
triggerType: 'mcp',
|
||||
}),
|
||||
})
|
||||
|
||||
const executeResult = await executeResponse.json()
|
||||
|
||||
if (!executeResponse.ok) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify({ error: executeResult.error || 'Workflow execution failed' }),
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: JSON.stringify(executeResult.output || executeResult, null, 2),
|
||||
},
|
||||
],
|
||||
isError: !executeResult.success,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error executing workflow ${tool.workflowId}:`, error)
|
||||
return {
|
||||
content: [{ type: 'text', text: JSON.stringify({ error: 'Tool execution failed' }) }],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles an MCP JSON-RPC request using the SDK
|
||||
*/
|
||||
export async function handleMcpRequest(
|
||||
context: ServerContext,
|
||||
request: Request
|
||||
): Promise<Response> {
|
||||
try {
|
||||
// Parse the incoming JSON-RPC message
|
||||
const body = await request.json()
|
||||
const message = body as JSONRPCMessage
|
||||
|
||||
// Create transport and server
|
||||
const transport = new NextJsTransport()
|
||||
const server = await createConfiguredMcpServer(context)
|
||||
|
||||
// Connect server to transport
|
||||
await server.connect(transport)
|
||||
|
||||
// Inject the received message
|
||||
transport.receiveMessage(message)
|
||||
|
||||
// Wait for the response
|
||||
const response = await transport.waitForResponse()
|
||||
|
||||
// Clean up
|
||||
await server.close()
|
||||
|
||||
return new Response(JSON.stringify(response), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-MCP-Server-Name': context.serverName,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error handling MCP request:', error)
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
jsonrpc: '2.0',
|
||||
id: null,
|
||||
error: {
|
||||
code: -32603,
|
||||
message: 'Internal error',
|
||||
},
|
||||
}),
|
||||
{
|
||||
status: 500,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an SSE stream for MCP notifications (used for GET requests)
|
||||
*/
|
||||
export function createMcpSseStream(context: ServerContext): ReadableStream<Uint8Array> {
|
||||
const encoder = new TextEncoder()
|
||||
let isStreamClosed = false
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
const sendEvent = (event: string, data: unknown) => {
|
||||
if (isStreamClosed) return
|
||||
try {
|
||||
const message = `event: ${event}\ndata: ${JSON.stringify(data)}\n\n`
|
||||
controller.enqueue(encoder.encode(message))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
|
||||
// Send initial connection event
|
||||
sendEvent('open', { type: 'connection', status: 'connected' })
|
||||
|
||||
// Send server capabilities
|
||||
sendEvent('message', {
|
||||
jsonrpc: '2.0',
|
||||
method: 'notifications/initialized',
|
||||
params: {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
serverInfo: {
|
||||
name: context.serverName,
|
||||
version: '1.0.0',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Keep connection alive with periodic pings
|
||||
const pingInterval = setInterval(() => {
|
||||
if (isStreamClosed) {
|
||||
clearInterval(pingInterval)
|
||||
return
|
||||
}
|
||||
sendEvent('ping', { timestamp: Date.now() })
|
||||
}, 30000)
|
||||
},
|
||||
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
logger.info(`SSE connection closed for server ${context.serverId}`)
|
||||
},
|
||||
})
|
||||
}
|
||||
247
apps/sim/lib/mcp/workflow-tool-schema.ts
Normal file
247
apps/sim/lib/mcp/workflow-tool-schema.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import { z } from 'zod'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
|
||||
/**
|
||||
* MCP Tool Schema following the JSON Schema specification
|
||||
*/
|
||||
export interface McpToolInputSchema {
|
||||
type: 'object'
|
||||
properties: Record<string, McpToolProperty>
|
||||
required?: string[]
|
||||
}
|
||||
|
||||
export interface McpToolProperty {
|
||||
type: string
|
||||
description?: string
|
||||
items?: McpToolProperty
|
||||
properties?: Record<string, McpToolProperty>
|
||||
}
|
||||
|
||||
export interface McpToolDefinition {
|
||||
name: string
|
||||
description: string
|
||||
inputSchema: McpToolInputSchema
|
||||
}
|
||||
|
||||
/**
|
||||
* File item Zod schema for MCP file inputs.
|
||||
* This is the single source of truth for file structure.
|
||||
*/
|
||||
export const fileItemZodSchema = z.object({
|
||||
name: z.string().describe('File name'),
|
||||
data: z.string().describe('Base64 encoded file content'),
|
||||
mimeType: z.string().describe('MIME type of the file'),
|
||||
})
|
||||
|
||||
/**
|
||||
* Convert InputFormatField type to Zod schema
|
||||
*/
|
||||
function fieldTypeToZod(fieldType: string | undefined, isRequired: boolean): z.ZodTypeAny {
|
||||
let zodType: z.ZodTypeAny
|
||||
|
||||
switch (fieldType) {
|
||||
case 'string':
|
||||
zodType = z.string()
|
||||
break
|
||||
case 'number':
|
||||
zodType = z.number()
|
||||
break
|
||||
case 'boolean':
|
||||
zodType = z.boolean()
|
||||
break
|
||||
case 'object':
|
||||
zodType = z.record(z.any())
|
||||
break
|
||||
case 'array':
|
||||
zodType = z.array(z.any())
|
||||
break
|
||||
case 'files':
|
||||
zodType = z.array(fileItemZodSchema)
|
||||
break
|
||||
default:
|
||||
zodType = z.string()
|
||||
}
|
||||
|
||||
return isRequired ? zodType : zodType.optional()
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Zod schema shape from InputFormatField array.
|
||||
* This is used directly by the MCP server for tool registration.
|
||||
*/
|
||||
export function generateToolZodSchema(inputFormat: InputFormatField[]): z.ZodRawShape | undefined {
|
||||
if (!inputFormat || inputFormat.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const shape: z.ZodRawShape = {}
|
||||
|
||||
for (const field of inputFormat) {
|
||||
if (!field.name) continue
|
||||
|
||||
const zodType = fieldTypeToZod(field.type, true)
|
||||
shape[field.name] = field.name ? zodType.describe(field.name) : zodType
|
||||
}
|
||||
|
||||
return Object.keys(shape).length > 0 ? shape : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Map InputFormatField type to JSON Schema type (for database storage)
|
||||
*/
|
||||
function mapFieldTypeToJsonSchemaType(fieldType: string | undefined): string {
|
||||
switch (fieldType) {
|
||||
case 'string':
|
||||
return 'string'
|
||||
case 'number':
|
||||
return 'number'
|
||||
case 'boolean':
|
||||
return 'boolean'
|
||||
case 'object':
|
||||
return 'object'
|
||||
case 'array':
|
||||
return 'array'
|
||||
case 'files':
|
||||
return 'array'
|
||||
default:
|
||||
return 'string'
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a workflow name to be a valid MCP tool name.
|
||||
* Tool names should be lowercase, alphanumeric with underscores.
|
||||
*/
|
||||
export function sanitizeToolName(name: string): string {
|
||||
return (
|
||||
name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9\s_-]/g, '')
|
||||
.replace(/[\s-]+/g, '_')
|
||||
.replace(/_+/g, '_')
|
||||
.replace(/^_|_$/g, '')
|
||||
.substring(0, 64) || 'workflow_tool'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate MCP tool input schema from InputFormatField array.
|
||||
* This converts the workflow's input format definition to JSON Schema format
|
||||
* that MCP clients can use to understand tool parameters.
|
||||
*/
|
||||
export function generateToolInputSchema(inputFormat: InputFormatField[]): McpToolInputSchema {
|
||||
const properties: Record<string, McpToolProperty> = {}
|
||||
const required: string[] = []
|
||||
|
||||
for (const field of inputFormat) {
|
||||
if (!field.name) continue
|
||||
|
||||
const fieldName = field.name
|
||||
const fieldType = mapFieldTypeToJsonSchemaType(field.type)
|
||||
|
||||
const property: McpToolProperty = {
|
||||
type: fieldType,
|
||||
// Use custom description if provided, otherwise use field name
|
||||
description: field.description?.trim() || fieldName,
|
||||
}
|
||||
|
||||
// Handle array types
|
||||
if (fieldType === 'array') {
|
||||
if (field.type === 'files') {
|
||||
property.items = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'File name' },
|
||||
url: { type: 'string', description: 'File URL' },
|
||||
type: { type: 'string', description: 'MIME type' },
|
||||
size: { type: 'number', description: 'File size in bytes' },
|
||||
},
|
||||
}
|
||||
// Use custom description if provided, otherwise use default
|
||||
if (!field.description?.trim()) {
|
||||
property.description = 'Array of file objects'
|
||||
}
|
||||
} else {
|
||||
property.items = { type: 'string' }
|
||||
}
|
||||
}
|
||||
|
||||
properties[fieldName] = property
|
||||
|
||||
// All fields are considered required by default
|
||||
// (in the future, we could add an optional flag to InputFormatField)
|
||||
required.push(fieldName)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'object',
|
||||
properties,
|
||||
required: required.length > 0 ? required : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a complete MCP tool definition from workflow metadata and input format.
|
||||
*/
|
||||
export function generateToolDefinition(
|
||||
workflowName: string,
|
||||
workflowDescription: string | undefined | null,
|
||||
inputFormat: InputFormatField[],
|
||||
customToolName?: string,
|
||||
customDescription?: string
|
||||
): McpToolDefinition {
|
||||
return {
|
||||
name: customToolName || sanitizeToolName(workflowName),
|
||||
description: customDescription || workflowDescription || `Execute ${workflowName} workflow`,
|
||||
inputSchema: generateToolInputSchema(inputFormat),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Valid start block types that can have input format
|
||||
*/
|
||||
const VALID_START_BLOCK_TYPES = [
|
||||
'starter',
|
||||
'start',
|
||||
'start_trigger',
|
||||
'api',
|
||||
'api_trigger',
|
||||
'input_trigger',
|
||||
]
|
||||
|
||||
/**
|
||||
* Extract input format from a workflow's blocks.
|
||||
* Looks for any valid start block and extracts its inputFormat configuration.
|
||||
*/
|
||||
export function extractInputFormatFromBlocks(
|
||||
blocks: Record<string, unknown>
|
||||
): InputFormatField[] | null {
|
||||
// Look for any valid start block
|
||||
for (const [, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
|
||||
const blockObj = block as Record<string, unknown>
|
||||
const blockType = blockObj.type as string
|
||||
|
||||
if (VALID_START_BLOCK_TYPES.includes(blockType)) {
|
||||
// Try to get inputFormat from subBlocks
|
||||
const subBlocks = blockObj.subBlocks as Record<string, unknown> | undefined
|
||||
if (subBlocks?.inputFormat) {
|
||||
const inputFormatSubBlock = subBlocks.inputFormat as Record<string, unknown>
|
||||
const value = inputFormatSubBlock.value
|
||||
if (Array.isArray(value)) {
|
||||
return value as InputFormatField[]
|
||||
}
|
||||
}
|
||||
|
||||
// Try legacy config.params.inputFormat
|
||||
const config = blockObj.config as Record<string, unknown> | undefined
|
||||
const params = config?.params as Record<string, unknown> | undefined
|
||||
if (params?.inputFormat && Array.isArray(params.inputFormat)) {
|
||||
return params.inputFormat as InputFormatField[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
@@ -29,6 +29,7 @@ import {
|
||||
PipedriveIcon,
|
||||
RedditIcon,
|
||||
SalesforceIcon,
|
||||
ServiceNowIcon,
|
||||
ShopifyIcon,
|
||||
SlackIcon,
|
||||
SpotifyIcon,
|
||||
@@ -69,6 +70,7 @@ export type OAuthProvider =
|
||||
| 'salesforce'
|
||||
| 'linkedin'
|
||||
| 'shopify'
|
||||
| 'servicenow'
|
||||
| 'zoom'
|
||||
| 'wordpress'
|
||||
| 'spotify'
|
||||
@@ -111,6 +113,7 @@ export type OAuthService =
|
||||
| 'salesforce'
|
||||
| 'linkedin'
|
||||
| 'shopify'
|
||||
| 'servicenow'
|
||||
| 'zoom'
|
||||
| 'wordpress'
|
||||
| 'spotify'
|
||||
@@ -618,6 +621,23 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
||||
},
|
||||
defaultService: 'shopify',
|
||||
},
|
||||
servicenow: {
|
||||
id: 'servicenow',
|
||||
name: 'ServiceNow',
|
||||
icon: (props) => ServiceNowIcon(props),
|
||||
services: {
|
||||
servicenow: {
|
||||
id: 'servicenow',
|
||||
name: 'ServiceNow',
|
||||
description: 'Manage incidents, tasks, and records in your ServiceNow instance.',
|
||||
providerId: 'servicenow',
|
||||
icon: (props) => ServiceNowIcon(props),
|
||||
baseProviderIcon: (props) => ServiceNowIcon(props),
|
||||
scopes: ['useraccount'],
|
||||
},
|
||||
},
|
||||
defaultService: 'servicenow',
|
||||
},
|
||||
slack: {
|
||||
id: 'slack',
|
||||
name: 'Slack',
|
||||
@@ -1487,6 +1507,21 @@ function getProviderAuthConfig(provider: string): ProviderAuthConfig {
|
||||
supportsRefreshTokenRotation: false,
|
||||
}
|
||||
}
|
||||
case 'servicenow': {
|
||||
// ServiceNow OAuth - token endpoint is instance-specific
|
||||
// This is a placeholder; actual token endpoint is set during authorization
|
||||
const { clientId, clientSecret } = getCredentials(
|
||||
env.SERVICENOW_CLIENT_ID,
|
||||
env.SERVICENOW_CLIENT_SECRET
|
||||
)
|
||||
return {
|
||||
tokenEndpoint: '', // Instance-specific, set during authorization
|
||||
clientId,
|
||||
clientSecret,
|
||||
useBasicAuth: false,
|
||||
supportsRefreshTokenRotation: true,
|
||||
}
|
||||
}
|
||||
case 'zoom': {
|
||||
const { clientId, clientSecret } = getCredentials(env.ZOOM_CLIENT_ID, env.ZOOM_CLIENT_SECRET)
|
||||
return {
|
||||
@@ -1565,20 +1600,36 @@ function buildAuthRequest(
|
||||
* This is a server-side utility function to refresh OAuth tokens
|
||||
* @param providerId The provider ID (e.g., 'google-drive')
|
||||
* @param refreshToken The refresh token to use
|
||||
* @param instanceUrl Optional instance URL for providers with instance-specific endpoints (e.g., ServiceNow)
|
||||
* @returns Object containing the new access token and expiration time in seconds, or null if refresh failed
|
||||
*/
|
||||
export async function refreshOAuthToken(
|
||||
providerId: string,
|
||||
refreshToken: string
|
||||
refreshToken: string,
|
||||
instanceUrl?: string
|
||||
): Promise<{ accessToken: string; expiresIn: number; refreshToken: string } | null> {
|
||||
try {
|
||||
// Get the provider from the providerId (e.g., 'google-drive' -> 'google')
|
||||
const provider = providerId.split('-')[0]
|
||||
|
||||
// Get provider configuration
|
||||
const config = getProviderAuthConfig(provider)
|
||||
|
||||
// For ServiceNow, the token endpoint is instance-specific
|
||||
let tokenEndpoint = config.tokenEndpoint
|
||||
if (provider === 'servicenow') {
|
||||
if (!instanceUrl) {
|
||||
logger.error('ServiceNow token refresh requires instance URL')
|
||||
return null
|
||||
}
|
||||
tokenEndpoint = `${instanceUrl.replace(/\/$/, '')}/oauth_token.do`
|
||||
}
|
||||
|
||||
// Build authentication request
|
||||
const { headers, bodyParams } = buildAuthRequest(config, refreshToken)
|
||||
|
||||
const response = await fetch(config.tokenEndpoint, {
|
||||
// Refresh the token
|
||||
const response = await fetch(tokenEndpoint, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: new URLSearchParams(bodyParams).toString(),
|
||||
@@ -1588,6 +1639,7 @@ export async function refreshOAuthToken(
|
||||
const errorText = await response.text()
|
||||
let errorData = errorText
|
||||
|
||||
// Try to parse the error as JSON for better diagnostics
|
||||
try {
|
||||
errorData = JSON.parse(errorText)
|
||||
} catch (_e) {
|
||||
@@ -1611,14 +1663,18 @@ export async function refreshOAuthToken(
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// Extract token and expiration (different providers may use different field names)
|
||||
const accessToken = data.access_token
|
||||
|
||||
// Handle refresh token rotation for providers that support it
|
||||
let newRefreshToken = null
|
||||
if (config.supportsRefreshTokenRotation && data.refresh_token) {
|
||||
newRefreshToken = data.refresh_token
|
||||
logger.info(`Received new refresh token from ${provider}`)
|
||||
}
|
||||
|
||||
// Get expiration time - use provider's value or default to 1 hour (3600 seconds)
|
||||
// Different providers use different names for this field
|
||||
const expiresIn = data.expires_in || data.expiresIn || 3600
|
||||
|
||||
if (!accessToken) {
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockWithDiff } from '@/lib/workflows/diff/types'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -538,17 +537,6 @@ export class WorkflowDiffEngine {
|
||||
|
||||
// First pass: build ID mappings
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
|
||||
if (!isValidKey(proposedId)) {
|
||||
logger.error('Invalid proposedId detected in proposed state', {
|
||||
proposedId,
|
||||
proposedId_type: typeof proposedId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
|
||||
// Check if this block exists in current state by type:name
|
||||
@@ -564,31 +552,7 @@ export class WorkflowDiffEngine {
|
||||
|
||||
// Second pass: build final blocks with mapped IDs
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
|
||||
if (!isValidKey(proposedId)) {
|
||||
logger.error('Invalid proposedId detected in proposed state (second pass)', {
|
||||
proposedId,
|
||||
proposedId_type: typeof proposedId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const finalId = idMap[proposedId]
|
||||
|
||||
// CRITICAL: Validate finalId before using as key
|
||||
if (!isValidKey(finalId)) {
|
||||
logger.error('Invalid finalId generated from idMap', {
|
||||
proposedId,
|
||||
finalId,
|
||||
finalId_type: typeof finalId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
const existingBlock = existingBlockMap[key]?.block
|
||||
|
||||
@@ -653,8 +617,6 @@ export class WorkflowDiffEngine {
|
||||
const { generateLoopBlocks, generateParallelBlocks } = await import(
|
||||
'@/stores/workflows/workflow/utils'
|
||||
)
|
||||
|
||||
// Build the proposed state
|
||||
const finalProposedState: WorkflowState = {
|
||||
blocks: finalBlocks,
|
||||
edges: finalEdges,
|
||||
@@ -663,9 +625,6 @@ export class WorkflowDiffEngine {
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
// Use the proposed state directly - validation happens at the source
|
||||
const fullyCleanedState = finalProposedState
|
||||
|
||||
// Transfer block heights from baseline workflow for better measurements in diff view
|
||||
// If editing on top of diff, this transfers from the diff (which already has good heights)
|
||||
// Otherwise transfers from original workflow
|
||||
@@ -735,7 +694,7 @@ export class WorkflowDiffEngine {
|
||||
'@/lib/workflows/autolayout/constants'
|
||||
)
|
||||
|
||||
const layoutedBlocks = applyTargetedLayout(finalBlocks, fullyCleanedState.edges, {
|
||||
const layoutedBlocks = applyTargetedLayout(finalBlocks, finalProposedState.edges, {
|
||||
changedBlockIds: impactedBlockArray,
|
||||
horizontalSpacing: DEFAULT_HORIZONTAL_SPACING,
|
||||
verticalSpacing: DEFAULT_VERTICAL_SPACING,
|
||||
@@ -783,7 +742,7 @@ export class WorkflowDiffEngine {
|
||||
|
||||
const layoutResult = applyNativeAutoLayout(
|
||||
finalBlocks,
|
||||
fullyCleanedState.edges,
|
||||
finalProposedState.edges,
|
||||
DEFAULT_LAYOUT_OPTIONS
|
||||
)
|
||||
|
||||
@@ -865,7 +824,7 @@ export class WorkflowDiffEngine {
|
||||
})
|
||||
|
||||
// Create edge identifiers for proposed state
|
||||
fullyCleanedState.edges.forEach((edge) => {
|
||||
finalEdges.forEach((edge) => {
|
||||
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
|
||||
proposedEdgeSet.add(edgeId)
|
||||
})
|
||||
@@ -904,21 +863,21 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Apply diff markers to blocks in the fully cleaned state
|
||||
// Apply diff markers to blocks
|
||||
if (computed) {
|
||||
for (const id of computed.new_blocks || []) {
|
||||
if (fullyCleanedState.blocks[id]) {
|
||||
;(fullyCleanedState.blocks[id] as any).is_diff = 'new'
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'new'
|
||||
}
|
||||
}
|
||||
for (const id of computed.edited_blocks || []) {
|
||||
if (fullyCleanedState.blocks[id]) {
|
||||
;(fullyCleanedState.blocks[id] as any).is_diff = 'edited'
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'edited'
|
||||
|
||||
// Also mark specific subblocks that changed
|
||||
if (computed.field_diffs?.[id]) {
|
||||
const fieldDiff = computed.field_diffs[id]
|
||||
const block = fullyCleanedState.blocks[id]
|
||||
const block = finalBlocks[id]
|
||||
|
||||
// Apply diff markers to changed subblocks
|
||||
for (const changedField of fieldDiff.changed_fields) {
|
||||
@@ -930,12 +889,12 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: We don't remove deleted blocks from fullyCleanedState, just mark them
|
||||
// Note: We don't remove deleted blocks from finalBlocks, just mark them
|
||||
}
|
||||
|
||||
// Store the diff with the fully sanitized state
|
||||
// Store the diff
|
||||
this.currentDiff = {
|
||||
proposedState: fullyCleanedState,
|
||||
proposedState: finalProposedState,
|
||||
diffAnalysis: computed,
|
||||
metadata: {
|
||||
source: 'workflow_state',
|
||||
@@ -944,10 +903,10 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
|
||||
logger.info('Successfully created diff from workflow state', {
|
||||
blockCount: Object.keys(fullyCleanedState.blocks).length,
|
||||
edgeCount: fullyCleanedState.edges.length,
|
||||
hasLoops: Object.keys(fullyCleanedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(fullyCleanedState.parallels || {}).length > 0,
|
||||
blockCount: Object.keys(finalProposedState.blocks).length,
|
||||
edgeCount: finalProposedState.edges.length,
|
||||
hasLoops: Object.keys(finalProposedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(finalProposedState.parallels || {}).length > 0,
|
||||
newBlocks: computed?.new_blocks?.length || 0,
|
||||
editedBlocks: computed?.edited_blocks?.length || 0,
|
||||
deletedBlocks: computed?.deleted_blocks?.length || 0,
|
||||
@@ -1137,17 +1096,6 @@ export function stripWorkflowDiffMarkers(state: WorkflowState): WorkflowState {
|
||||
const cleanBlocks: Record<string, BlockState> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(state.blocks || {})) {
|
||||
// Validate block ID at the source - skip invalid IDs
|
||||
if (!isValidKey(blockId)) {
|
||||
logger.error('Invalid blockId detected in stripWorkflowDiffMarkers', {
|
||||
blockId,
|
||||
blockId_type: typeof blockId,
|
||||
blockType: block?.type,
|
||||
blockName: block?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const cleanBlock: BlockState = structuredClone(block)
|
||||
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
|
||||
blockWithDiff.is_diff = undefined
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
workflowSubflows,
|
||||
} from '@sim/db'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
import { and, desc, eq, inArray, sql } from 'drizzle-orm'
|
||||
import { and, desc, eq, sql } from 'drizzle-orm'
|
||||
import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -602,178 +602,6 @@ export async function deployWorkflow(params: {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk load workflow states for multiple workflows in a single set of queries.
|
||||
* Much more efficient than calling loadWorkflowFromNormalizedTables for each workflow.
|
||||
*/
|
||||
export async function loadBulkWorkflowsFromNormalizedTables(
|
||||
workflowIds: string[]
|
||||
): Promise<Map<string, NormalizedWorkflowData>> {
|
||||
const result = new Map<string, NormalizedWorkflowData>()
|
||||
|
||||
if (workflowIds.length === 0) {
|
||||
return result
|
||||
}
|
||||
|
||||
try {
|
||||
// Load all components for all workflows in parallel (just 3 queries total)
|
||||
const [allBlocks, allEdges, allSubflows] = await Promise.all([
|
||||
db.select().from(workflowBlocks).where(inArray(workflowBlocks.workflowId, workflowIds)),
|
||||
db.select().from(workflowEdges).where(inArray(workflowEdges.workflowId, workflowIds)),
|
||||
db.select().from(workflowSubflows).where(inArray(workflowSubflows.workflowId, workflowIds)),
|
||||
])
|
||||
|
||||
// Group blocks by workflow
|
||||
const blocksByWorkflow = new Map<string, typeof allBlocks>()
|
||||
for (const block of allBlocks) {
|
||||
const existing = blocksByWorkflow.get(block.workflowId) || []
|
||||
existing.push(block)
|
||||
blocksByWorkflow.set(block.workflowId, existing)
|
||||
}
|
||||
|
||||
// Group edges by workflow
|
||||
const edgesByWorkflow = new Map<string, typeof allEdges>()
|
||||
for (const edge of allEdges) {
|
||||
const existing = edgesByWorkflow.get(edge.workflowId) || []
|
||||
existing.push(edge)
|
||||
edgesByWorkflow.set(edge.workflowId, existing)
|
||||
}
|
||||
|
||||
// Group subflows by workflow
|
||||
const subflowsByWorkflow = new Map<string, typeof allSubflows>()
|
||||
for (const subflow of allSubflows) {
|
||||
const existing = subflowsByWorkflow.get(subflow.workflowId) || []
|
||||
existing.push(subflow)
|
||||
subflowsByWorkflow.set(subflow.workflowId, existing)
|
||||
}
|
||||
|
||||
// Process each workflow
|
||||
for (const workflowId of workflowIds) {
|
||||
const blocks = blocksByWorkflow.get(workflowId) || []
|
||||
const edges = edgesByWorkflow.get(workflowId) || []
|
||||
const subflows = subflowsByWorkflow.get(workflowId) || []
|
||||
|
||||
// Skip workflows with no blocks (not migrated yet)
|
||||
if (blocks.length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Convert blocks to the expected format
|
||||
const blocksMap: Record<string, BlockState> = {}
|
||||
blocks.forEach((block) => {
|
||||
const blockData = block.data || {}
|
||||
|
||||
const assembled: BlockState = {
|
||||
id: block.id,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
position: {
|
||||
x: Number(block.positionX),
|
||||
y: Number(block.positionY),
|
||||
},
|
||||
enabled: block.enabled,
|
||||
horizontalHandles: block.horizontalHandles,
|
||||
advancedMode: block.advancedMode,
|
||||
triggerMode: block.triggerMode,
|
||||
height: Number(block.height),
|
||||
subBlocks: (block.subBlocks as BlockState['subBlocks']) || {},
|
||||
outputs: (block.outputs as BlockState['outputs']) || {},
|
||||
data: blockData,
|
||||
}
|
||||
|
||||
blocksMap[block.id] = assembled
|
||||
})
|
||||
|
||||
// Sanitize any invalid custom tools in agent blocks
|
||||
const { blocks: sanitizedBlocks } = sanitizeAgentToolsInBlocks(blocksMap)
|
||||
|
||||
// Migrate old agent block format to new messages array format
|
||||
const migratedBlocks = migrateAgentBlocksToMessagesFormat(sanitizedBlocks)
|
||||
|
||||
// Convert edges to the expected format
|
||||
const edgesArray: Edge[] = edges.map((edge) => ({
|
||||
id: edge.id,
|
||||
source: edge.sourceBlockId,
|
||||
target: edge.targetBlockId,
|
||||
sourceHandle: edge.sourceHandle ?? undefined,
|
||||
targetHandle: edge.targetHandle ?? undefined,
|
||||
type: 'default',
|
||||
data: {},
|
||||
}))
|
||||
|
||||
// Convert subflows to loops and parallels
|
||||
const loops: Record<string, Loop> = {}
|
||||
const parallels: Record<string, Parallel> = {}
|
||||
|
||||
subflows.forEach((subflow) => {
|
||||
const config = (subflow.config ?? {}) as Partial<Loop & Parallel>
|
||||
|
||||
if (subflow.type === SUBFLOW_TYPES.LOOP) {
|
||||
const loopType =
|
||||
(config as Loop).loopType === 'for' ||
|
||||
(config as Loop).loopType === 'forEach' ||
|
||||
(config as Loop).loopType === 'while' ||
|
||||
(config as Loop).loopType === 'doWhile'
|
||||
? (config as Loop).loopType
|
||||
: 'for'
|
||||
|
||||
const loop: Loop = {
|
||||
id: subflow.id,
|
||||
nodes: Array.isArray((config as Loop).nodes) ? (config as Loop).nodes : [],
|
||||
iterations:
|
||||
typeof (config as Loop).iterations === 'number' ? (config as Loop).iterations : 1,
|
||||
loopType,
|
||||
forEachItems: (config as Loop).forEachItems ?? '',
|
||||
whileCondition: (config as Loop).whileCondition ?? '',
|
||||
doWhileCondition: (config as Loop).doWhileCondition ?? '',
|
||||
}
|
||||
loops[subflow.id] = loop
|
||||
|
||||
// Sync block.data with loop config
|
||||
if (migratedBlocks[subflow.id]) {
|
||||
const block = migratedBlocks[subflow.id]
|
||||
migratedBlocks[subflow.id] = {
|
||||
...block,
|
||||
data: {
|
||||
...block.data,
|
||||
collection: loop.forEachItems ?? block.data?.collection ?? '',
|
||||
whileCondition: loop.whileCondition ?? block.data?.whileCondition ?? '',
|
||||
doWhileCondition: loop.doWhileCondition ?? block.data?.doWhileCondition ?? '',
|
||||
},
|
||||
}
|
||||
}
|
||||
} else if (subflow.type === SUBFLOW_TYPES.PARALLEL) {
|
||||
const parallel: Parallel = {
|
||||
id: subflow.id,
|
||||
nodes: Array.isArray((config as Parallel).nodes) ? (config as Parallel).nodes : [],
|
||||
count: typeof (config as Parallel).count === 'number' ? (config as Parallel).count : 5,
|
||||
distribution: (config as Parallel).distribution ?? '',
|
||||
parallelType:
|
||||
(config as Parallel).parallelType === 'count' ||
|
||||
(config as Parallel).parallelType === 'collection'
|
||||
? (config as Parallel).parallelType
|
||||
: 'count',
|
||||
}
|
||||
parallels[subflow.id] = parallel
|
||||
}
|
||||
})
|
||||
|
||||
result.set(workflowId, {
|
||||
blocks: migratedBlocks,
|
||||
edges: edgesArray,
|
||||
loops,
|
||||
parallels,
|
||||
isFromNormalizedTables: true,
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
logger.error('Error bulk loading workflows from normalized tables:', error)
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
|
||||
* Returns a new state with all IDs regenerated and references updated
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
/**
|
||||
* Checks if a key is valid (not undefined, null, empty, or literal "undefined"/"null")
|
||||
* Use this to validate BEFORE setting a dynamic key on any object.
|
||||
*/
|
||||
export function isValidKey(key: unknown): key is string {
|
||||
return (
|
||||
!!key && typeof key === 'string' && key !== 'undefined' && key !== 'null' && key.trim() !== ''
|
||||
)
|
||||
}
|
||||
@@ -10,6 +10,43 @@ import { getTrigger } from '@/triggers'
|
||||
|
||||
const logger = createLogger('TriggerUtils')
|
||||
|
||||
/**
|
||||
* Valid start block types that can trigger a workflow
|
||||
*/
|
||||
export const VALID_START_BLOCK_TYPES = [
|
||||
'starter',
|
||||
'start',
|
||||
'start_trigger',
|
||||
'api',
|
||||
'api_trigger',
|
||||
'input_trigger',
|
||||
] as const
|
||||
|
||||
export type ValidStartBlockType = (typeof VALID_START_BLOCK_TYPES)[number]
|
||||
|
||||
/**
|
||||
* Check if a block type is a valid start block type
|
||||
*/
|
||||
export function isValidStartBlockType(blockType: string): blockType is ValidStartBlockType {
|
||||
return VALID_START_BLOCK_TYPES.includes(blockType as ValidStartBlockType)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a workflow state has a valid start block
|
||||
*/
|
||||
export function hasValidStartBlockInState(state: any): boolean {
|
||||
if (!state?.blocks) {
|
||||
return false
|
||||
}
|
||||
|
||||
const startBlock = Object.values(state.blocks).find((block: any) => {
|
||||
const blockType = block?.type
|
||||
return isValidStartBlockType(blockType)
|
||||
})
|
||||
|
||||
return !!startBlock
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates mock data based on the output type definition
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
export interface InputFormatField {
|
||||
name?: string
|
||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files' | string
|
||||
description?: string
|
||||
value?: unknown
|
||||
}
|
||||
|
||||
|
||||
@@ -114,6 +114,7 @@
|
||||
"react": "19.2.1",
|
||||
"react-colorful": "5.6.1",
|
||||
"react-dom": "19.2.1",
|
||||
"react-google-drive-picker": "^1.2.2",
|
||||
"react-hook-form": "^7.54.2",
|
||||
"react-markdown": "^10.1.0",
|
||||
"react-simple-code-editor": "^0.14.1",
|
||||
|
||||
@@ -60,7 +60,9 @@ const parseTriggerArrayFromURL = (value: string | null): TriggerType[] => {
|
||||
if (!value) return []
|
||||
return value
|
||||
.split(',')
|
||||
.filter((t): t is TriggerType => ['chat', 'api', 'webhook', 'manual', 'schedule'].includes(t))
|
||||
.filter((t): t is TriggerType =>
|
||||
['chat', 'api', 'webhook', 'manual', 'schedule', 'mcp'].includes(t)
|
||||
)
|
||||
}
|
||||
|
||||
const parseStringArrayFromURL = (value: string | null): string[] => {
|
||||
|
||||
@@ -166,7 +166,15 @@ export type TimeRange =
|
||||
| 'Past 30 days'
|
||||
| 'All time'
|
||||
export type LogLevel = 'error' | 'info' | 'running' | 'pending' | 'all'
|
||||
export type TriggerType = 'chat' | 'api' | 'webhook' | 'manual' | 'schedule' | 'all' | string
|
||||
export type TriggerType =
|
||||
| 'chat'
|
||||
| 'api'
|
||||
| 'webhook'
|
||||
| 'manual'
|
||||
| 'schedule'
|
||||
| 'mcp'
|
||||
| 'all'
|
||||
| string
|
||||
|
||||
export interface FilterState {
|
||||
// Workspace context
|
||||
|
||||
@@ -32,7 +32,6 @@ import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/
|
||||
import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors'
|
||||
import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online'
|
||||
import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns'
|
||||
import { SleepClientTool } from '@/lib/copilot/tools/client/other/sleep'
|
||||
import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/registry'
|
||||
import { GetCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-credentials'
|
||||
import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables'
|
||||
@@ -105,7 +104,6 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
navigate_ui: (id) => new NavigateUIClientTool(id),
|
||||
manage_custom_tool: (id) => new ManageCustomToolClientTool(id),
|
||||
manage_mcp_tool: (id) => new ManageMcpToolClientTool(id),
|
||||
sleep: (id) => new SleepClientTool(id),
|
||||
}
|
||||
|
||||
// Read-only static metadata for class-based tools (no instances)
|
||||
@@ -143,7 +141,6 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
navigate_ui: (NavigateUIClientTool as any)?.metadata,
|
||||
manage_custom_tool: (ManageCustomToolClientTool as any)?.metadata,
|
||||
manage_mcp_tool: (ManageMcpToolClientTool as any)?.metadata,
|
||||
sleep: (SleepClientTool as any)?.metadata,
|
||||
}
|
||||
|
||||
function ensureClientToolInstance(toolName: string | undefined, toolCallId: string | undefined) {
|
||||
@@ -2263,22 +2260,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
set({ toolCallsById: map })
|
||||
} catch {}
|
||||
},
|
||||
|
||||
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => {
|
||||
try {
|
||||
if (!toolCallId) return
|
||||
const map = { ...get().toolCallsById }
|
||||
const current = map[toolCallId]
|
||||
if (!current) return
|
||||
const updatedParams = { ...current.params, ...params }
|
||||
map[toolCallId] = {
|
||||
...current,
|
||||
params: updatedParams,
|
||||
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams),
|
||||
}
|
||||
set({ toolCallsById: map })
|
||||
} catch {}
|
||||
},
|
||||
updatePreviewToolCallState: (
|
||||
toolCallState: 'accepted' | 'rejected' | 'error',
|
||||
toolCallId?: string
|
||||
|
||||
@@ -178,7 +178,6 @@ export interface CopilotActions {
|
||||
toolCallId?: string
|
||||
) => void
|
||||
setToolCallState: (toolCall: any, newState: ClientToolCallState, options?: any) => void
|
||||
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => void
|
||||
sendDocsMessage: (query: string, options?: { stream?: boolean; topK?: number }) => Promise<void>
|
||||
saveChatMessages: (chatId: string) => Promise<void>
|
||||
|
||||
|
||||
@@ -20,11 +20,7 @@ import type {
|
||||
WorkflowState,
|
||||
WorkflowStore,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
wouldCreateCycle,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('WorkflowStore')
|
||||
|
||||
@@ -432,15 +428,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
return
|
||||
}
|
||||
|
||||
// Prevent self-connections and cycles
|
||||
if (wouldCreateCycle(get().edges, edge.source, edge.target)) {
|
||||
logger.warn('Prevented edge that would create a cycle', {
|
||||
source: edge.source,
|
||||
target: edge.target,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check for duplicate connections
|
||||
const isDuplicate = get().edges.some(
|
||||
(existingEdge) =>
|
||||
|
||||
@@ -1,56 +1,7 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const DEFAULT_LOOP_ITERATIONS = 5
|
||||
|
||||
/**
|
||||
* Check if adding an edge would create a cycle in the graph.
|
||||
* Uses depth-first search to detect if the source node is reachable from the target node.
|
||||
*
|
||||
* @param edges - Current edges in the graph
|
||||
* @param sourceId - Source node ID of the proposed edge
|
||||
* @param targetId - Target node ID of the proposed edge
|
||||
* @returns true if adding this edge would create a cycle
|
||||
*/
|
||||
export function wouldCreateCycle(edges: Edge[], sourceId: string, targetId: string): boolean {
|
||||
if (sourceId === targetId) {
|
||||
return true
|
||||
}
|
||||
|
||||
const adjacencyList = new Map<string, string[]>()
|
||||
for (const edge of edges) {
|
||||
if (!adjacencyList.has(edge.source)) {
|
||||
adjacencyList.set(edge.source, [])
|
||||
}
|
||||
adjacencyList.get(edge.source)!.push(edge.target)
|
||||
}
|
||||
|
||||
const visited = new Set<string>()
|
||||
|
||||
function canReachSource(currentNode: string): boolean {
|
||||
if (currentNode === sourceId) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (visited.has(currentNode)) {
|
||||
return false
|
||||
}
|
||||
|
||||
visited.add(currentNode)
|
||||
|
||||
const neighbors = adjacencyList.get(currentNode) || []
|
||||
for (const neighbor of neighbors) {
|
||||
if (canReachSource(neighbor)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return canReachSource(targetId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert UI loop block to executor Loop format
|
||||
*
|
||||
|
||||
@@ -766,7 +766,6 @@ function validateClientSideParams(
|
||||
// Internal parameters that should be excluded from validation
|
||||
const internalParamSet = new Set([
|
||||
'_context',
|
||||
'_toolSchema',
|
||||
'workflowId',
|
||||
'envVars',
|
||||
'workflowVariables',
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { ServiceNowCreateParams, ServiceNowCreateResponse } from '@/tools/servicenow/types'
|
||||
import { createBasicAuthHeader } from '@/tools/servicenow/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ServiceNowCreateRecordTool')
|
||||
@@ -11,6 +10,11 @@ export const createRecordTool: ToolConfig<ServiceNowCreateParams, ServiceNowCrea
|
||||
description: 'Create a new record in a ServiceNow table',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'servicenow',
|
||||
},
|
||||
|
||||
params: {
|
||||
instanceUrl: {
|
||||
type: 'string',
|
||||
@@ -18,17 +22,11 @@ export const createRecordTool: ToolConfig<ServiceNowCreateParams, ServiceNowCrea
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow instance URL (e.g., https://instance.service-now.com)',
|
||||
},
|
||||
username: {
|
||||
credential: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow password',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'ServiceNow OAuth credential ID',
|
||||
},
|
||||
tableName: {
|
||||
type: 'string',
|
||||
@@ -46,7 +44,8 @@ export const createRecordTool: ToolConfig<ServiceNowCreateParams, ServiceNowCrea
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const baseUrl = params.instanceUrl.replace(/\/$/, '')
|
||||
// Use instanceUrl if provided, otherwise fall back to idToken (stored instance URL from OAuth)
|
||||
const baseUrl = (params.instanceUrl || params.idToken || '').replace(/\/$/, '')
|
||||
if (!baseUrl) {
|
||||
throw new Error('ServiceNow instance URL is required')
|
||||
}
|
||||
@@ -54,11 +53,11 @@ export const createRecordTool: ToolConfig<ServiceNowCreateParams, ServiceNowCrea
|
||||
},
|
||||
method: 'POST',
|
||||
headers: (params) => {
|
||||
if (!params.username || !params.password) {
|
||||
throw new Error('ServiceNow username and password are required')
|
||||
if (!params.accessToken) {
|
||||
throw new Error('OAuth access token is required')
|
||||
}
|
||||
return {
|
||||
Authorization: createBasicAuthHeader(params.username, params.password),
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { ServiceNowDeleteParams, ServiceNowDeleteResponse } from '@/tools/servicenow/types'
|
||||
import { createBasicAuthHeader } from '@/tools/servicenow/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ServiceNowDeleteRecordTool')
|
||||
@@ -11,24 +10,23 @@ export const deleteRecordTool: ToolConfig<ServiceNowDeleteParams, ServiceNowDele
|
||||
description: 'Delete a record from a ServiceNow table',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'servicenow',
|
||||
},
|
||||
|
||||
params: {
|
||||
instanceUrl: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow instance URL (e.g., https://instance.service-now.com)',
|
||||
description: 'ServiceNow instance URL (auto-detected from OAuth if not provided)',
|
||||
},
|
||||
username: {
|
||||
credential: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow password',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'ServiceNow OAuth credential ID',
|
||||
},
|
||||
tableName: {
|
||||
type: 'string',
|
||||
@@ -46,7 +44,8 @@ export const deleteRecordTool: ToolConfig<ServiceNowDeleteParams, ServiceNowDele
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const baseUrl = params.instanceUrl.replace(/\/$/, '')
|
||||
// Use instanceUrl if provided, otherwise fall back to idToken (stored instance URL from OAuth)
|
||||
const baseUrl = (params.instanceUrl || params.idToken || '').replace(/\/$/, '')
|
||||
if (!baseUrl) {
|
||||
throw new Error('ServiceNow instance URL is required')
|
||||
}
|
||||
@@ -54,11 +53,11 @@ export const deleteRecordTool: ToolConfig<ServiceNowDeleteParams, ServiceNowDele
|
||||
},
|
||||
method: 'DELETE',
|
||||
headers: (params) => {
|
||||
if (!params.username || !params.password) {
|
||||
throw new Error('ServiceNow username and password are required')
|
||||
if (!params.accessToken) {
|
||||
throw new Error('OAuth access token is required')
|
||||
}
|
||||
return {
|
||||
Authorization: createBasicAuthHeader(params.username, params.password),
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { ServiceNowReadParams, ServiceNowReadResponse } from '@/tools/servicenow/types'
|
||||
import { createBasicAuthHeader } from '@/tools/servicenow/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ServiceNowReadRecordTool')
|
||||
@@ -11,24 +10,23 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
description: 'Read records from a ServiceNow table',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'servicenow',
|
||||
},
|
||||
|
||||
params: {
|
||||
instanceUrl: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow instance URL (e.g., https://instance.service-now.com)',
|
||||
description: 'ServiceNow instance URL (auto-detected from OAuth if not provided)',
|
||||
},
|
||||
username: {
|
||||
credential: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ServiceNow password',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'ServiceNow OAuth credential ID',
|
||||
},
|
||||
tableName: {
|
||||
type: 'string',
|
||||
@@ -70,7 +68,8 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const baseUrl = params.instanceUrl.replace(/\/$/, '')
|
||||
// Use instanceUrl if provided, otherwise fall back to idToken (stored instance URL from OAuth)
|
||||
const baseUrl = (params.instanceUrl || params.idToken || '').replace(/\/$/, '')
|
||||
if (!baseUrl) {
|
||||
throw new Error('ServiceNow instance URL is required')
|
||||
}
|
||||
@@ -81,13 +80,10 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
if (params.sysId) {
|
||||
url = `${url}/${params.sysId}`
|
||||
} else if (params.number) {
|
||||
const numberQuery = `number=${params.number}`
|
||||
const existingQuery = params.query
|
||||
queryParams.append(
|
||||
'sysparm_query',
|
||||
existingQuery ? `${existingQuery}^${numberQuery}` : numberQuery
|
||||
)
|
||||
} else if (params.query) {
|
||||
queryParams.append('number', params.number)
|
||||
}
|
||||
|
||||
if (params.query) {
|
||||
queryParams.append('sysparm_query', params.query)
|
||||
}
|
||||
|
||||
@@ -104,11 +100,11 @@ export const readRecordTool: ToolConfig<ServiceNowReadParams, ServiceNowReadResp
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => {
|
||||
if (!params.username || !params.password) {
|
||||
throw new Error('ServiceNow username and password are required')
|
||||
if (!params.accessToken) {
|
||||
throw new Error('OAuth access token is required')
|
||||
}
|
||||
return {
|
||||
Authorization: createBasicAuthHeader(params.username, params.password),
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user