mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 23:48:09 -05:00
Compare commits
16 Commits
feat/zapie
...
v0.5.21
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c27c233da0 | ||
|
|
ebef5f3a27 | ||
|
|
12c4c2d44f | ||
|
|
929a352edb | ||
|
|
6cd078b0fe | ||
|
|
31874939ee | ||
|
|
e157ce5fbc | ||
|
|
774e5d585c | ||
|
|
54cc93743f | ||
|
|
8c32ad4c0d | ||
|
|
1d08796853 | ||
|
|
ebcd243942 | ||
|
|
b7e814b721 | ||
|
|
842ef27ed9 | ||
|
|
31c34b2ea3 | ||
|
|
8f0ef58056 |
@@ -3798,23 +3798,6 @@ export function SshIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function SftpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 32 32'
|
||||
width='32px'
|
||||
height='32px'
|
||||
>
|
||||
<path
|
||||
d='M 6 3 L 6 29 L 26 29 L 26 9.59375 L 25.71875 9.28125 L 19.71875 3.28125 L 19.40625 3 Z M 8 5 L 18 5 L 18 11 L 24 11 L 24 27 L 8 27 Z M 20 6.4375 L 22.5625 9 L 20 9 Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ApifyIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
@@ -4146,78 +4129,3 @@ export function CursorIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function DuckDuckGoIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='-108 -108 216 216'>
|
||||
<circle r='108' fill='#d53' />
|
||||
<circle r='96' fill='none' stroke='#ffffff' strokeWidth='7' />
|
||||
<path
|
||||
d='M-32-55C-62-48-51-6-51-6l19 93 7 3M-39-73h-8l11 4s-11 0-11 7c24-1 35 5 35 5'
|
||||
fill='#ddd'
|
||||
/>
|
||||
<path d='M25 95S1 57 1 32c0-47 31-7 31-44S1-58 1-58c-15-19-44-15-44-15l7 4s-7 2-9 4 19-3 28 5c-37 3-31 33-31 33l21 120' />
|
||||
<path d='M25-1l38-10c34 5-29 24-33 23C0 7 9 32 45 24s9 20-24 9C-26 20-1-3 25-1' fill='#fc0' />
|
||||
<path
|
||||
d='M15 78l2-3c22 8 23 11 22-9s0-20-23-3c0-5-13-3-15 0-21-9-23-12-22 2 2 29 1 24 21 14'
|
||||
fill='#6b5'
|
||||
/>
|
||||
<path d='M-1 67v12c1 2 17 2 17-2s-8 3-13 1-2-13-2-13' fill='#4a4' />
|
||||
<path
|
||||
d='M-23-32c-5-6-18-1-15 7 1-4 8-10 15-7m32 0c1-6 11-7 14-1-4-2-10-2-14 1m-33 16a2 2 0 1 1 0 1m-8 3a7 7 0 1 0 0-1m52-6a2 2 0 1 1 0 1m-6 3a6 6 0 1 0 0-1'
|
||||
fill='#148'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RssIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='24'
|
||||
height='24'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M4 11C6.38695 11 8.67613 11.9482 10.364 13.636C12.0518 15.3239 13 17.6131 13 20'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M4 4C8.24346 4 12.3131 5.68571 15.3137 8.68629C18.3143 11.6869 20 15.7565 20 20'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<circle cx='5' cy='19' r='1' fill='currentColor' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ZapierIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='800px'
|
||||
height='800px'
|
||||
viewBox='0 0 256 256'
|
||||
version='1.1'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
xmlnsXlink='http://www.w3.org/1999/xlink'
|
||||
>
|
||||
<g>
|
||||
<path
|
||||
d='M128.080089,-0.000183105 C135.311053,0.0131003068 142.422517,0.624138494 149.335663,1.77979593 L149.335663,1.77979593 L149.335663,76.2997796 L202.166953,23.6044907 C208.002065,27.7488446 213.460883,32.3582023 218.507811,37.3926715 C223.557281,42.4271407 228.192318,47.8867213 232.346817,53.7047992 L232.346817,53.7047992 L179.512985,106.400063 L254.227854,106.400063 C255.387249,113.29414 256,120.36111 256,127.587243 L256,127.587243 L256,127.759881 C256,134.986013 255.387249,142.066204 254.227854,148.960282 L254.227854,148.960282 L179.500273,148.960282 L232.346817,201.642324 C228.192318,207.460402 223.557281,212.919983 218.523066,217.954452 L218.523066,217.954452 L218.507811,217.954452 C213.460883,222.988921 208.002065,227.6115 202.182208,231.742607 L202.182208,231.742607 L149.335663,179.04709 L149.335663,253.5672 C142.435229,254.723036 135.323765,255.333244 128.092802,255.348499 L128.092802,255.348499 L127.907197,255.348499 C120.673691,255.333244 113.590195,254.723036 106.677048,253.5672 L106.677048,253.5672 L106.677048,179.04709 L53.8457596,231.742607 C42.1780766,223.466917 31.977435,213.278734 23.6658953,201.642324 L23.6658953,201.642324 L76.4997269,148.960282 L1.78485803,148.960282 C0.612750404,142.052729 0,134.946095 0,127.719963 L0,127.719963 L0,127.349037 C0.0121454869,125.473817 0.134939797,123.182933 0.311311815,120.812834 L0.36577283,120.099764 C0.887996182,113.428547 1.78485803,106.400063 1.78485803,106.400063 L1.78485803,106.400063 L76.4997269,106.400063 L23.6658953,53.7047992 C27.8076812,47.8867213 32.4300059,42.4403618 37.4769335,37.4193681 L37.4769335,37.4193681 L37.5023588,37.3926715 C42.5391163,32.3582023 48.0106469,27.7488446 53.8457596,23.6044907 L53.8457596,23.6044907 L106.677048,76.2997796 L106.677048,1.77979593 C113.590195,0.624138494 120.688946,0.0131003068 127.932622,-0.000183105 L127.932622,-0.000183105 L128.080089,-0.000183105 Z M128.067377,95.7600714 L127.945335,95.7600714 C118.436262,95.7600714 109.32891,97.5001809 100.910584,100.661566 C97.7553011,109.043534 96.0085811,118.129275 95.9958684,127.613685 L95.9958684,127.733184 C96.0085811,137.217594 97.7553011,146.303589 100.923296,154.685303 C109.32891,157.846943 118.436262,159.587052 127.945335,159.587052 L128.067377,159.587052 C137.576449,159.587052 146.683802,157.846943 155.089415,154.685303 C158.257411,146.290368 160.004131,137.217594 160.004131,127.733184 L160.004131,127.613685 C160.004131,118.129275 158.257411,109.043534 155.089415,100.661566 C146.683802,97.5001809 137.576449,95.7600714 128.067377,95.7600714 Z'
|
||||
fill='#FF4A00'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
DiscordIcon,
|
||||
DocumentIcon,
|
||||
DropboxIcon,
|
||||
DuckDuckGoIcon,
|
||||
DynamoDBIcon,
|
||||
ElasticsearchIcon,
|
||||
ElevenLabsIcon,
|
||||
@@ -86,7 +85,6 @@ import {
|
||||
SendgridIcon,
|
||||
SentryIcon,
|
||||
SerperIcon,
|
||||
SftpIcon,
|
||||
ShopifyIcon,
|
||||
SlackIcon,
|
||||
SmtpIcon,
|
||||
@@ -110,7 +108,6 @@ import {
|
||||
WordpressIcon,
|
||||
xIcon,
|
||||
YouTubeIcon,
|
||||
ZapierIcon,
|
||||
ZendeskIcon,
|
||||
ZepIcon,
|
||||
ZoomIcon,
|
||||
@@ -122,7 +119,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
zoom: ZoomIcon,
|
||||
zep: ZepIcon,
|
||||
zendesk: ZendeskIcon,
|
||||
zapier: ZapierIcon,
|
||||
youtube: YouTubeIcon,
|
||||
x: xIcon,
|
||||
wordpress: WordpressIcon,
|
||||
@@ -151,7 +147,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
slack: SlackIcon,
|
||||
shopify: ShopifyIcon,
|
||||
sharepoint: MicrosoftSharepointIcon,
|
||||
sftp: SftpIcon,
|
||||
serper: SerperIcon,
|
||||
sentry: SentryIcon,
|
||||
sendgrid: SendgridIcon,
|
||||
@@ -217,7 +212,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
elevenlabs: ElevenLabsIcon,
|
||||
elasticsearch: ElasticsearchIcon,
|
||||
dynamodb: DynamoDBIcon,
|
||||
duckduckgo: DuckDuckGoIcon,
|
||||
dropbox: DropboxIcon,
|
||||
discord: DiscordIcon,
|
||||
datadog: DatadogIcon,
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
---
|
||||
title: DuckDuckGo
|
||||
description: Suche mit DuckDuckGo
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="duckduckgo"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[DuckDuckGo](https://duckduckgo.com/) ist eine datenschutzorientierte Websuchmaschine, die sofortige Antworten, Zusammenfassungen, verwandte Themen und mehr liefert – ohne dich oder deine Suchen zu verfolgen. DuckDuckGo macht es einfach, Informationen ohne Benutzerprofilierung oder zielgerichtete Werbung zu finden.
|
||||
|
||||
Mit DuckDuckGo in Sim kannst du:
|
||||
|
||||
- **Im Web suchen**: Finde sofort Antworten, Fakten und Übersichten für eine bestimmte Suchanfrage
|
||||
- **Direkte Antworten erhalten**: Erhalte spezifische Antworten für Berechnungen, Umrechnungen oder Faktenfragen
|
||||
- **Auf Zusammenfassungen zugreifen**: Erhalte kurze Zusammenfassungen oder Beschreibungen für deine Suchthemen
|
||||
- **Verwandte Themen abrufen**: Entdecke Links und Referenzen, die für deine Suche relevant sind
|
||||
- **Ausgabe filtern**: Optional HTML entfernen oder Begriffsklärungen überspringen für sauberere Ergebnisse
|
||||
|
||||
Diese Funktionen ermöglichen es deinen Sim-Agenten, den Zugriff auf aktuelles Webwissen zu automatisieren – vom Auffinden von Fakten in einem Workflow bis hin zur Anreicherung von Dokumenten und Analysen mit aktuellen Informationen. Da DuckDuckGos Instant Answers API offen ist und keinen API-Schlüssel erfordert, lässt sie sich einfach und datenschutzsicher in deine automatisierten Geschäftsprozesse integrieren.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Nutzungsanleitung
|
||||
|
||||
Durchsuche das Web mit der DuckDuckGo Instant Answers API. Liefert sofortige Antworten, Zusammenfassungen, verwandte Themen und mehr. Kostenlos nutzbar ohne API-Schlüssel.
|
||||
|
||||
## Tools
|
||||
|
||||
### `duckduckgo_search`
|
||||
|
||||
Durchsuche das Web mit der DuckDuckGo Instant Answers API. Liefert sofortige Antworten, Zusammenfassungen und verwandte Themen für deine Anfrage. Kostenlos nutzbar ohne API-Schlüssel.
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | Ja | Die auszuführende Suchanfrage |
|
||||
| `noHtml` | boolean | Nein | HTML aus Text in Ergebnissen entfernen \(Standard: true\) |
|
||||
| `skipDisambig` | boolean | Nein | Begriffsklärungsergebnisse überspringen \(Standard: false\) |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `heading` | string | Die Überschrift/der Titel der Sofortantwort |
|
||||
| `abstract` | string | Eine kurze Zusammenfassung des Themas |
|
||||
| `abstractText` | string | Einfache Textversion der Zusammenfassung |
|
||||
| `abstractSource` | string | Die Quelle der Zusammenfassung \(z.B. Wikipedia\) |
|
||||
| `abstractURL` | string | URL zur Quelle der Zusammenfassung |
|
||||
| `image` | string | URL zu einem Bild zum Thema |
|
||||
| `answer` | string | Direkte Antwort, falls verfügbar \(z.B. für Berechnungen\) |
|
||||
| `answerType` | string | Typ der Antwort \(z.B. calc, ip, usw.\) |
|
||||
| `type` | string | Antworttyp: A \(Artikel\), D \(Begriffsklärung\), C \(Kategorie\), N \(Name\), E \(Exklusiv\) |
|
||||
| `relatedTopics` | array | Array verwandter Themen mit URLs und Beschreibungen |
|
||||
|
||||
## Hinweise
|
||||
|
||||
- Kategorie: `tools`
|
||||
- Typ: `duckduckgo`
|
||||
@@ -1,183 +0,0 @@
|
||||
---
|
||||
title: SFTP
|
||||
description: Übertragen Sie Dateien über SFTP (SSH File Transfer Protocol)
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="sftp"
|
||||
color="#2D3748"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[SFTP (SSH File Transfer Protocol)](https://en.wikipedia.org/wiki/SSH_File_Transfer_Protocol) ist ein sicheres Netzwerkprotokoll, das es Ihnen ermöglicht, Dateien auf entfernten Servern hochzuladen, herunterzuladen und zu verwalten. SFTP arbeitet über SSH und ist damit ideal für automatisierte, verschlüsselte Dateiübertragungen und die Fernverwaltung von Dateien in modernen Arbeitsabläufen.
|
||||
|
||||
Mit den in Sim integrierten SFTP-Tools können Sie die Übertragung von Dateien zwischen Ihren KI-Agenten und externen Systemen oder Servern einfach automatisieren. Dies ermöglicht Ihren Agenten, kritische Datenaustausche, Backups, Dokumentenerstellung und die Orchestrierung entfernter Systeme – alles mit robuster Sicherheit – zu verwalten.
|
||||
|
||||
**Wichtige Funktionen, die über SFTP-Tools verfügbar sind:**
|
||||
|
||||
- **Dateien hochladen:** Übertragen Sie nahtlos Dateien jeder Art von Ihrem Workflow auf einen entfernten Server, mit Unterstützung für Passwort- und SSH-Private-Key-Authentifizierung.
|
||||
- **Dateien herunterladen:** Rufen Sie Dateien von entfernten SFTP-Servern direkt zur Verarbeitung, Archivierung oder weiteren Automatisierung ab.
|
||||
- **Dateien auflisten & verwalten:** Verzeichnisse auflisten, Dateien und Ordner löschen oder erstellen und Dateisystemberechtigungen ferngesteuert verwalten.
|
||||
- **Flexible Authentifizierung:** Verbinden Sie sich entweder mit herkömmlichen Passwörtern oder SSH-Schlüsseln, mit Unterstützung für Passphrasen und Berechtigungskontrolle.
|
||||
- **Unterstützung großer Dateien:** Verwalten Sie programmatisch große Datei-Uploads und -Downloads, mit integrierten Größenbeschränkungen für die Sicherheit.
|
||||
|
||||
Durch die Integration von SFTP in Sim können Sie sichere Dateioperationen als Teil jedes Workflows automatisieren, sei es Datenerfassung, Berichterstattung, Wartung entfernter Systeme oder dynamischer Inhaltsaustausch zwischen Plattformen.
|
||||
|
||||
Die folgenden Abschnitte beschreiben die wichtigsten verfügbaren SFTP-Tools:
|
||||
|
||||
- **sftp_upload:** Laden Sie eine oder mehrere Dateien auf einen entfernten Server hoch.
|
||||
- **sftp_download:** Laden Sie Dateien von einem entfernten Server in Ihren Workflow herunter.
|
||||
- **sftp_list:** Listen Sie Verzeichnisinhalte auf einem entfernten SFTP-Server auf.
|
||||
- **sftp_delete:** Löschen Sie Dateien oder Verzeichnisse von einem entfernten Server.
|
||||
- **sftp_create:** Erstellen Sie neue Dateien auf einem entfernten SFTP-Server.
|
||||
- **sftp_mkdir:** Erstellen Sie neue Verzeichnisse aus der Ferne.
|
||||
|
||||
Siehe die Werkzeugdokumentation unten für detaillierte Ein- und Ausgabeparameter für jede Operation.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Nutzungsanweisungen
|
||||
|
||||
Dateien auf Remote-Servern über SFTP hochladen, herunterladen, auflisten und verwalten. Unterstützt sowohl Passwort- als auch Private-Key-Authentifizierung für sichere Dateiübertragungen.
|
||||
|
||||
## Werkzeuge
|
||||
|
||||
### `sftp_upload`
|
||||
|
||||
Dateien auf einen Remote-SFTP-Server hochladen
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Ja | SFTP-Server-Hostname oder IP-Adresse |
|
||||
| `port` | number | Ja | SFTP-Server-Port \(Standard: 22\) |
|
||||
| `username` | string | Ja | SFTP-Benutzername |
|
||||
| `password` | string | Nein | Passwort für die Authentifizierung \(wenn kein Private Key verwendet wird\) |
|
||||
| `privateKey` | string | Nein | Private Key für die Authentifizierung \(OpenSSH-Format\) |
|
||||
| `passphrase` | string | Nein | Passphrase für verschlüsselten Private Key |
|
||||
| `remotePath` | string | Ja | Zielverzeichnis auf dem Remote-Server |
|
||||
| `files` | file[] | Nein | Hochzuladende Dateien |
|
||||
| `fileContent` | string | Nein | Direkter Dateiinhalt zum Hochladen \(für Textdateien\) |
|
||||
| `fileName` | string | Nein | Dateiname bei Verwendung von direktem Inhalt |
|
||||
| `overwrite` | boolean | Nein | Ob bestehende Dateien überschrieben werden sollen \(Standard: true\) |
|
||||
| `permissions` | string | Nein | Dateiberechtigungen \(z.B. 0644\) |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Ob der Upload erfolgreich war |
|
||||
| `uploadedFiles` | json | Array mit Details zu hochgeladenen Dateien \(Name, remotePath, Größe\) |
|
||||
| `message` | string | Statusmeldung des Vorgangs |
|
||||
|
||||
### `sftp_download`
|
||||
|
||||
Datei von einem entfernten SFTP-Server herunterladen
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Ja | SFTP-Server-Hostname oder IP-Adresse |
|
||||
| `port` | number | Ja | SFTP-Server-Port \(Standard: 22\) |
|
||||
| `username` | string | Ja | SFTP-Benutzername |
|
||||
| `password` | string | Nein | Passwort für die Authentifizierung \(wenn kein privater Schlüssel verwendet wird\) |
|
||||
| `privateKey` | string | Nein | Privater Schlüssel für die Authentifizierung \(OpenSSH-Format\) |
|
||||
| `passphrase` | string | Nein | Passphrase für verschlüsselten privaten Schlüssel |
|
||||
| `remotePath` | string | Ja | Pfad zur Datei auf dem entfernten Server |
|
||||
| `encoding` | string | Nein | Ausgabe-Kodierung: utf-8 für Text, base64 für Binärdaten \(Standard: utf-8\) |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Ob der Download erfolgreich war |
|
||||
| `fileName` | string | Name der heruntergeladenen Datei |
|
||||
| `content` | string | Dateiinhalt \(Text oder base64-kodiert\) |
|
||||
| `size` | number | Dateigröße in Bytes |
|
||||
| `encoding` | string | Inhaltskodierung \(utf-8 oder base64\) |
|
||||
| `message` | string | Statusmeldung des Vorgangs |
|
||||
|
||||
### `sftp_list`
|
||||
|
||||
Dateien und Verzeichnisse auf einem entfernten SFTP-Server auflisten
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Ja | SFTP-Server-Hostname oder IP-Adresse |
|
||||
| `port` | number | Ja | SFTP-Server-Port \(Standard: 22\) |
|
||||
| `username` | string | Ja | SFTP-Benutzername |
|
||||
| `password` | string | Nein | Passwort für die Authentifizierung \(wenn kein privater Schlüssel verwendet wird\) |
|
||||
| `privateKey` | string | Nein | Privater Schlüssel für die Authentifizierung \(OpenSSH-Format\) |
|
||||
| `passphrase` | string | Nein | Passphrase für verschlüsselten privaten Schlüssel |
|
||||
| `remotePath` | string | Ja | Verzeichnispfad auf dem entfernten Server |
|
||||
| `detailed` | boolean | Nein | Detaillierte Dateiinformationen einschließen \(Größe, Berechtigungen, Änderungsdatum\) |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Ob der Vorgang erfolgreich war |
|
||||
| `path` | string | Verzeichnispfad, der aufgelistet wurde |
|
||||
| `entries` | json | Array von Verzeichniseinträgen mit Name, Typ, Größe, Berechtigungen, modifiedAt |
|
||||
| `count` | number | Anzahl der Einträge im Verzeichnis |
|
||||
| `message` | string | Statusmeldung des Vorgangs |
|
||||
|
||||
### `sftp_delete`
|
||||
|
||||
Löschen einer Datei oder eines Verzeichnisses auf einem entfernten SFTP-Server
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Ja | SFTP-Server-Hostname oder IP-Adresse |
|
||||
| `port` | number | Ja | SFTP-Server-Port \(Standard: 22\) |
|
||||
| `username` | string | Ja | SFTP-Benutzername |
|
||||
| `password` | string | Nein | Passwort für die Authentifizierung \(wenn kein privater Schlüssel verwendet wird\) |
|
||||
| `privateKey` | string | Nein | Privater Schlüssel für die Authentifizierung \(OpenSSH-Format\) |
|
||||
| `passphrase` | string | Nein | Passphrase für verschlüsselten privaten Schlüssel |
|
||||
| `remotePath` | string | Ja | Pfad zur Datei oder zum Verzeichnis, das gelöscht werden soll |
|
||||
| `recursive` | boolean | Nein | Verzeichnisse rekursiv löschen |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Ob das Löschen erfolgreich war |
|
||||
| `deletedPath` | string | Pfad, der gelöscht wurde |
|
||||
| `message` | string | Statusmeldung des Vorgangs |
|
||||
|
||||
### `sftp_mkdir`
|
||||
|
||||
Ein Verzeichnis auf einem entfernten SFTP-Server erstellen
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Ja | SFTP-Server-Hostname oder IP-Adresse |
|
||||
| `port` | number | Ja | SFTP-Server-Port \(Standard: 22\) |
|
||||
| `username` | string | Ja | SFTP-Benutzername |
|
||||
| `password` | string | Nein | Passwort für die Authentifizierung \(wenn kein privater Schlüssel verwendet wird\) |
|
||||
| `privateKey` | string | Nein | Privater Schlüssel für die Authentifizierung \(OpenSSH-Format\) |
|
||||
| `passphrase` | string | Nein | Passphrase für verschlüsselten privaten Schlüssel |
|
||||
| `remotePath` | string | Ja | Pfad für das neue Verzeichnis |
|
||||
| `recursive` | boolean | Nein | Übergeordnete Verzeichnisse erstellen, falls sie nicht existieren |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Ob das Verzeichnis erfolgreich erstellt wurde |
|
||||
| `createdPath` | string | Pfad des erstellten Verzeichnisses |
|
||||
| `message` | string | Statusmeldung des Vorgangs |
|
||||
|
||||
## Hinweise
|
||||
|
||||
- Kategorie: `tools`
|
||||
- Typ: `sftp`
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="smtp"
|
||||
color="#2D3748"
|
||||
color="#4A5568"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
---
|
||||
title: DuckDuckGo
|
||||
description: Search with DuckDuckGo
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="duckduckgo"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[DuckDuckGo](https://duckduckgo.com/) is a privacy-focused web search engine that delivers instant answers, abstracts, related topics, and more — without tracking you or your searches. DuckDuckGo makes it easy to find information without any user profiling or targeted ads.
|
||||
|
||||
With DuckDuckGo in Sim, you can:
|
||||
|
||||
- **Search the web**: Instantly find answers, facts, and overviews for a given search query
|
||||
- **Get direct answers**: Retrieve specific responses for calculations, conversions, or factual queries
|
||||
- **Access abstracts**: Receive short summaries or descriptions for your search topics
|
||||
- **Fetch related topics**: Discover links and references relevant to your search
|
||||
- **Filter output**: Optionally remove HTML or skip disambiguation for cleaner results
|
||||
|
||||
These features enable your Sim agents to automate access to fresh web knowledge — from surfacing facts in a workflow, to enriching documents and analysis with up-to-date information. Because DuckDuckGo’s Instant Answers API is open and does not require an API key, it’s simple and privacy-safe to integrate into your automated business processes.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search the web using DuckDuckGo Instant Answers API. Returns instant answers, abstracts, related topics, and more. Free to use without an API key.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `duckduckgo_search`
|
||||
|
||||
Search the web using DuckDuckGo Instant Answers API. Returns instant answers, abstracts, and related topics for your query. Free to use without an API key.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | Yes | The search query to execute |
|
||||
| `noHtml` | boolean | No | Remove HTML from text in results \(default: true\) |
|
||||
| `skipDisambig` | boolean | No | Skip disambiguation results \(default: false\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `heading` | string | The heading/title of the instant answer |
|
||||
| `abstract` | string | A short abstract summary of the topic |
|
||||
| `abstractText` | string | Plain text version of the abstract |
|
||||
| `abstractSource` | string | The source of the abstract \(e.g., Wikipedia\) |
|
||||
| `abstractURL` | string | URL to the source of the abstract |
|
||||
| `image` | string | URL to an image related to the topic |
|
||||
| `answer` | string | Direct answer if available \(e.g., for calculations\) |
|
||||
| `answerType` | string | Type of the answer \(e.g., calc, ip, etc.\) |
|
||||
| `type` | string | Response type: A \(article\), D \(disambiguation\), C \(category\), N \(name\), E \(exclusive\) |
|
||||
| `relatedTopics` | array | Array of related topics with URLs and descriptions |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `duckduckgo`
|
||||
@@ -15,7 +15,6 @@
|
||||
"datadog",
|
||||
"discord",
|
||||
"dropbox",
|
||||
"duckduckgo",
|
||||
"dynamodb",
|
||||
"elasticsearch",
|
||||
"elevenlabs",
|
||||
@@ -81,7 +80,6 @@
|
||||
"sendgrid",
|
||||
"sentry",
|
||||
"serper",
|
||||
"sftp",
|
||||
"sharepoint",
|
||||
"shopify",
|
||||
"slack",
|
||||
@@ -110,7 +108,6 @@
|
||||
"wordpress",
|
||||
"x",
|
||||
"youtube",
|
||||
"zapier",
|
||||
"zendesk",
|
||||
"zep",
|
||||
"zoom"
|
||||
|
||||
@@ -1,188 +0,0 @@
|
||||
---
|
||||
title: SFTP
|
||||
description: Transfer files via SFTP (SSH File Transfer Protocol)
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="sftp"
|
||||
color="#2D3748"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[SFTP (SSH File Transfer Protocol)](https://en.wikipedia.org/wiki/SSH_File_Transfer_Protocol) is a secure network protocol that enables you to upload, download, and manage files on remote servers. SFTP operates over SSH, making it ideal for automated, encrypted file transfers and remote file management within modern workflows.
|
||||
|
||||
With SFTP tools integrated into Sim, you can easily automate the movement of files between your AI agents and external systems or servers. This empowers your agents to manage critical data exchanges, backups, document generation, and remote system orchestration—all with robust security.
|
||||
|
||||
**Key functionality available via SFTP tools:**
|
||||
|
||||
- **Upload Files:** Seamlessly transfer files of any type from your workflow to a remote server, with support for both password and SSH private key authentication.
|
||||
- **Download Files:** Retrieve files from remote SFTP servers directly for processing, archiving, or further automation.
|
||||
- **List & Manage Files:** Enumerate directories, delete or create files and folders, and manage file system permissions remotely.
|
||||
- **Flexible Authentication:** Connect using either traditional passwords or SSH keys, with support for passphrases and permissions control.
|
||||
- **Large File Support:** Programmatically manage large file uploads and downloads, with built-in size limits for safety.
|
||||
|
||||
By integrating SFTP into Sim, you can automate secure file operations as part of any workflow, whether it’s data collection, reporting, remote system maintenance, or dynamic content exchange between platforms.
|
||||
|
||||
The sections below describe the key SFTP tools available:
|
||||
|
||||
- **sftp_upload:** Upload one or more files to a remote server.
|
||||
- **sftp_download:** Download files from a remote server to your workflow.
|
||||
- **sftp_list:** List directory contents on a remote SFTP server.
|
||||
- **sftp_delete:** Delete files or directories from a remote server.
|
||||
- **sftp_create:** Create new files on a remote SFTP server.
|
||||
- **sftp_mkdir:** Create new directories remotely.
|
||||
|
||||
See the tool documentation below for detailed input and output parameters for each operation.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Upload, download, list, and manage files on remote servers via SFTP. Supports both password and private key authentication for secure file transfers.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `sftp_upload`
|
||||
|
||||
Upload files to a remote SFTP server
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | SFTP server hostname or IP address |
|
||||
| `port` | number | Yes | SFTP server port \(default: 22\) |
|
||||
| `username` | string | Yes | SFTP username |
|
||||
| `password` | string | No | Password for authentication \(if not using private key\) |
|
||||
| `privateKey` | string | No | Private key for authentication \(OpenSSH format\) |
|
||||
| `passphrase` | string | No | Passphrase for encrypted private key |
|
||||
| `remotePath` | string | Yes | Destination directory on the remote server |
|
||||
| `files` | file[] | No | Files to upload |
|
||||
| `fileContent` | string | No | Direct file content to upload \(for text files\) |
|
||||
| `fileName` | string | No | File name when using direct content |
|
||||
| `overwrite` | boolean | No | Whether to overwrite existing files \(default: true\) |
|
||||
| `permissions` | string | No | File permissions \(e.g., 0644\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the upload was successful |
|
||||
| `uploadedFiles` | json | Array of uploaded file details \(name, remotePath, size\) |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `sftp_download`
|
||||
|
||||
Download a file from a remote SFTP server
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | SFTP server hostname or IP address |
|
||||
| `port` | number | Yes | SFTP server port \(default: 22\) |
|
||||
| `username` | string | Yes | SFTP username |
|
||||
| `password` | string | No | Password for authentication \(if not using private key\) |
|
||||
| `privateKey` | string | No | Private key for authentication \(OpenSSH format\) |
|
||||
| `passphrase` | string | No | Passphrase for encrypted private key |
|
||||
| `remotePath` | string | Yes | Path to the file on the remote server |
|
||||
| `encoding` | string | No | Output encoding: utf-8 for text, base64 for binary \(default: utf-8\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the download was successful |
|
||||
| `fileName` | string | Name of the downloaded file |
|
||||
| `content` | string | File content \(text or base64 encoded\) |
|
||||
| `size` | number | File size in bytes |
|
||||
| `encoding` | string | Content encoding \(utf-8 or base64\) |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `sftp_list`
|
||||
|
||||
List files and directories on a remote SFTP server
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | SFTP server hostname or IP address |
|
||||
| `port` | number | Yes | SFTP server port \(default: 22\) |
|
||||
| `username` | string | Yes | SFTP username |
|
||||
| `password` | string | No | Password for authentication \(if not using private key\) |
|
||||
| `privateKey` | string | No | Private key for authentication \(OpenSSH format\) |
|
||||
| `passphrase` | string | No | Passphrase for encrypted private key |
|
||||
| `remotePath` | string | Yes | Directory path on the remote server |
|
||||
| `detailed` | boolean | No | Include detailed file information \(size, permissions, modified date\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the operation was successful |
|
||||
| `path` | string | Directory path that was listed |
|
||||
| `entries` | json | Array of directory entries with name, type, size, permissions, modifiedAt |
|
||||
| `count` | number | Number of entries in the directory |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `sftp_delete`
|
||||
|
||||
Delete a file or directory on a remote SFTP server
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | SFTP server hostname or IP address |
|
||||
| `port` | number | Yes | SFTP server port \(default: 22\) |
|
||||
| `username` | string | Yes | SFTP username |
|
||||
| `password` | string | No | Password for authentication \(if not using private key\) |
|
||||
| `privateKey` | string | No | Private key for authentication \(OpenSSH format\) |
|
||||
| `passphrase` | string | No | Passphrase for encrypted private key |
|
||||
| `remotePath` | string | Yes | Path to the file or directory to delete |
|
||||
| `recursive` | boolean | No | Delete directories recursively |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the deletion was successful |
|
||||
| `deletedPath` | string | Path that was deleted |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `sftp_mkdir`
|
||||
|
||||
Create a directory on a remote SFTP server
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | SFTP server hostname or IP address |
|
||||
| `port` | number | Yes | SFTP server port \(default: 22\) |
|
||||
| `username` | string | Yes | SFTP username |
|
||||
| `password` | string | No | Password for authentication \(if not using private key\) |
|
||||
| `privateKey` | string | No | Private key for authentication \(OpenSSH format\) |
|
||||
| `passphrase` | string | No | Passphrase for encrypted private key |
|
||||
| `remotePath` | string | Yes | Path for the new directory |
|
||||
| `recursive` | boolean | No | Create parent directories if they do not exist |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the directory was created successfully |
|
||||
| `createdPath` | string | Path of the created directory |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `sftp`
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="smtp"
|
||||
color="#2D3748"
|
||||
color="#4A5568"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -1,275 +0,0 @@
|
||||
---
|
||||
title: Zapier
|
||||
description: Execute actions across 7,000+ apps using Zapier AI Actions
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="zapier"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Zapier](https://zapier.com/) connects 7,000+ apps and automates workflows without manual coding. The Zapier integration in Sim empowers you to execute, search, build, and manage powerful AI-driven actions across thousands of applications—all with plain English instructions.
|
||||
|
||||
With Zapier AI Actions in Sim, you can:
|
||||
|
||||
- **Execute Actions:** Instantly trigger any stored AI Action in your Zapier account. Launch emails, messages, project updates, document workflows, CRM updates, and much more.
|
||||
- **List Actions:** Retrieve a list of your available AI Actions configured in Zapier. Discover what's possible and find the right tool for your workflow.
|
||||
- **Search Apps:** Find apps in Zapier’s ecosystem by name or keyword. Easily check if the app you need is supported before building automations.
|
||||
- **Find Actions (Guess):** Describe what you want to accomplish in plain English (e.g., "send a Slack message", "create a Google Sheet row"), and let Zapier’s AI suggest matching actions—even across unfamiliar apps or APIs.
|
||||
- **Create Actions:** Programmatically define new AI Actions by specifying the target app, action type (write, read, search), and required parameters, directly from your workflow.
|
||||
|
||||
By combining these capabilities, you can search for apps, define new AI Actions, discover possible automations, list available actions, and execute any workflow—fully automated, with the power of both Sim and Zapier.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to Zapier AI Actions to execute any of 30,000+ actions across 7,000+ apps. Send emails, create documents, update CRMs, post messages, and more - all through natural language instructions. Requires a Zapier AI Actions API key.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `zapier_execute_action`
|
||||
|
||||
Execute a stored AI Action in Zapier. Runs any of the 30,000+ actions across 7,000+ apps that Zapier supports.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `actionId` | string | Yes | The ID of the AI Action to execute |
|
||||
| `instructions` | string | Yes | Plain English instructions for what the action should do \(e.g., "Send a message about the weekly report to #general"\) |
|
||||
| `previewOnly` | boolean | No | If true, preview the execution without actually running it |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `executionLogId` | string | Unique identifier for this execution \(can be used for feedback\) |
|
||||
| `actionUsed` | string | Name of the action that was executed |
|
||||
| `inputParams` | json | Parameters that were passed to the API |
|
||||
| `resolvedParams` | json | Parameters that the AI resolved for execution |
|
||||
| `results` | json | Results from the action execution |
|
||||
| `resultFieldLabels` | json | Human-readable labels for result fields |
|
||||
| `status` | string | Execution status: success, error, empty, preview, or halted |
|
||||
| `error` | string | Error message if execution failed |
|
||||
|
||||
### `zapier_list_actions`
|
||||
|
||||
List all AI Actions configured in your Zapier account. Returns stored actions that can be executed.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `actions` | json | Array of configured AI Actions with id, description, actionType, app, appLabel, action, actionLabel, params, accountId, authenticationId, needs |
|
||||
| `configurationLink` | string | Link to configure more actions in Zapier |
|
||||
|
||||
### `zapier_search_apps`
|
||||
|
||||
Search for apps available in Zapier. Returns apps with their available action counts.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `query` | string | No | Optional search query to filter apps by name |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `apps` | json | Array of apps with app, name, logoUrl, authType, actions \(raw counts by type\), actionCount, writeActionCount, searchActionCount, readActionCount |
|
||||
|
||||
### `zapier_guess_actions`
|
||||
|
||||
Find relevant Zapier actions using natural language. Searches across 30,000+ actions to find the best matches for your query.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `query` | string | Yes | Natural language description of what you want to do \(e.g., "send a Slack message", "create a Google Doc"\) |
|
||||
| `actionTypes` | array | No | Types of actions to search for: write, search, read. If not specified, returns all types. |
|
||||
| `count` | number | No | Maximum number of results to return \(default: 25\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `actions` | json | Array of matching actions with app, action, actionType, name \(combined app/action name\), description, image, and score |
|
||||
|
||||
### `zapier_create_action`
|
||||
|
||||
Create a new stored AI Action in Zapier. The action can then be executed with zapier_execute_action.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `app` | string | Yes | The app identifier \(e.g., "slack", "gmail", "google-docs"\) |
|
||||
| `action` | string | Yes | The action identifier \(e.g., "send_channel_message", "send_email"\) |
|
||||
| `actionType` | string | No | Type of action: write, search, or read. Defaults to write. |
|
||||
| `accountId` | number | No | Zapier account ID |
|
||||
| `authenticationId` | number | No | Authentication ID for the app connection |
|
||||
| `meta` | json | No | Metadata object with params labels, app_label, action_label, authentication_label, app_needs_auth |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | The ID of the created AI Action \(use this with execute_action\) |
|
||||
| `description` | string | Description of the action |
|
||||
| `actionType` | string | Type of action \(write, search, read, read_bulk, search_or_write, search_and_write\) |
|
||||
| `app` | string | App identifier |
|
||||
| `appLabel` | string | Human-readable app label from meta |
|
||||
| `action` | string | Action identifier |
|
||||
| `actionLabel` | string | Human-readable action label from meta |
|
||||
|
||||
### `zapier_stateless_execute`
|
||||
|
||||
Execute any Zapier action directly without creating a stored AI Action first. Provide the app, action, and instructions.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `app` | string | Yes | The app to use \(e.g., "SlackAPI", "GoogleSheetsV2API", "GmailV2API"\) |
|
||||
| `action` | string | Yes | The action to run \(e.g., "direct_message", "add_row", "send_email"\) |
|
||||
| `instructions` | string | Yes | Plain English instructions about how to run the action \(e.g., "Send a message saying hello to #general"\) |
|
||||
| `actionType` | string | No | Type of action: write, search, read, read_bulk, search_or_write, search_and_write |
|
||||
| `previewOnly` | boolean | No | If true, preview the execution without actually running it |
|
||||
| `authenticationId` | number | No | Authentication ID for the app connection |
|
||||
| `accountId` | number | No | Zapier account ID |
|
||||
| `providerId` | string | No | Provider ID for AI Actions |
|
||||
| `tokenBudget` | number | No | Max tokens per field \(default: 1000\) |
|
||||
| `skipParamGuessing` | boolean | No | Skip AI parameter guessing |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `executionLogId` | string | Unique identifier for this execution |
|
||||
| `actionUsed` | string | Name of the action that was executed |
|
||||
| `inputParams` | json | Parameters that were passed to the API |
|
||||
| `resolvedParams` | json | Parameters that the AI resolved for execution |
|
||||
| `results` | json | Results from the action execution |
|
||||
| `resultFieldLabels` | json | Human-readable labels for result fields |
|
||||
| `status` | string | Execution status: success, error, empty, preview, or halted |
|
||||
| `error` | string | Error message if execution failed |
|
||||
|
||||
### `zapier_search_app_actions`
|
||||
|
||||
Search for available actions within a specific Zapier app. Returns all actions the app supports.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `app` | string | Yes | The app identifier to search actions for \(e.g., "SlackAPI", "GmailV2API"\) |
|
||||
| `query` | string | No | Optional search query to filter actions by name or description |
|
||||
| `actionTypes` | array | No | Filter by action types: write, search, read, read_bulk, search_or_write, search_and_write. Defaults to write and search. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `actions` | json | Array of actions with app, action, actionType, displayName, description, relevancyScore, appNeedsAuth, appInfo |
|
||||
|
||||
### `zapier_get_action_details`
|
||||
|
||||
Get detailed information about a specific action including its required inputs (needs) and outputs (gives).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `app` | string | Yes | The app identifier \(e.g., "SlackAPI", "GmailV2API"\) |
|
||||
| `action` | string | Yes | The action identifier \(e.g., "send_channel_message", "send_email"\) |
|
||||
| `actionType` | string | No | Type of action: write, search, read. Defaults to write. |
|
||||
| `includeNeeds` | boolean | No | Include input requirements \(needs\). Defaults to true. |
|
||||
| `includeGives` | boolean | No | Include output specifications \(gives\). Defaults to false. |
|
||||
| `includeSample` | boolean | No | Include sample execution result. Defaults to false. |
|
||||
| `accountId` | number | No | Zapier account ID |
|
||||
| `authenticationId` | number | No | Authentication ID for the app connection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `action` | json | Action metadata including type, key, name, noun, and description |
|
||||
| `needs` | json | Array of input requirements with key, type, label, required, helpText, defaultValue, choices, dependsOn |
|
||||
| `gives` | json | Array of output fields with key, label, type, important, sample |
|
||||
| `sample` | json | Sample execution result if requested |
|
||||
| `customNeedsProbability` | number | Probability \(0-1\) that this action has custom/dynamic input fields |
|
||||
|
||||
### `zapier_update_action`
|
||||
|
||||
Update an existing stored AI Action configuration in Zapier.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `actionId` | string | Yes | The ID of the AI Action to update |
|
||||
| `app` | string | Yes | The app identifier \(e.g., "SlackAPI", "GmailV2API"\) |
|
||||
| `action` | string | Yes | The action identifier \(e.g., "send_channel_message", "send_email"\) |
|
||||
| `actionType` | string | No | Type of action: write, search, read, read_bulk, search_or_write, search_and_write |
|
||||
| `accountId` | number | No | Zapier account ID |
|
||||
| `authenticationId` | number | No | Authentication ID for the app connection |
|
||||
| `meta` | json | No | Metadata object with params labels, app_label, action_label, authentication_label, app_needs_auth |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | The ID of the updated AI Action |
|
||||
| `description` | string | Description of the action |
|
||||
| `actionType` | string | Type of action |
|
||||
| `app` | string | App identifier |
|
||||
| `appLabel` | string | Human-readable app label |
|
||||
| `action` | string | Action identifier |
|
||||
| `actionLabel` | string | Human-readable action label |
|
||||
|
||||
### `zapier_delete_action`
|
||||
|
||||
Delete a stored AI Action from Zapier.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Zapier AI Actions API key from actions.zapier.com/credentials |
|
||||
| `actionId` | string | Yes | The ID of the AI Action to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `deleted` | boolean | Whether the action was successfully deleted |
|
||||
| `message` | string | Status message |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `zapier`
|
||||
@@ -30,9 +30,6 @@ Use the Start block for everything originating from the editor, deploy-to-API, o
|
||||
<Card title="Schedule" href="/triggers/schedule">
|
||||
Cron or interval based execution
|
||||
</Card>
|
||||
<Card title="RSS Feed" href="/triggers/rss">
|
||||
Monitor RSS and Atom feeds for new content
|
||||
</Card>
|
||||
</Cards>
|
||||
|
||||
## Quick Comparison
|
||||
@@ -42,7 +39,6 @@ Use the Start block for everything originating from the editor, deploy-to-API, o
|
||||
| **Start** | Editor runs, deploy-to-API requests, or chat messages |
|
||||
| **Schedule** | Timer managed in schedule block |
|
||||
| **Webhook** | On inbound HTTP request |
|
||||
| **RSS Feed** | New item published to feed |
|
||||
|
||||
> The Start block always exposes `input`, `conversationId`, and `files` fields. Add custom fields to the input format for additional structured data.
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"pages": ["index", "start", "schedule", "webhook", "rss"]
|
||||
"pages": ["index", "start", "schedule", "webhook"]
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
---
|
||||
title: RSS Feed
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
The RSS Feed block monitors RSS and Atom feeds – when new items are published, your workflow triggers automatically.
|
||||
|
||||
<div className="flex justify-center">
|
||||
<Image
|
||||
src="/static/blocks/rss.png"
|
||||
alt="RSS Feed Block"
|
||||
width={500}
|
||||
height={400}
|
||||
className="my-6"
|
||||
/>
|
||||
</div>
|
||||
|
||||
## Configuration
|
||||
|
||||
1. **Add RSS Feed Block** - Drag the RSS Feed block to start your workflow
|
||||
2. **Enter Feed URL** - Paste the URL of any RSS or Atom feed
|
||||
3. **Deploy** - Deploy your workflow to activate polling
|
||||
|
||||
Once deployed, the feed is checked every minute for new items.
|
||||
|
||||
## Output Fields
|
||||
|
||||
| Field | Type | Description |
|
||||
|-------|------|-------------|
|
||||
| `title` | string | Item title |
|
||||
| `link` | string | Item link |
|
||||
| `pubDate` | string | Publication date |
|
||||
| `item` | object | Raw item with all fields |
|
||||
| `feed` | object | Raw feed metadata |
|
||||
|
||||
Access mapped fields directly (`<rss.title>`) or use the raw objects for any field (`<rss.item.author>`, `<rss.feed.language>`).
|
||||
|
||||
## Use Cases
|
||||
|
||||
- **Content monitoring** - Track blogs, news sites, or competitor updates
|
||||
- **Podcast automation** - Trigger workflows when new episodes drop
|
||||
- **Release tracking** - Monitor GitHub releases, changelogs, or product updates
|
||||
- **Social aggregation** - Collect content from platforms that expose RSS feeds
|
||||
|
||||
<Callout>
|
||||
RSS triggers only fire for items published after you save the trigger. Existing feed items are not processed.
|
||||
</Callout>
|
||||
@@ -1,63 +0,0 @@
|
||||
---
|
||||
title: DuckDuckGo
|
||||
description: Busca con DuckDuckGo
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="duckduckgo"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[DuckDuckGo](https://duckduckgo.com/) es un motor de búsqueda web enfocado en la privacidad que ofrece respuestas instantáneas, resúmenes, temas relacionados y más, sin rastrear a ti o tus búsquedas. DuckDuckGo facilita encontrar información sin perfiles de usuario ni anuncios dirigidos.
|
||||
|
||||
Con DuckDuckGo en Sim, puedes:
|
||||
|
||||
- **Buscar en la web**: Encuentra instantáneamente respuestas, hechos y resúmenes para una consulta de búsqueda determinada
|
||||
- **Obtener respuestas directas**: Recibe respuestas específicas para cálculos, conversiones o consultas factuales
|
||||
- **Acceder a resúmenes**: Recibe breves sumarios o descripciones para tus temas de búsqueda
|
||||
- **Obtener temas relacionados**: Descubre enlaces y referencias relevantes para tu búsqueda
|
||||
- **Filtrar resultados**: Opcionalmente elimina HTML o evita la desambiguación para obtener resultados más limpios
|
||||
|
||||
Estas características permiten a tus agentes Sim automatizar el acceso a conocimientos web actualizados, desde mostrar hechos en un flujo de trabajo hasta enriquecer documentos y análisis con información actualizada. Como la API de Respuestas Instantáneas de DuckDuckGo es abierta y no requiere una clave API, es simple y segura para la privacidad al integrarla en tus procesos de negocio automatizados.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Instrucciones de uso
|
||||
|
||||
Busca en la web usando la API de Respuestas Instantáneas de DuckDuckGo. Devuelve respuestas instantáneas, resúmenes, temas relacionados y más. Uso gratuito sin necesidad de clave API.
|
||||
|
||||
## Herramientas
|
||||
|
||||
### `duckduckgo_search`
|
||||
|
||||
Busca en la web usando la API de Respuestas Instantáneas de DuckDuckGo. Devuelve respuestas instantáneas, resúmenes y temas relacionados para tu consulta. Uso gratuito sin necesidad de clave API.
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `query` | string | Sí | La consulta de búsqueda a ejecutar |
|
||||
| `noHtml` | boolean | No | Eliminar HTML del texto en los resultados \(predeterminado: true\) |
|
||||
| `skipDisambig` | boolean | No | Omitir resultados de desambiguación \(predeterminado: false\) |
|
||||
|
||||
#### Salida
|
||||
|
||||
| Parámetro | Tipo | Descripción |
|
||||
| --------- | ---- | ----------- |
|
||||
| `heading` | string | El encabezado/título de la respuesta instantánea |
|
||||
| `abstract` | string | Un breve resumen abstracto del tema |
|
||||
| `abstractText` | string | Versión en texto plano del resumen |
|
||||
| `abstractSource` | string | La fuente del resumen \(p. ej., Wikipedia\) |
|
||||
| `abstractURL` | string | URL a la fuente del resumen |
|
||||
| `image` | string | URL a una imagen relacionada con el tema |
|
||||
| `answer` | string | Respuesta directa si está disponible \(p. ej., para cálculos\) |
|
||||
| `answerType` | string | Tipo de respuesta \(p. ej., calc, ip, etc.\) |
|
||||
| `type` | string | Tipo de respuesta: A \(artículo\), D \(desambiguación\), C \(categoría\), N \(nombre\), E \(exclusivo\) |
|
||||
| `relatedTopics` | array | Array de temas relacionados con URLs y descripciones |
|
||||
|
||||
## Notas
|
||||
|
||||
- Categoría: `tools`
|
||||
- Tipo: `duckduckgo`
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="smtp"
|
||||
color="#2D3748"
|
||||
color="#4A5568"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
---
|
||||
title: DuckDuckGo
|
||||
description: Recherchez avec DuckDuckGo
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="duckduckgo"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[DuckDuckGo](https://duckduckgo.com/) est un moteur de recherche web axé sur la confidentialité qui fournit des réponses instantanées, des résumés, des sujets connexes et plus encore — sans vous suivre ni suivre vos recherches. DuckDuckGo facilite la recherche d'informations sans profilage d'utilisateur ni publicités ciblées.
|
||||
|
||||
Avec DuckDuckGo dans Sim, vous pouvez :
|
||||
|
||||
- **Rechercher sur le web** : trouvez instantanément des réponses, des faits et des aperçus pour une requête de recherche donnée
|
||||
- **Obtenir des réponses directes** : recevez des réponses spécifiques pour des calculs, des conversions ou des requêtes factuelles
|
||||
- **Accéder à des résumés** : recevez de courts résumés ou descriptions pour vos sujets de recherche
|
||||
- **Récupérer des sujets connexes** : découvrez des liens et références pertinents pour votre recherche
|
||||
- **Filtrer les résultats** : supprimez éventuellement le HTML ou ignorez la désambiguïsation pour des résultats plus propres
|
||||
|
||||
Ces fonctionnalités permettent à vos agents Sim d'automatiser l'accès à des connaissances web récentes — de la présentation de faits dans un flux de travail à l'enrichissement de documents et d'analyses avec des informations à jour. Comme l'API Instant Answers de DuckDuckGo est ouverte et ne nécessite pas de clé API, elle s'intègre facilement et en toute sécurité dans vos processus d'entreprise automatisés.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Instructions d'utilisation
|
||||
|
||||
Recherchez sur le web en utilisant l'API Instant Answers de DuckDuckGo. Renvoie des réponses instantanées, des résumés, des sujets connexes et plus encore. Gratuit à utiliser sans clé API.
|
||||
|
||||
## Outils
|
||||
|
||||
### `duckduckgo_search`
|
||||
|
||||
Recherchez sur le web en utilisant l'API Instant Answers de DuckDuckGo. Renvoie des réponses instantanées, des résumés et des sujets connexes pour votre requête. Gratuit à utiliser sans clé API.
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | Oui | La requête de recherche à exécuter |
|
||||
| `noHtml` | boolean | Non | Supprimer le HTML du texte dans les résultats \(par défaut : true\) |
|
||||
| `skipDisambig` | boolean | Non | Ignorer les résultats de désambiguïsation \(par défaut : false\) |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `heading` | string | Le titre/en-tête de la réponse instantanée |
|
||||
| `abstract` | string | Un court résumé du sujet |
|
||||
| `abstractText` | string | Version en texte brut du résumé |
|
||||
| `abstractSource` | string | La source du résumé \(par exemple, Wikipédia\) |
|
||||
| `abstractURL` | string | URL vers la source du résumé |
|
||||
| `image` | string | URL vers une image liée au sujet |
|
||||
| `answer` | string | Réponse directe si disponible \(par exemple, pour les calculs\) |
|
||||
| `answerType` | string | Type de réponse \(par exemple, calc, ip, etc.\) |
|
||||
| `type` | string | Type de réponse : A \(article\), D \(désambiguïsation\), C \(catégorie\), N \(nom\), E \(exclusif\) |
|
||||
| `relatedTopics` | array | Tableau des sujets connexes avec URLs et descriptions |
|
||||
|
||||
## Notes
|
||||
|
||||
- Catégorie : `tools`
|
||||
- Type : `duckduckgo`
|
||||
@@ -1,183 +0,0 @@
|
||||
---
|
||||
title: SFTP
|
||||
description: Transférer des fichiers via SFTP (Protocole de transfert de fichiers SSH)
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="sftp"
|
||||
color="#2D3748"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[SFTP (Protocole de transfert de fichiers SSH)](https://en.wikipedia.org/wiki/SSH_File_Transfer_Protocol) est un protocole réseau sécurisé qui vous permet de téléverser, télécharger et gérer des fichiers sur des serveurs distants. SFTP fonctionne via SSH, ce qui en fait une solution idéale pour les transferts de fichiers automatisés et chiffrés, ainsi que pour la gestion de fichiers à distance dans les flux de travail modernes.
|
||||
|
||||
Grâce aux outils SFTP intégrés à Sim, vous pouvez facilement automatiser le déplacement de fichiers entre vos agents IA et des systèmes ou serveurs externes. Cela permet à vos agents de gérer les échanges de données critiques, les sauvegardes, la génération de documents et l'orchestration de systèmes distants, le tout avec une sécurité robuste.
|
||||
|
||||
**Fonctionnalités clés disponibles via les outils SFTP :**
|
||||
|
||||
- **Téléversement de fichiers :** Transférez facilement des fichiers de tout type depuis votre flux de travail vers un serveur distant, avec prise en charge de l'authentification par mot de passe et par clé privée SSH.
|
||||
- **Téléchargement de fichiers :** Récupérez des fichiers depuis des serveurs SFTP distants directement pour traitement, archivage ou automatisation supplémentaire.
|
||||
- **Liste et gestion des fichiers :** Énumérez les répertoires, supprimez ou créez des fichiers et dossiers, et gérez les permissions du système de fichiers à distance.
|
||||
- **Authentification flexible :** Connectez-vous en utilisant soit des mots de passe traditionnels, soit des clés SSH, avec prise en charge des phrases secrètes et du contrôle des permissions.
|
||||
- **Prise en charge des fichiers volumineux :** Gérez de manière programmatique les téléversements et téléchargements de fichiers volumineux, avec des limites de taille intégrées pour la sécurité.
|
||||
|
||||
En intégrant SFTP à Sim, vous pouvez automatiser les opérations de fichiers sécurisées dans le cadre de n'importe quel flux de travail, qu'il s'agisse de collecte de données, de rapports, de maintenance de systèmes distants ou d'échange dynamique de contenu entre plateformes.
|
||||
|
||||
Les sections ci-dessous décrivent les principaux outils SFTP disponibles :
|
||||
|
||||
- **sftp_upload :** Téléverser un ou plusieurs fichiers vers un serveur distant.
|
||||
- **sftp_download :** Télécharger des fichiers depuis un serveur distant vers votre flux de travail.
|
||||
- **sftp_list :** Lister le contenu des répertoires sur un serveur SFTP distant.
|
||||
- **sftp_delete :** Supprimer des fichiers ou des répertoires d'un serveur distant.
|
||||
- **sftp_create :** Créer de nouveaux fichiers sur un serveur SFTP distant.
|
||||
- **sftp_mkdir :** Créer de nouveaux répertoires à distance.
|
||||
|
||||
Consultez la documentation de l'outil ci-dessous pour les paramètres d'entrée et de sortie détaillés pour chaque opération.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Instructions d'utilisation
|
||||
|
||||
Téléchargez, téléchargez, listez et gérez des fichiers sur des serveurs distants via SFTP. Prend en charge l'authentification par mot de passe et par clé privée pour des transferts de fichiers sécurisés.
|
||||
|
||||
## Outils
|
||||
|
||||
### `sftp_upload`
|
||||
|
||||
Téléverser des fichiers vers un serveur SFTP distant
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `host` | string | Oui | Nom d'hôte ou adresse IP du serveur SFTP |
|
||||
| `port` | number | Oui | Port du serveur SFTP \(par défaut : 22\) |
|
||||
| `username` | string | Oui | Nom d'utilisateur SFTP |
|
||||
| `password` | string | Non | Mot de passe pour l'authentification \(si vous n'utilisez pas de clé privée\) |
|
||||
| `privateKey` | string | Non | Clé privée pour l'authentification \(format OpenSSH\) |
|
||||
| `passphrase` | string | Non | Phrase secrète pour la clé privée chiffrée |
|
||||
| `remotePath` | string | Oui | Répertoire de destination sur le serveur distant |
|
||||
| `files` | file[] | Non | Fichiers à téléverser |
|
||||
| `fileContent` | string | Non | Contenu direct du fichier à téléverser \(pour les fichiers texte\) |
|
||||
| `fileName` | string | Non | Nom du fichier lors de l'utilisation du contenu direct |
|
||||
| `overwrite` | boolean | Non | Écraser les fichiers existants \(par défaut : true\) |
|
||||
| `permissions` | string | Non | Permissions du fichier \(ex. 0644\) |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Si le téléversement a réussi |
|
||||
| `uploadedFiles` | json | Tableau des détails des fichiers téléversés \(nom, chemin distant, taille\) |
|
||||
| `message` | string | Message d'état de l'opération |
|
||||
|
||||
### `sftp_download`
|
||||
|
||||
Télécharger un fichier depuis un serveur SFTP distant
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | chaîne | Oui | Nom d'hôte ou adresse IP du serveur SFTP |
|
||||
| `port` | nombre | Oui | Port du serveur SFTP \(par défaut : 22\) |
|
||||
| `username` | chaîne | Oui | Nom d'utilisateur SFTP |
|
||||
| `password` | chaîne | Non | Mot de passe pour l'authentification \(si vous n'utilisez pas de clé privée\) |
|
||||
| `privateKey` | chaîne | Non | Clé privée pour l'authentification \(format OpenSSH\) |
|
||||
| `passphrase` | chaîne | Non | Phrase secrète pour la clé privée chiffrée |
|
||||
| `remotePath` | chaîne | Oui | Chemin vers le fichier sur le serveur distant |
|
||||
| `encoding` | chaîne | Non | Encodage de sortie : utf-8 pour le texte, base64 pour le binaire \(par défaut : utf-8\) |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | booléen | Indique si le téléchargement a réussi |
|
||||
| `fileName` | chaîne | Nom du fichier téléchargé |
|
||||
| `content` | chaîne | Contenu du fichier \(texte ou encodé en base64\) |
|
||||
| `size` | nombre | Taille du fichier en octets |
|
||||
| `encoding` | chaîne | Encodage du contenu \(utf-8 ou base64\) |
|
||||
| `message` | chaîne | Message d'état de l'opération |
|
||||
|
||||
### `sftp_list`
|
||||
|
||||
Lister les fichiers et répertoires sur un serveur SFTP distant
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | chaîne | Oui | Nom d'hôte ou adresse IP du serveur SFTP |
|
||||
| `port` | nombre | Oui | Port du serveur SFTP \(par défaut : 22\) |
|
||||
| `username` | chaîne | Oui | Nom d'utilisateur SFTP |
|
||||
| `password` | chaîne | Non | Mot de passe pour l'authentification \(si vous n'utilisez pas de clé privée\) |
|
||||
| `privateKey` | chaîne | Non | Clé privée pour l'authentification \(format OpenSSH\) |
|
||||
| `passphrase` | chaîne | Non | Phrase secrète pour la clé privée chiffrée |
|
||||
| `remotePath` | chaîne | Oui | Chemin du répertoire sur le serveur distant |
|
||||
| `detailed` | booléen | Non | Inclure des informations détaillées sur les fichiers \(taille, permissions, date de modification\) |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Indique si l'opération a réussi |
|
||||
| `path` | string | Chemin du répertoire qui a été listé |
|
||||
| `entries` | json | Tableau des entrées du répertoire avec nom, type, taille, permissions, modifiedAt |
|
||||
| `count` | number | Nombre d'entrées dans le répertoire |
|
||||
| `message` | string | Message d'état de l'opération |
|
||||
|
||||
### `sftp_delete`
|
||||
|
||||
Supprimer un fichier ou un répertoire sur un serveur SFTP distant
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | ---------- | ----------- |
|
||||
| `host` | string | Oui | Nom d'hôte ou adresse IP du serveur SFTP |
|
||||
| `port` | number | Oui | Port du serveur SFTP \(par défaut : 22\) |
|
||||
| `username` | string | Oui | Nom d'utilisateur SFTP |
|
||||
| `password` | string | Non | Mot de passe pour l'authentification \(si vous n'utilisez pas de clé privée\) |
|
||||
| `privateKey` | string | Non | Clé privée pour l'authentification \(format OpenSSH\) |
|
||||
| `passphrase` | string | Non | Phrase secrète pour la clé privée chiffrée |
|
||||
| `remotePath` | string | Oui | Chemin vers le fichier ou le répertoire à supprimer |
|
||||
| `recursive` | boolean | Non | Supprimer les répertoires de façon récursive |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Indique si la suppression a réussi |
|
||||
| `deletedPath` | string | Chemin qui a été supprimé |
|
||||
| `message` | string | Message d'état de l'opération |
|
||||
|
||||
### `sftp_mkdir`
|
||||
|
||||
Créer un répertoire sur un serveur SFTP distant
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `host` | chaîne | Oui | Nom d'hôte ou adresse IP du serveur SFTP |
|
||||
| `port` | nombre | Oui | Port du serveur SFTP \(par défaut : 22\) |
|
||||
| `username` | chaîne | Oui | Nom d'utilisateur SFTP |
|
||||
| `password` | chaîne | Non | Mot de passe pour l'authentification \(si vous n'utilisez pas de clé privée\) |
|
||||
| `privateKey` | chaîne | Non | Clé privée pour l'authentification \(format OpenSSH\) |
|
||||
| `passphrase` | chaîne | Non | Phrase secrète pour la clé privée chiffrée |
|
||||
| `remotePath` | chaîne | Oui | Chemin pour le nouveau répertoire |
|
||||
| `recursive` | booléen | Non | Créer les répertoires parents s'ils n'existent pas |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | booléen | Indique si le répertoire a été créé avec succès |
|
||||
| `createdPath` | chaîne | Chemin du répertoire créé |
|
||||
| `message` | chaîne | Message d'état de l'opération |
|
||||
|
||||
## Remarques
|
||||
|
||||
- Catégorie : `tools`
|
||||
- Type : `sftp`
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="smtp"
|
||||
color="#2D3748"
|
||||
color="#4A5568"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
---
|
||||
title: DuckDuckGo
|
||||
description: DuckDuckGoで検索
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="duckduckgo"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[DuckDuckGo](https://duckduckgo.com/)は、プライバシーを重視したウェブ検索エンジンで、あなたやあなたの検索を追跡することなく、即時の回答、要約、関連トピックなどを提供します。DuckDuckGoを使えば、ユーザープロファイリングやターゲット広告なしで簡単に情報を見つけることができます。
|
||||
|
||||
SimでDuckDuckGoを使用すると、以下のことができます:
|
||||
|
||||
- **ウェブ検索**: 特定の検索クエリに対して、回答、事実、概要を即座に見つける
|
||||
- **直接的な回答を取得**: 計算、変換、事実に関するクエリに対して特定の回答を取得
|
||||
- **要約にアクセス**: 検索トピックに関する短い要約や説明を受け取る
|
||||
- **関連トピックを取得**: 検索に関連するリンクや参考情報を発見
|
||||
- **出力をフィルタリング**: オプションでHTMLを削除したり、より明確な結果を得るために曖昧さ回避をスキップしたりする
|
||||
|
||||
これらの機能により、Simエージェントは最新のウェブ知識への自動アクセスを可能にします — ワークフローでの事実の表示から、最新情報によるドキュメントや分析の強化まで。DuckDuckGoのインスタントアンサーAPIはオープンでAPIキーを必要としないため、自動化されたビジネスプロセスにプライバシーを保ちながら簡単に統合できます。
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## 使用方法
|
||||
|
||||
DuckDuckGoインスタントアンサーAPIを使用してウェブを検索します。インスタントアンサー、要約、関連トピックなどを返します。APIキーなしで無料で使用できます。
|
||||
|
||||
## ツール
|
||||
|
||||
### `duckduckgo_search`
|
||||
|
||||
DuckDuckGoインスタントアンサーAPIを使用してウェブを検索します。クエリに対するインスタントアンサー、要約、関連トピックを返します。APIキーなしで無料で使用できます。
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | はい | 実行する検索クエリ |
|
||||
| `noHtml` | boolean | いいえ | 結果のテキストからHTMLを削除する(デフォルト: true) |
|
||||
| `skipDisambig` | boolean | いいえ | 曖昧さ回避の結果をスキップする(デフォルト: false) |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `heading` | string | インスタントアンサーの見出し/タイトル |
|
||||
| `abstract` | string | トピックの短い要約 |
|
||||
| `abstractText` | string | 要約のプレーンテキストバージョン |
|
||||
| `abstractSource` | string | 要約の情報源(例:Wikipedia) |
|
||||
| `abstractURL` | string | 要約の情報源へのURL |
|
||||
| `image` | string | トピックに関連する画像へのURL |
|
||||
| `answer` | string | 利用可能な場合は直接的な回答(例:計算の場合) |
|
||||
| `answerType` | string | 回答のタイプ(例:calc、ipなど) |
|
||||
| `type` | string | レスポンスタイプ:A(記事)、D(曖昧さ回避)、C(カテゴリ)、N(名前)、E(排他的) |
|
||||
| `relatedTopics` | array | URLと説明を含む関連トピックの配列 |
|
||||
|
||||
## 注意事項
|
||||
|
||||
- カテゴリ: `tools`
|
||||
- タイプ: `duckduckgo`
|
||||
@@ -1,183 +0,0 @@
|
||||
---
|
||||
title: SFTP
|
||||
description: SFTP(SSH File Transfer Protocol)を介してファイルを転送
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="sftp"
|
||||
color="#2D3748"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[SFTP(SSH File Transfer Protocol)](https://en.wikipedia.org/wiki/SSH_File_Transfer_Protocol)は、リモートサーバー上でファイルのアップロード、ダウンロード、管理を可能にする安全なネットワークプロトコルです。SFTPはSSH上で動作し、現代のワークフロー内での自動化された暗号化ファイル転送とリモートファイル管理に最適です。
|
||||
|
||||
SimにSFTPツールを統合することで、AIエージェントと外部システムやサーバー間のファイル移動を簡単に自動化できます。これにより、エージェントは重要なデータ交換、バックアップ、ドキュメント生成、リモートシステムのオーケストレーションを堅牢なセキュリティで管理できるようになります。
|
||||
|
||||
**SFTPツールで利用可能な主要機能:**
|
||||
|
||||
- **ファイルのアップロード:** パスワードとSSH秘密鍵認証の両方をサポートし、ワークフローからリモートサーバーへあらゆるタイプのファイルをシームレスに転送。
|
||||
- **ファイルのダウンロード:** リモートSFTPサーバーから直接ファイルを取得し、処理、アーカイブ、または更なる自動化を行う。
|
||||
- **ファイルの一覧表示と管理:** ディレクトリの列挙、ファイルやフォルダの削除または作成、リモートでのファイルシステム権限の管理。
|
||||
- **柔軟な認証:** 従来のパスワードまたはSSH鍵を使用して接続し、パスフレーズと権限制御をサポート。
|
||||
- **大容量ファイルのサポート:** 安全性のための組み込みサイズ制限付きで、大容量ファイルのアップロードとダウンロードをプログラムで管理。
|
||||
|
||||
SimにSFTPを統合することで、データ収集、レポート作成、リモートシステムのメンテナンス、プラットフォーム間の動的コンテンツ交換など、あらゆるワークフローの一部として安全なファイル操作を自動化できます。
|
||||
|
||||
以下のセクションでは、利用可能な主要なSFTPツールについて説明します:
|
||||
|
||||
- **sftp_upload:** 1つまたは複数のファイルをリモートサーバーにアップロード。
|
||||
- **sftp_download:** リモートサーバーからワークフローにファイルをダウンロード。
|
||||
- **sftp_list:** リモートSFTPサーバー上のディレクトリ内容を一覧表示。
|
||||
- **sftp_delete:** リモートサーバーからファイルまたはディレクトリを削除。
|
||||
- **sftp_create:** リモートSFTPサーバー上に新しいファイルを作成。
|
||||
- **sftp_mkdir:** リモートで新しいディレクトリを作成。
|
||||
|
||||
各操作の詳細な入力パラメータと出力パラメータについては、以下のツールドキュメントをご覧ください。
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## 使用方法
|
||||
|
||||
SFTPを介してリモートサーバーにファイルをアップロード、ダウンロード、一覧表示、管理できます。安全なファイル転送のためにパスワード認証と秘密鍵認証の両方をサポートしています。
|
||||
|
||||
## ツール
|
||||
|
||||
### `sftp_upload`
|
||||
|
||||
リモートSFTPサーバーにファイルをアップロードする
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | はい | SFTPサーバーのホスト名またはIPアドレス |
|
||||
| `port` | number | はい | SFTPサーバーのポート(デフォルト:22) |
|
||||
| `username` | string | はい | SFTPユーザー名 |
|
||||
| `password` | string | いいえ | 認証用パスワード(秘密鍵を使用しない場合) |
|
||||
| `privateKey` | string | いいえ | 認証用秘密鍵(OpenSSH形式) |
|
||||
| `passphrase` | string | いいえ | 暗号化された秘密鍵のパスフレーズ |
|
||||
| `remotePath` | string | はい | リモートサーバー上の宛先ディレクトリ |
|
||||
| `files` | file[] | いいえ | アップロードするファイル |
|
||||
| `fileContent` | string | いいえ | アップロードする直接ファイルコンテンツ(テキストファイル用) |
|
||||
| `fileName` | string | いいえ | 直接コンテンツを使用する場合のファイル名 |
|
||||
| `overwrite` | boolean | いいえ | 既存のファイルを上書きするかどうか(デフォルト:true) |
|
||||
| `permissions` | string | いいえ | ファイルのパーミッション(例:0644) |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | アップロードが成功したかどうか |
|
||||
| `uploadedFiles` | json | アップロードされたファイルの詳細の配列(名前、リモートパス、サイズ) |
|
||||
| `message` | string | 操作ステータスメッセージ |
|
||||
|
||||
### `sftp_download`
|
||||
|
||||
リモートSFTPサーバーからファイルをダウンロードする
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | はい | SFTPサーバーのホスト名またはIPアドレス |
|
||||
| `port` | number | はい | SFTPサーバーのポート(デフォルト:22) |
|
||||
| `username` | string | はい | SFTPユーザー名 |
|
||||
| `password` | string | いいえ | 認証用パスワード(秘密鍵を使用しない場合) |
|
||||
| `privateKey` | string | いいえ | 認証用秘密鍵(OpenSSH形式) |
|
||||
| `passphrase` | string | いいえ | 暗号化された秘密鍵のパスフレーズ |
|
||||
| `remotePath` | string | はい | リモートサーバー上のファイルパス |
|
||||
| `encoding` | string | いいえ | 出力エンコーディング:テキストの場合はutf-8、バイナリの場合はbase64(デフォルト:utf-8) |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | ダウンロードが成功したかどうか |
|
||||
| `fileName` | string | ダウンロードしたファイルの名前 |
|
||||
| `content` | string | ファイルの内容(テキストまたはbase64エンコード) |
|
||||
| `size` | number | ファイルサイズ(バイト) |
|
||||
| `encoding` | string | コンテンツエンコーディング(utf-8またはbase64) |
|
||||
| `message` | string | 操作ステータスメッセージ |
|
||||
|
||||
### `sftp_list`
|
||||
|
||||
リモートSFTPサーバー上のファイルとディレクトリを一覧表示する
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | はい | SFTPサーバーのホスト名またはIPアドレス |
|
||||
| `port` | number | はい | SFTPサーバーのポート(デフォルト:22) |
|
||||
| `username` | string | はい | SFTPユーザー名 |
|
||||
| `password` | string | いいえ | 認証用パスワード(秘密鍵を使用しない場合) |
|
||||
| `privateKey` | string | いいえ | 認証用秘密鍵(OpenSSH形式) |
|
||||
| `passphrase` | string | いいえ | 暗号化された秘密鍵のパスフレーズ |
|
||||
| `remotePath` | string | はい | リモートサーバー上のディレクトリパス |
|
||||
| `detailed` | boolean | いいえ | 詳細なファイル情報(サイズ、権限、更新日)を含める |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 操作が成功したかどうか |
|
||||
| `path` | string | 一覧表示されたディレクトリパス |
|
||||
| `entries` | json | 名前、タイプ、サイズ、権限、更新日時を含むディレクトリエントリの配列 |
|
||||
| `count` | number | ディレクトリ内のエントリ数 |
|
||||
| `message` | string | 操作のステータスメッセージ |
|
||||
|
||||
### `sftp_delete`
|
||||
|
||||
リモートSFTPサーバー上のファイルまたはディレクトリを削除する
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | はい | SFTPサーバーのホスト名またはIPアドレス |
|
||||
| `port` | number | はい | SFTPサーバーのポート(デフォルト:22) |
|
||||
| `username` | string | はい | SFTPユーザー名 |
|
||||
| `password` | string | いいえ | 認証用パスワード(秘密鍵を使用しない場合) |
|
||||
| `privateKey` | string | いいえ | 認証用の秘密鍵(OpenSSH形式) |
|
||||
| `passphrase` | string | いいえ | 暗号化された秘密鍵のパスフレーズ |
|
||||
| `remotePath` | string | はい | 削除するファイルまたはディレクトリのパス |
|
||||
| `recursive` | boolean | いいえ | ディレクトリを再帰的に削除する |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 削除が成功したかどうか |
|
||||
| `deletedPath` | string | 削除されたパス |
|
||||
| `message` | string | 操作のステータスメッセージ |
|
||||
|
||||
### `sftp_mkdir`
|
||||
|
||||
リモートSFTPサーバーにディレクトリを作成する
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | はい | SFTPサーバーのホスト名またはIPアドレス |
|
||||
| `port` | number | はい | SFTPサーバーのポート(デフォルト:22) |
|
||||
| `username` | string | はい | SFTPユーザー名 |
|
||||
| `password` | string | いいえ | 認証用パスワード(秘密鍵を使用しない場合) |
|
||||
| `privateKey` | string | いいえ | 認証用秘密鍵(OpenSSH形式) |
|
||||
| `passphrase` | string | いいえ | 暗号化された秘密鍵のパスフレーズ |
|
||||
| `remotePath` | string | はい | 新しいディレクトリのパス |
|
||||
| `recursive` | boolean | いいえ | 親ディレクトリが存在しない場合に作成する |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | ディレクトリが正常に作成されたかどうか |
|
||||
| `createdPath` | string | 作成されたディレクトリのパス |
|
||||
| `message` | string | 操作のステータスメッセージ |
|
||||
|
||||
## 注意事項
|
||||
|
||||
- カテゴリ: `tools`
|
||||
- タイプ: `sftp`
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="smtp"
|
||||
color="#2D3748"
|
||||
color="#4A5568"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
---
|
||||
title: DuckDuckGo
|
||||
description: 使用 DuckDuckGo 搜索
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="duckduckgo"
|
||||
color="#FFFFFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[DuckDuckGo](https://duckduckgo.com/) 是一个注重隐私的网页搜索引擎,提供即时答案、摘要、相关主题等功能——无需跟踪您或您的搜索记录。DuckDuckGo 让您无需用户画像或定向广告即可轻松获取信息。
|
||||
|
||||
在 Sim 中使用 DuckDuckGo,您可以:
|
||||
|
||||
- **搜索网页**:即时找到答案、事实和搜索查询的概述
|
||||
- **获取直接答案**:检索计算、转换或事实查询的具体响应
|
||||
- **访问摘要**:接收搜索主题的简短总结或描述
|
||||
- **获取相关主题**:发现与搜索相关的链接和参考资料
|
||||
- **过滤输出**:可选择移除 HTML 或跳过歧义消解以获得更简洁的结果
|
||||
|
||||
这些功能使您的 Sim 代理能够自动访问最新的网络知识——从在工作流程中呈现事实,到通过最新信息丰富文档和分析。由于 DuckDuckGo 的即时答案 API 是开放的且不需要 API 密钥,因此集成到您的自动化业务流程中既简单又安全。
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## 使用说明
|
||||
|
||||
使用 DuckDuckGo 即时答案 API 搜索网页。返回即时答案、摘要、相关主题等。无需 API 密钥即可免费使用。
|
||||
|
||||
## 工具
|
||||
|
||||
### `duckduckgo_search`
|
||||
|
||||
使用 DuckDuckGo 即时答案 API 搜索网页。返回查询的即时答案、摘要和相关主题。无需 API 密钥即可免费使用。
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | 是 | 要执行的搜索查询 |
|
||||
| `noHtml` | boolean | 否 | 从结果文本中移除 HTML \(默认值: true\) |
|
||||
| `skipDisambig` | boolean | 否 | 跳过歧义消解结果 \(默认值: false\) |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `heading` | string | 即时答案的标题 |
|
||||
| `abstract` | string | 主题的简短摘要 |
|
||||
| `abstractText` | string | 摘要的纯文本版本 |
|
||||
| `abstractSource` | string | 摘要的来源(例如,Wikipedia) |
|
||||
| `abstractURL` | string | 摘要来源的 URL |
|
||||
| `image` | string | 与主题相关的图片的 URL |
|
||||
| `answer` | string | 如果可用,直接答案(例如,用于计算) |
|
||||
| `answerType` | string | 答案的类型(例如,calc,ip 等) |
|
||||
| `type` | string | 响应类型:A(文章),D(消歧),C(类别),N(名称),E(独占) |
|
||||
| `relatedTopics` | array | 包含相关主题及其 URL 和描述的数组 |
|
||||
|
||||
## 注意事项
|
||||
|
||||
- 类别:`tools`
|
||||
- 类型:`duckduckgo`
|
||||
@@ -1,183 +0,0 @@
|
||||
---
|
||||
title: SFTP
|
||||
description: 通过 SFTP(SSH 文件传输协议)传输文件
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="sftp"
|
||||
color="#2D3748"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[SFTP(SSH 文件传输协议)](https://en.wikipedia.org/wiki/SSH_File_Transfer_Protocol) 是一种安全的网络协议,可让您在远程服务器上上传、下载和管理文件。SFTP 基于 SSH 运行,非常适合现代工作流程中的自动化加密文件传输和远程文件管理。
|
||||
|
||||
通过将 SFTP 工具集成到 Sim 中,您可以轻松实现 AI 代理与外部系统或服务器之间的文件自动化传输。这使您的代理能够管理关键数据交换、备份、文档生成和远程系统协调——所有这些都具有强大的安全性。
|
||||
|
||||
**通过 SFTP 工具可用的关键功能:**
|
||||
|
||||
- **上传文件:** 无缝地将任何类型的文件从您的工作流程传输到远程服务器,支持密码和 SSH 私钥认证。
|
||||
- **下载文件:** 直接从远程 SFTP 服务器检索文件以进行处理、存档或进一步自动化。
|
||||
- **列出和管理文件:** 枚举目录,删除或创建文件和文件夹,并远程管理文件系统权限。
|
||||
- **灵活的认证:** 使用传统密码或 SSH 密钥连接,支持密码短语和权限控制。
|
||||
- **大文件支持:** 以编程方式管理大文件的上传和下载,并内置大小限制以确保安全。
|
||||
|
||||
通过将 SFTP 集成到 Sim 中,您可以将安全的文件操作自动化为任何工作流程的一部分,无论是数据收集、报告、远程系统维护,还是平台之间的动态内容交换。
|
||||
|
||||
以下部分描述了可用的关键 SFTP 工具:
|
||||
|
||||
- **sftp_upload:** 将一个或多个文件上传到远程服务器。
|
||||
- **sftp_download:** 从远程服务器下载文件到您的工作流程。
|
||||
- **sftp_list:** 列出远程 SFTP 服务器上的目录内容。
|
||||
- **sftp_delete:** 从远程服务器删除文件或目录。
|
||||
- **sftp_create:** 在远程 SFTP 服务器上创建新文件。
|
||||
- **sftp_mkdir:** 远程创建新目录。
|
||||
|
||||
请参阅下面的工具文档,了解每个操作的详细输入和输出参数。
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## 使用说明
|
||||
|
||||
通过 SFTP 上传、下载、列出和管理远程服务器上的文件。支持密码和私钥认证,确保文件传输安全。
|
||||
|
||||
## 工具
|
||||
|
||||
### `sftp_upload`
|
||||
|
||||
将文件上传到远程 SFTP 服务器
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | 是 | SFTP 服务器主机名或 IP 地址 |
|
||||
| `port` | number | 是 | SFTP 服务器端口 \(默认值: 22\) |
|
||||
| `username` | string | 是 | SFTP 用户名 |
|
||||
| `password` | string | 否 | 用于认证的密码 \(如果未使用私钥\) |
|
||||
| `privateKey` | string | 否 | 用于认证的私钥 \(OpenSSH 格式\) |
|
||||
| `passphrase` | string | 否 | 加密私钥的密码短语 |
|
||||
| `remotePath` | string | 是 | 远程服务器上的目标目录 |
|
||||
| `files` | file[] | 否 | 要上传的文件 |
|
||||
| `fileContent` | string | 否 | 要上传的直接文件内容 \(针对文本文件\) |
|
||||
| `fileName` | string | 否 | 使用直接内容时的文件名 |
|
||||
| `overwrite` | boolean | 否 | 是否覆盖现有文件 \(默认值: true\) |
|
||||
| `permissions` | string | 否 | 文件权限 \(例如: 0644\) |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 上传是否成功 |
|
||||
| `uploadedFiles` | json | 上传文件详情数组 \(名称, 远程路径, 大小\) |
|
||||
| `message` | string | 操作状态消息 |
|
||||
|
||||
### `sftp_download`
|
||||
|
||||
从远程 SFTP 服务器下载文件
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | 是 | SFTP 服务器主机名或 IP 地址 |
|
||||
| `port` | number | 是 | SFTP 服务器端口(默认:22) |
|
||||
| `username` | string | 是 | SFTP 用户名 |
|
||||
| `password` | string | 否 | 用于身份验证的密码(如果未使用私钥) |
|
||||
| `privateKey` | string | 否 | 用于身份验证的私钥(OpenSSH 格式) |
|
||||
| `passphrase` | string | 否 | 加密私钥的密码短语 |
|
||||
| `remotePath` | string | 是 | 远程服务器上文件的路径 |
|
||||
| `encoding` | string | 否 | 输出编码:utf-8 表示文本,base64 表示二进制(默认:utf-8) |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 下载是否成功 |
|
||||
| `fileName` | string | 下载文件的名称 |
|
||||
| `content` | string | 文件内容(文本或 base64 编码) |
|
||||
| `size` | number | 文件大小(字节) |
|
||||
| `encoding` | string | 内容编码(utf-8 或 base64) |
|
||||
| `message` | string | 操作状态消息 |
|
||||
|
||||
### `sftp_list`
|
||||
|
||||
列出远程 SFTP 服务器上的文件和目录
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | 是 | SFTP 服务器主机名或 IP 地址 |
|
||||
| `port` | number | 是 | SFTP 服务器端口(默认:22) |
|
||||
| `username` | string | 是 | SFTP 用户名 |
|
||||
| `password` | string | 否 | 用于身份验证的密码(如果未使用私钥) |
|
||||
| `privateKey` | string | 否 | 用于身份验证的私钥(OpenSSH 格式) |
|
||||
| `passphrase` | string | 否 | 加密私钥的密码短语 |
|
||||
| `remotePath` | string | 是 | 远程服务器上的目录路径 |
|
||||
| `detailed` | boolean | 否 | 是否包含详细的文件信息(大小、权限、修改日期) |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 操作是否成功 |
|
||||
| `path` | string | 被列出的目录路径 |
|
||||
| `entries` | json | 包含名称、类型、大小、权限、修改时间的目录条目数组 |
|
||||
| `count` | number | 目录中的条目数量 |
|
||||
| `message` | string | 操作状态消息 |
|
||||
|
||||
### `sftp_delete`
|
||||
|
||||
删除远程 SFTP 服务器上的文件或目录
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | 是 | SFTP 服务器主机名或 IP 地址 |
|
||||
| `port` | number | 是 | SFTP 服务器端口 \(默认值: 22\) |
|
||||
| `username` | string | 是 | SFTP 用户名 |
|
||||
| `password` | string | 否 | 用于身份验证的密码 \(如果未使用私钥\) |
|
||||
| `privateKey` | string | 否 | 用于身份验证的私钥 \(OpenSSH 格式\) |
|
||||
| `passphrase` | string | 否 | 加密私钥的密码短语 |
|
||||
| `remotePath` | string | 是 | 要删除的文件或目录的路径 |
|
||||
| `recursive` | boolean | 否 | 是否递归删除目录 |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 删除是否成功 |
|
||||
| `deletedPath` | string | 被删除的路径 |
|
||||
| `message` | string | 操作状态消息 |
|
||||
|
||||
### `sftp_mkdir`
|
||||
|
||||
在远程 SFTP 服务器上创建一个目录
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | 是 | SFTP 服务器主机名或 IP 地址 |
|
||||
| `port` | number | 是 | SFTP 服务器端口 \(默认值: 22\) |
|
||||
| `username` | string | 是 | SFTP 用户名 |
|
||||
| `password` | string | 否 | 用于身份验证的密码 \(如果未使用私钥\) |
|
||||
| `privateKey` | string | 否 | 用于身份验证的私钥 \(OpenSSH 格式\) |
|
||||
| `passphrase` | string | 否 | 加密私钥的密码短语 |
|
||||
| `remotePath` | string | 是 | 新目录的路径 |
|
||||
| `recursive` | boolean | 否 | 如果父目录不存在,是否创建父目录 |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | 目录是否成功创建 |
|
||||
| `createdPath` | string | 创建的目录路径 |
|
||||
| `message` | string | 操作状态消息 |
|
||||
|
||||
## 注意事项
|
||||
|
||||
- 分类: `tools`
|
||||
- 类型: `sftp`
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="smtp"
|
||||
color="#2D3748"
|
||||
color="#4A5568"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -47243,7 +47243,7 @@ checksums:
|
||||
meta/title: cba6e4eab965c94b8973e60e9ea10c05
|
||||
meta/description: 366d196f8f11ecd0e96516bb9181f8d5
|
||||
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
|
||||
content/1: e152030e17bf42c8c007a7b64082108c
|
||||
content/1: dcf2843a8d5eb40192a44104c9c788a4
|
||||
content/2: 36ce181b1ca29664a1b6ddf4090623ae
|
||||
content/3: 0a9d2b209e2a8b8fadda104bc42ea92e
|
||||
content/4: 646bc61a952c9733ad296f441ae5ed9e
|
||||
@@ -49232,71 +49232,3 @@ checksums:
|
||||
content/52: bcb37c2bc190c3c12e5c721d376909f7
|
||||
content/53: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/54: dafdefed393d3f02fe15ef832c922450
|
||||
c9be5cc608340116679fe327fbe63480:
|
||||
meta/title: aa4b66dbba98434a4db6d610ca890294
|
||||
meta/description: 257605ee0390330ef9eab6e37af91194
|
||||
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
|
||||
content/1: 4bf86a30616d0d72abc566853303b76b
|
||||
content/2: 2e930315ec421d2a3b1bfdb4772a1cf1
|
||||
content/3: b3f6c9d26d40474f23c0807242efa241
|
||||
content/4: c7f52e83abe327e76611283536d1eab5
|
||||
content/5: 50fdbcb70ad91301c147b15e3e820ec0
|
||||
content/6: 821e6394b0a953e2b0842b04ae8f3105
|
||||
content/7: d0319d7cb966b70ee0c02a95cff46f93
|
||||
content/8: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
|
||||
content/9: a1fe169a3a18363fb213703cc030bd88
|
||||
content/10: f321c7ba0733abff259e6cb67e28206c
|
||||
content/11: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/12: c6f1ef4078a4dd6a275b7d35c4c4111c
|
||||
content/13: bcadfc362b69078beee0088e5936c98b
|
||||
content/14: d4f59eb404e3b9bb1a435017f1a0b59f
|
||||
content/15: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/16: 724d99e69acb7e708fd374d48bfcc10f
|
||||
c9f7b791abaf0d87cf84a72d272d3b06:
|
||||
meta/title: 7de8ba470a0c9dec4744b3c3cc177649
|
||||
meta/description: 1d912a560e6b4a91dd606e3411636114
|
||||
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
|
||||
content/1: 0e15635e5f8b9a9e2784d7437016732e
|
||||
content/2: efdaf30231cd82038af969ee2e4d5893
|
||||
content/3: 13e8cc6de77c95b362e47cfb5e784df5
|
||||
content/4: 5a1625be72ab706aa5e5df10b0f14cd8
|
||||
content/5: 24bb72eb803058206443ee6f04961ed0
|
||||
content/6: 82b5a7ad9b8222bab8a7d90e40a7016f
|
||||
content/7: 6a1b76137145b1359c7614aca381e217
|
||||
content/8: 899d98f7957916b99affdca5f5f0b95d
|
||||
content/9: fbc8be7912092ba5bb3939699f353b5b
|
||||
content/10: 821e6394b0a953e2b0842b04ae8f3105
|
||||
content/11: 5e5da9369cae02d9b99d74fa04f946f7
|
||||
content/12: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
|
||||
content/13: c6caf38bc019cd301adff09db02f10ec
|
||||
content/14: 76e738d08d55e3cb175d72a00da780da
|
||||
content/15: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/16: 405fb5a3b5ccf556769b7f54038cbafd
|
||||
content/17: bcadfc362b69078beee0088e5936c98b
|
||||
content/18: 7d1fc963936fd278098980231cd741d6
|
||||
content/19: 5d4837312f813cf934b2c9aee8179ec8
|
||||
content/20: bc83de0badce9a1d471c97872ac0b550
|
||||
content/21: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/22: 261139b68ea4de9b50743a402db8168d
|
||||
content/23: bcadfc362b69078beee0088e5936c98b
|
||||
content/24: a55ffa4e204bcc53131f42b02ee0f812
|
||||
content/25: d16d2c9c4fa2a6e9c8b308192b0b3dc8
|
||||
content/26: 8eaa96c0ba2fb77c023692a5e4334616
|
||||
content/27: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/28: bcf30844e3d152515f817efb953ed5b0
|
||||
content/29: bcadfc362b69078beee0088e5936c98b
|
||||
content/30: f3eddb7e55dcefcc3f971b4836487b45
|
||||
content/31: 601453f757ae944030dbd93f3afd1575
|
||||
content/32: 8a62582ec6c6b17957b70076b5834c08
|
||||
content/33: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/34: 1f4f6f2120ce67e63e8b8976759c05a3
|
||||
content/35: bcadfc362b69078beee0088e5936c98b
|
||||
content/36: 0cc0f238ca3ec3d1f3b9f16e04aa8138
|
||||
content/37: d131798eeae12126287a483831da2d83
|
||||
content/38: af12f8b3cc617981fb20e3e7de06f723
|
||||
content/39: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/40: 4c4d76bbf61f52f83b6530322fa87d2e
|
||||
content/41: bcadfc362b69078beee0088e5936c98b
|
||||
content/42: dc2cfed837ea55adfa23bd7c87d5299d
|
||||
content/43: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/44: df2ef65659b8ea0a13916358943f965b
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 19 KiB |
@@ -364,7 +364,7 @@ describe('Chat Identifier API Route', () => {
|
||||
error: {
|
||||
message: 'Workflow is not deployed',
|
||||
statusCode: 403,
|
||||
logCreated: false,
|
||||
logCreated: true,
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -4,9 +4,7 @@ import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const GenerateApiKeySchema = z.object({
|
||||
name: z.string().min(1, 'Name is required').max(255, 'Name is too long'),
|
||||
})
|
||||
const GenerateApiKeySchema = z.object({}).optional()
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
@@ -33,15 +31,13 @@ export async function POST(req: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { name } = validationResult.data
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify({ userId, name }),
|
||||
body: JSON.stringify({ userId }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
|
||||
@@ -27,9 +27,7 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Failed to get keys' }, { status: res.status || 500 })
|
||||
}
|
||||
|
||||
const apiKeys = (await res.json().catch(() => null)) as
|
||||
| { id: string; apiKey: string; name?: string; createdAt?: string; lastUsed?: string }[]
|
||||
| null
|
||||
const apiKeys = (await res.json().catch(() => null)) as { id: string; apiKey: string }[] | null
|
||||
|
||||
if (!Array.isArray(apiKeys)) {
|
||||
return NextResponse.json({ error: 'Invalid response from Sim Agent' }, { status: 500 })
|
||||
@@ -39,13 +37,7 @@ export async function GET(request: NextRequest) {
|
||||
const value = typeof k.apiKey === 'string' ? k.apiKey : ''
|
||||
const last6 = value.slice(-6)
|
||||
const displayKey = `•••••${last6}`
|
||||
return {
|
||||
id: k.id,
|
||||
displayKey,
|
||||
name: k.name || null,
|
||||
createdAt: k.createdAt || null,
|
||||
lastUsed: k.lastUsed || null,
|
||||
}
|
||||
return { id: k.id, displayKey }
|
||||
})
|
||||
|
||||
return NextResponse.json({ keys }, { status: 200 })
|
||||
|
||||
@@ -353,10 +353,10 @@ export async function POST(req: NextRequest) {
|
||||
executeLocally: true,
|
||||
},
|
||||
]
|
||||
// Fetch user credentials (OAuth + API keys) - pass workflowId to get workspace env vars
|
||||
// Fetch user credentials (OAuth + API keys)
|
||||
try {
|
||||
const rawCredentials = await getCredentialsServerTool.execute(
|
||||
{ workflowId },
|
||||
{},
|
||||
{ userId: authenticatedUserId }
|
||||
)
|
||||
|
||||
@@ -840,36 +840,9 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${tracker.requestId}] Error processing stream:`, error)
|
||||
|
||||
// Send an error event to the client before closing so it knows what happened
|
||||
try {
|
||||
const errorMessage =
|
||||
error instanceof Error && error.message === 'terminated'
|
||||
? 'Connection to AI service was interrupted. Please try again.'
|
||||
: 'An unexpected error occurred while processing the response.'
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
// Send error as content so it shows in the chat
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
`data: ${JSON.stringify({ type: 'content', data: `\n\n_${errorMessage}_` })}\n\n`
|
||||
)
|
||||
)
|
||||
// Send done event to properly close the stream on client
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`))
|
||||
} catch (enqueueError) {
|
||||
// Stream might already be closed, that's ok
|
||||
logger.warn(
|
||||
`[${tracker.requestId}] Could not send error event to client:`,
|
||||
enqueueError
|
||||
)
|
||||
}
|
||||
controller.error(error)
|
||||
} finally {
|
||||
try {
|
||||
controller.close()
|
||||
} catch {
|
||||
// Controller might already be closed
|
||||
}
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization, subscription } from '@sim/db/schema'
|
||||
import { member, subscription } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getPlanPricing } from '@/lib/billing/core/billing'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { isBillingEnabled } from '@/lib/core/config/environment'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -173,39 +172,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
})
|
||||
.where(eq(subscription.id, orgSubscription.id))
|
||||
|
||||
// Update orgUsageLimit to reflect new seat count (seats × basePrice as minimum)
|
||||
const { basePrice } = getPlanPricing('team')
|
||||
const newMinimumLimit = newSeatCount * basePrice
|
||||
|
||||
const orgData = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
const currentOrgLimit =
|
||||
orgData.length > 0 && orgData[0].orgUsageLimit
|
||||
? Number.parseFloat(orgData[0].orgUsageLimit)
|
||||
: 0
|
||||
|
||||
// Update if new minimum is higher than current limit
|
||||
if (newMinimumLimit > currentOrgLimit) {
|
||||
await db
|
||||
.update(organization)
|
||||
.set({
|
||||
orgUsageLimit: newMinimumLimit.toFixed(2),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(organization.id, organizationId))
|
||||
|
||||
logger.info('Updated organization usage limit for seat change', {
|
||||
organizationId,
|
||||
newSeatCount,
|
||||
newMinimumLimit,
|
||||
previousLimit: currentOrgLimit,
|
||||
})
|
||||
}
|
||||
|
||||
logger.info('Successfully updated seat count', {
|
||||
organizationId,
|
||||
stripeSubscriptionId: orgSubscription.stripeSubscriptionId,
|
||||
@@ -258,3 +224,74 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/seats
|
||||
* Get current seat information for an organization
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get subscription data
|
||||
const subscriptionRecord = await db
|
||||
.select()
|
||||
.from(subscription)
|
||||
.where(and(eq(subscription.referenceId, organizationId), eq(subscription.status, 'active')))
|
||||
.limit(1)
|
||||
|
||||
if (subscriptionRecord.length === 0) {
|
||||
return NextResponse.json({ error: 'No active subscription found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Get member count
|
||||
const memberCount = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
const orgSubscription = subscriptionRecord[0]
|
||||
const maxSeats = orgSubscription.seats || 1
|
||||
const usedSeats = memberCount.length
|
||||
const availableSeats = Math.max(0, maxSeats - usedSeats)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
maxSeats,
|
||||
usedSeats,
|
||||
availableSeats,
|
||||
plan: orgSubscription.plan,
|
||||
canModifySeats: orgSubscription.plan === 'team',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
const { id: organizationId } = await params
|
||||
logger.error('Failed to get organization seats', {
|
||||
organizationId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,188 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type { SFTPWrapper } from 'ssh2'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
createSftpConnection,
|
||||
getFileType,
|
||||
getSftp,
|
||||
isPathSafe,
|
||||
sanitizePath,
|
||||
sftpIsDirectory,
|
||||
} from '@/app/api/tools/sftp/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SftpDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive().default(22),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().nullish(),
|
||||
privateKey: z.string().nullish(),
|
||||
passphrase: z.string().nullish(),
|
||||
remotePath: z.string().min(1, 'Remote path is required'),
|
||||
recursive: z.boolean().default(false),
|
||||
})
|
||||
|
||||
/**
|
||||
* Recursively deletes a directory and all its contents
|
||||
*/
|
||||
async function deleteRecursive(sftp: SFTPWrapper, dirPath: string): Promise<void> {
|
||||
const entries = await new Promise<Array<{ filename: string; attrs: any }>>((resolve, reject) => {
|
||||
sftp.readdir(dirPath, (err, list) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve(list)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.filename === '.' || entry.filename === '..') continue
|
||||
|
||||
const entryPath = `${dirPath}/${entry.filename}`
|
||||
const entryType = getFileType(entry.attrs)
|
||||
|
||||
if (entryType === 'directory') {
|
||||
await deleteRecursive(sftp, entryPath)
|
||||
} else {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
sftp.unlink(entryPath, (err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
sftp.rmdir(dirPath, (err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP delete attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated SFTP delete request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
if (!params.password && !params.privateKey) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Either password or privateKey must be provided' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!isPathSafe(params.remotePath)) {
|
||||
logger.warn(`[${requestId}] Path traversal attempt detected in remotePath`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid remote path: path traversal sequences are not allowed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Connecting to SFTP server ${params.host}:${params.port}`)
|
||||
|
||||
const client = await createSftpConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
privateKey: params.privateKey,
|
||||
passphrase: params.passphrase,
|
||||
})
|
||||
|
||||
try {
|
||||
const sftp = await getSftp(client)
|
||||
const remotePath = sanitizePath(params.remotePath)
|
||||
|
||||
logger.info(`[${requestId}] Deleting ${remotePath} (recursive: ${params.recursive})`)
|
||||
|
||||
const isDir = await sftpIsDirectory(sftp, remotePath)
|
||||
|
||||
if (isDir) {
|
||||
if (params.recursive) {
|
||||
await deleteRecursive(sftp, remotePath)
|
||||
} else {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
sftp.rmdir(remotePath, (err) => {
|
||||
if (err) {
|
||||
if (err.message.includes('not empty')) {
|
||||
reject(
|
||||
new Error(
|
||||
'Directory is not empty. Use recursive: true to delete non-empty directories.'
|
||||
)
|
||||
)
|
||||
} else {
|
||||
reject(err)
|
||||
}
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
} else {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
sftp.unlink(remotePath, (err) => {
|
||||
if (err) {
|
||||
if (err.message.includes('No such file')) {
|
||||
reject(new Error(`File not found: ${remotePath}`))
|
||||
} else {
|
||||
reject(err)
|
||||
}
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted ${remotePath}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
deletedPath: remotePath,
|
||||
message: `Successfully deleted ${remotePath}`,
|
||||
})
|
||||
} finally {
|
||||
client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] SFTP delete failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `SFTP delete failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
import path from 'path'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SftpDownloadAPI')
|
||||
|
||||
const DownloadSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive().default(22),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().nullish(),
|
||||
privateKey: z.string().nullish(),
|
||||
passphrase: z.string().nullish(),
|
||||
remotePath: z.string().min(1, 'Remote path is required'),
|
||||
encoding: z.enum(['utf-8', 'base64']).default('utf-8'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated SFTP download request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const params = DownloadSchema.parse(body)
|
||||
|
||||
if (!params.password && !params.privateKey) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Either password or privateKey must be provided' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!isPathSafe(params.remotePath)) {
|
||||
logger.warn(`[${requestId}] Path traversal attempt detected in remotePath`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid remote path: path traversal sequences are not allowed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Connecting to SFTP server ${params.host}:${params.port}`)
|
||||
|
||||
const client = await createSftpConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
privateKey: params.privateKey,
|
||||
passphrase: params.passphrase,
|
||||
})
|
||||
|
||||
try {
|
||||
const sftp = await getSftp(client)
|
||||
const remotePath = sanitizePath(params.remotePath)
|
||||
|
||||
const stats = await new Promise<{ size: number }>((resolve, reject) => {
|
||||
sftp.stat(remotePath, (err, stats) => {
|
||||
if (err) {
|
||||
if (err.message.includes('No such file')) {
|
||||
reject(new Error(`File not found: ${remotePath}`))
|
||||
} else {
|
||||
reject(err)
|
||||
}
|
||||
} else {
|
||||
resolve(stats)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const maxSize = 50 * 1024 * 1024
|
||||
if (stats.size > maxSize) {
|
||||
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Downloading file ${remotePath} (${stats.size} bytes)`)
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const readStream = sftp.createReadStream(remotePath)
|
||||
|
||||
readStream.on('data', (chunk: Buffer) => {
|
||||
chunks.push(chunk)
|
||||
})
|
||||
|
||||
readStream.on('end', () => resolve())
|
||||
readStream.on('error', reject)
|
||||
})
|
||||
|
||||
const buffer = Buffer.concat(chunks)
|
||||
const fileName = path.basename(remotePath)
|
||||
|
||||
let content: string
|
||||
if (params.encoding === 'base64') {
|
||||
content = buffer.toString('base64')
|
||||
} else {
|
||||
content = buffer.toString('utf-8')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Downloaded ${fileName} (${buffer.length} bytes)`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
fileName,
|
||||
content,
|
||||
size: buffer.length,
|
||||
encoding: params.encoding,
|
||||
message: `Successfully downloaded ${fileName}`,
|
||||
})
|
||||
} finally {
|
||||
client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] SFTP download failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `SFTP download failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,156 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
createSftpConnection,
|
||||
getFileType,
|
||||
getSftp,
|
||||
isPathSafe,
|
||||
parsePermissions,
|
||||
sanitizePath,
|
||||
} from '@/app/api/tools/sftp/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SftpListAPI')
|
||||
|
||||
const ListSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive().default(22),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().nullish(),
|
||||
privateKey: z.string().nullish(),
|
||||
passphrase: z.string().nullish(),
|
||||
remotePath: z.string().min(1, 'Remote path is required'),
|
||||
detailed: z.boolean().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP list attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated SFTP list request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const params = ListSchema.parse(body)
|
||||
|
||||
if (!params.password && !params.privateKey) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Either password or privateKey must be provided' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!isPathSafe(params.remotePath)) {
|
||||
logger.warn(`[${requestId}] Path traversal attempt detected in remotePath`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid remote path: path traversal sequences are not allowed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Connecting to SFTP server ${params.host}:${params.port}`)
|
||||
|
||||
const client = await createSftpConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
privateKey: params.privateKey,
|
||||
passphrase: params.passphrase,
|
||||
})
|
||||
|
||||
try {
|
||||
const sftp = await getSftp(client)
|
||||
const remotePath = sanitizePath(params.remotePath)
|
||||
|
||||
logger.info(`[${requestId}] Listing directory ${remotePath}`)
|
||||
|
||||
const fileList = await new Promise<Array<{ filename: string; longname: string; attrs: any }>>(
|
||||
(resolve, reject) => {
|
||||
sftp.readdir(remotePath, (err, list) => {
|
||||
if (err) {
|
||||
if (err.message.includes('No such file')) {
|
||||
reject(new Error(`Directory not found: ${remotePath}`))
|
||||
} else {
|
||||
reject(err)
|
||||
}
|
||||
} else {
|
||||
resolve(list)
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const entries = fileList
|
||||
.filter((item) => item.filename !== '.' && item.filename !== '..')
|
||||
.map((item) => {
|
||||
const entry: {
|
||||
name: string
|
||||
type: 'file' | 'directory' | 'symlink' | 'other'
|
||||
size?: number
|
||||
permissions?: string
|
||||
modifiedAt?: string
|
||||
} = {
|
||||
name: item.filename,
|
||||
type: getFileType(item.attrs),
|
||||
}
|
||||
|
||||
if (params.detailed) {
|
||||
entry.size = item.attrs.size
|
||||
entry.permissions = parsePermissions(item.attrs.mode)
|
||||
if (item.attrs.mtime) {
|
||||
entry.modifiedAt = new Date(item.attrs.mtime * 1000).toISOString()
|
||||
}
|
||||
}
|
||||
|
||||
return entry
|
||||
})
|
||||
|
||||
entries.sort((a, b) => {
|
||||
if (a.type === 'directory' && b.type !== 'directory') return -1
|
||||
if (a.type !== 'directory' && b.type === 'directory') return 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Listed ${entries.length} entries in ${remotePath}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
path: remotePath,
|
||||
entries,
|
||||
count: entries.length,
|
||||
message: `Found ${entries.length} entries in ${remotePath}`,
|
||||
})
|
||||
} finally {
|
||||
client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] SFTP list failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `SFTP list failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,168 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type { SFTPWrapper } from 'ssh2'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
createSftpConnection,
|
||||
getSftp,
|
||||
isPathSafe,
|
||||
sanitizePath,
|
||||
sftpExists,
|
||||
} from '@/app/api/tools/sftp/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SftpMkdirAPI')
|
||||
|
||||
const MkdirSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive().default(22),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().nullish(),
|
||||
privateKey: z.string().nullish(),
|
||||
passphrase: z.string().nullish(),
|
||||
remotePath: z.string().min(1, 'Remote path is required'),
|
||||
recursive: z.boolean().default(false),
|
||||
})
|
||||
|
||||
/**
|
||||
* Creates directory recursively (like mkdir -p)
|
||||
*/
|
||||
async function mkdirRecursive(sftp: SFTPWrapper, dirPath: string): Promise<void> {
|
||||
const parts = dirPath.split('/').filter(Boolean)
|
||||
let currentPath = dirPath.startsWith('/') ? '' : ''
|
||||
|
||||
for (const part of parts) {
|
||||
currentPath = currentPath
|
||||
? `${currentPath}/${part}`
|
||||
: dirPath.startsWith('/')
|
||||
? `/${part}`
|
||||
: part
|
||||
|
||||
const exists = await sftpExists(sftp, currentPath)
|
||||
if (!exists) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
sftp.mkdir(currentPath, (err) => {
|
||||
if (err && !err.message.includes('already exists')) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP mkdir attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated SFTP mkdir request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const params = MkdirSchema.parse(body)
|
||||
|
||||
if (!params.password && !params.privateKey) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Either password or privateKey must be provided' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!isPathSafe(params.remotePath)) {
|
||||
logger.warn(`[${requestId}] Path traversal attempt detected in remotePath`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid remote path: path traversal sequences are not allowed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Connecting to SFTP server ${params.host}:${params.port}`)
|
||||
|
||||
const client = await createSftpConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
privateKey: params.privateKey,
|
||||
passphrase: params.passphrase,
|
||||
})
|
||||
|
||||
try {
|
||||
const sftp = await getSftp(client)
|
||||
const remotePath = sanitizePath(params.remotePath)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Creating directory ${remotePath} (recursive: ${params.recursive})`
|
||||
)
|
||||
|
||||
if (params.recursive) {
|
||||
await mkdirRecursive(sftp, remotePath)
|
||||
} else {
|
||||
const exists = await sftpExists(sftp, remotePath)
|
||||
if (exists) {
|
||||
return NextResponse.json(
|
||||
{ error: `Directory already exists: ${remotePath}` },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
sftp.mkdir(remotePath, (err) => {
|
||||
if (err) {
|
||||
if (err.message.includes('No such file')) {
|
||||
reject(
|
||||
new Error(
|
||||
'Parent directory does not exist. Use recursive: true to create parent directories.'
|
||||
)
|
||||
)
|
||||
} else {
|
||||
reject(err)
|
||||
}
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully created directory ${remotePath}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
createdPath: remotePath,
|
||||
message: `Successfully created directory ${remotePath}`,
|
||||
})
|
||||
} finally {
|
||||
client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] SFTP mkdir failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `SFTP mkdir failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,242 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import {
|
||||
createSftpConnection,
|
||||
getSftp,
|
||||
isPathSafe,
|
||||
sanitizeFileName,
|
||||
sanitizePath,
|
||||
sftpExists,
|
||||
} from '@/app/api/tools/sftp/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SftpUploadAPI')
|
||||
|
||||
const UploadSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive().default(22),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().nullish(),
|
||||
privateKey: z.string().nullish(),
|
||||
passphrase: z.string().nullish(),
|
||||
remotePath: z.string().min(1, 'Remote path is required'),
|
||||
files: z
|
||||
.union([z.array(z.any()), z.string(), z.number(), z.null(), z.undefined()])
|
||||
.transform((val) => {
|
||||
if (Array.isArray(val)) return val
|
||||
if (val === null || val === undefined || val === '') return undefined
|
||||
return undefined
|
||||
})
|
||||
.nullish(),
|
||||
fileContent: z.string().nullish(),
|
||||
fileName: z.string().nullish(),
|
||||
overwrite: z.boolean().default(true),
|
||||
permissions: z.string().nullish(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP upload attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated SFTP upload request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const params = UploadSchema.parse(body)
|
||||
|
||||
if (!params.password && !params.privateKey) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Either password or privateKey must be provided' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const hasFiles = params.files && params.files.length > 0
|
||||
const hasDirectContent = params.fileContent && params.fileName
|
||||
|
||||
if (!hasFiles && !hasDirectContent) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Either files or fileContent with fileName must be provided' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!isPathSafe(params.remotePath)) {
|
||||
logger.warn(`[${requestId}] Path traversal attempt detected in remotePath`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid remote path: path traversal sequences are not allowed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Connecting to SFTP server ${params.host}:${params.port}`)
|
||||
|
||||
const client = await createSftpConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
privateKey: params.privateKey,
|
||||
passphrase: params.passphrase,
|
||||
})
|
||||
|
||||
try {
|
||||
const sftp = await getSftp(client)
|
||||
const remotePath = sanitizePath(params.remotePath)
|
||||
const uploadedFiles: Array<{ name: string; remotePath: string; size: number }> = []
|
||||
|
||||
if (hasFiles) {
|
||||
const rawFiles = params.files!
|
||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
||||
|
||||
const totalSize = userFiles.reduce((sum, file) => sum + file.size, 0)
|
||||
const maxSize = 100 * 1024 * 1024
|
||||
|
||||
if (totalSize > maxSize) {
|
||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: `Total file size (${sizeMB}MB) exceeds limit of 100MB` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
for (const file of userFiles) {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Downloading file for upload: ${file.name} (${file.size} bytes)`
|
||||
)
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const safeFileName = sanitizeFileName(file.name)
|
||||
const fullRemotePath = remotePath.endsWith('/')
|
||||
? `${remotePath}${safeFileName}`
|
||||
: `${remotePath}/${safeFileName}`
|
||||
|
||||
const sanitizedRemotePath = sanitizePath(fullRemotePath)
|
||||
|
||||
if (!params.overwrite) {
|
||||
const exists = await sftpExists(sftp, sanitizedRemotePath)
|
||||
if (exists) {
|
||||
logger.warn(`[${requestId}] File ${sanitizedRemotePath} already exists, skipping`)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const writeStream = sftp.createWriteStream(sanitizedRemotePath, {
|
||||
mode: params.permissions ? Number.parseInt(params.permissions, 8) : 0o644,
|
||||
})
|
||||
|
||||
writeStream.on('error', reject)
|
||||
writeStream.on('close', () => resolve())
|
||||
writeStream.end(buffer)
|
||||
})
|
||||
|
||||
uploadedFiles.push({
|
||||
name: safeFileName,
|
||||
remotePath: sanitizedRemotePath,
|
||||
size: buffer.length,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Uploaded ${safeFileName} to ${sanitizedRemotePath}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to upload file ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to upload file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasDirectContent) {
|
||||
const safeFileName = sanitizeFileName(params.fileName!)
|
||||
const fullRemotePath = remotePath.endsWith('/')
|
||||
? `${remotePath}${safeFileName}`
|
||||
: `${remotePath}/${safeFileName}`
|
||||
|
||||
const sanitizedRemotePath = sanitizePath(fullRemotePath)
|
||||
|
||||
if (!params.overwrite) {
|
||||
const exists = await sftpExists(sftp, sanitizedRemotePath)
|
||||
if (exists) {
|
||||
return NextResponse.json(
|
||||
{ error: 'File already exists and overwrite is disabled' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let content: Buffer
|
||||
try {
|
||||
content = Buffer.from(params.fileContent!, 'base64')
|
||||
const reEncoded = content.toString('base64')
|
||||
if (reEncoded !== params.fileContent) {
|
||||
content = Buffer.from(params.fileContent!, 'utf-8')
|
||||
}
|
||||
} catch {
|
||||
content = Buffer.from(params.fileContent!, 'utf-8')
|
||||
}
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const writeStream = sftp.createWriteStream(sanitizedRemotePath, {
|
||||
mode: params.permissions ? Number.parseInt(params.permissions, 8) : 0o644,
|
||||
})
|
||||
|
||||
writeStream.on('error', reject)
|
||||
writeStream.on('close', () => resolve())
|
||||
writeStream.end(content)
|
||||
})
|
||||
|
||||
uploadedFiles.push({
|
||||
name: safeFileName,
|
||||
remotePath: sanitizedRemotePath,
|
||||
size: content.length,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Uploaded direct content to ${sanitizedRemotePath}`)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] SFTP upload completed: ${uploadedFiles.length} file(s)`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
uploadedFiles,
|
||||
message: `Successfully uploaded ${uploadedFiles.length} file(s)`,
|
||||
})
|
||||
} finally {
|
||||
client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] SFTP upload failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `SFTP upload failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
import { type Attributes, Client, type ConnectConfig, type SFTPWrapper } from 'ssh2'
|
||||
|
||||
const S_IFMT = 0o170000
|
||||
const S_IFDIR = 0o040000
|
||||
const S_IFREG = 0o100000
|
||||
const S_IFLNK = 0o120000
|
||||
|
||||
export interface SftpConnectionConfig {
|
||||
host: string
|
||||
port: number
|
||||
username: string
|
||||
password?: string | null
|
||||
privateKey?: string | null
|
||||
passphrase?: string | null
|
||||
timeout?: number
|
||||
keepaliveInterval?: number
|
||||
readyTimeout?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats SSH/SFTP errors with helpful troubleshooting context
|
||||
*/
|
||||
function formatSftpError(err: Error, config: { host: string; port: number }): Error {
|
||||
const errorMessage = err.message.toLowerCase()
|
||||
const { host, port } = config
|
||||
|
||||
if (errorMessage.includes('econnrefused') || errorMessage.includes('connection refused')) {
|
||||
return new Error(
|
||||
`Connection refused to ${host}:${port}. ` +
|
||||
`Please verify: (1) SSH/SFTP server is running, ` +
|
||||
`(2) Port ${port} is correct, ` +
|
||||
`(3) Firewall allows connections.`
|
||||
)
|
||||
}
|
||||
|
||||
if (errorMessage.includes('econnreset') || errorMessage.includes('connection reset')) {
|
||||
return new Error(
|
||||
`Connection reset by ${host}:${port}. ` +
|
||||
`This usually means: (1) Wrong port number, ` +
|
||||
`(2) Server rejected the connection, ` +
|
||||
`(3) Network/firewall interrupted the connection.`
|
||||
)
|
||||
}
|
||||
|
||||
if (errorMessage.includes('etimedout') || errorMessage.includes('timeout')) {
|
||||
return new Error(
|
||||
`Connection timed out to ${host}:${port}. ` +
|
||||
`Please verify: (1) Host is reachable, ` +
|
||||
`(2) No firewall is blocking the connection, ` +
|
||||
`(3) The SFTP server is responding.`
|
||||
)
|
||||
}
|
||||
|
||||
if (errorMessage.includes('enotfound') || errorMessage.includes('getaddrinfo')) {
|
||||
return new Error(
|
||||
`Could not resolve hostname "${host}". Please verify the hostname or IP address is correct.`
|
||||
)
|
||||
}
|
||||
|
||||
if (errorMessage.includes('authentication') || errorMessage.includes('auth')) {
|
||||
return new Error(
|
||||
`Authentication failed on ${host}:${port}. ` +
|
||||
`Please verify: (1) Username is correct, ` +
|
||||
`(2) Password or private key is valid, ` +
|
||||
`(3) User has SFTP access on the server.`
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
errorMessage.includes('key') &&
|
||||
(errorMessage.includes('parse') || errorMessage.includes('invalid'))
|
||||
) {
|
||||
return new Error(
|
||||
`Invalid private key format. ` +
|
||||
`Please ensure you're using a valid OpenSSH private key ` +
|
||||
`(starts with "-----BEGIN" and ends with "-----END").`
|
||||
)
|
||||
}
|
||||
|
||||
if (errorMessage.includes('host key') || errorMessage.includes('hostkey')) {
|
||||
return new Error(
|
||||
`Host key verification issue for ${host}. ` +
|
||||
`This may be the first connection or the server's key has changed.`
|
||||
)
|
||||
}
|
||||
|
||||
return new Error(`SFTP connection to ${host}:${port} failed: ${err.message}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an SSH connection for SFTP using the provided configuration.
|
||||
* Uses ssh2 library defaults which align with OpenSSH standards.
|
||||
*/
|
||||
export function createSftpConnection(config: SftpConnectionConfig): Promise<Client> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = new Client()
|
||||
const port = config.port || 22
|
||||
const host = config.host
|
||||
|
||||
if (!host || host.trim() === '') {
|
||||
reject(new Error('Host is required. Please provide a valid hostname or IP address.'))
|
||||
return
|
||||
}
|
||||
|
||||
const hasPassword = config.password && config.password.trim() !== ''
|
||||
const hasPrivateKey = config.privateKey && config.privateKey.trim() !== ''
|
||||
|
||||
if (!hasPassword && !hasPrivateKey) {
|
||||
reject(new Error('Authentication required. Please provide either a password or private key.'))
|
||||
return
|
||||
}
|
||||
|
||||
const connectConfig: ConnectConfig = {
|
||||
host: host.trim(),
|
||||
port,
|
||||
username: config.username,
|
||||
}
|
||||
|
||||
if (config.readyTimeout !== undefined) {
|
||||
connectConfig.readyTimeout = config.readyTimeout
|
||||
}
|
||||
if (config.keepaliveInterval !== undefined) {
|
||||
connectConfig.keepaliveInterval = config.keepaliveInterval
|
||||
}
|
||||
|
||||
if (hasPrivateKey) {
|
||||
connectConfig.privateKey = config.privateKey!
|
||||
if (config.passphrase && config.passphrase.trim() !== '') {
|
||||
connectConfig.passphrase = config.passphrase
|
||||
}
|
||||
} else if (hasPassword) {
|
||||
connectConfig.password = config.password!
|
||||
}
|
||||
|
||||
client.on('ready', () => {
|
||||
resolve(client)
|
||||
})
|
||||
|
||||
client.on('error', (err) => {
|
||||
reject(formatSftpError(err, { host, port }))
|
||||
})
|
||||
|
||||
try {
|
||||
client.connect(connectConfig)
|
||||
} catch (err) {
|
||||
reject(formatSftpError(err instanceof Error ? err : new Error(String(err)), { host, port }))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets SFTP subsystem from SSH client
|
||||
*/
|
||||
export function getSftp(client: Client): Promise<SFTPWrapper> {
|
||||
return new Promise((resolve, reject) => {
|
||||
client.sftp((err, sftp) => {
|
||||
if (err) {
|
||||
reject(new Error(`Failed to start SFTP session: ${err.message}`))
|
||||
} else {
|
||||
resolve(sftp)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a remote path to prevent path traversal attacks.
|
||||
* Removes null bytes, normalizes path separators, and collapses traversal sequences.
|
||||
* Based on OWASP Path Traversal prevention guidelines.
|
||||
*/
|
||||
export function sanitizePath(path: string): string {
|
||||
let sanitized = path
|
||||
sanitized = sanitized.replace(/\0/g, '')
|
||||
sanitized = decodeURIComponent(sanitized)
|
||||
sanitized = sanitized.replace(/\\/g, '/')
|
||||
sanitized = sanitized.replace(/\/+/g, '/')
|
||||
sanitized = sanitized.trim()
|
||||
return sanitized
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a filename to prevent path traversal and injection attacks.
|
||||
* Removes directory traversal sequences, path separators, null bytes, and dangerous patterns.
|
||||
* Based on OWASP Input Validation Cheat Sheet recommendations.
|
||||
*/
|
||||
export function sanitizeFileName(fileName: string): string {
|
||||
let sanitized = fileName
|
||||
sanitized = sanitized.replace(/\0/g, '')
|
||||
|
||||
try {
|
||||
sanitized = decodeURIComponent(sanitized)
|
||||
} catch {
|
||||
// Keep original if decode fails (malformed encoding)
|
||||
}
|
||||
|
||||
sanitized = sanitized.replace(/\.\.[/\\]?/g, '')
|
||||
sanitized = sanitized.replace(/[/\\]/g, '_')
|
||||
sanitized = sanitized.replace(/^\.+/, '')
|
||||
sanitized = sanitized.replace(/[\x00-\x1f\x7f]/g, '')
|
||||
sanitized = sanitized.trim()
|
||||
|
||||
return sanitized || 'unnamed_file'
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a path doesn't contain traversal sequences.
|
||||
* Returns true if the path is safe, false if it contains potential traversal attacks.
|
||||
*/
|
||||
export function isPathSafe(path: string): boolean {
|
||||
const normalizedPath = path.replace(/\\/g, '/')
|
||||
|
||||
if (normalizedPath.includes('../') || normalizedPath.includes('..\\')) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = decodeURIComponent(normalizedPath)
|
||||
if (decoded.includes('../') || decoded.includes('..\\')) {
|
||||
return false
|
||||
}
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
|
||||
if (normalizedPath.includes('\0')) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses file permissions from mode bits to octal string representation.
|
||||
*/
|
||||
export function parsePermissions(mode: number): string {
|
||||
return `0${(mode & 0o777).toString(8)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines file type from SFTP attributes mode bits.
|
||||
*/
|
||||
export function getFileType(attrs: Attributes): 'file' | 'directory' | 'symlink' | 'other' {
|
||||
const fileType = attrs.mode & S_IFMT
|
||||
|
||||
if (fileType === S_IFDIR) return 'directory'
|
||||
if (fileType === S_IFREG) return 'file'
|
||||
if (fileType === S_IFLNK) return 'symlink'
|
||||
return 'other'
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a path exists on the SFTP server.
|
||||
*/
|
||||
export function sftpExists(sftp: SFTPWrapper, path: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
sftp.stat(path, (err) => {
|
||||
resolve(!err)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a path is a directory on the SFTP server.
|
||||
*/
|
||||
export function sftpIsDirectory(sftp: SFTPWrapper, path: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
sftp.stat(path, (err, stats) => {
|
||||
if (err) {
|
||||
resolve(false)
|
||||
} else {
|
||||
resolve(getFileType(stats) === 'directory')
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -4,9 +4,9 @@ import { checkServerSideUsageLimits } from '@/lib/billing'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { getEffectiveCurrentPeriodCost } from '@/lib/billing/core/usage'
|
||||
import { getUserStorageLimit, getUserStorageUsage } from '@/lib/billing/storage'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createErrorResponse } from '@/app/api/workflows/utils'
|
||||
import { RateLimiter } from '@/services/queue'
|
||||
|
||||
const logger = createLogger('UsageLimitsAPI')
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
* GET /api/v1/admin/users/:id - Get user details
|
||||
* GET /api/v1/admin/users/:id/billing - Get user billing info
|
||||
* PATCH /api/v1/admin/users/:id/billing - Update user billing (limit, blocked)
|
||||
* POST /api/v1/admin/users/:id/billing/move-to-org - Move user to organization
|
||||
*
|
||||
* Workspaces:
|
||||
* GET /api/v1/admin/workspaces - List all workspaces
|
||||
@@ -35,7 +36,7 @@
|
||||
* GET /api/v1/admin/organizations/:id - Get organization details
|
||||
* PATCH /api/v1/admin/organizations/:id - Update organization
|
||||
* GET /api/v1/admin/organizations/:id/members - List organization members
|
||||
* POST /api/v1/admin/organizations/:id/members - Add/update member in organization
|
||||
* POST /api/v1/admin/organizations/:id/members - Add member to organization
|
||||
* GET /api/v1/admin/organizations/:id/members/:mid - Get member details
|
||||
* PATCH /api/v1/admin/organizations/:id/members/:mid - Update member role
|
||||
* DELETE /api/v1/admin/organizations/:id/members/:mid - Remove member
|
||||
|
||||
@@ -13,16 +13,13 @@
|
||||
*
|
||||
* Add a user to an organization with full billing logic.
|
||||
* Handles Pro usage snapshot and subscription cancellation like the invitation flow.
|
||||
* If user is already a member, updates their role if different.
|
||||
*
|
||||
* Body:
|
||||
* - userId: string - User ID to add
|
||||
* - role: string - Role ('admin' | 'member')
|
||||
* - skipBillingLogic?: boolean - Skip Pro cancellation (default: false)
|
||||
*
|
||||
* Response: AdminSingleResponse<AdminMember & {
|
||||
* action: 'created' | 'updated' | 'already_member',
|
||||
* billingActions: { proUsageSnapshotted, proCancelledAtPeriodEnd }
|
||||
* }>
|
||||
* Response: AdminSingleResponse<AdminMember>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
@@ -132,6 +129,8 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
|
||||
return badRequestResponse('role must be "admin" or "member"')
|
||||
}
|
||||
|
||||
const skipBillingLogic = body.skipBillingLogic === true
|
||||
|
||||
const [orgData] = await db
|
||||
.select({ id: organization.id, name: organization.name })
|
||||
.from(organization)
|
||||
@@ -152,71 +151,11 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
|
||||
return notFoundResponse('User')
|
||||
}
|
||||
|
||||
const [existingMember] = await db
|
||||
.select({
|
||||
id: member.id,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
organizationId: member.organizationId,
|
||||
})
|
||||
.from(member)
|
||||
.where(eq(member.userId, body.userId))
|
||||
.limit(1)
|
||||
|
||||
if (existingMember) {
|
||||
if (existingMember.organizationId === organizationId) {
|
||||
if (existingMember.role !== body.role) {
|
||||
await db.update(member).set({ role: body.role }).where(eq(member.id, existingMember.id))
|
||||
|
||||
logger.info(
|
||||
`Admin API: Updated user ${body.userId} role in organization ${organizationId}`,
|
||||
{
|
||||
previousRole: existingMember.role,
|
||||
newRole: body.role,
|
||||
}
|
||||
)
|
||||
|
||||
return singleResponse({
|
||||
id: existingMember.id,
|
||||
userId: body.userId,
|
||||
organizationId,
|
||||
role: body.role,
|
||||
createdAt: existingMember.createdAt.toISOString(),
|
||||
userName: userData.name,
|
||||
userEmail: userData.email,
|
||||
action: 'updated' as const,
|
||||
billingActions: {
|
||||
proUsageSnapshotted: false,
|
||||
proCancelledAtPeriodEnd: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return singleResponse({
|
||||
id: existingMember.id,
|
||||
userId: body.userId,
|
||||
organizationId,
|
||||
role: existingMember.role,
|
||||
createdAt: existingMember.createdAt.toISOString(),
|
||||
userName: userData.name,
|
||||
userEmail: userData.email,
|
||||
action: 'already_member' as const,
|
||||
billingActions: {
|
||||
proUsageSnapshotted: false,
|
||||
proCancelledAtPeriodEnd: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return badRequestResponse(
|
||||
`User is already a member of another organization. Users can only belong to one organization at a time.`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await addUserToOrganization({
|
||||
userId: body.userId,
|
||||
organizationId,
|
||||
role: body.role,
|
||||
skipBillingLogic,
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
@@ -237,11 +176,11 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
|
||||
role: body.role,
|
||||
memberId: result.memberId,
|
||||
billingActions: result.billingActions,
|
||||
skipBillingLogic,
|
||||
})
|
||||
|
||||
return singleResponse({
|
||||
...data,
|
||||
action: 'created' as const,
|
||||
billingActions: {
|
||||
proUsageSnapshotted: result.billingActions.proUsageSnapshotted,
|
||||
proCancelledAtPeriodEnd: result.billingActions.proCancelledAtPeriodEnd,
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
* Body:
|
||||
* - name?: string - Organization name
|
||||
* - slug?: string - Organization slug
|
||||
* - orgUsageLimit?: number - Usage limit (null to clear)
|
||||
*
|
||||
* Response: AdminSingleResponse<AdminOrganization>
|
||||
*/
|
||||
@@ -111,10 +112,14 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
|
||||
updateData.slug = body.slug.trim()
|
||||
}
|
||||
|
||||
if (Object.keys(updateData).length === 1) {
|
||||
return badRequestResponse(
|
||||
'No valid fields to update. Use /billing endpoint for orgUsageLimit.'
|
||||
)
|
||||
if (body.orgUsageLimit !== undefined) {
|
||||
if (body.orgUsageLimit === null) {
|
||||
updateData.orgUsageLimit = null
|
||||
} else if (typeof body.orgUsageLimit === 'number' && body.orgUsageLimit >= 0) {
|
||||
updateData.orgUsageLimit = body.orgUsageLimit.toFixed(2)
|
||||
} else {
|
||||
return badRequestResponse('orgUsageLimit must be a non-negative number or null')
|
||||
}
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
|
||||
@@ -7,18 +7,17 @@
|
||||
*
|
||||
* PATCH /api/v1/admin/organizations/[id]/seats
|
||||
*
|
||||
* Update organization seat count with Stripe sync (matches user flow).
|
||||
* Update organization seat count (for admin override of enterprise seats).
|
||||
*
|
||||
* Body:
|
||||
* - seats: number - New seat count (positive integer)
|
||||
* - seats: number - New seat count (for enterprise metadata.seats)
|
||||
*
|
||||
* Response: AdminSingleResponse<{ success: true, seats: number, plan: string, stripeUpdated?: boolean }>
|
||||
* Response: AdminSingleResponse<{ success: true, seats: number }>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { organization, subscription } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { getOrganizationSeatAnalytics } from '@/lib/billing/validation/seat-management'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
@@ -106,14 +105,11 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
|
||||
return notFoundResponse('Subscription')
|
||||
}
|
||||
|
||||
const newSeatCount = body.seats
|
||||
let stripeUpdated = false
|
||||
|
||||
if (subData.plan === 'enterprise') {
|
||||
const currentMetadata = (subData.metadata as Record<string, unknown>) || {}
|
||||
const newMetadata = {
|
||||
...currentMetadata,
|
||||
seats: newSeatCount,
|
||||
seats: body.seats,
|
||||
}
|
||||
|
||||
await db
|
||||
@@ -122,72 +118,23 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
|
||||
.where(eq(subscription.id, subData.id))
|
||||
|
||||
logger.info(`Admin API: Updated enterprise seats for organization ${organizationId}`, {
|
||||
seats: newSeatCount,
|
||||
})
|
||||
} else if (subData.plan === 'team') {
|
||||
if (subData.stripeSubscriptionId) {
|
||||
const stripe = requireStripeClient()
|
||||
|
||||
const stripeSubscription = await stripe.subscriptions.retrieve(subData.stripeSubscriptionId)
|
||||
|
||||
if (stripeSubscription.status !== 'active') {
|
||||
return badRequestResponse('Stripe subscription is not active')
|
||||
}
|
||||
|
||||
const subscriptionItem = stripeSubscription.items.data[0]
|
||||
if (!subscriptionItem) {
|
||||
return internalErrorResponse('No subscription item found in Stripe subscription')
|
||||
}
|
||||
|
||||
const currentSeats = subData.seats || 1
|
||||
|
||||
logger.info('Admin API: Updating Stripe subscription quantity', {
|
||||
organizationId,
|
||||
stripeSubscriptionId: subData.stripeSubscriptionId,
|
||||
subscriptionItemId: subscriptionItem.id,
|
||||
currentSeats,
|
||||
newSeatCount,
|
||||
})
|
||||
|
||||
await stripe.subscriptions.update(subData.stripeSubscriptionId, {
|
||||
items: [
|
||||
{
|
||||
id: subscriptionItem.id,
|
||||
quantity: newSeatCount,
|
||||
},
|
||||
],
|
||||
proration_behavior: 'create_prorations',
|
||||
})
|
||||
|
||||
stripeUpdated = true
|
||||
}
|
||||
|
||||
await db
|
||||
.update(subscription)
|
||||
.set({ seats: newSeatCount })
|
||||
.where(eq(subscription.id, subData.id))
|
||||
|
||||
logger.info(`Admin API: Updated team seats for organization ${organizationId}`, {
|
||||
seats: newSeatCount,
|
||||
stripeUpdated,
|
||||
seats: body.seats,
|
||||
})
|
||||
} else {
|
||||
await db
|
||||
.update(subscription)
|
||||
.set({ seats: newSeatCount })
|
||||
.set({ seats: body.seats })
|
||||
.where(eq(subscription.id, subData.id))
|
||||
|
||||
logger.info(`Admin API: Updated seats for organization ${organizationId}`, {
|
||||
seats: newSeatCount,
|
||||
plan: subData.plan,
|
||||
logger.info(`Admin API: Updated team seats for organization ${organizationId}`, {
|
||||
seats: body.seats,
|
||||
})
|
||||
}
|
||||
|
||||
return singleResponse({
|
||||
success: true,
|
||||
seats: newSeatCount,
|
||||
seats: body.seats,
|
||||
plan: subData.plan,
|
||||
stripeUpdated,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to update organization seats', { error, organizationId })
|
||||
|
||||
@@ -0,0 +1,160 @@
|
||||
/**
|
||||
* POST /api/v1/admin/users/[id]/billing/move-to-org
|
||||
*
|
||||
* Move a user to an organization with full billing logic.
|
||||
* Enforces single-org constraint, handles Pro snapshot/cancellation.
|
||||
*
|
||||
* Body:
|
||||
* - organizationId: string - Target organization ID
|
||||
* - role?: string - Role in organization ('admin' | 'member'), defaults to 'member'
|
||||
* - skipBillingLogic?: boolean - Skip Pro handling (default: false)
|
||||
*
|
||||
* Response: AdminSingleResponse<{
|
||||
* success: true,
|
||||
* memberId: string,
|
||||
* organizationId: string,
|
||||
* role: string,
|
||||
* action: 'created' | 'updated' | 'already_member',
|
||||
* billingActions: { proUsageSnapshotted, proCancelledAtPeriodEnd }
|
||||
* }>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization, user } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { addUserToOrganization } from '@/lib/billing/organizations/membership'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
|
||||
const logger = createLogger('AdminUserMoveToOrgAPI')
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
export const POST = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
const { id: userId } = await context.params
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
if (!body.organizationId || typeof body.organizationId !== 'string') {
|
||||
return badRequestResponse('organizationId is required')
|
||||
}
|
||||
|
||||
const role = body.role || 'member'
|
||||
if (!['admin', 'member'].includes(role)) {
|
||||
return badRequestResponse('role must be "admin" or "member"')
|
||||
}
|
||||
|
||||
const skipBillingLogic = body.skipBillingLogic === true
|
||||
|
||||
const [userData] = await db
|
||||
.select({ id: user.id })
|
||||
.from(user)
|
||||
.where(eq(user.id, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!userData) {
|
||||
return notFoundResponse('User')
|
||||
}
|
||||
|
||||
const [orgData] = await db
|
||||
.select({ id: organization.id, name: organization.name })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, body.organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (!orgData) {
|
||||
return notFoundResponse('Organization')
|
||||
}
|
||||
|
||||
const existingMemberships = await db
|
||||
.select({ id: member.id, organizationId: member.organizationId, role: member.role })
|
||||
.from(member)
|
||||
.where(eq(member.userId, userId))
|
||||
|
||||
const existingInThisOrg = existingMemberships.find(
|
||||
(m) => m.organizationId === body.organizationId
|
||||
)
|
||||
if (existingInThisOrg) {
|
||||
if (existingInThisOrg.role !== role) {
|
||||
await db.update(member).set({ role }).where(eq(member.id, existingInThisOrg.id))
|
||||
|
||||
logger.info(
|
||||
`Admin API: Updated user ${userId} role in organization ${body.organizationId}`,
|
||||
{
|
||||
previousRole: existingInThisOrg.role,
|
||||
newRole: role,
|
||||
}
|
||||
)
|
||||
|
||||
return singleResponse({
|
||||
success: true,
|
||||
memberId: existingInThisOrg.id,
|
||||
organizationId: body.organizationId,
|
||||
organizationName: orgData.name,
|
||||
role,
|
||||
action: 'updated',
|
||||
billingActions: {
|
||||
proUsageSnapshotted: false,
|
||||
proCancelledAtPeriodEnd: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return singleResponse({
|
||||
success: true,
|
||||
memberId: existingInThisOrg.id,
|
||||
organizationId: body.organizationId,
|
||||
organizationName: orgData.name,
|
||||
role: existingInThisOrg.role,
|
||||
action: 'already_member',
|
||||
billingActions: {
|
||||
proUsageSnapshotted: false,
|
||||
proCancelledAtPeriodEnd: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const result = await addUserToOrganization({
|
||||
userId,
|
||||
organizationId: body.organizationId,
|
||||
role,
|
||||
skipBillingLogic,
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
return badRequestResponse(result.error || 'Failed to move user to organization')
|
||||
}
|
||||
|
||||
logger.info(`Admin API: Moved user ${userId} to organization ${body.organizationId}`, {
|
||||
role,
|
||||
memberId: result.memberId,
|
||||
billingActions: result.billingActions,
|
||||
skipBillingLogic,
|
||||
})
|
||||
|
||||
return singleResponse({
|
||||
success: true,
|
||||
memberId: result.memberId,
|
||||
organizationId: body.organizationId,
|
||||
organizationName: orgData.name,
|
||||
role,
|
||||
action: 'created',
|
||||
billingActions: {
|
||||
proUsageSnapshotted: result.billingActions.proUsageSnapshotted,
|
||||
proCancelledAtPeriodEnd: result.billingActions.proCancelledAtPeriodEnd,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to move user to organization', { error, userId })
|
||||
return internalErrorResponse('Failed to move user to organization')
|
||||
}
|
||||
})
|
||||
@@ -1,7 +1,7 @@
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { getEffectiveCurrentPeriodCost } from '@/lib/billing/core/usage'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { RateLimiter } from '@/services/queue'
|
||||
|
||||
export interface UserLimits {
|
||||
workflowExecutionRateLimit: {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { authenticateV1Request } from '@/app/api/v1/auth'
|
||||
import { RateLimiter } from '@/services/queue/RateLimiter'
|
||||
|
||||
const logger = createLogger('V1Middleware')
|
||||
const rateLimiter = new RateLimiter()
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { acquireLock, releaseLock } from '@/lib/core/config/redis'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { pollRssWebhooks } from '@/lib/webhooks/rss-polling-service'
|
||||
|
||||
const logger = createLogger('RssPollingAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
|
||||
|
||||
const LOCK_KEY = 'rss-polling-lock'
|
||||
const LOCK_TTL_SECONDS = 180 // Same as maxDuration (3 min)
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = nanoid()
|
||||
logger.info(`RSS webhook polling triggered (${requestId})`)
|
||||
|
||||
let lockValue: string | undefined
|
||||
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'RSS webhook polling')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
lockValue = requestId
|
||||
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
|
||||
|
||||
if (!locked) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
message: 'Polling already in progress – skipped',
|
||||
requestId,
|
||||
status: 'skip',
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
}
|
||||
|
||||
const results = await pollRssWebhooks()
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'RSS polling completed',
|
||||
requestId,
|
||||
status: 'completed',
|
||||
...results,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Error during RSS polling (${requestId}):`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: 'RSS polling failed',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
requestId,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
await releaseLock(LOCK_KEY).catch(() => {})
|
||||
}
|
||||
}
|
||||
@@ -544,43 +544,6 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
// --- End Outlook specific logic ---
|
||||
|
||||
// --- RSS webhook setup ---
|
||||
if (savedWebhook && provider === 'rss') {
|
||||
logger.info(`[${requestId}] RSS provider detected. Setting up RSS webhook configuration.`)
|
||||
try {
|
||||
const { configureRssPolling } = await import('@/lib/webhooks/utils.server')
|
||||
const success = await configureRssPolling(savedWebhook, requestId)
|
||||
|
||||
if (!success) {
|
||||
logger.error(`[${requestId}] Failed to configure RSS polling, rolling back webhook`)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure RSS polling',
|
||||
details: 'Please try again',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully configured RSS polling`)
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error setting up RSS webhook configuration, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to configure RSS webhook',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End RSS specific logic ---
|
||||
|
||||
const status = targetWebhookId ? 200 : 201
|
||||
return NextResponse.json({ webhook: savedWebhook }, { status })
|
||||
} catch (error: any) {
|
||||
|
||||
@@ -69,11 +69,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
let preprocessError: NextResponse | null = null
|
||||
try {
|
||||
// Test webhooks skip deployment check but still enforce rate limits and usage limits
|
||||
// They run on live/draft state to allow testing before deployment
|
||||
preprocessError = await checkWebhookPreprocessing(foundWorkflow, foundWebhook, requestId, {
|
||||
isTestMode: true,
|
||||
})
|
||||
preprocessError = await checkWebhookPreprocessing(foundWorkflow, foundWebhook, requestId)
|
||||
if (preprocessError) {
|
||||
return preprocessError
|
||||
}
|
||||
|
||||
@@ -140,7 +140,7 @@ vi.mock('@/lib/workspaces/utils', async () => {
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('@/lib/core/rate-limiter', () => ({
|
||||
vi.mock('@/services/queue', () => ({
|
||||
RateLimiter: vi.fn().mockImplementation(() => ({
|
||||
checkRateLimit: vi.fn().mockResolvedValue({
|
||||
allowed: true,
|
||||
|
||||
@@ -395,7 +395,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
triggerType: loggingTriggerType,
|
||||
executionId,
|
||||
requestId,
|
||||
checkDeployment: !shouldUseDraftState,
|
||||
checkRateLimit: false, // Manual executions bypass rate limits
|
||||
checkDeployment: !shouldUseDraftState, // Check deployment unless using draft
|
||||
loggingSession,
|
||||
})
|
||||
|
||||
|
||||
@@ -451,6 +451,15 @@ function RunSkipButtons({
|
||||
const actionInProgressRef = useRef(false)
|
||||
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
||||
|
||||
const instance = getClientTool(toolCall.id)
|
||||
const interruptDisplays = instance?.getInterruptDisplays?.()
|
||||
const isIntegration = isIntegrationTool(toolCall.name)
|
||||
|
||||
// For integration tools: Allow, Always Allow, Skip
|
||||
// For client tools with interrupts: Run, Skip (or custom labels)
|
||||
const acceptLabel = isIntegration ? 'Allow' : interruptDisplays?.accept?.text || 'Run'
|
||||
const rejectLabel = interruptDisplays?.reject?.text || 'Skip'
|
||||
|
||||
const onRun = async () => {
|
||||
// Prevent race condition - check ref synchronously
|
||||
if (actionInProgressRef.current) return
|
||||
@@ -498,19 +507,20 @@ function RunSkipButtons({
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||
return (
|
||||
<div className='mt-[12px] flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='primary'>
|
||||
{isProcessing ? <Loader2 className='mr-1 h-3 w-3 animate-spin' /> : null}
|
||||
Allow
|
||||
</Button>
|
||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||
{isProcessing ? <Loader2 className='mr-1 h-3 w-3 animate-spin' /> : null}
|
||||
Always Allow
|
||||
{acceptLabel}
|
||||
</Button>
|
||||
{isIntegration && (
|
||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||
{isProcessing ? <Loader2 className='mr-1 h-3 w-3 animate-spin' /> : null}
|
||||
Always Allow
|
||||
</Button>
|
||||
)}
|
||||
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
||||
Skip
|
||||
{rejectLabel}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -190,25 +190,15 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
|
||||
/**
|
||||
* Cleanup on component unmount (page refresh, navigation, etc.)
|
||||
* Uses a ref to track sending state to avoid stale closure issues
|
||||
* Note: Parent workflow.tsx also has useStreamCleanup for page-level cleanup
|
||||
*/
|
||||
const isSendingRef = useRef(isSendingMessage)
|
||||
isSendingRef.current = isSendingMessage
|
||||
const abortMessageRef = useRef(abortMessage)
|
||||
abortMessageRef.current = abortMessage
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
// Use refs to check current values, not stale closure values
|
||||
if (isSendingRef.current) {
|
||||
abortMessageRef.current()
|
||||
if (isSendingMessage) {
|
||||
abortMessage()
|
||||
logger.info('Aborted active message streaming due to component unmount')
|
||||
}
|
||||
}
|
||||
// Empty deps - only run cleanup on actual unmount, not on re-renders
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
}, [isSendingMessage, abortMessage])
|
||||
|
||||
/**
|
||||
* Container-level click capture to cancel edit mode when clicking outside the current edit area
|
||||
|
||||
@@ -262,8 +262,6 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'sharing.write': 'Share files and folders with others',
|
||||
// WordPress.com scopes
|
||||
global: 'Full access to manage your WordPress.com sites, posts, pages, media, and settings',
|
||||
// Zapier AI Actions scopes
|
||||
'nla:exposed_actions:execute': 'Execute Zapier AI Actions on your behalf',
|
||||
}
|
||||
|
||||
function getScopeDescription(scope: string): string {
|
||||
|
||||
@@ -363,8 +363,6 @@ export function Dropdown({
|
||||
)
|
||||
}, [multiSelect, multiValues, optionMap])
|
||||
|
||||
const isSearchable = subBlockId === 'operation'
|
||||
|
||||
return (
|
||||
<Combobox
|
||||
options={comboboxOptions}
|
||||
@@ -377,6 +375,7 @@ export function Dropdown({
|
||||
editable={false}
|
||||
onOpenChange={(open) => {
|
||||
if (open) {
|
||||
// Fetch options when the dropdown is opened to ensure freshness
|
||||
void fetchOptionsIfNeeded()
|
||||
}
|
||||
}}
|
||||
@@ -384,8 +383,6 @@ export function Dropdown({
|
||||
multiSelect={multiSelect}
|
||||
isLoading={isLoadingOptions}
|
||||
error={fetchError}
|
||||
searchable={isSearchable}
|
||||
searchPlaceholder='Search operations...'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -58,7 +58,6 @@ interface CustomToolModalProps {
|
||||
|
||||
export interface CustomTool {
|
||||
type: 'custom-tool'
|
||||
id?: string
|
||||
title: string
|
||||
name: string
|
||||
description: string
|
||||
@@ -434,8 +433,6 @@ try {
|
||||
}
|
||||
}
|
||||
|
||||
let savedToolId: string | undefined
|
||||
|
||||
if (isEditing && toolIdToUpdate) {
|
||||
await updateToolMutation.mutateAsync({
|
||||
workspaceId,
|
||||
@@ -446,9 +443,8 @@ try {
|
||||
code: functionCode || '',
|
||||
},
|
||||
})
|
||||
savedToolId = toolIdToUpdate
|
||||
} else {
|
||||
const result = await createToolMutation.mutateAsync({
|
||||
await createToolMutation.mutateAsync({
|
||||
workspaceId,
|
||||
tool: {
|
||||
title: name,
|
||||
@@ -456,13 +452,10 @@ try {
|
||||
code: functionCode || '',
|
||||
},
|
||||
})
|
||||
// Get the ID from the created tool
|
||||
savedToolId = result?.[0]?.id
|
||||
}
|
||||
|
||||
const customTool: CustomTool = {
|
||||
type: 'custom-tool',
|
||||
id: savedToolId,
|
||||
title: name,
|
||||
name,
|
||||
description,
|
||||
|
||||
@@ -51,10 +51,7 @@ import {
|
||||
import { ToolCredentialSelector } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/components/tool-credential-selector'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { getAllBlocks } from '@/blocks'
|
||||
import {
|
||||
type CustomTool as CustomToolDefinition,
|
||||
useCustomTools,
|
||||
} from '@/hooks/queries/custom-tools'
|
||||
import { useCustomTools } from '@/hooks/queries/custom-tools'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { useMcpTools } from '@/hooks/use-mcp-tools'
|
||||
import { getProviderFromModel, supportsToolUsageControl } from '@/providers/utils'
|
||||
@@ -88,28 +85,21 @@ interface ToolInputProps {
|
||||
|
||||
/**
|
||||
* Represents a tool selected and configured in the workflow
|
||||
*
|
||||
* @remarks
|
||||
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
|
||||
* Everything else (title, schema, code) is loaded dynamically from the database.
|
||||
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
|
||||
*/
|
||||
interface StoredTool {
|
||||
/** Block type identifier */
|
||||
type: string
|
||||
/** Display title for the tool (optional for new custom tool format) */
|
||||
title?: string
|
||||
/** Direct tool ID for execution (optional for new custom tool format) */
|
||||
toolId?: string
|
||||
/** Parameter values configured by the user (optional for new custom tool format) */
|
||||
params?: Record<string, string>
|
||||
/** Display title for the tool */
|
||||
title: string
|
||||
/** Direct tool ID for execution */
|
||||
toolId: string
|
||||
/** Parameter values configured by the user */
|
||||
params: Record<string, string>
|
||||
/** Whether the tool details are expanded in UI */
|
||||
isExpanded?: boolean
|
||||
/** Database ID for custom tools (new format - reference only) */
|
||||
customToolId?: string
|
||||
/** Tool schema for custom tools (legacy format - inline) */
|
||||
/** Tool schema for custom tools */
|
||||
schema?: any
|
||||
/** Implementation code for custom tools (legacy format - inline) */
|
||||
/** Implementation code for custom tools */
|
||||
code?: string
|
||||
/** Selected operation for multi-operation tools */
|
||||
operation?: string
|
||||
@@ -117,55 +107,6 @@ interface StoredTool {
|
||||
usageControl?: 'auto' | 'force' | 'none'
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a custom tool reference to its full definition
|
||||
*
|
||||
* @remarks
|
||||
* Custom tools can be stored in two formats:
|
||||
* 1. Reference-only (new): { customToolId: "...", usageControl: "auto" } - loads from database
|
||||
* 2. Inline (legacy): { schema: {...}, code: "..." } - uses embedded definition
|
||||
*
|
||||
* @param storedTool - The stored tool reference
|
||||
* @param customToolsList - List of custom tools from the database
|
||||
* @returns The resolved custom tool with full definition, or null if not found
|
||||
*/
|
||||
function resolveCustomToolFromReference(
|
||||
storedTool: StoredTool,
|
||||
customToolsList: CustomToolDefinition[]
|
||||
): { schema: any; code: string; title: string } | null {
|
||||
// If the tool has a customToolId (new reference format), look it up
|
||||
if (storedTool.customToolId) {
|
||||
const customTool = customToolsList.find((t) => t.id === storedTool.customToolId)
|
||||
if (customTool) {
|
||||
return {
|
||||
schema: customTool.schema,
|
||||
code: customTool.code,
|
||||
title: customTool.title,
|
||||
}
|
||||
}
|
||||
// If not found by ID, fall through to try other methods
|
||||
logger.warn(`Custom tool not found by ID: ${storedTool.customToolId}`)
|
||||
}
|
||||
|
||||
// Legacy format: inline schema and code
|
||||
if (storedTool.schema && storedTool.code !== undefined) {
|
||||
return {
|
||||
schema: storedTool.schema,
|
||||
code: storedTool.code,
|
||||
title: storedTool.title || '',
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a stored custom tool is a reference-only format (no inline code/schema)
|
||||
*/
|
||||
function isCustomToolReference(storedTool: StoredTool): boolean {
|
||||
return storedTool.type === 'custom-tool' && !!storedTool.customToolId && !storedTool.code
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic sync wrapper that synchronizes store values with local component state
|
||||
*
|
||||
@@ -1013,25 +954,18 @@ export function ToolInput({
|
||||
(customTool: CustomTool) => {
|
||||
if (isPreview || disabled) return
|
||||
|
||||
// If the tool has a database ID, store minimal reference
|
||||
// Otherwise, store inline for backwards compatibility
|
||||
const newTool: StoredTool = customTool.id
|
||||
? {
|
||||
type: 'custom-tool',
|
||||
customToolId: customTool.id,
|
||||
usageControl: 'auto',
|
||||
isExpanded: true,
|
||||
}
|
||||
: {
|
||||
type: 'custom-tool',
|
||||
title: customTool.title,
|
||||
toolId: `custom-${customTool.schema?.function?.name || 'unknown'}`,
|
||||
params: {},
|
||||
isExpanded: true,
|
||||
schema: customTool.schema,
|
||||
code: customTool.code || '',
|
||||
usageControl: 'auto',
|
||||
}
|
||||
const customToolId = `custom-${customTool.schema?.function?.name || 'unknown'}`
|
||||
|
||||
const newTool: StoredTool = {
|
||||
type: 'custom-tool',
|
||||
title: customTool.title,
|
||||
toolId: customToolId,
|
||||
params: {},
|
||||
isExpanded: true,
|
||||
schema: customTool.schema,
|
||||
code: customTool.code || '',
|
||||
usageControl: 'auto',
|
||||
}
|
||||
|
||||
setStoreValue([...selectedTools.map((tool) => ({ ...tool, isExpanded: false })), newTool])
|
||||
},
|
||||
@@ -1041,21 +975,12 @@ export function ToolInput({
|
||||
const handleEditCustomTool = useCallback(
|
||||
(toolIndex: number) => {
|
||||
const tool = selectedTools[toolIndex]
|
||||
if (tool.type !== 'custom-tool') return
|
||||
|
||||
// For reference-only tools, we need to resolve the tool from the database
|
||||
// The modal will handle loading the full definition
|
||||
const resolved = resolveCustomToolFromReference(tool, customTools)
|
||||
if (!resolved && !tool.schema) {
|
||||
// Tool not found and no inline definition - can't edit
|
||||
logger.warn('Cannot edit custom tool - not found in database and no inline definition')
|
||||
return
|
||||
}
|
||||
if (tool.type !== 'custom-tool' || !tool.schema) return
|
||||
|
||||
setEditingToolIndex(toolIndex)
|
||||
setCustomToolModalOpen(true)
|
||||
},
|
||||
[selectedTools, customTools]
|
||||
[selectedTools]
|
||||
)
|
||||
|
||||
const handleSaveCustomTool = useCallback(
|
||||
@@ -1063,26 +988,17 @@ export function ToolInput({
|
||||
if (isPreview || disabled) return
|
||||
|
||||
if (editingToolIndex !== null) {
|
||||
const existingTool = selectedTools[editingToolIndex]
|
||||
|
||||
// If the tool has a database ID, convert to minimal reference format
|
||||
// Otherwise keep inline for backwards compatibility
|
||||
const updatedTool: StoredTool = customTool.id
|
||||
? {
|
||||
type: 'custom-tool',
|
||||
customToolId: customTool.id,
|
||||
usageControl: existingTool.usageControl || 'auto',
|
||||
isExpanded: existingTool.isExpanded,
|
||||
}
|
||||
: {
|
||||
...existingTool,
|
||||
title: customTool.title,
|
||||
schema: customTool.schema,
|
||||
code: customTool.code || '',
|
||||
}
|
||||
|
||||
setStoreValue(
|
||||
selectedTools.map((tool, index) => (index === editingToolIndex ? updatedTool : tool))
|
||||
selectedTools.map((tool, index) =>
|
||||
index === editingToolIndex
|
||||
? {
|
||||
...tool,
|
||||
title: customTool.title,
|
||||
schema: customTool.schema,
|
||||
code: customTool.code || '',
|
||||
}
|
||||
: tool
|
||||
)
|
||||
)
|
||||
setEditingToolIndex(null)
|
||||
} else {
|
||||
@@ -1103,15 +1019,8 @@ export function ToolInput({
|
||||
const handleDeleteTool = useCallback(
|
||||
(toolId: string) => {
|
||||
const updatedTools = selectedTools.filter((tool) => {
|
||||
if (tool.type !== 'custom-tool') return true
|
||||
|
||||
// New format: check customToolId
|
||||
if (tool.customToolId === toolId) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Legacy format: check by function name match
|
||||
if (
|
||||
tool.type === 'custom-tool' &&
|
||||
tool.schema?.function?.name &&
|
||||
customTools.some(
|
||||
(customTool) =>
|
||||
@@ -1174,12 +1083,12 @@ export function ToolInput({
|
||||
|
||||
const initialParams = initializeToolParams(newToolId, toolParams.userInputParameters, blockId)
|
||||
|
||||
const oldToolParams = tool.toolId ? getToolParametersConfig(tool.toolId, tool.type) : null
|
||||
const oldToolParams = getToolParametersConfig(tool.toolId, tool.type)
|
||||
const oldParamIds = new Set(oldToolParams?.userInputParameters.map((p) => p.id) || [])
|
||||
const newParamIds = new Set(toolParams.userInputParameters.map((p) => p.id))
|
||||
|
||||
const preservedParams: Record<string, string> = {}
|
||||
Object.entries(tool.params || {}).forEach(([paramId, value]) => {
|
||||
Object.entries(tool.params).forEach(([paramId, value]) => {
|
||||
if (newParamIds.has(paramId) && value) {
|
||||
preservedParams[paramId] = value
|
||||
}
|
||||
@@ -1757,13 +1666,15 @@ export function ToolInput({
|
||||
key={customTool.id}
|
||||
value={customTool.title}
|
||||
onSelect={() => {
|
||||
// Store minimal reference - only ID, usageControl, isExpanded
|
||||
// Everything else (title, toolId, params) loaded dynamically
|
||||
const newTool: StoredTool = {
|
||||
type: 'custom-tool',
|
||||
customToolId: customTool.id,
|
||||
usageControl: 'auto',
|
||||
title: customTool.title,
|
||||
toolId: `custom-${customTool.schema?.function?.name || 'unknown'}`,
|
||||
params: {},
|
||||
isExpanded: true,
|
||||
schema: customTool.schema,
|
||||
code: customTool.code,
|
||||
usageControl: 'auto',
|
||||
}
|
||||
|
||||
setStoreValue([
|
||||
@@ -1846,33 +1757,22 @@ export function ToolInput({
|
||||
// Get the current tool ID (may change based on operation)
|
||||
const currentToolId =
|
||||
!isCustomTool && !isMcpTool
|
||||
? getToolIdForOperation(tool.type, tool.operation) || tool.toolId || ''
|
||||
: tool.toolId || ''
|
||||
? getToolIdForOperation(tool.type, tool.operation) || tool.toolId
|
||||
: tool.toolId
|
||||
|
||||
// Get tool parameters using the new utility with block type for UI components
|
||||
const toolParams =
|
||||
!isCustomTool && !isMcpTool && currentToolId
|
||||
? getToolParametersConfig(currentToolId, tool.type)
|
||||
: null
|
||||
!isCustomTool && !isMcpTool ? getToolParametersConfig(currentToolId, tool.type) : null
|
||||
|
||||
// For custom tools, resolve from reference (new format) or use inline (legacy)
|
||||
const resolvedCustomTool = isCustomTool
|
||||
? resolveCustomToolFromReference(tool, customTools)
|
||||
: null
|
||||
|
||||
// Derive title and schema from resolved tool or inline data
|
||||
const customToolTitle = isCustomTool
|
||||
? tool.title || resolvedCustomTool?.title || 'Unknown Tool'
|
||||
: null
|
||||
const customToolSchema = isCustomTool ? tool.schema || resolvedCustomTool?.schema : null
|
||||
// For custom tools, extract parameters from schema
|
||||
const customToolParams =
|
||||
isCustomTool && customToolSchema?.function?.parameters?.properties
|
||||
? Object.entries(customToolSchema.function.parameters.properties || {}).map(
|
||||
isCustomTool && tool.schema && tool.schema.function?.parameters?.properties
|
||||
? Object.entries(tool.schema.function.parameters.properties || {}).map(
|
||||
([paramId, param]: [string, any]) => ({
|
||||
id: paramId,
|
||||
type: param.type || 'string',
|
||||
description: param.description || '',
|
||||
visibility: (customToolSchema.function.parameters.required?.includes(paramId)
|
||||
visibility: (tool.schema.function.parameters.required?.includes(paramId)
|
||||
? 'user-or-llm'
|
||||
: 'user-only') as 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden',
|
||||
})
|
||||
@@ -1905,12 +1805,9 @@ export function ToolInput({
|
||||
: toolParams?.userInputParameters || []
|
||||
|
||||
// Check if tool requires OAuth
|
||||
const requiresOAuth =
|
||||
!isCustomTool && !isMcpTool && currentToolId && toolRequiresOAuth(currentToolId)
|
||||
const requiresOAuth = !isCustomTool && !isMcpTool && toolRequiresOAuth(currentToolId)
|
||||
const oauthConfig =
|
||||
!isCustomTool && !isMcpTool && currentToolId
|
||||
? getToolOAuthConfig(currentToolId)
|
||||
: null
|
||||
!isCustomTool && !isMcpTool ? getToolOAuthConfig(currentToolId) : null
|
||||
|
||||
// Tools are always expandable so users can access the interface
|
||||
const isExpandedForDisplay = isPreview
|
||||
@@ -1919,7 +1816,7 @@ export function ToolInput({
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`${tool.customToolId || tool.toolId || toolIndex}-${toolIndex}`}
|
||||
key={`${tool.toolId}-${toolIndex}`}
|
||||
className={cn(
|
||||
'group relative flex flex-col overflow-visible rounded-[4px] border border-[var(--border-strong)] bg-[var(--surface-4)] transition-all duration-200 ease-in-out',
|
||||
draggedIndex === toolIndex ? 'scale-95 opacity-40' : '',
|
||||
@@ -1975,7 +1872,7 @@ export function ToolInput({
|
||||
)}
|
||||
</div>
|
||||
<span className='truncate font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
{isCustomTool ? customToolTitle : tool.title}
|
||||
{tool.title}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex flex-shrink-0 items-center gap-[8px]'>
|
||||
@@ -2071,7 +1968,7 @@ export function ToolInput({
|
||||
</div>
|
||||
<div className='w-full min-w-0'>
|
||||
<ToolCredentialSelector
|
||||
value={tool.params?.credential || ''}
|
||||
value={tool.params.credential || ''}
|
||||
onChange={(value) => handleParamChange(toolIndex, 'credential', value)}
|
||||
provider={oauthConfig.provider as OAuthProvider}
|
||||
requiredScopes={
|
||||
@@ -2119,7 +2016,7 @@ export function ToolInput({
|
||||
const firstParam = params[0] as ToolParameterConfig
|
||||
const groupValue = JSON.stringify(
|
||||
params.reduce(
|
||||
(acc, p) => ({ ...acc, [p.id]: tool.params?.[p.id] === 'true' }),
|
||||
(acc, p) => ({ ...acc, [p.id]: tool.params[p.id] === 'true' }),
|
||||
{}
|
||||
)
|
||||
)
|
||||
@@ -2178,10 +2075,10 @@ export function ToolInput({
|
||||
{param.uiComponent ? (
|
||||
renderParameterInput(
|
||||
param,
|
||||
tool.params?.[param.id] || '',
|
||||
tool.params[param.id] || '',
|
||||
(value) => handleParamChange(toolIndex, param.id, value),
|
||||
toolIndex,
|
||||
tool.params || {}
|
||||
tool.params
|
||||
)
|
||||
) : (
|
||||
<ShortInput
|
||||
@@ -2197,7 +2094,7 @@ export function ToolInput({
|
||||
type: 'short-input',
|
||||
title: param.id,
|
||||
}}
|
||||
value={tool.params?.[param.id] || ''}
|
||||
value={tool.params[param.id] || ''}
|
||||
onChange={(value) =>
|
||||
handleParamChange(toolIndex, param.id, value)
|
||||
}
|
||||
@@ -2370,35 +2267,15 @@ export function ToolInput({
|
||||
blockId={blockId}
|
||||
initialValues={
|
||||
editingToolIndex !== null && selectedTools[editingToolIndex]?.type === 'custom-tool'
|
||||
? (() => {
|
||||
const storedTool = selectedTools[editingToolIndex]
|
||||
// Resolve the full tool definition from reference or inline
|
||||
const resolved = resolveCustomToolFromReference(storedTool, customTools)
|
||||
|
||||
if (resolved) {
|
||||
// Find the database ID
|
||||
const dbTool = storedTool.customToolId
|
||||
? customTools.find((t) => t.id === storedTool.customToolId)
|
||||
: customTools.find(
|
||||
(t) => t.schema?.function?.name === resolved.schema?.function?.name
|
||||
)
|
||||
|
||||
return {
|
||||
id: dbTool?.id,
|
||||
schema: resolved.schema,
|
||||
code: resolved.code,
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to inline definition (legacy format)
|
||||
return {
|
||||
id: customTools.find(
|
||||
(tool) => tool.schema?.function?.name === storedTool.schema?.function?.name
|
||||
)?.id,
|
||||
schema: storedTool.schema,
|
||||
code: storedTool.code || '',
|
||||
}
|
||||
})()
|
||||
? {
|
||||
id: customTools.find(
|
||||
(tool) =>
|
||||
tool.schema?.function?.name ===
|
||||
selectedTools[editingToolIndex].schema?.function?.name
|
||||
)?.id,
|
||||
schema: selectedTools[editingToolIndex].schema,
|
||||
code: selectedTools[editingToolIndex].code || '',
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import type { ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
export interface WorkflowExecutionOptions {
|
||||
workflowInput?: any
|
||||
@@ -28,11 +26,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
|
||||
const executionId = options.executionId || uuidv4()
|
||||
const { addConsole } = useTerminalConsoleStore.getState()
|
||||
const { setActiveBlocks, setBlockRunStatus, setEdgeRunStatus } = useExecutionStore.getState()
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
|
||||
// Track active blocks for pulsing animation
|
||||
const activeBlocksSet = new Set<string>()
|
||||
|
||||
const payload: any = {
|
||||
input: options.workflowInput,
|
||||
@@ -88,29 +81,7 @@ export async function executeWorkflowWithFullLogging(
|
||||
const event = JSON.parse(data)
|
||||
|
||||
switch (event.type) {
|
||||
case 'block:started': {
|
||||
// Add block to active set for pulsing animation
|
||||
activeBlocksSet.add(event.data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track edges that led to this block as soon as execution starts
|
||||
const incomingEdges = workflowEdges.filter(
|
||||
(edge) => edge.target === event.data.blockId
|
||||
)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'block:completed':
|
||||
// Remove block from active set
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track successful block execution in run path
|
||||
setBlockRunStatus(event.data.blockId, 'success')
|
||||
|
||||
addConsole({
|
||||
input: event.data.input || {},
|
||||
output: event.data.output,
|
||||
@@ -134,13 +105,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
break
|
||||
|
||||
case 'block:error':
|
||||
// Remove block from active set
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track failed block execution in run path
|
||||
setBlockRunStatus(event.data.blockId, 'error')
|
||||
|
||||
addConsole({
|
||||
input: event.data.input || {},
|
||||
output: {},
|
||||
@@ -183,8 +147,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
// Clear active blocks when execution ends
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
|
||||
return executionResult
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { Check, Copy, Plus, Search } from 'lucide-react'
|
||||
import { Button, Input as EmcnInput } from '@/components/emcn'
|
||||
import { Button } from '@/components/emcn'
|
||||
import {
|
||||
Modal,
|
||||
ModalBody,
|
||||
@@ -28,11 +28,7 @@ function CopilotKeySkeleton() {
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-[12px]'>
|
||||
<div className='flex min-w-0 flex-col justify-center gap-[1px]'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Skeleton className='h-5 w-[80px]' />
|
||||
<Skeleton className='h-5 w-[140px]' />
|
||||
</div>
|
||||
<Skeleton className='h-5 w-[100px]' />
|
||||
<Skeleton className='h-[13px] w-[120px]' />
|
||||
</div>
|
||||
<Skeleton className='h-[26px] w-[48px] rounded-[6px]' />
|
||||
</div>
|
||||
@@ -48,50 +44,28 @@ export function Copilot() {
|
||||
const generateKey = useGenerateCopilotKey()
|
||||
const deleteKeyMutation = useDeleteCopilotKey()
|
||||
|
||||
const [isCreateDialogOpen, setIsCreateDialogOpen] = useState(false)
|
||||
const [newKeyName, setNewKeyName] = useState('')
|
||||
const [newKey, setNewKey] = useState<string | null>(null)
|
||||
const [showNewKeyDialog, setShowNewKeyDialog] = useState(false)
|
||||
const [newKey, setNewKey] = useState<string | null>(null)
|
||||
const [copySuccess, setCopySuccess] = useState(false)
|
||||
const [deleteKey, setDeleteKey] = useState<CopilotKey | null>(null)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [createError, setCreateError] = useState<string | null>(null)
|
||||
|
||||
const filteredKeys = useMemo(() => {
|
||||
if (!searchTerm.trim()) return keys
|
||||
const term = searchTerm.toLowerCase()
|
||||
return keys.filter(
|
||||
(key) =>
|
||||
key.name?.toLowerCase().includes(term) || key.displayKey?.toLowerCase().includes(term)
|
||||
)
|
||||
return keys.filter((key) => key.displayKey?.toLowerCase().includes(term))
|
||||
}, [keys, searchTerm])
|
||||
|
||||
const handleCreateKey = async () => {
|
||||
if (!newKeyName.trim()) return
|
||||
|
||||
const trimmedName = newKeyName.trim()
|
||||
const isDuplicate = keys.some((k) => k.name === trimmedName)
|
||||
if (isDuplicate) {
|
||||
setCreateError(
|
||||
`A Copilot API key named "${trimmedName}" already exists. Please choose a different name.`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
setCreateError(null)
|
||||
const onGenerate = async () => {
|
||||
try {
|
||||
const data = await generateKey.mutateAsync({ name: trimmedName })
|
||||
const data = await generateKey.mutateAsync()
|
||||
if (data?.key?.apiKey) {
|
||||
setNewKey(data.key.apiKey)
|
||||
setShowNewKeyDialog(true)
|
||||
setNewKeyName('')
|
||||
setCreateError(null)
|
||||
setIsCreateDialogOpen(false)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate copilot API key', { error })
|
||||
setCreateError('Failed to create API key. Please check your connection and try again.')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,15 +88,6 @@ export function Copilot() {
|
||||
}
|
||||
}
|
||||
|
||||
const formatDate = (dateString?: string | null) => {
|
||||
if (!dateString) return 'Never'
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
const hasKeys = keys.length > 0
|
||||
const showEmptyState = !hasKeys
|
||||
const showNoResults = searchTerm.trim() && filteredKeys.length === 0 && keys.length > 0
|
||||
@@ -138,23 +103,20 @@ export function Copilot() {
|
||||
strokeWidth={2}
|
||||
/>
|
||||
<Input
|
||||
placeholder='Search API keys...'
|
||||
placeholder='Search keys...'
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
className='h-auto flex-1 border-0 bg-transparent p-0 font-base leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setIsCreateDialogOpen(true)
|
||||
setCreateError(null)
|
||||
}}
|
||||
onClick={onGenerate}
|
||||
variant='primary'
|
||||
disabled={isLoading}
|
||||
disabled={isLoading || generateKey.isPending}
|
||||
className='!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90 disabled:cursor-not-allowed disabled:opacity-60'
|
||||
>
|
||||
<Plus className='mr-[6px] h-[13px] w-[13px]' />
|
||||
Create
|
||||
{generateKey.isPending ? 'Creating...' : 'Create'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -175,15 +137,7 @@ export function Copilot() {
|
||||
{filteredKeys.map((key) => (
|
||||
<div key={key.id} className='flex items-center justify-between gap-[12px]'>
|
||||
<div className='flex min-w-0 flex-col justify-center gap-[1px]'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<span className='max-w-[280px] truncate font-medium text-[14px]'>
|
||||
{key.name || 'Unnamed Key'}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
<p className='truncate text-[13px] text-[var(--text-primary)]'>
|
||||
{key.displayKey}
|
||||
</p>
|
||||
</div>
|
||||
@@ -201,7 +155,7 @@ export function Copilot() {
|
||||
))}
|
||||
{showNoResults && (
|
||||
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
No API keys found matching "{searchTerm}"
|
||||
No keys found matching "{searchTerm}"
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -209,60 +163,6 @@ export function Copilot() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Create API Key Dialog */}
|
||||
<Modal open={isCreateDialogOpen} onOpenChange={setIsCreateDialogOpen}>
|
||||
<ModalContent className='w-[400px]'>
|
||||
<ModalHeader>Create new API key</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
This key will allow access to Copilot features. Make sure to copy it after creation as
|
||||
you won't be able to see it again.
|
||||
</p>
|
||||
|
||||
<div className='mt-[16px] flex flex-col gap-[8px]'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Enter a name for your API key to help you identify it later.
|
||||
</p>
|
||||
<EmcnInput
|
||||
value={newKeyName}
|
||||
onChange={(e) => {
|
||||
setNewKeyName(e.target.value)
|
||||
if (createError) setCreateError(null)
|
||||
}}
|
||||
placeholder='e.g., Development, Production'
|
||||
className='h-9'
|
||||
autoFocus
|
||||
/>
|
||||
{createError && (
|
||||
<p className='text-[11px] text-[var(--text-error)] leading-tight'>{createError}</p>
|
||||
)}
|
||||
</div>
|
||||
</ModalBody>
|
||||
|
||||
<ModalFooter>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={() => {
|
||||
setIsCreateDialogOpen(false)
|
||||
setNewKeyName('')
|
||||
setCreateError(null)
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
type='button'
|
||||
variant='primary'
|
||||
onClick={handleCreateKey}
|
||||
disabled={!newKeyName.trim() || generateKey.isPending}
|
||||
className='!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90'
|
||||
>
|
||||
{generateKey.isPending ? 'Creating...' : 'Create'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
||||
{/* New API Key Dialog */}
|
||||
<Modal
|
||||
open={showNewKeyDialog}
|
||||
@@ -315,11 +215,7 @@ export function Copilot() {
|
||||
<ModalHeader>Delete API key</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Deleting{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{deleteKey?.name || 'Unnamed Key'}
|
||||
</span>{' '}
|
||||
will immediately revoke access for any integrations using it.{' '}
|
||||
Deleting this API key will immediately revoke access for any integrations using it.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
|
||||
@@ -134,7 +134,7 @@ async function executeWebhookJobInternal(
|
||||
const loggingSession = new LoggingSession(
|
||||
payload.workflowId,
|
||||
executionId,
|
||||
payload.provider,
|
||||
payload.provider || 'webhook',
|
||||
requestId
|
||||
)
|
||||
|
||||
|
||||
@@ -11,13 +11,13 @@ import { and, eq, isNull, lte, or, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { TraceSpan, WorkflowExecutionLog } from '@/lib/logs/types'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import type { AlertConfig } from '@/lib/notifications/alert-rules'
|
||||
import { RateLimiter } from '@/services/queue'
|
||||
|
||||
const logger = createLogger('WorkspaceNotificationDelivery')
|
||||
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
import { DuckDuckGoIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { DuckDuckGoResponse } from '@/tools/duckduckgo/types'
|
||||
|
||||
export const DuckDuckGoBlock: BlockConfig<DuckDuckGoResponse> = {
|
||||
type: 'duckduckgo',
|
||||
name: 'DuckDuckGo',
|
||||
description: 'Search with DuckDuckGo',
|
||||
longDescription:
|
||||
'Search the web using DuckDuckGo Instant Answers API. Returns instant answers, abstracts, related topics, and more. Free to use without an API key.',
|
||||
docsLink: 'https://docs.sim.ai/tools/duckduckgo',
|
||||
category: 'tools',
|
||||
bgColor: '#FFFFFF',
|
||||
icon: DuckDuckGoIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Search Query',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter your search query...',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'noHtml',
|
||||
title: 'Remove HTML',
|
||||
type: 'switch',
|
||||
defaultValue: true,
|
||||
},
|
||||
{
|
||||
id: 'skipDisambig',
|
||||
title: 'Skip Disambiguation',
|
||||
type: 'switch',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['duckduckgo_search'],
|
||||
config: {
|
||||
tool: () => 'duckduckgo_search',
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
query: { type: 'string', description: 'Search query terms' },
|
||||
noHtml: { type: 'boolean', description: 'Remove HTML from text in results' },
|
||||
skipDisambig: { type: 'boolean', description: 'Skip disambiguation results' },
|
||||
},
|
||||
outputs: {
|
||||
heading: { type: 'string', description: 'The heading/title of the instant answer' },
|
||||
abstract: { type: 'string', description: 'A short abstract summary of the topic' },
|
||||
abstractText: { type: 'string', description: 'Plain text version of the abstract' },
|
||||
abstractSource: { type: 'string', description: 'The source of the abstract' },
|
||||
abstractURL: { type: 'string', description: 'URL to the source of the abstract' },
|
||||
image: { type: 'string', description: 'URL to an image related to the topic' },
|
||||
answer: { type: 'string', description: 'Direct answer if available' },
|
||||
answerType: { type: 'string', description: 'Type of the answer' },
|
||||
type: { type: 'string', description: 'Response type (A, D, C, N, E)' },
|
||||
relatedTopics: { type: 'json', description: 'Array of related topics' },
|
||||
results: { type: 'json', description: 'Array of external link results' },
|
||||
},
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import { RssIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
export const RssBlock: BlockConfig = {
|
||||
type: 'rss',
|
||||
name: 'RSS Feed',
|
||||
description: 'Monitor RSS feeds and trigger workflows when new items are published',
|
||||
longDescription:
|
||||
'Subscribe to any RSS or Atom feed and automatically trigger your workflow when new content is published. Perfect for monitoring blogs, news sites, podcasts, and any content that publishes an RSS feed.',
|
||||
category: 'triggers',
|
||||
bgColor: '#F97316',
|
||||
icon: RssIcon,
|
||||
triggerAllowed: true,
|
||||
|
||||
subBlocks: [...getTrigger('rss_poller').subBlocks],
|
||||
|
||||
tools: {
|
||||
access: [], // Trigger-only for now
|
||||
},
|
||||
|
||||
inputs: {},
|
||||
|
||||
outputs: {
|
||||
title: { type: 'string', description: 'Item title' },
|
||||
link: { type: 'string', description: 'Item link' },
|
||||
pubDate: { type: 'string', description: 'Publication date' },
|
||||
item: { type: 'json', description: 'Raw item object with all fields' },
|
||||
feed: { type: 'json', description: 'Raw feed object with all fields' },
|
||||
},
|
||||
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: ['rss_poller'],
|
||||
},
|
||||
}
|
||||
@@ -1,306 +0,0 @@
|
||||
import { SftpIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { SftpUploadResult } from '@/tools/sftp/types'
|
||||
|
||||
export const SftpBlock: BlockConfig<SftpUploadResult> = {
|
||||
type: 'sftp',
|
||||
name: 'SFTP',
|
||||
description: 'Transfer files via SFTP (SSH File Transfer Protocol)',
|
||||
longDescription:
|
||||
'Upload, download, list, and manage files on remote servers via SFTP. Supports both password and private key authentication for secure file transfers.',
|
||||
docsLink: 'https://docs.sim.ai/tools/sftp',
|
||||
category: 'tools',
|
||||
bgColor: '#2D3748',
|
||||
icon: SftpIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Upload Files', id: 'sftp_upload' },
|
||||
{ label: 'Create File', id: 'sftp_create' },
|
||||
{ label: 'Download File', id: 'sftp_download' },
|
||||
{ label: 'List Directory', id: 'sftp_list' },
|
||||
{ label: 'Delete File/Directory', id: 'sftp_delete' },
|
||||
{ label: 'Create Directory', id: 'sftp_mkdir' },
|
||||
],
|
||||
value: () => 'sftp_upload',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'host',
|
||||
title: 'SFTP Host',
|
||||
type: 'short-input',
|
||||
placeholder: 'sftp.example.com or 192.168.1.100',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'port',
|
||||
title: 'SFTP Port',
|
||||
type: 'short-input',
|
||||
placeholder: '22',
|
||||
value: () => '22',
|
||||
},
|
||||
{
|
||||
id: 'username',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
placeholder: 'sftp-user',
|
||||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
id: 'authMethod',
|
||||
title: 'Authentication Method',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Password', id: 'password' },
|
||||
{ label: 'Private Key', id: 'privateKey' },
|
||||
],
|
||||
value: () => 'password',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'password',
|
||||
title: 'Password',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Your SFTP password',
|
||||
condition: { field: 'authMethod', value: 'password' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'privateKey',
|
||||
title: 'Private Key',
|
||||
type: 'code',
|
||||
placeholder: '-----BEGIN OPENSSH PRIVATE KEY-----\n...',
|
||||
condition: { field: 'authMethod', value: 'privateKey' },
|
||||
},
|
||||
{
|
||||
id: 'passphrase',
|
||||
title: 'Passphrase',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Passphrase for encrypted key (optional)',
|
||||
condition: { field: 'authMethod', value: 'privateKey' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'remotePath',
|
||||
title: 'Remote Path',
|
||||
type: 'short-input',
|
||||
placeholder: '/home/user/uploads',
|
||||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
id: 'uploadFiles',
|
||||
title: 'Files to Upload',
|
||||
type: 'file-upload',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Select files to upload',
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
condition: { field: 'operation', value: 'sftp_upload' },
|
||||
},
|
||||
{
|
||||
id: 'files',
|
||||
title: 'File Reference',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Reference file from previous block (e.g., {{block_name.file}})',
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
condition: { field: 'operation', value: 'sftp_upload' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'overwrite',
|
||||
title: 'Overwrite Existing Files',
|
||||
type: 'switch',
|
||||
defaultValue: true,
|
||||
condition: { field: 'operation', value: ['sftp_upload', 'sftp_create'] },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'permissions',
|
||||
title: 'File Permissions',
|
||||
type: 'short-input',
|
||||
placeholder: '0644',
|
||||
condition: { field: 'operation', value: ['sftp_upload', 'sftp_create'] },
|
||||
mode: 'advanced',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'fileName',
|
||||
title: 'File Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'filename.txt',
|
||||
condition: { field: 'operation', value: 'sftp_create' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'fileContent',
|
||||
title: 'File Content',
|
||||
type: 'code',
|
||||
placeholder: 'Text content to write to the file',
|
||||
condition: { field: 'operation', value: 'sftp_create' },
|
||||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
id: 'encoding',
|
||||
title: 'Output Encoding',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'UTF-8 (Text)', id: 'utf-8' },
|
||||
{ label: 'Base64 (Binary)', id: 'base64' },
|
||||
],
|
||||
value: () => 'utf-8',
|
||||
condition: { field: 'operation', value: 'sftp_download' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'detailed',
|
||||
title: 'Show Detailed Info',
|
||||
type: 'switch',
|
||||
defaultValue: false,
|
||||
condition: { field: 'operation', value: 'sftp_list' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'recursive',
|
||||
title: 'Recursive Delete',
|
||||
type: 'switch',
|
||||
defaultValue: false,
|
||||
condition: { field: 'operation', value: 'sftp_delete' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'mkdirRecursive',
|
||||
title: 'Create Parent Directories',
|
||||
type: 'switch',
|
||||
defaultValue: true,
|
||||
condition: { field: 'operation', value: 'sftp_mkdir' },
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: ['sftp_upload', 'sftp_download', 'sftp_list', 'sftp_delete', 'sftp_mkdir'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
const operation = params.operation || 'sftp_upload'
|
||||
if (operation === 'sftp_create') return 'sftp_upload'
|
||||
return operation
|
||||
},
|
||||
params: (params) => {
|
||||
const connectionConfig: Record<string, unknown> = {
|
||||
host: params.host,
|
||||
port:
|
||||
typeof params.port === 'string' ? Number.parseInt(params.port, 10) : params.port || 22,
|
||||
username: params.username,
|
||||
}
|
||||
|
||||
if (params.authMethod === 'privateKey') {
|
||||
connectionConfig.privateKey = params.privateKey
|
||||
if (params.passphrase) {
|
||||
connectionConfig.passphrase = params.passphrase
|
||||
}
|
||||
} else {
|
||||
connectionConfig.password = params.password
|
||||
}
|
||||
|
||||
const operation = params.operation || 'sftp_upload'
|
||||
|
||||
switch (operation) {
|
||||
case 'sftp_upload':
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
files: params.files,
|
||||
overwrite: params.overwrite !== false,
|
||||
permissions: params.permissions,
|
||||
}
|
||||
case 'sftp_create':
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
fileContent: params.fileContent,
|
||||
fileName: params.fileName,
|
||||
overwrite: params.overwrite !== false,
|
||||
permissions: params.permissions,
|
||||
}
|
||||
case 'sftp_download':
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
encoding: params.encoding || 'utf-8',
|
||||
}
|
||||
case 'sftp_list':
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
detailed: params.detailed || false,
|
||||
}
|
||||
case 'sftp_delete':
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
recursive: params.recursive || false,
|
||||
}
|
||||
case 'sftp_mkdir':
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
recursive: params.mkdirRecursive !== false,
|
||||
}
|
||||
default:
|
||||
return {
|
||||
...connectionConfig,
|
||||
remotePath: params.remotePath,
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'SFTP operation to perform' },
|
||||
host: { type: 'string', description: 'SFTP server hostname' },
|
||||
port: { type: 'number', description: 'SFTP server port' },
|
||||
username: { type: 'string', description: 'SFTP username' },
|
||||
authMethod: { type: 'string', description: 'Authentication method (password or privateKey)' },
|
||||
password: { type: 'string', description: 'Password for authentication' },
|
||||
privateKey: { type: 'string', description: 'Private key for authentication' },
|
||||
passphrase: { type: 'string', description: 'Passphrase for encrypted key' },
|
||||
remotePath: { type: 'string', description: 'Remote path on the SFTP server' },
|
||||
files: { type: 'array', description: 'Files to upload (UserFile array)' },
|
||||
fileContent: { type: 'string', description: 'Direct content to upload' },
|
||||
fileName: { type: 'string', description: 'File name for direct content' },
|
||||
overwrite: { type: 'boolean', description: 'Overwrite existing files' },
|
||||
permissions: { type: 'string', description: 'File permissions (e.g., 0644)' },
|
||||
encoding: { type: 'string', description: 'Output encoding for download' },
|
||||
detailed: { type: 'boolean', description: 'Show detailed file info' },
|
||||
recursive: { type: 'boolean', description: 'Recursive delete' },
|
||||
mkdirRecursive: { type: 'boolean', description: 'Create parent directories' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
success: { type: 'boolean', description: 'Whether the operation was successful' },
|
||||
uploadedFiles: { type: 'json', description: 'Array of uploaded file details' },
|
||||
fileName: { type: 'string', description: 'Downloaded file name' },
|
||||
content: { type: 'string', description: 'Downloaded file content' },
|
||||
size: { type: 'number', description: 'File size in bytes' },
|
||||
entries: { type: 'json', description: 'Directory listing entries' },
|
||||
count: { type: 'number', description: 'Number of entries' },
|
||||
deletedPath: { type: 'string', description: 'Path that was deleted' },
|
||||
createdPath: { type: 'string', description: 'Directory that was created' },
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
error: { type: 'string', description: 'Error message if operation failed' },
|
||||
},
|
||||
}
|
||||
@@ -11,7 +11,7 @@ export const SmtpBlock: BlockConfig<SmtpSendMailResult> = {
|
||||
'Send emails using any SMTP server (Gmail, Outlook, custom servers, etc.). Configure SMTP connection settings and send emails with full control over content, recipients, and attachments.',
|
||||
docsLink: 'https://docs.sim.ai/tools/smtp',
|
||||
category: 'tools',
|
||||
bgColor: '#2D3748',
|
||||
bgColor: '#4A5568',
|
||||
icon: SmtpIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
|
||||
@@ -1,808 +0,0 @@
|
||||
import { ZapierIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { ZapierResponse } from '@/tools/zapier/types'
|
||||
|
||||
export const ZapierBlock: BlockConfig<ZapierResponse> = {
|
||||
type: 'zapier',
|
||||
name: 'Zapier',
|
||||
description: 'Execute actions across 7,000+ apps using Zapier AI Actions',
|
||||
authMode: AuthMode.OAuth,
|
||||
longDescription:
|
||||
'Connect to Zapier AI Actions to execute any of 30,000+ actions across 7,000+ apps. Send emails, create documents, update CRMs, post messages, and more - all through natural language instructions.',
|
||||
docsLink: 'https://docs.sim.ai/tools/zapier',
|
||||
category: 'tools',
|
||||
bgColor: '#FFFFFF',
|
||||
icon: ZapierIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Execute Action', id: 'execute' },
|
||||
{ label: 'Stateless Execute', id: 'stateless_execute' },
|
||||
{ label: 'List Actions', id: 'list' },
|
||||
{ label: 'Search Apps', id: 'search_apps' },
|
||||
{ label: 'Search App Actions', id: 'search_app_actions' },
|
||||
{ label: 'Find Actions', id: 'guess' },
|
||||
{ label: 'Get Action Details', id: 'get_action_details' },
|
||||
{ label: 'Create Action', id: 'create' },
|
||||
{ label: 'Update Action', id: 'update' },
|
||||
{ label: 'Delete Action', id: 'delete' },
|
||||
],
|
||||
value: () => 'execute',
|
||||
},
|
||||
{
|
||||
id: 'credential',
|
||||
title: 'Zapier Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'zapier',
|
||||
requiredScopes: ['openid', 'nla:exposed_actions:execute'],
|
||||
placeholder: 'Select Zapier account',
|
||||
required: true,
|
||||
},
|
||||
// Execute Action fields
|
||||
{
|
||||
id: 'actionId',
|
||||
title: 'Action ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter the AI Action ID to execute',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'execute',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'instructions',
|
||||
title: 'Instructions',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'Describe what you want to do in plain English (e.g., "Send a message to #general saying hello")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'execute',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'params',
|
||||
title: 'Parameters',
|
||||
type: 'code',
|
||||
placeholder: '{\n "channel": {"mode": "locked", "value": "#general"}\n}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'execute',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'previewOnly',
|
||||
title: 'Preview Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Execute', id: 'false' },
|
||||
{ label: 'Preview Only', id: 'true' },
|
||||
],
|
||||
value: () => 'false',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'execute',
|
||||
},
|
||||
},
|
||||
// Search Apps fields
|
||||
{
|
||||
id: 'searchQuery',
|
||||
title: 'Search Query',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter app name to search (e.g., "slack", "google")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'search_apps',
|
||||
},
|
||||
},
|
||||
// Guess Actions fields
|
||||
{
|
||||
id: 'guessQuery',
|
||||
title: 'What do you want to do?',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'Describe in plain English (e.g., "send a Slack message", "create a Google Doc")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'guess',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'actionTypes',
|
||||
title: 'Action Types',
|
||||
type: 'checkbox-list',
|
||||
options: [
|
||||
{ label: 'Write (Create/Send)', id: 'actionTypes_write' },
|
||||
{ label: 'Search (Find)', id: 'actionTypes_search' },
|
||||
{ label: 'Read (Get)', id: 'actionTypes_read' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'guess',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'resultCount',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
placeholder: '25',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'guess',
|
||||
},
|
||||
},
|
||||
// Create Action fields
|
||||
{
|
||||
id: 'app',
|
||||
title: 'App',
|
||||
type: 'short-input',
|
||||
placeholder: 'App identifier (e.g., "slack", "gmail")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'action',
|
||||
title: 'Action',
|
||||
type: 'short-input',
|
||||
placeholder: 'Action identifier (e.g., "send_channel_message")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'createActionType',
|
||||
title: 'Action Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Write', id: 'write' },
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Read', id: 'read' },
|
||||
],
|
||||
value: () => 'write',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'createParams',
|
||||
title: 'Parameters',
|
||||
type: 'code',
|
||||
placeholder: '{\n "channel": "#general"\n}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create',
|
||||
},
|
||||
},
|
||||
// Stateless Execute fields
|
||||
{
|
||||
id: 'statelessApp',
|
||||
title: 'App',
|
||||
type: 'short-input',
|
||||
placeholder: 'App identifier (e.g., "SlackAPI", "GmailV2API")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'stateless_execute',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'statelessAction',
|
||||
title: 'Action',
|
||||
type: 'short-input',
|
||||
placeholder: 'Action identifier (e.g., "send_channel_message")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'stateless_execute',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'statelessInstructions',
|
||||
title: 'Instructions',
|
||||
type: 'long-input',
|
||||
placeholder: 'Describe what you want to do in plain English',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'stateless_execute',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'statelessActionType',
|
||||
title: 'Action Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Write', id: 'write' },
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Read', id: 'read' },
|
||||
],
|
||||
value: () => 'write',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'stateless_execute',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'statelessParams',
|
||||
title: 'Parameters',
|
||||
type: 'code',
|
||||
placeholder: '{\n "channel": {"mode": "locked", "value": "#general"}\n}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'stateless_execute',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'statelessPreviewOnly',
|
||||
title: 'Preview Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Execute', id: 'false' },
|
||||
{ label: 'Preview Only', id: 'true' },
|
||||
],
|
||||
value: () => 'false',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'stateless_execute',
|
||||
},
|
||||
},
|
||||
// Search App Actions fields
|
||||
{
|
||||
id: 'searchAppActionsApp',
|
||||
title: 'App',
|
||||
type: 'short-input',
|
||||
placeholder: 'App identifier (e.g., "SlackAPI", "GmailV2API")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'search_app_actions',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'searchAppActionsQuery',
|
||||
title: 'Search Query',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional: filter actions by name',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'search_app_actions',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'searchAppActionsTypes',
|
||||
title: 'Action Types',
|
||||
type: 'checkbox-list',
|
||||
options: [
|
||||
{ label: 'Write (Create/Send)', id: 'searchAppActionsTypes_write' },
|
||||
{ label: 'Search (Find)', id: 'searchAppActionsTypes_search' },
|
||||
{ label: 'Read (Get)', id: 'searchAppActionsTypes_read' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'search_app_actions',
|
||||
},
|
||||
},
|
||||
// Get Action Details fields
|
||||
{
|
||||
id: 'detailsApp',
|
||||
title: 'App',
|
||||
type: 'short-input',
|
||||
placeholder: 'App identifier (e.g., "SlackAPI", "GmailV2API")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_action_details',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'detailsAction',
|
||||
title: 'Action',
|
||||
type: 'short-input',
|
||||
placeholder: 'Action identifier (e.g., "send_channel_message")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_action_details',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'detailsActionType',
|
||||
title: 'Action Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Write', id: 'write' },
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Read', id: 'read' },
|
||||
],
|
||||
value: () => 'write',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_action_details',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeNeeds',
|
||||
title: 'Include Inputs (Needs)',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
value: () => 'true',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_action_details',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeGives',
|
||||
title: 'Include Outputs (Gives)',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
value: () => 'false',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_action_details',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeSample',
|
||||
title: 'Include Sample',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
value: () => 'false',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_action_details',
|
||||
},
|
||||
},
|
||||
// Update Action fields
|
||||
{
|
||||
id: 'updateActionId',
|
||||
title: 'Action ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'The ID of the AI Action to update',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'updateApp',
|
||||
title: 'App',
|
||||
type: 'short-input',
|
||||
placeholder: 'App identifier (e.g., "SlackAPI")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'updateAction',
|
||||
title: 'Action',
|
||||
type: 'short-input',
|
||||
placeholder: 'Action identifier (e.g., "send_channel_message")',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'updateActionType',
|
||||
title: 'Action Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Write', id: 'write' },
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Read', id: 'read' },
|
||||
],
|
||||
value: () => 'write',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'updateParams',
|
||||
title: 'Parameters',
|
||||
type: 'code',
|
||||
placeholder: '{\n "channel": "#general"\n}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
},
|
||||
},
|
||||
// Delete Action fields
|
||||
{
|
||||
id: 'deleteActionId',
|
||||
title: 'Action ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'The ID of the AI Action to delete',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'delete',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'zapier_execute_action',
|
||||
'zapier_list_actions',
|
||||
'zapier_search_apps',
|
||||
'zapier_guess_actions',
|
||||
'zapier_create_action',
|
||||
'zapier_stateless_execute',
|
||||
'zapier_search_app_actions',
|
||||
'zapier_get_action_details',
|
||||
'zapier_update_action',
|
||||
'zapier_delete_action',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'execute':
|
||||
return 'zapier_execute_action'
|
||||
case 'stateless_execute':
|
||||
return 'zapier_stateless_execute'
|
||||
case 'list':
|
||||
return 'zapier_list_actions'
|
||||
case 'search_apps':
|
||||
return 'zapier_search_apps'
|
||||
case 'search_app_actions':
|
||||
return 'zapier_search_app_actions'
|
||||
case 'guess':
|
||||
return 'zapier_guess_actions'
|
||||
case 'get_action_details':
|
||||
return 'zapier_get_action_details'
|
||||
case 'create':
|
||||
return 'zapier_create_action'
|
||||
case 'update':
|
||||
return 'zapier_update_action'
|
||||
case 'delete':
|
||||
return 'zapier_delete_action'
|
||||
default:
|
||||
throw new Error(`Invalid Zapier operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const {
|
||||
operation,
|
||||
credential,
|
||||
actionId,
|
||||
instructions,
|
||||
params: execParams,
|
||||
previewOnly,
|
||||
searchQuery,
|
||||
guessQuery,
|
||||
resultCount,
|
||||
app,
|
||||
action,
|
||||
createActionType,
|
||||
createParams,
|
||||
statelessApp,
|
||||
statelessAction,
|
||||
statelessInstructions,
|
||||
statelessActionType,
|
||||
statelessParams,
|
||||
statelessPreviewOnly,
|
||||
searchAppActionsApp,
|
||||
searchAppActionsQuery,
|
||||
detailsApp,
|
||||
detailsAction,
|
||||
detailsActionType,
|
||||
includeNeeds,
|
||||
includeGives,
|
||||
includeSample,
|
||||
updateActionId,
|
||||
updateApp,
|
||||
updateAction,
|
||||
updateActionType,
|
||||
updateParams,
|
||||
deleteActionId,
|
||||
} = params
|
||||
|
||||
const baseParams: Record<string, any> = { credential }
|
||||
|
||||
// Helper to parse JSON params
|
||||
const parseJsonParams = (jsonParams: any) => {
|
||||
if (!jsonParams) return undefined
|
||||
try {
|
||||
return typeof jsonParams === 'string' ? JSON.parse(jsonParams) : jsonParams
|
||||
} catch {
|
||||
throw new Error('Invalid JSON in parameters field')
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to collect checkbox-list values
|
||||
// Use truthy check since values may be boolean true or string "true" after serialization
|
||||
const collectActionTypes = (prefix: string) => {
|
||||
const types: string[] = []
|
||||
const writeVal = params[`${prefix}_write`]
|
||||
const searchVal = params[`${prefix}_search`]
|
||||
const readVal = params[`${prefix}_read`]
|
||||
if (writeVal === true || writeVal === 'true') types.push('write')
|
||||
if (searchVal === true || searchVal === 'true') types.push('search')
|
||||
if (readVal === true || readVal === 'true') types.push('read')
|
||||
return types.length > 0 ? types : undefined
|
||||
}
|
||||
|
||||
switch (operation) {
|
||||
case 'execute':
|
||||
baseParams.actionId = actionId
|
||||
baseParams.instructions = instructions
|
||||
baseParams.params = parseJsonParams(execParams)
|
||||
baseParams.previewOnly = previewOnly === 'true'
|
||||
break
|
||||
|
||||
case 'stateless_execute':
|
||||
baseParams.app = statelessApp
|
||||
baseParams.action = statelessAction
|
||||
baseParams.instructions = statelessInstructions
|
||||
baseParams.actionType = statelessActionType || 'write'
|
||||
baseParams.params = parseJsonParams(statelessParams)
|
||||
baseParams.previewOnly = statelessPreviewOnly === 'true'
|
||||
break
|
||||
|
||||
case 'list':
|
||||
break
|
||||
|
||||
case 'search_apps':
|
||||
if (searchQuery) baseParams.query = searchQuery
|
||||
break
|
||||
|
||||
case 'search_app_actions':
|
||||
baseParams.app = searchAppActionsApp
|
||||
if (searchAppActionsQuery) baseParams.query = searchAppActionsQuery
|
||||
baseParams.actionTypes = collectActionTypes('searchAppActionsTypes')
|
||||
break
|
||||
|
||||
case 'guess': {
|
||||
baseParams.query = guessQuery
|
||||
// Checkbox-list values are stored under prefixed option IDs (actionTypes_write, etc.)
|
||||
baseParams.actionTypes = collectActionTypes('actionTypes')
|
||||
if (resultCount) {
|
||||
const count = Number.parseInt(resultCount, 10)
|
||||
if (!Number.isNaN(count)) baseParams.count = count
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'get_action_details':
|
||||
baseParams.app = detailsApp
|
||||
baseParams.action = detailsAction
|
||||
baseParams.actionType = detailsActionType || 'write'
|
||||
baseParams.includeNeeds = includeNeeds !== 'false'
|
||||
baseParams.includeGives = includeGives === 'true'
|
||||
baseParams.includeSample = includeSample === 'true'
|
||||
break
|
||||
|
||||
case 'create':
|
||||
baseParams.app = app
|
||||
baseParams.action = action
|
||||
baseParams.actionType = createActionType || 'write'
|
||||
baseParams.params = parseJsonParams(createParams)
|
||||
break
|
||||
|
||||
case 'update':
|
||||
baseParams.actionId = updateActionId
|
||||
baseParams.app = updateApp
|
||||
baseParams.action = updateAction
|
||||
baseParams.actionType = updateActionType || 'write'
|
||||
baseParams.params = parseJsonParams(updateParams)
|
||||
break
|
||||
|
||||
case 'delete':
|
||||
baseParams.actionId = deleteActionId
|
||||
break
|
||||
}
|
||||
|
||||
return baseParams
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
credential: { type: 'string', description: 'Zapier OAuth credential' },
|
||||
// Execute inputs
|
||||
actionId: { type: 'string', description: 'AI Action ID to execute' },
|
||||
instructions: { type: 'string', description: 'Plain English instructions for the action' },
|
||||
params: { type: 'json', description: 'Optional parameter constraints' },
|
||||
previewOnly: { type: 'string', description: 'Whether to preview without executing' },
|
||||
// Stateless execute inputs
|
||||
statelessApp: { type: 'string', description: 'App identifier for stateless execute' },
|
||||
statelessAction: { type: 'string', description: 'Action identifier for stateless execute' },
|
||||
statelessInstructions: { type: 'string', description: 'Instructions for stateless execute' },
|
||||
statelessActionType: { type: 'string', description: 'Action type for stateless execute' },
|
||||
statelessParams: { type: 'json', description: 'Parameters for stateless execute' },
|
||||
statelessPreviewOnly: { type: 'string', description: 'Preview mode for stateless execute' },
|
||||
// Search inputs
|
||||
searchQuery: { type: 'string', description: 'App search query' },
|
||||
// Search app actions inputs
|
||||
searchAppActionsApp: { type: 'string', description: 'App to search actions for' },
|
||||
searchAppActionsQuery: { type: 'string', description: 'Query to filter actions' },
|
||||
searchAppActionsTypes_write: { type: 'boolean', description: 'Include write actions' },
|
||||
searchAppActionsTypes_search: { type: 'boolean', description: 'Include search actions' },
|
||||
searchAppActionsTypes_read: { type: 'boolean', description: 'Include read actions' },
|
||||
// Guess inputs
|
||||
guessQuery: { type: 'string', description: 'Natural language query to find actions' },
|
||||
actionTypes_write: { type: 'boolean', description: 'Include write actions' },
|
||||
actionTypes_search: { type: 'boolean', description: 'Include search actions' },
|
||||
actionTypes_read: { type: 'boolean', description: 'Include read actions' },
|
||||
resultCount: { type: 'string', description: 'Maximum number of results' },
|
||||
// Get action details inputs
|
||||
detailsApp: { type: 'string', description: 'App identifier for action details' },
|
||||
detailsAction: { type: 'string', description: 'Action identifier for action details' },
|
||||
detailsActionType: { type: 'string', description: 'Action type for action details' },
|
||||
includeNeeds: { type: 'string', description: 'Include input requirements' },
|
||||
includeGives: { type: 'string', description: 'Include output specifications' },
|
||||
includeSample: { type: 'string', description: 'Include sample data' },
|
||||
// Create inputs
|
||||
app: { type: 'string', description: 'App identifier' },
|
||||
action: { type: 'string', description: 'Action identifier' },
|
||||
createActionType: { type: 'string', description: 'Type of action to create' },
|
||||
createParams: { type: 'json', description: 'Pre-configured parameters' },
|
||||
// Update inputs
|
||||
updateActionId: { type: 'string', description: 'AI Action ID to update' },
|
||||
updateApp: { type: 'string', description: 'App identifier for update' },
|
||||
updateAction: { type: 'string', description: 'Action identifier for update' },
|
||||
updateActionType: { type: 'string', description: 'Action type for update' },
|
||||
updateParams: { type: 'json', description: 'Parameters for update' },
|
||||
// Delete inputs
|
||||
deleteActionId: { type: 'string', description: 'AI Action ID to delete' },
|
||||
},
|
||||
outputs: {
|
||||
// Execute Action outputs
|
||||
executionLogId: {
|
||||
type: 'string',
|
||||
description: 'Unique identifier for the execution',
|
||||
},
|
||||
actionUsed: {
|
||||
type: 'string',
|
||||
description: 'Name of the action that was executed',
|
||||
},
|
||||
inputParams: {
|
||||
type: 'json',
|
||||
description: 'Parameters passed to the API',
|
||||
},
|
||||
resolvedParams: {
|
||||
type: 'json',
|
||||
description: 'Parameters resolved by AI for execution',
|
||||
},
|
||||
results: {
|
||||
type: 'json',
|
||||
description: 'Results from action execution',
|
||||
},
|
||||
resultFieldLabels: {
|
||||
type: 'json',
|
||||
description: 'Human-readable labels for result fields',
|
||||
},
|
||||
status: {
|
||||
type: 'string',
|
||||
description: 'Execution status (success, error, preview, etc.)',
|
||||
},
|
||||
error: {
|
||||
type: 'string',
|
||||
description: 'Error message if execution failed',
|
||||
},
|
||||
// List Actions outputs
|
||||
actions: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of AI Actions with id, description, actionType, app, appLabel, action, actionLabel, params, accountId, authenticationId, configurationLink (list) or guessed actions (find)',
|
||||
},
|
||||
configurationLink: {
|
||||
type: 'string',
|
||||
description: 'Link to configure actions in Zapier (list operation only)',
|
||||
},
|
||||
// Search Apps outputs
|
||||
apps: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of apps with app, name, logoUrl, authType, actionCount, writeActionCount, searchActionCount, readActionCount',
|
||||
},
|
||||
// Guess Actions outputs (in addition to 'actions' above)
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Combined app and action name (find operation)',
|
||||
},
|
||||
image: {
|
||||
type: 'string',
|
||||
description: 'App logo URL (find operation)',
|
||||
},
|
||||
score: {
|
||||
type: 'number',
|
||||
description: 'Relevance score for guessed actions (find operation)',
|
||||
},
|
||||
// Create Action outputs
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'ID of the created AI Action',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
description: 'Description of the action',
|
||||
},
|
||||
actionType: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Type of action (write, search, read, read_bulk, search_or_write, search_and_write)',
|
||||
},
|
||||
app: {
|
||||
type: 'string',
|
||||
description: 'App identifier',
|
||||
},
|
||||
appLabel: {
|
||||
type: 'string',
|
||||
description: 'Human-readable app label',
|
||||
},
|
||||
action: {
|
||||
type: 'string',
|
||||
description: 'Action identifier',
|
||||
},
|
||||
actionLabel: {
|
||||
type: 'string',
|
||||
description: 'Human-readable action label',
|
||||
},
|
||||
params: {
|
||||
type: 'json',
|
||||
description: 'Configured parameter values',
|
||||
},
|
||||
accountId: {
|
||||
type: 'number',
|
||||
description: 'Zapier account ID',
|
||||
},
|
||||
authenticationId: {
|
||||
type: 'number',
|
||||
description: 'Authentication ID used for the app',
|
||||
},
|
||||
// Get Action Details outputs
|
||||
needs: {
|
||||
type: 'json',
|
||||
description: 'Array of input requirements for the action',
|
||||
},
|
||||
gives: {
|
||||
type: 'json',
|
||||
description: 'Array of output fields from the action',
|
||||
},
|
||||
sample: {
|
||||
type: 'json',
|
||||
description: 'Sample execution result',
|
||||
},
|
||||
customNeedsProbability: {
|
||||
type: 'number',
|
||||
description: 'Probability that action has custom/dynamic input fields',
|
||||
},
|
||||
// Delete Action outputs
|
||||
deleted: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the action was successfully deleted',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Status message for delete operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -18,7 +18,6 @@ import { CursorBlock } from '@/blocks/blocks/cursor'
|
||||
import { DatadogBlock } from '@/blocks/blocks/datadog'
|
||||
import { DiscordBlock } from '@/blocks/blocks/discord'
|
||||
import { DropboxBlock } from '@/blocks/blocks/dropbox'
|
||||
import { DuckDuckGoBlock } from '@/blocks/blocks/duckduckgo'
|
||||
import { DynamoDBBlock } from '@/blocks/blocks/dynamodb'
|
||||
import { ElasticsearchBlock } from '@/blocks/blocks/elasticsearch'
|
||||
import { ElevenLabsBlock } from '@/blocks/blocks/elevenlabs'
|
||||
@@ -89,7 +88,6 @@ import { RedditBlock } from '@/blocks/blocks/reddit'
|
||||
import { ResendBlock } from '@/blocks/blocks/resend'
|
||||
import { ResponseBlock } from '@/blocks/blocks/response'
|
||||
import { RouterBlock } from '@/blocks/blocks/router'
|
||||
import { RssBlock } from '@/blocks/blocks/rss'
|
||||
import { S3Block } from '@/blocks/blocks/s3'
|
||||
import { SalesforceBlock } from '@/blocks/blocks/salesforce'
|
||||
import { ScheduleBlock } from '@/blocks/blocks/schedule'
|
||||
@@ -97,7 +95,6 @@ import { SearchBlock } from '@/blocks/blocks/search'
|
||||
import { SendGridBlock } from '@/blocks/blocks/sendgrid'
|
||||
import { SentryBlock } from '@/blocks/blocks/sentry'
|
||||
import { SerperBlock } from '@/blocks/blocks/serper'
|
||||
import { SftpBlock } from '@/blocks/blocks/sftp'
|
||||
import { SharepointBlock } from '@/blocks/blocks/sharepoint'
|
||||
import { ShopifyBlock } from '@/blocks/blocks/shopify'
|
||||
import { SlackBlock } from '@/blocks/blocks/slack'
|
||||
@@ -133,7 +130,6 @@ import { WorkflowBlock } from '@/blocks/blocks/workflow'
|
||||
import { WorkflowInputBlock } from '@/blocks/blocks/workflow_input'
|
||||
import { XBlock } from '@/blocks/blocks/x'
|
||||
import { YouTubeBlock } from '@/blocks/blocks/youtube'
|
||||
import { ZapierBlock } from '@/blocks/blocks/zapier'
|
||||
import { ZendeskBlock } from '@/blocks/blocks/zendesk'
|
||||
import { ZepBlock } from '@/blocks/blocks/zep'
|
||||
import { ZoomBlock } from '@/blocks/blocks/zoom'
|
||||
@@ -161,7 +157,6 @@ export const registry: Record<string, BlockConfig> = {
|
||||
datadog: DatadogBlock,
|
||||
discord: DiscordBlock,
|
||||
dropbox: DropboxBlock,
|
||||
duckduckgo: DuckDuckGoBlock,
|
||||
elevenlabs: ElevenLabsBlock,
|
||||
elasticsearch: ElasticsearchBlock,
|
||||
evaluator: EvaluatorBlock,
|
||||
@@ -231,7 +226,6 @@ export const registry: Record<string, BlockConfig> = {
|
||||
reddit: RedditBlock,
|
||||
resend: ResendBlock,
|
||||
response: ResponseBlock,
|
||||
rss: RssBlock,
|
||||
router: RouterBlock,
|
||||
s3: S3Block,
|
||||
salesforce: SalesforceBlock,
|
||||
@@ -244,7 +238,6 @@ export const registry: Record<string, BlockConfig> = {
|
||||
shopify: ShopifyBlock,
|
||||
slack: SlackBlock,
|
||||
smtp: SmtpBlock,
|
||||
sftp: SftpBlock,
|
||||
ssh: SSHBlock,
|
||||
stagehand: StagehandBlock,
|
||||
stagehand_agent: StagehandAgentBlock,
|
||||
@@ -276,9 +269,8 @@ export const registry: Record<string, BlockConfig> = {
|
||||
workflow_input: WorkflowInputBlock,
|
||||
x: XBlock,
|
||||
youtube: YouTubeBlock,
|
||||
zapier: ZapierBlock,
|
||||
zendesk: ZendeskBlock,
|
||||
zep: ZepBlock,
|
||||
zendesk: ZendeskBlock,
|
||||
zoom: ZoomBlock,
|
||||
}
|
||||
|
||||
|
||||
@@ -3798,23 +3798,6 @@ export function SshIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function SftpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 32 32'
|
||||
width='32px'
|
||||
height='32px'
|
||||
>
|
||||
<path
|
||||
d='M 6 3 L 6 29 L 26 29 L 26 9.59375 L 25.71875 9.28125 L 19.71875 3.28125 L 19.40625 3 Z M 8 5 L 18 5 L 18 11 L 24 11 L 24 27 L 8 27 Z M 20 6.4375 L 22.5625 9 L 20 9 Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ApifyIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
@@ -4146,78 +4129,3 @@ export function CursorIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function DuckDuckGoIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='-108 -108 216 216'>
|
||||
<circle r='108' fill='#d53' />
|
||||
<circle r='96' fill='none' stroke='#ffffff' strokeWidth='7' />
|
||||
<path
|
||||
d='M-32-55C-62-48-51-6-51-6l19 93 7 3M-39-73h-8l11 4s-11 0-11 7c24-1 35 5 35 5'
|
||||
fill='#ddd'
|
||||
/>
|
||||
<path d='M25 95S1 57 1 32c0-47 31-7 31-44S1-58 1-58c-15-19-44-15-44-15l7 4s-7 2-9 4 19-3 28 5c-37 3-31 33-31 33l21 120' />
|
||||
<path d='M25-1l38-10c34 5-29 24-33 23C0 7 9 32 45 24s9 20-24 9C-26 20-1-3 25-1' fill='#fc0' />
|
||||
<path
|
||||
d='M15 78l2-3c22 8 23 11 22-9s0-20-23-3c0-5-13-3-15 0-21-9-23-12-22 2 2 29 1 24 21 14'
|
||||
fill='#6b5'
|
||||
/>
|
||||
<path d='M-1 67v12c1 2 17 2 17-2s-8 3-13 1-2-13-2-13' fill='#4a4' />
|
||||
<path
|
||||
d='M-23-32c-5-6-18-1-15 7 1-4 8-10 15-7m32 0c1-6 11-7 14-1-4-2-10-2-14 1m-33 16a2 2 0 1 1 0 1m-8 3a7 7 0 1 0 0-1m52-6a2 2 0 1 1 0 1m-6 3a6 6 0 1 0 0-1'
|
||||
fill='#148'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RssIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='24'
|
||||
height='24'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M4 11C6.38695 11 8.67613 11.9482 10.364 13.636C12.0518 15.3239 13 17.6131 13 20'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M4 4C8.24346 4 12.3131 5.68571 15.3137 8.68629C18.3143 11.6869 20 15.7565 20 20'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<circle cx='5' cy='19' r='1' fill='currentColor' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ZapierIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='800px'
|
||||
height='800px'
|
||||
viewBox='0 0 256 256'
|
||||
version='1.1'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
xmlnsXlink='http://www.w3.org/1999/xlink'
|
||||
>
|
||||
<g>
|
||||
<path
|
||||
d='M128.080089,-0.000183105 C135.311053,0.0131003068 142.422517,0.624138494 149.335663,1.77979593 L149.335663,1.77979593 L149.335663,76.2997796 L202.166953,23.6044907 C208.002065,27.7488446 213.460883,32.3582023 218.507811,37.3926715 C223.557281,42.4271407 228.192318,47.8867213 232.346817,53.7047992 L232.346817,53.7047992 L179.512985,106.400063 L254.227854,106.400063 C255.387249,113.29414 256,120.36111 256,127.587243 L256,127.587243 L256,127.759881 C256,134.986013 255.387249,142.066204 254.227854,148.960282 L254.227854,148.960282 L179.500273,148.960282 L232.346817,201.642324 C228.192318,207.460402 223.557281,212.919983 218.523066,217.954452 L218.523066,217.954452 L218.507811,217.954452 C213.460883,222.988921 208.002065,227.6115 202.182208,231.742607 L202.182208,231.742607 L149.335663,179.04709 L149.335663,253.5672 C142.435229,254.723036 135.323765,255.333244 128.092802,255.348499 L128.092802,255.348499 L127.907197,255.348499 C120.673691,255.333244 113.590195,254.723036 106.677048,253.5672 L106.677048,253.5672 L106.677048,179.04709 L53.8457596,231.742607 C42.1780766,223.466917 31.977435,213.278734 23.6658953,201.642324 L23.6658953,201.642324 L76.4997269,148.960282 L1.78485803,148.960282 C0.612750404,142.052729 0,134.946095 0,127.719963 L0,127.719963 L0,127.349037 C0.0121454869,125.473817 0.134939797,123.182933 0.311311815,120.812834 L0.36577283,120.099764 C0.887996182,113.428547 1.78485803,106.400063 1.78485803,106.400063 L1.78485803,106.400063 L76.4997269,106.400063 L23.6658953,53.7047992 C27.8076812,47.8867213 32.4300059,42.4403618 37.4769335,37.4193681 L37.4769335,37.4193681 L37.5023588,37.3926715 C42.5391163,32.3582023 48.0106469,27.7488446 53.8457596,23.6044907 L53.8457596,23.6044907 L106.677048,76.2997796 L106.677048,1.77979593 C113.590195,0.624138494 120.688946,0.0131003068 127.932622,-0.000183105 L127.932622,-0.000183105 L128.080089,-0.000183105 Z M128.067377,95.7600714 L127.945335,95.7600714 C118.436262,95.7600714 109.32891,97.5001809 100.910584,100.661566 C97.7553011,109.043534 96.0085811,118.129275 95.9958684,127.613685 L95.9958684,127.733184 C96.0085811,137.217594 97.7553011,146.303589 100.923296,154.685303 C109.32891,157.846943 118.436262,159.587052 127.945335,159.587052 L128.067377,159.587052 C137.576449,159.587052 146.683802,157.846943 155.089415,154.685303 C158.257411,146.290368 160.004131,137.217594 160.004131,127.733184 L160.004131,127.613685 C160.004131,118.129275 158.257411,109.043534 155.089415,100.661566 C146.683802,97.5001809 137.576449,95.7600714 128.067377,95.7600714 Z'
|
||||
fill='#FF4A00'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</g>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -158,8 +158,8 @@ export const HTTP = {
|
||||
|
||||
export const AGENT = {
|
||||
DEFAULT_MODEL: 'claude-sonnet-4-5',
|
||||
DEFAULT_FUNCTION_TIMEOUT: 600000, // 10 minutes for custom tool code execution
|
||||
REQUEST_TIMEOUT: 600000, // 10 minutes for LLM API requests
|
||||
DEFAULT_FUNCTION_TIMEOUT: 5000,
|
||||
REQUEST_TIMEOUT: 120000,
|
||||
CUSTOM_TOOL_PREFIX: 'custom_',
|
||||
} as const
|
||||
|
||||
|
||||
@@ -127,8 +127,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
})
|
||||
.map(async (tool) => {
|
||||
try {
|
||||
// Handle custom tools - either inline (schema) or reference (customToolId)
|
||||
if (tool.type === 'custom-tool' && (tool.schema || tool.customToolId)) {
|
||||
if (tool.type === 'custom-tool' && tool.schema) {
|
||||
return await this.createCustomTool(ctx, tool)
|
||||
}
|
||||
if (tool.type === 'mcp') {
|
||||
@@ -152,47 +151,24 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
private async createCustomTool(ctx: ExecutionContext, tool: ToolInput): Promise<any> {
|
||||
const userProvidedParams = tool.params || {}
|
||||
|
||||
// Resolve tool definition - either inline or from database reference
|
||||
let schema = tool.schema
|
||||
let code = tool.code
|
||||
let title = tool.title
|
||||
|
||||
// If this is a reference-only tool (has customToolId but no schema), fetch from API
|
||||
if (tool.customToolId && !schema) {
|
||||
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
||||
if (!resolved) {
|
||||
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
||||
return null
|
||||
}
|
||||
schema = resolved.schema
|
||||
code = resolved.code
|
||||
title = resolved.title
|
||||
}
|
||||
|
||||
// Validate we have the required data
|
||||
if (!schema?.function) {
|
||||
logger.error('Custom tool missing schema:', { customToolId: tool.customToolId, title })
|
||||
return null
|
||||
}
|
||||
|
||||
const { filterSchemaForLLM, mergeToolParameters } = await import('@/tools/params')
|
||||
|
||||
const filteredSchema = filterSchemaForLLM(schema.function.parameters, userProvidedParams)
|
||||
const filteredSchema = filterSchemaForLLM(tool.schema.function.parameters, userProvidedParams)
|
||||
|
||||
const toolId = `${AGENT.CUSTOM_TOOL_PREFIX}${title}`
|
||||
const toolId = `${AGENT.CUSTOM_TOOL_PREFIX}${tool.title}`
|
||||
const base: any = {
|
||||
id: toolId,
|
||||
name: schema.function.name,
|
||||
description: schema.function.description || '',
|
||||
name: tool.schema.function.name,
|
||||
description: tool.schema.function.description || '',
|
||||
params: userProvidedParams,
|
||||
parameters: {
|
||||
...filteredSchema,
|
||||
type: schema.function.parameters.type,
|
||||
type: tool.schema.function.parameters.type,
|
||||
},
|
||||
usageControl: tool.usageControl || 'auto',
|
||||
}
|
||||
|
||||
if (code) {
|
||||
if (tool.code) {
|
||||
base.executeFunction = async (callParams: Record<string, any>) => {
|
||||
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
|
||||
|
||||
@@ -201,7 +177,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
{
|
||||
code,
|
||||
code: tool.code,
|
||||
...mergedParams,
|
||||
timeout: tool.timeout ?? AGENT.DEFAULT_FUNCTION_TIMEOUT,
|
||||
envVars: ctx.environmentVariables || {},
|
||||
@@ -229,78 +205,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
return base
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a custom tool definition from the database by ID
|
||||
* Uses Zustand store in browser, API call on server
|
||||
*/
|
||||
private async fetchCustomToolById(
|
||||
ctx: ExecutionContext,
|
||||
customToolId: string
|
||||
): Promise<{ schema: any; code: string; title: string } | null> {
|
||||
// In browser, use the Zustand store which has cached data from React Query
|
||||
if (typeof window !== 'undefined') {
|
||||
try {
|
||||
const { useCustomToolsStore } = await import('@/stores/custom-tools/store')
|
||||
const tool = useCustomToolsStore.getState().getTool(customToolId)
|
||||
if (tool) {
|
||||
return {
|
||||
schema: tool.schema,
|
||||
code: tool.code || '',
|
||||
title: tool.title,
|
||||
}
|
||||
}
|
||||
logger.warn(`Custom tool not found in store: ${customToolId}`)
|
||||
} catch (error) {
|
||||
logger.error('Error accessing custom tools store:', { error })
|
||||
}
|
||||
}
|
||||
|
||||
// Server-side: fetch from API
|
||||
try {
|
||||
const headers = await buildAuthHeaders()
|
||||
const params: Record<string, string> = {}
|
||||
|
||||
if (ctx.workspaceId) {
|
||||
params.workspaceId = ctx.workspaceId
|
||||
}
|
||||
if (ctx.workflowId) {
|
||||
params.workflowId = ctx.workflowId
|
||||
}
|
||||
|
||||
const url = buildAPIUrl('/api/tools/custom', params)
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'GET',
|
||||
headers,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`Failed to fetch custom tools: ${response.status}`)
|
||||
return null
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
if (!data.data || !Array.isArray(data.data)) {
|
||||
logger.error('Invalid custom tools API response')
|
||||
return null
|
||||
}
|
||||
|
||||
const tool = data.data.find((t: any) => t.id === customToolId)
|
||||
if (!tool) {
|
||||
logger.warn(`Custom tool not found by ID: ${customToolId}`)
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
schema: tool.schema,
|
||||
code: tool.code || '',
|
||||
title: tool.title,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching custom tool:', { customToolId, error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
private async createMcpTool(ctx: ExecutionContext, tool: ToolInput): Promise<any> {
|
||||
const { serverId, toolName, ...userProvidedParams } = tool.params || {}
|
||||
|
||||
|
||||
@@ -32,8 +32,6 @@ export interface ToolInput {
|
||||
timeout?: number
|
||||
usageControl?: 'auto' | 'force' | 'none'
|
||||
operation?: string
|
||||
/** Database ID for custom tools (new reference format) */
|
||||
customToolId?: string
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
|
||||
@@ -245,11 +245,9 @@ export class LoopOrchestrator {
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the initial condition for loops at the sentinel start.
|
||||
* - For while loops, the condition must be checked BEFORE the first iteration.
|
||||
* - For forEach loops, skip if the items array is empty.
|
||||
* - For for loops, skip if maxIterations is 0.
|
||||
* - For doWhile loops, always execute at least once.
|
||||
* Evaluates the initial condition for while loops at the sentinel start.
|
||||
* For while loops, the condition must be checked BEFORE the first iteration.
|
||||
* If the condition is false, the loop body should be skipped entirely.
|
||||
*
|
||||
* @returns true if the loop should execute, false if it should be skipped
|
||||
*/
|
||||
@@ -260,47 +258,27 @@ export class LoopOrchestrator {
|
||||
return true
|
||||
}
|
||||
|
||||
// forEach: skip if items array is empty
|
||||
if (scope.loopType === 'forEach') {
|
||||
if (!scope.items || scope.items.length === 0) {
|
||||
logger.info('ForEach loop has empty items, skipping loop body', { loopId })
|
||||
return false
|
||||
}
|
||||
// Only while loops need an initial condition check
|
||||
// - for/forEach: always execute based on iteration count/items
|
||||
// - doWhile: always execute at least once, check condition after
|
||||
// - while: check condition before first iteration
|
||||
if (scope.loopType !== 'while') {
|
||||
return true
|
||||
}
|
||||
|
||||
// for: skip if maxIterations is 0
|
||||
if (scope.loopType === 'for') {
|
||||
if (scope.maxIterations === 0) {
|
||||
logger.info('For loop has 0 iterations, skipping loop body', { loopId })
|
||||
return false
|
||||
}
|
||||
return true
|
||||
if (!scope.condition) {
|
||||
logger.warn('No condition defined for while loop', { loopId })
|
||||
return false
|
||||
}
|
||||
|
||||
// doWhile: always execute at least once
|
||||
if (scope.loopType === 'doWhile') {
|
||||
return true
|
||||
}
|
||||
const result = this.evaluateWhileCondition(ctx, scope.condition, scope)
|
||||
logger.info('While loop initial condition evaluation', {
|
||||
loopId,
|
||||
condition: scope.condition,
|
||||
result,
|
||||
})
|
||||
|
||||
// while: check condition before first iteration
|
||||
if (scope.loopType === 'while') {
|
||||
if (!scope.condition) {
|
||||
logger.warn('No condition defined for while loop', { loopId })
|
||||
return false
|
||||
}
|
||||
|
||||
const result = this.evaluateWhileCondition(ctx, scope.condition, scope)
|
||||
logger.info('While loop initial condition evaluation', {
|
||||
loopId,
|
||||
condition: scope.condition,
|
||||
result,
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
return true
|
||||
return result
|
||||
}
|
||||
|
||||
shouldExecuteLoopNode(_ctx: ExecutionContext, _nodeId: string, _loopId: string): boolean {
|
||||
|
||||
@@ -38,42 +38,17 @@ export function extractLoopIdFromSentinel(sentinelId: string): string | null {
|
||||
|
||||
/**
|
||||
* Parse distribution items from parallel config
|
||||
* Handles: arrays, JSON strings, objects, and references
|
||||
* Note: References (starting with '<') cannot be resolved at DAG construction time,
|
||||
* they must be resolved at runtime. This function returns [] for references.
|
||||
* Handles: arrays, JSON strings, and references
|
||||
*/
|
||||
export function parseDistributionItems(config: SerializedParallel): any[] {
|
||||
const rawItems = config.distribution ?? []
|
||||
|
||||
// Already an array - return as-is
|
||||
if (Array.isArray(rawItems)) {
|
||||
return rawItems
|
||||
if (typeof rawItems === 'string' && rawItems.startsWith(REFERENCE.START)) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Object - convert to entries array (consistent with loop forEach behavior)
|
||||
if (typeof rawItems === 'object' && rawItems !== null) {
|
||||
return Object.entries(rawItems)
|
||||
}
|
||||
|
||||
// String handling
|
||||
if (typeof rawItems === 'string') {
|
||||
// References cannot be resolved at DAG construction time
|
||||
if (rawItems.startsWith(REFERENCE.START) && rawItems.endsWith(REFERENCE.END)) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Try to parse as JSON
|
||||
try {
|
||||
const normalizedJSON = rawItems.replace(/'/g, '"')
|
||||
const parsed = JSON.parse(normalizedJSON)
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
// Parsed to non-array (e.g. object) - convert to entries
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return Object.entries(parsed)
|
||||
}
|
||||
return []
|
||||
return JSON.parse(normalizedJSON)
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse distribution items', {
|
||||
rawItems,
|
||||
@@ -82,7 +57,12 @@ export function parseDistributionItems(config: SerializedParallel): any[] {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(rawItems)) {
|
||||
return rawItems
|
||||
}
|
||||
if (typeof rawItems === 'object' && rawItems !== null) {
|
||||
return [rawItems]
|
||||
}
|
||||
return []
|
||||
}
|
||||
/**
|
||||
|
||||
@@ -98,43 +98,16 @@ export class ParallelResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
private getDistributionItems(parallelConfig: any): any[] {
|
||||
const rawItems = parallelConfig.distributionItems || parallelConfig.distribution || []
|
||||
|
||||
// Already an array - return as-is
|
||||
if (Array.isArray(rawItems)) {
|
||||
return rawItems
|
||||
}
|
||||
|
||||
// Object - convert to entries array (consistent with loop forEach behavior)
|
||||
if (typeof rawItems === 'object' && rawItems !== null) {
|
||||
return Object.entries(rawItems)
|
||||
}
|
||||
|
||||
// String handling
|
||||
if (typeof rawItems === 'string') {
|
||||
// Skip references - they should be resolved by the variable resolver
|
||||
if (rawItems.startsWith('<')) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Try to parse as JSON
|
||||
private getDistributionItems(parallelConfig: any): any {
|
||||
let distributionItems = parallelConfig.distributionItems || parallelConfig.distribution || []
|
||||
if (typeof distributionItems === 'string' && !distributionItems.startsWith('<')) {
|
||||
try {
|
||||
const parsed = JSON.parse(rawItems.replace(/'/g, '"'))
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
// Parsed to non-array (e.g. object) - convert to entries
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return Object.entries(parsed)
|
||||
}
|
||||
return []
|
||||
distributionItems = JSON.parse(distributionItems.replace(/'/g, '"'))
|
||||
} catch (e) {
|
||||
logger.error('Failed to parse distribution items', { rawItems })
|
||||
logger.error('Failed to parse distribution items', { distributionItems })
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
return []
|
||||
return distributionItems
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,9 +18,6 @@ export const copilotKeysKeys = {
|
||||
export interface CopilotKey {
|
||||
id: string
|
||||
displayKey: string // "•••••{last6}"
|
||||
name: string | null
|
||||
createdAt: string | null
|
||||
lastUsed: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,13 +58,6 @@ export function useCopilotKeys() {
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate key params
|
||||
*/
|
||||
interface GenerateKeyParams {
|
||||
name: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate new Copilot API key mutation
|
||||
*/
|
||||
@@ -75,13 +65,12 @@ export function useGenerateCopilotKey() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ name }: GenerateKeyParams): Promise<GenerateKeyResponse> => {
|
||||
mutationFn: async (): Promise<GenerateKeyResponse> => {
|
||||
const response = await fetch('/api/copilot/api-keys/generate', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ name }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -13,11 +13,12 @@ import {
|
||||
oneTimeToken,
|
||||
organization,
|
||||
} from 'better-auth/plugins'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { headers } from 'next/headers'
|
||||
import Stripe from 'stripe'
|
||||
import {
|
||||
getEmailSubject,
|
||||
renderInvitationEmail,
|
||||
renderOTPEmail,
|
||||
renderPasswordResetEmail,
|
||||
} from '@/components/emails/render-email'
|
||||
@@ -1847,59 +1848,6 @@ export const auth = betterAuth({
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
// Zapier AI Actions provider
|
||||
{
|
||||
providerId: 'zapier',
|
||||
clientId: env.ZAPIER_CLIENT_ID as string,
|
||||
clientSecret: env.ZAPIER_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://actions.zapier.com/oauth/authorize/',
|
||||
tokenUrl: 'https://actions.zapier.com/oauth/token/',
|
||||
userInfoUrl: 'https://actions.zapier.com/api/v2/check/',
|
||||
scopes: ['openid', 'nla:exposed_actions:execute'],
|
||||
responseType: 'code',
|
||||
pkce: true,
|
||||
accessType: 'offline',
|
||||
prompt: 'consent',
|
||||
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/zapier`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
logger.info('Fetching Zapier user profile')
|
||||
|
||||
// Zapier's check endpoint returns account info when using OAuth
|
||||
const response = await fetch('https://actions.zapier.com/api/v2/check/', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${tokens.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error('Failed to fetch Zapier user info', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
})
|
||||
throw new Error('Failed to fetch user info')
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// Zapier check endpoint returns account_id and other info
|
||||
const userId = data.account_id || data.user_id || `zapier-${Date.now()}`
|
||||
|
||||
return {
|
||||
id: userId.toString(),
|
||||
name: data.email || 'Zapier User',
|
||||
email: data.email || `${userId}@zapier.user`,
|
||||
emailVerified: !!data.email,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in Zapier getUserInfo:', { error })
|
||||
return null
|
||||
}
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
// Include SSO plugin when enabled
|
||||
@@ -2120,6 +2068,79 @@ export const auth = betterAuth({
|
||||
|
||||
return hasTeamPlan
|
||||
},
|
||||
// Set a fixed membership limit of 50, but the actual limit will be enforced in the invitation flow
|
||||
membershipLimit: 50,
|
||||
// Validate seat limits before sending invitations
|
||||
beforeInvite: async ({ organization }: { organization: { id: string } }) => {
|
||||
const subscriptions = await db
|
||||
.select()
|
||||
.from(schema.subscription)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.subscription.referenceId, organization.id),
|
||||
eq(schema.subscription.status, 'active')
|
||||
)
|
||||
)
|
||||
|
||||
const teamOrEnterpriseSubscription = subscriptions.find(
|
||||
(sub) => sub.plan === 'team' || sub.plan === 'enterprise'
|
||||
)
|
||||
|
||||
if (!teamOrEnterpriseSubscription) {
|
||||
throw new Error('No active team or enterprise subscription for this organization')
|
||||
}
|
||||
|
||||
const members = await db
|
||||
.select()
|
||||
.from(schema.member)
|
||||
.where(eq(schema.member.organizationId, organization.id))
|
||||
|
||||
const pendingInvites = await db
|
||||
.select()
|
||||
.from(schema.invitation)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.invitation.organizationId, organization.id),
|
||||
eq(schema.invitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
|
||||
const totalCount = members.length + pendingInvites.length
|
||||
const seatLimit = teamOrEnterpriseSubscription.seats || 1
|
||||
|
||||
if (totalCount >= seatLimit) {
|
||||
throw new Error(`Organization has reached its seat limit of ${seatLimit}`)
|
||||
}
|
||||
},
|
||||
sendInvitationEmail: async (data: any) => {
|
||||
try {
|
||||
const { invitation, organization, inviter } = data
|
||||
|
||||
const inviteUrl = `${getBaseUrl()}/invite/${invitation.id}`
|
||||
const inviterName = inviter.user?.name || 'A team member'
|
||||
|
||||
const html = await renderInvitationEmail(
|
||||
inviterName,
|
||||
organization.name,
|
||||
inviteUrl,
|
||||
invitation.email
|
||||
)
|
||||
|
||||
const result = await sendEmail({
|
||||
to: invitation.email,
|
||||
subject: `${inviterName} has invited you to join ${organization.name} on Sim`,
|
||||
html,
|
||||
from: getFromEmailAddress(),
|
||||
emailType: 'transactional',
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
logger.error('Failed to send organization invitation email:', result.message)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error sending invitation email', { error })
|
||||
}
|
||||
},
|
||||
organizationCreation: {
|
||||
afterCreate: async ({ organization, user }) => {
|
||||
logger.info('[organizationCreation.afterCreate] Organization created', {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization, userStats } from '@sim/db/schema'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { getOrganizationSubscription, getPlanPricing } from '@/lib/billing/core/billing'
|
||||
import { getUserUsageLimit } from '@/lib/billing/core/usage'
|
||||
import { isBillingEnabled } from '@/lib/core/config/environment'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -107,10 +108,19 @@ export async function checkUsageStatus(userId: string): Promise<UsageData> {
|
||||
)
|
||||
}
|
||||
}
|
||||
// Determine org cap from orgUsageLimit (should always be set for team/enterprise)
|
||||
const orgCap = org.orgUsageLimit ? Number.parseFloat(String(org.orgUsageLimit)) : 0
|
||||
// Determine org cap
|
||||
let orgCap = org.orgUsageLimit ? Number.parseFloat(String(org.orgUsageLimit)) : 0
|
||||
if (!orgCap || Number.isNaN(orgCap)) {
|
||||
logger.warn('Organization missing usage limit', { orgId: org.id })
|
||||
// Fall back to minimum billing amount from Stripe subscription
|
||||
const orgSub = await getOrganizationSubscription(org.id)
|
||||
if (orgSub?.seats) {
|
||||
const { basePrice } = getPlanPricing(orgSub.plan)
|
||||
orgCap = (orgSub.seats ?? 0) * basePrice
|
||||
} else {
|
||||
// If no subscription, use team default
|
||||
const { basePrice } = getPlanPricing('team')
|
||||
orgCap = basePrice // Default to 1 seat minimum
|
||||
}
|
||||
}
|
||||
if (pooledUsage >= orgCap) {
|
||||
isExceeded = true
|
||||
|
||||
@@ -22,56 +22,6 @@ import { getEmailPreferences } from '@/lib/messaging/email/unsubscribe'
|
||||
|
||||
const logger = createLogger('UsageManagement')
|
||||
|
||||
export interface OrgUsageLimitResult {
|
||||
limit: number
|
||||
minimum: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the effective usage limit for a team or enterprise organization.
|
||||
* - Enterprise: Uses orgUsageLimit directly (fixed pricing)
|
||||
* - Team: Uses orgUsageLimit but never below seats × basePrice
|
||||
*/
|
||||
export async function getOrgUsageLimit(
|
||||
organizationId: string,
|
||||
plan: string,
|
||||
seats: number | null
|
||||
): Promise<OrgUsageLimitResult> {
|
||||
const orgData = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
const configured =
|
||||
orgData.length > 0 && orgData[0].orgUsageLimit
|
||||
? Number.parseFloat(orgData[0].orgUsageLimit)
|
||||
: null
|
||||
|
||||
if (plan === 'enterprise') {
|
||||
// Enterprise: Use configured limit directly (no per-seat minimum)
|
||||
if (configured !== null) {
|
||||
return { limit: configured, minimum: configured }
|
||||
}
|
||||
logger.warn('Enterprise org missing usage limit', { orgId: organizationId })
|
||||
return { limit: 0, minimum: 0 }
|
||||
}
|
||||
|
||||
const { basePrice } = getPlanPricing(plan)
|
||||
const minimum = (seats ?? 0) * basePrice
|
||||
|
||||
if (configured !== null) {
|
||||
return { limit: Math.max(configured, minimum), minimum }
|
||||
}
|
||||
|
||||
logger.warn('Team org missing usage limit, using seats × basePrice fallback', {
|
||||
orgId: organizationId,
|
||||
seats,
|
||||
minimum,
|
||||
})
|
||||
return { limit: minimum, minimum }
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle new user setup when they join the platform
|
||||
* Creates userStats record with default free credits
|
||||
@@ -137,13 +87,22 @@ export async function getUserUsageData(userId: string): Promise<UsageData> {
|
||||
? Number.parseFloat(stats.currentUsageLimit)
|
||||
: getFreeTierLimit()
|
||||
} else {
|
||||
// Team/Enterprise: Use organization limit
|
||||
const orgLimit = await getOrgUsageLimit(
|
||||
subscription.referenceId,
|
||||
subscription.plan,
|
||||
subscription.seats
|
||||
)
|
||||
limit = orgLimit.limit
|
||||
// Team/Enterprise: Use organization limit but never below minimum (seats × cost per seat)
|
||||
const orgData = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, subscription.referenceId))
|
||||
.limit(1)
|
||||
|
||||
const { basePrice } = getPlanPricing(subscription.plan)
|
||||
const minimum = (subscription.seats ?? 0) * basePrice
|
||||
|
||||
if (orgData.length > 0 && orgData[0].orgUsageLimit) {
|
||||
const configured = Number.parseFloat(orgData[0].orgUsageLimit)
|
||||
limit = Math.max(configured, minimum)
|
||||
} else {
|
||||
limit = minimum
|
||||
}
|
||||
}
|
||||
|
||||
const percentUsed = limit > 0 ? Math.min((currentUsage / limit) * 100, 100) : 0
|
||||
@@ -200,15 +159,24 @@ export async function getUserUsageLimitInfo(userId: string): Promise<UsageLimitI
|
||||
minimumLimit = getPerUserMinimumLimit(subscription)
|
||||
canEdit = canEditUsageLimit(subscription)
|
||||
} else {
|
||||
// Team/Enterprise: Use organization limits
|
||||
const orgLimit = await getOrgUsageLimit(
|
||||
subscription.referenceId,
|
||||
subscription.plan,
|
||||
subscription.seats
|
||||
)
|
||||
currentLimit = orgLimit.limit
|
||||
minimumLimit = orgLimit.minimum
|
||||
canEdit = false
|
||||
// Team/Enterprise: Use organization limits (users cannot edit)
|
||||
const orgData = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, subscription.referenceId))
|
||||
.limit(1)
|
||||
|
||||
const { basePrice } = getPlanPricing(subscription.plan)
|
||||
const minimum = (subscription.seats ?? 0) * basePrice
|
||||
|
||||
if (orgData.length > 0 && orgData[0].orgUsageLimit) {
|
||||
const configured = Number.parseFloat(orgData[0].orgUsageLimit)
|
||||
currentLimit = Math.max(configured, minimum)
|
||||
} else {
|
||||
currentLimit = minimum
|
||||
}
|
||||
minimumLimit = minimum
|
||||
canEdit = false // Team/enterprise members cannot edit limits
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -355,23 +323,27 @@ export async function getUserUsageLimit(userId: string): Promise<number> {
|
||||
|
||||
return Number.parseFloat(userStatsQuery[0].currentUsageLimit)
|
||||
}
|
||||
// Team/Enterprise: Verify org exists then use organization limit
|
||||
const orgExists = await db
|
||||
.select({ id: organization.id })
|
||||
// Team/Enterprise: Use organization limit but never below minimum
|
||||
const orgData = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, subscription.referenceId))
|
||||
.limit(1)
|
||||
|
||||
if (orgExists.length === 0) {
|
||||
if (orgData.length === 0) {
|
||||
throw new Error(`Organization not found: ${subscription.referenceId} for user: ${userId}`)
|
||||
}
|
||||
|
||||
const orgLimit = await getOrgUsageLimit(
|
||||
subscription.referenceId,
|
||||
subscription.plan,
|
||||
subscription.seats
|
||||
)
|
||||
return orgLimit.limit
|
||||
if (orgData[0].orgUsageLimit) {
|
||||
const configured = Number.parseFloat(orgData[0].orgUsageLimit)
|
||||
const { basePrice } = getPlanPricing(subscription.plan)
|
||||
const minimum = (subscription.seats ?? 0) * basePrice
|
||||
return Math.max(configured, minimum)
|
||||
}
|
||||
|
||||
// If org hasn't set a custom limit, use minimum (seats × cost per seat)
|
||||
const { basePrice } = getPlanPricing(subscription.plan)
|
||||
return (subscription.seats ?? 0) * basePrice
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import * as schema from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { getPlanPricing } from '@/lib/billing/core/billing'
|
||||
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
@@ -146,52 +145,11 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData
|
||||
plan: subscription.plan,
|
||||
})
|
||||
} else {
|
||||
// Organization subscription - set org usage limit and sync member limits
|
||||
const organizationId = subscription.referenceId
|
||||
|
||||
// Set orgUsageLimit for team plans (enterprise is set via webhook with custom pricing)
|
||||
if (subscription.plan === 'team') {
|
||||
const { basePrice } = getPlanPricing(subscription.plan)
|
||||
const seats = subscription.seats ?? 1
|
||||
const orgLimit = seats * basePrice
|
||||
|
||||
// Only set if not already set or if updating to a higher value based on seats
|
||||
const orgData = await db
|
||||
.select({ orgUsageLimit: schema.organization.orgUsageLimit })
|
||||
.from(schema.organization)
|
||||
.where(eq(schema.organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
const currentLimit =
|
||||
orgData.length > 0 && orgData[0].orgUsageLimit
|
||||
? Number.parseFloat(orgData[0].orgUsageLimit)
|
||||
: 0
|
||||
|
||||
// Update if no limit set, or if new seat-based minimum is higher
|
||||
if (currentLimit < orgLimit) {
|
||||
await db
|
||||
.update(schema.organization)
|
||||
.set({
|
||||
orgUsageLimit: orgLimit.toFixed(2),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(schema.organization.id, organizationId))
|
||||
|
||||
logger.info('Set organization usage limit for team plan', {
|
||||
organizationId,
|
||||
seats,
|
||||
basePrice,
|
||||
orgLimit,
|
||||
previousLimit: currentLimit,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Sync usage limits for all members
|
||||
// Organization subscription - sync usage limits for all members
|
||||
const members = await db
|
||||
.select({ userId: schema.member.userId })
|
||||
.from(schema.member)
|
||||
.where(eq(schema.member.organizationId, organizationId))
|
||||
.where(eq(schema.member.organizationId, subscription.referenceId))
|
||||
|
||||
if (members.length > 0) {
|
||||
for (const member of members) {
|
||||
@@ -200,7 +158,7 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData
|
||||
} catch (memberError) {
|
||||
logger.error('Failed to sync usage limits for organization member', {
|
||||
userId: member.userId,
|
||||
organizationId,
|
||||
organizationId: subscription.referenceId,
|
||||
subscriptionId: subscription.id,
|
||||
error: memberError,
|
||||
})
|
||||
@@ -208,7 +166,7 @@ export async function syncSubscriptionUsageLimits(subscription: SubscriptionData
|
||||
}
|
||||
|
||||
logger.info('Synced usage limits for organization members', {
|
||||
organizationId,
|
||||
organizationId: subscription.referenceId,
|
||||
memberCount: members.length,
|
||||
subscriptionId: subscription.id,
|
||||
plan: subscription.plan,
|
||||
|
||||
@@ -31,7 +31,6 @@ export const ToolIds = z.enum([
|
||||
'check_deployment_status',
|
||||
'navigate_ui',
|
||||
'knowledge_base',
|
||||
'manage_custom_tool',
|
||||
])
|
||||
export type ToolId = z.infer<typeof ToolIds>
|
||||
|
||||
@@ -188,45 +187,6 @@ export const ToolArgSchemas = {
|
||||
}),
|
||||
|
||||
knowledge_base: KnowledgeBaseArgsSchema,
|
||||
|
||||
manage_custom_tool: z.object({
|
||||
operation: z
|
||||
.enum(['add', 'edit', 'delete'])
|
||||
.describe('The operation to perform: add (create new), edit (update existing), or delete'),
|
||||
toolId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Required for edit and delete operations. The database ID of the custom tool (e.g., "0robnW7_JUVwZrDkq1mqj"). Use get_workflow_data with data_type "custom_tools" to get the list of tools and their IDs. Do NOT use the function name - use the actual "id" field from the tool.'
|
||||
),
|
||||
title: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'The display title of the custom tool. Required for add. Should always be provided for edit/delete so the user knows which tool is being modified.'
|
||||
),
|
||||
schema: z
|
||||
.object({
|
||||
type: z.literal('function'),
|
||||
function: z.object({
|
||||
name: z.string().describe('The function name (camelCase, e.g. getWeather)'),
|
||||
description: z.string().optional().describe('What the function does'),
|
||||
parameters: z.object({
|
||||
type: z.string(),
|
||||
properties: z.record(z.any()),
|
||||
required: z.array(z.string()).optional(),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
.optional()
|
||||
.describe('Required for add. The OpenAI function calling format schema.'),
|
||||
code: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Required for add. The JavaScript function body code. Use {{ENV_VAR}} for environment variables and reference parameters directly by name.'
|
||||
),
|
||||
}),
|
||||
} as const
|
||||
export type ToolArgSchemaMap = typeof ToolArgSchemas
|
||||
|
||||
@@ -291,7 +251,6 @@ export const ToolSSESchemas = {
|
||||
),
|
||||
navigate_ui: toolCallSSEFor('navigate_ui', ToolArgSchemas.navigate_ui),
|
||||
knowledge_base: toolCallSSEFor('knowledge_base', ToolArgSchemas.knowledge_base),
|
||||
manage_custom_tool: toolCallSSEFor('manage_custom_tool', ToolArgSchemas.manage_custom_tool),
|
||||
} as const
|
||||
export type ToolSSESchemaMap = typeof ToolSSESchemas
|
||||
|
||||
@@ -512,13 +471,6 @@ export const ToolResultSchemas = {
|
||||
navigated: z.boolean(),
|
||||
}),
|
||||
knowledge_base: KnowledgeBaseResultSchema,
|
||||
manage_custom_tool: z.object({
|
||||
success: z.boolean(),
|
||||
operation: z.enum(['add', 'edit', 'delete']),
|
||||
toolId: z.string().optional(),
|
||||
title: z.string().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
} as const
|
||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||
|
||||
|
||||
@@ -4,12 +4,6 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const baseToolLogger = createLogger('BaseClientTool')
|
||||
|
||||
/** Default timeout for tool execution (5 minutes) */
|
||||
const DEFAULT_TOOL_TIMEOUT_MS = 2 * 60 * 1000
|
||||
|
||||
/** Timeout for tools that run workflows (10 minutes) */
|
||||
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
|
||||
|
||||
// Client tool call states used by the new runtime
|
||||
export enum ClientToolCallState {
|
||||
generating = 'generating',
|
||||
@@ -58,8 +52,6 @@ export class BaseClientTool {
|
||||
readonly name: string
|
||||
protected state: ClientToolCallState
|
||||
protected metadata: BaseClientToolMetadata
|
||||
protected isMarkedComplete = false
|
||||
protected timeoutMs: number = DEFAULT_TOOL_TIMEOUT_MS
|
||||
|
||||
constructor(toolCallId: string, name: string, metadata: BaseClientToolMetadata) {
|
||||
this.toolCallId = toolCallId
|
||||
@@ -68,98 +60,14 @@ export class BaseClientTool {
|
||||
this.state = ClientToolCallState.generating
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a custom timeout for this tool (in milliseconds)
|
||||
*/
|
||||
setTimeoutMs(ms: number): void {
|
||||
this.timeoutMs = ms
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this tool has been marked complete
|
||||
*/
|
||||
hasBeenMarkedComplete(): boolean {
|
||||
return this.isMarkedComplete
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the tool is marked complete. If not already marked, marks it with error.
|
||||
* This should be called in finally blocks to prevent leaked tool calls.
|
||||
*/
|
||||
async ensureMarkedComplete(
|
||||
fallbackMessage = 'Tool execution did not complete properly'
|
||||
): Promise<void> {
|
||||
if (!this.isMarkedComplete) {
|
||||
baseToolLogger.warn('Tool was not marked complete, marking with error', {
|
||||
toolCallId: this.toolCallId,
|
||||
toolName: this.name,
|
||||
state: this.state,
|
||||
})
|
||||
await this.markToolComplete(500, fallbackMessage)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute with timeout protection. Wraps the execution in a timeout and ensures
|
||||
* markToolComplete is always called.
|
||||
*/
|
||||
async executeWithTimeout(executeFn: () => Promise<void>, timeoutMs?: number): Promise<void> {
|
||||
const timeout = timeoutMs ?? this.timeoutMs
|
||||
let timeoutId: NodeJS.Timeout | null = null
|
||||
|
||||
try {
|
||||
await Promise.race([
|
||||
executeFn(),
|
||||
new Promise<never>((_, reject) => {
|
||||
timeoutId = setTimeout(() => {
|
||||
reject(new Error(`Tool execution timed out after ${timeout / 1000} seconds`))
|
||||
}, timeout)
|
||||
}),
|
||||
])
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
baseToolLogger.error('Tool execution failed or timed out', {
|
||||
toolCallId: this.toolCallId,
|
||||
toolName: this.name,
|
||||
error: message,
|
||||
})
|
||||
// Only mark complete if not already marked
|
||||
if (!this.isMarkedComplete) {
|
||||
await this.markToolComplete(500, message)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
} finally {
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
// Ensure tool is always marked complete
|
||||
await this.ensureMarkedComplete()
|
||||
}
|
||||
}
|
||||
|
||||
// Intentionally left empty - specific tools can override
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async execute(_args?: Record<string, any>): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a tool as complete on the server (proxies to server-side route).
|
||||
* Once called, the tool is considered complete and won't be marked again.
|
||||
*/
|
||||
// Mark a tool as complete on the server (proxies to server-side route)
|
||||
async markToolComplete(status: number, message?: any, data?: any): Promise<boolean> {
|
||||
// Prevent double-marking
|
||||
if (this.isMarkedComplete) {
|
||||
baseToolLogger.warn('markToolComplete called but tool already marked complete', {
|
||||
toolCallId: this.toolCallId,
|
||||
toolName: this.name,
|
||||
existingState: this.state,
|
||||
attemptedStatus: status,
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
||||
this.isMarkedComplete = true
|
||||
|
||||
try {
|
||||
baseToolLogger.info('markToolComplete called', {
|
||||
toolCallId: this.toolCallId,
|
||||
@@ -170,7 +78,6 @@ export class BaseClientTool {
|
||||
hasData: data !== undefined,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
||||
method: 'POST',
|
||||
@@ -197,11 +104,7 @@ export class BaseClientTool {
|
||||
const json = (await res.json()) as { success?: boolean }
|
||||
return json?.success === true
|
||||
} catch (e) {
|
||||
// Default failure path - but tool is still marked complete locally
|
||||
baseToolLogger.error('Failed to mark tool complete on server', {
|
||||
toolCallId: this.toolCallId,
|
||||
error: e instanceof Error ? e.message : String(e),
|
||||
})
|
||||
// Default failure path
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,15 +197,9 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
|
||||
async execute(args?: EditWorkflowArgs): Promise<void> {
|
||||
const logger = createLogger('EditWorkflowClientTool')
|
||||
|
||||
// Use timeout protection to ensure tool always completes
|
||||
await this.executeWithTimeout(async () => {
|
||||
try {
|
||||
if (this.hasExecuted) {
|
||||
logger.info('execute skipped (already executed)', { toolCallId: this.toolCallId })
|
||||
// Even if skipped, ensure we mark complete
|
||||
if (!this.hasBeenMarkedComplete()) {
|
||||
await this.markToolComplete(200, 'Tool already executed')
|
||||
}
|
||||
return
|
||||
}
|
||||
this.hasExecuted = true
|
||||
@@ -258,136 +252,137 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch with AbortController for timeout support
|
||||
const controller = new AbortController()
|
||||
const fetchTimeout = setTimeout(() => controller.abort(), 60000) // 60s fetch timeout
|
||||
|
||||
try {
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
toolName: 'edit_workflow',
|
||||
payload: {
|
||||
operations,
|
||||
workflowId,
|
||||
...(currentUserWorkflow ? { currentUserWorkflow } : {}),
|
||||
},
|
||||
}),
|
||||
signal: controller.signal,
|
||||
})
|
||||
|
||||
clearTimeout(fetchTimeout)
|
||||
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => '')
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
throw new Error(errorJson.error || errorText || `Server error (${res.status})`)
|
||||
} catch {
|
||||
throw new Error(errorText || `Server error (${res.status})`)
|
||||
}
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
toolName: 'edit_workflow',
|
||||
payload: {
|
||||
operations,
|
||||
workflowId,
|
||||
...(currentUserWorkflow ? { currentUserWorkflow } : {}),
|
||||
},
|
||||
}),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => '')
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
throw new Error(errorJson.error || errorText || `Server error (${res.status})`)
|
||||
} catch {
|
||||
throw new Error(errorText || `Server error (${res.status})`)
|
||||
}
|
||||
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
const result = parsed.result as any
|
||||
this.lastResult = result
|
||||
logger.info('server result parsed', {
|
||||
hasWorkflowState: !!result?.workflowState,
|
||||
blocksCount: result?.workflowState
|
||||
? Object.keys(result.workflowState.blocks || {}).length
|
||||
: 0,
|
||||
hasSkippedItems: !!result?.skippedItems,
|
||||
skippedItemsCount: result?.skippedItems?.length || 0,
|
||||
hasInputValidationErrors: !!result?.inputValidationErrors,
|
||||
inputValidationErrorsCount: result?.inputValidationErrors?.length || 0,
|
||||
})
|
||||
|
||||
// Log skipped items and validation errors for visibility
|
||||
if (result?.skippedItems?.length > 0) {
|
||||
logger.warn('Some operations were skipped during edit_workflow', {
|
||||
skippedItems: result.skippedItems,
|
||||
})
|
||||
}
|
||||
if (result?.inputValidationErrors?.length > 0) {
|
||||
logger.warn('Some inputs were rejected during edit_workflow', {
|
||||
inputValidationErrors: result.inputValidationErrors,
|
||||
})
|
||||
}
|
||||
|
||||
// Update diff directly with workflow state - no YAML conversion needed!
|
||||
if (!result.workflowState) {
|
||||
throw new Error('No workflow state returned from server')
|
||||
}
|
||||
|
||||
let actualDiffWorkflow: WorkflowState | null = null
|
||||
|
||||
if (!this.hasAppliedDiff) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
// setProposedChanges applies the state optimistically to the workflow store
|
||||
await diffStore.setProposedChanges(result.workflowState)
|
||||
logger.info('diff proposed changes set for edit_workflow with direct workflow state')
|
||||
this.hasAppliedDiff = true
|
||||
}
|
||||
|
||||
// Read back the applied state from the workflow store
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
actualDiffWorkflow = workflowStore.getWorkflowState()
|
||||
|
||||
if (!actualDiffWorkflow) {
|
||||
throw new Error('Failed to retrieve workflow state after applying changes')
|
||||
}
|
||||
|
||||
// Get the workflow state that was just applied, merge subblocks, and sanitize
|
||||
// This matches what get_user_workflow would return (the true state after edits were applied)
|
||||
const workflowJson = this.getSanitizedWorkflowJson(actualDiffWorkflow)
|
||||
|
||||
// Build sanitized data including workflow JSON and any skipped/validation info
|
||||
const sanitizedData: Record<string, any> = {}
|
||||
if (workflowJson) {
|
||||
sanitizedData.userWorkflow = workflowJson
|
||||
}
|
||||
|
||||
// Include skipped items and validation errors in the response for LLM feedback
|
||||
if (result?.skippedItems?.length > 0) {
|
||||
sanitizedData.skippedItems = result.skippedItems
|
||||
sanitizedData.skippedItemsMessage = result.skippedItemsMessage
|
||||
}
|
||||
if (result?.inputValidationErrors?.length > 0) {
|
||||
sanitizedData.inputValidationErrors = result.inputValidationErrors
|
||||
sanitizedData.inputValidationMessage = result.inputValidationMessage
|
||||
}
|
||||
|
||||
// Build a message that includes info about skipped items
|
||||
let completeMessage = 'Workflow diff ready for review'
|
||||
if (result?.skippedItems?.length > 0 || result?.inputValidationErrors?.length > 0) {
|
||||
const parts: string[] = []
|
||||
if (result?.skippedItems?.length > 0) {
|
||||
parts.push(`${result.skippedItems.length} operation(s) skipped`)
|
||||
}
|
||||
if (result?.inputValidationErrors?.length > 0) {
|
||||
parts.push(`${result.inputValidationErrors.length} input(s) rejected`)
|
||||
}
|
||||
completeMessage = `Workflow diff ready for review. Note: ${parts.join(', ')}.`
|
||||
}
|
||||
|
||||
// Mark complete early to unblock LLM stream
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
completeMessage,
|
||||
Object.keys(sanitizedData).length > 0 ? sanitizedData : undefined
|
||||
)
|
||||
|
||||
// Move into review state
|
||||
this.setState(ClientToolCallState.review, { result })
|
||||
} catch (fetchError: any) {
|
||||
clearTimeout(fetchTimeout)
|
||||
if (fetchError.name === 'AbortError') {
|
||||
throw new Error('Server request timed out')
|
||||
}
|
||||
throw fetchError
|
||||
}
|
||||
})
|
||||
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
const result = parsed.result as any
|
||||
this.lastResult = result
|
||||
logger.info('server result parsed', {
|
||||
hasWorkflowState: !!result?.workflowState,
|
||||
blocksCount: result?.workflowState
|
||||
? Object.keys(result.workflowState.blocks || {}).length
|
||||
: 0,
|
||||
hasSkippedItems: !!result?.skippedItems,
|
||||
skippedItemsCount: result?.skippedItems?.length || 0,
|
||||
hasInputValidationErrors: !!result?.inputValidationErrors,
|
||||
inputValidationErrorsCount: result?.inputValidationErrors?.length || 0,
|
||||
})
|
||||
|
||||
// Log skipped items and validation errors for visibility
|
||||
if (result?.skippedItems?.length > 0) {
|
||||
logger.warn('Some operations were skipped during edit_workflow', {
|
||||
skippedItems: result.skippedItems,
|
||||
})
|
||||
}
|
||||
if (result?.inputValidationErrors?.length > 0) {
|
||||
logger.warn('Some inputs were rejected during edit_workflow', {
|
||||
inputValidationErrors: result.inputValidationErrors,
|
||||
})
|
||||
}
|
||||
|
||||
// Update diff directly with workflow state - no YAML conversion needed!
|
||||
// The diff engine may transform the workflow state (e.g., assign new IDs), so we must use
|
||||
// the returned proposedState rather than the original result.workflowState
|
||||
let actualDiffWorkflow: WorkflowState | null = null
|
||||
|
||||
if (result.workflowState) {
|
||||
try {
|
||||
if (!this.hasAppliedDiff) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
// setProposedChanges applies the state directly to the workflow store
|
||||
await diffStore.setProposedChanges(result.workflowState)
|
||||
logger.info('diff proposed changes set for edit_workflow with direct workflow state')
|
||||
this.hasAppliedDiff = true
|
||||
|
||||
// Read back the applied state from the workflow store
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
actualDiffWorkflow = workflowStore.getWorkflowState()
|
||||
} else {
|
||||
logger.info('skipping diff apply (already applied)')
|
||||
// If we already applied, read from workflow store
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
actualDiffWorkflow = workflowStore.getWorkflowState()
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to set proposed changes in diff store', e as any)
|
||||
throw new Error('Failed to create workflow diff')
|
||||
}
|
||||
} else {
|
||||
throw new Error('No workflow state returned from server')
|
||||
}
|
||||
|
||||
if (!actualDiffWorkflow) {
|
||||
throw new Error('Failed to retrieve workflow from diff store after setting changes')
|
||||
}
|
||||
|
||||
// Get the workflow state that was just applied, merge subblocks, and sanitize
|
||||
// This matches what get_user_workflow would return (the true state after edits were applied)
|
||||
const workflowJson = this.getSanitizedWorkflowJson(actualDiffWorkflow)
|
||||
|
||||
// Build sanitized data including workflow JSON and any skipped/validation info
|
||||
const sanitizedData: Record<string, any> = {}
|
||||
if (workflowJson) {
|
||||
sanitizedData.userWorkflow = workflowJson
|
||||
}
|
||||
|
||||
// Include skipped items and validation errors in the response for LLM feedback
|
||||
if (result?.skippedItems?.length > 0) {
|
||||
sanitizedData.skippedItems = result.skippedItems
|
||||
sanitizedData.skippedItemsMessage = result.skippedItemsMessage
|
||||
}
|
||||
if (result?.inputValidationErrors?.length > 0) {
|
||||
sanitizedData.inputValidationErrors = result.inputValidationErrors
|
||||
sanitizedData.inputValidationMessage = result.inputValidationMessage
|
||||
}
|
||||
|
||||
// Build a message that includes info about skipped items
|
||||
let completeMessage = 'Workflow diff ready for review'
|
||||
if (result?.skippedItems?.length > 0 || result?.inputValidationErrors?.length > 0) {
|
||||
const parts: string[] = []
|
||||
if (result?.skippedItems?.length > 0) {
|
||||
parts.push(`${result.skippedItems.length} operation(s) skipped`)
|
||||
}
|
||||
if (result?.inputValidationErrors?.length > 0) {
|
||||
parts.push(`${result.inputValidationErrors.length} input(s) rejected`)
|
||||
}
|
||||
completeMessage = `Workflow diff ready for review. Note: ${parts.join(', ')}.`
|
||||
}
|
||||
|
||||
// Mark complete early to unblock LLM stream
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
completeMessage,
|
||||
Object.keys(sanitizedData).length > 0 ? sanitizedData : undefined
|
||||
)
|
||||
|
||||
// Move into review state
|
||||
this.setState(ClientToolCallState.review, { result })
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
logger.error('execute error', { message })
|
||||
await this.markToolComplete(500, message)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,387 +0,0 @@
|
||||
import { Check, Loader2, Plus, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCustomToolsStore } from '@/stores/custom-tools/store'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface CustomToolSchema {
|
||||
type: 'function'
|
||||
function: {
|
||||
name: string
|
||||
description?: string
|
||||
parameters: {
|
||||
type: string
|
||||
properties: Record<string, any>
|
||||
required?: string[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface ManageCustomToolArgs {
|
||||
operation: 'add' | 'edit' | 'delete'
|
||||
toolId?: string
|
||||
title?: string
|
||||
schema?: CustomToolSchema
|
||||
code?: string
|
||||
}
|
||||
|
||||
const API_ENDPOINT = '/api/tools/custom'
|
||||
|
||||
/**
|
||||
* Client tool for creating, editing, and deleting custom tools via the copilot.
|
||||
*/
|
||||
export class ManageCustomToolClientTool extends BaseClientTool {
|
||||
static readonly id = 'manage_custom_tool'
|
||||
private currentArgs?: ManageCustomToolArgs
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, ManageCustomToolClientTool.id, ManageCustomToolClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Managing custom tool',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Manage custom tool?', icon: Plus },
|
||||
[ClientToolCallState.executing]: { text: 'Managing custom tool', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Managed custom tool', icon: Check },
|
||||
[ClientToolCallState.error]: { text: 'Failed to manage custom tool', icon: X },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted managing custom tool',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped managing custom tool',
|
||||
icon: XCircle,
|
||||
},
|
||||
},
|
||||
interrupt: {
|
||||
accept: { text: 'Allow', icon: Check },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const operation = params?.operation as 'add' | 'edit' | 'delete' | undefined
|
||||
|
||||
// Return undefined if no operation yet - use static defaults
|
||||
if (!operation) return undefined
|
||||
|
||||
// Get tool name from params, or look it up from the store by toolId
|
||||
let toolName = params?.title || params?.schema?.function?.name
|
||||
if (!toolName && params?.toolId) {
|
||||
try {
|
||||
const tool = useCustomToolsStore.getState().getTool(params.toolId)
|
||||
toolName = tool?.title || tool?.schema?.function?.name
|
||||
} catch {
|
||||
// Ignore errors accessing store
|
||||
}
|
||||
}
|
||||
|
||||
const getActionText = (verb: 'present' | 'past' | 'gerund') => {
|
||||
switch (operation) {
|
||||
case 'add':
|
||||
return verb === 'present' ? 'Create' : verb === 'past' ? 'Created' : 'Creating'
|
||||
case 'edit':
|
||||
return verb === 'present' ? 'Edit' : verb === 'past' ? 'Edited' : 'Editing'
|
||||
case 'delete':
|
||||
return verb === 'present' ? 'Delete' : verb === 'past' ? 'Deleted' : 'Deleting'
|
||||
}
|
||||
}
|
||||
|
||||
// For add: only show tool name in past tense (success)
|
||||
// For edit/delete: always show tool name
|
||||
const shouldShowToolName = (currentState: ClientToolCallState) => {
|
||||
if (operation === 'add') {
|
||||
return currentState === ClientToolCallState.success
|
||||
}
|
||||
return true // edit and delete always show tool name
|
||||
}
|
||||
|
||||
const nameText = shouldShowToolName(state) && toolName ? ` ${toolName}` : ' custom tool'
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `${getActionText('past')}${nameText}`
|
||||
case ClientToolCallState.executing:
|
||||
return `${getActionText('gerund')}${nameText}`
|
||||
case ClientToolCallState.generating:
|
||||
return `${getActionText('gerund')}${nameText}`
|
||||
case ClientToolCallState.pending:
|
||||
return `${getActionText('present')}${nameText}?`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to ${getActionText('present')?.toLowerCase()}${nameText}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted ${getActionText('gerund')?.toLowerCase()}${nameText}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped ${getActionText('gerund')?.toLowerCase()}${nameText}`
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the tool call args from the copilot store (needed before execute() is called)
|
||||
*/
|
||||
private getArgsFromStore(): ManageCustomToolArgs | undefined {
|
||||
try {
|
||||
const { toolCallsById } = useCopilotStore.getState()
|
||||
const toolCall = toolCallsById[this.toolCallId]
|
||||
return (toolCall as any)?.params as ManageCustomToolArgs | undefined
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Override getInterruptDisplays to only show confirmation for edit and delete operations.
|
||||
* Add operations execute directly without confirmation.
|
||||
*/
|
||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
||||
// Try currentArgs first, then fall back to store (for when called before execute())
|
||||
const args = this.currentArgs || this.getArgsFromStore()
|
||||
const operation = args?.operation
|
||||
if (operation === 'edit' || operation === 'delete') {
|
||||
return this.metadata.interrupt
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: ManageCustomToolArgs): Promise<void> {
|
||||
const logger = createLogger('ManageCustomToolClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
await this.executeOperation(args, logger)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to manage custom tool')
|
||||
}
|
||||
}
|
||||
|
||||
async execute(args?: ManageCustomToolArgs): Promise<void> {
|
||||
this.currentArgs = args
|
||||
// For add operation, execute directly without confirmation
|
||||
// For edit/delete, the copilot store will check hasInterrupt() and wait for confirmation
|
||||
if (args?.operation === 'add') {
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
// edit/delete will wait for user confirmation via handleAccept
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the custom tool operation (add, edit, or delete)
|
||||
*/
|
||||
private async executeOperation(
|
||||
args: ManageCustomToolArgs | undefined,
|
||||
logger: ReturnType<typeof createLogger>
|
||||
): Promise<void> {
|
||||
if (!args?.operation) {
|
||||
throw new Error('Operation is required')
|
||||
}
|
||||
|
||||
const { operation, toolId, title, schema, code } = args
|
||||
|
||||
// Get workspace ID from the workflow registry
|
||||
const { hydration } = useWorkflowRegistry.getState()
|
||||
const workspaceId = hydration.workspaceId
|
||||
if (!workspaceId) {
|
||||
throw new Error('No active workspace found')
|
||||
}
|
||||
|
||||
logger.info(`Executing custom tool operation: ${operation}`, {
|
||||
operation,
|
||||
toolId,
|
||||
title,
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
switch (operation) {
|
||||
case 'add':
|
||||
await this.addCustomTool({ title, schema, code, workspaceId }, logger)
|
||||
break
|
||||
case 'edit':
|
||||
await this.editCustomTool({ toolId, title, schema, code, workspaceId }, logger)
|
||||
break
|
||||
case 'delete':
|
||||
await this.deleteCustomTool({ toolId, workspaceId }, logger)
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown operation: ${operation}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new custom tool
|
||||
*/
|
||||
private async addCustomTool(
|
||||
params: {
|
||||
title?: string
|
||||
schema?: CustomToolSchema
|
||||
code?: string
|
||||
workspaceId: string
|
||||
},
|
||||
logger: ReturnType<typeof createLogger>
|
||||
): Promise<void> {
|
||||
const { title, schema, code, workspaceId } = params
|
||||
|
||||
if (!title) {
|
||||
throw new Error('Title is required for adding a custom tool')
|
||||
}
|
||||
if (!schema) {
|
||||
throw new Error('Schema is required for adding a custom tool')
|
||||
}
|
||||
if (!code) {
|
||||
throw new Error('Code is required for adding a custom tool')
|
||||
}
|
||||
|
||||
const response = await fetch(API_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
tools: [{ title, schema, code }],
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to create custom tool')
|
||||
}
|
||||
|
||||
if (!data.data || !Array.isArray(data.data) || data.data.length === 0) {
|
||||
throw new Error('Invalid API response: missing tool data')
|
||||
}
|
||||
|
||||
const createdTool = data.data[0]
|
||||
logger.info(`Created custom tool: ${title}`, { toolId: createdTool.id })
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, `Created custom tool "${title}"`, {
|
||||
success: true,
|
||||
operation: 'add',
|
||||
toolId: createdTool.id,
|
||||
title,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates an existing custom tool
|
||||
*/
|
||||
private async editCustomTool(
|
||||
params: {
|
||||
toolId?: string
|
||||
title?: string
|
||||
schema?: CustomToolSchema
|
||||
code?: string
|
||||
workspaceId: string
|
||||
},
|
||||
logger: ReturnType<typeof createLogger>
|
||||
): Promise<void> {
|
||||
const { toolId, title, schema, code, workspaceId } = params
|
||||
|
||||
if (!toolId) {
|
||||
throw new Error('Tool ID is required for editing a custom tool')
|
||||
}
|
||||
|
||||
// At least one of title, schema, or code must be provided
|
||||
if (!title && !schema && !code) {
|
||||
throw new Error('At least one of title, schema, or code must be provided for editing')
|
||||
}
|
||||
|
||||
// We need to send the full tool data to the API for updates
|
||||
// First, fetch the existing tool to merge with updates
|
||||
const existingResponse = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`)
|
||||
const existingData = await existingResponse.json()
|
||||
|
||||
if (!existingResponse.ok) {
|
||||
throw new Error(existingData.error || 'Failed to fetch existing tools')
|
||||
}
|
||||
|
||||
const existingTool = existingData.data?.find((t: any) => t.id === toolId)
|
||||
if (!existingTool) {
|
||||
throw new Error(`Tool with ID ${toolId} not found`)
|
||||
}
|
||||
|
||||
// Merge updates with existing tool
|
||||
const updatedTool = {
|
||||
id: toolId,
|
||||
title: title ?? existingTool.title,
|
||||
schema: schema ?? existingTool.schema,
|
||||
code: code ?? existingTool.code,
|
||||
}
|
||||
|
||||
const response = await fetch(API_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
tools: [updatedTool],
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to update custom tool')
|
||||
}
|
||||
|
||||
logger.info(`Updated custom tool: ${updatedTool.title}`, { toolId })
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, `Updated custom tool "${updatedTool.title}"`, {
|
||||
success: true,
|
||||
operation: 'edit',
|
||||
toolId,
|
||||
title: updatedTool.title,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a custom tool
|
||||
*/
|
||||
private async deleteCustomTool(
|
||||
params: {
|
||||
toolId?: string
|
||||
workspaceId: string
|
||||
},
|
||||
logger: ReturnType<typeof createLogger>
|
||||
): Promise<void> {
|
||||
const { toolId, workspaceId } = params
|
||||
|
||||
if (!toolId) {
|
||||
throw new Error('Tool ID is required for deleting a custom tool')
|
||||
}
|
||||
|
||||
const url = `${API_ENDPOINT}?id=${toolId}&workspaceId=${workspaceId}`
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to delete custom tool')
|
||||
}
|
||||
|
||||
logger.info(`Deleted custom tool: ${toolId}`)
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, `Deleted custom tool`, {
|
||||
success: true,
|
||||
operation: 'delete',
|
||||
toolId,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
WORKFLOW_EXECUTION_TIMEOUT_MS,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { executeWorkflowWithFullLogging } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
@@ -75,9 +74,7 @@ export class RunWorkflowClientTool extends BaseClientTool {
|
||||
|
||||
async handleAccept(args?: RunWorkflowArgs): Promise<void> {
|
||||
const logger = createLogger('RunWorkflowClientTool')
|
||||
|
||||
// Use longer timeout for workflow execution (10 minutes)
|
||||
await this.executeWithTimeout(async () => {
|
||||
try {
|
||||
const params = args || {}
|
||||
logger.debug('handleAccept() called', {
|
||||
toolCallId: this.toolCallId,
|
||||
@@ -127,54 +124,60 @@ export class RunWorkflowClientTool extends BaseClientTool {
|
||||
toolCallId: this.toolCallId,
|
||||
})
|
||||
|
||||
const result = await executeWorkflowWithFullLogging({
|
||||
workflowInput,
|
||||
executionId,
|
||||
})
|
||||
|
||||
setIsExecuting(false)
|
||||
|
||||
// Determine success for both non-streaming and streaming executions
|
||||
let succeeded = true
|
||||
let errorMessage: string | undefined
|
||||
try {
|
||||
const result = await executeWorkflowWithFullLogging({
|
||||
workflowInput,
|
||||
executionId,
|
||||
})
|
||||
|
||||
// Determine success for both non-streaming and streaming executions
|
||||
let succeeded = true
|
||||
let errorMessage: string | undefined
|
||||
try {
|
||||
if (result && typeof result === 'object' && 'success' in (result as any)) {
|
||||
succeeded = Boolean((result as any).success)
|
||||
if (!succeeded) {
|
||||
errorMessage = (result as any)?.error || (result as any)?.output?.error
|
||||
}
|
||||
} else if (
|
||||
result &&
|
||||
typeof result === 'object' &&
|
||||
'execution' in (result as any) &&
|
||||
(result as any).execution &&
|
||||
typeof (result as any).execution === 'object'
|
||||
) {
|
||||
succeeded = Boolean((result as any).execution.success)
|
||||
if (!succeeded) {
|
||||
errorMessage =
|
||||
(result as any).execution?.error || (result as any).execution?.output?.error
|
||||
}
|
||||
if (result && typeof result === 'object' && 'success' in (result as any)) {
|
||||
succeeded = Boolean((result as any).success)
|
||||
if (!succeeded) {
|
||||
errorMessage = (result as any)?.error || (result as any)?.output?.error
|
||||
}
|
||||
} else if (
|
||||
result &&
|
||||
typeof result === 'object' &&
|
||||
'execution' in (result as any) &&
|
||||
(result as any).execution &&
|
||||
typeof (result as any).execution === 'object'
|
||||
) {
|
||||
succeeded = Boolean((result as any).execution.success)
|
||||
if (!succeeded) {
|
||||
errorMessage =
|
||||
(result as any).execution?.error || (result as any).execution?.output?.error
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (succeeded) {
|
||||
logger.debug('Workflow execution finished with success')
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
`Workflow execution completed. Started at: ${executionStartTime}`
|
||||
)
|
||||
} else {
|
||||
const msg = errorMessage || 'Workflow execution failed'
|
||||
logger.error('Workflow execution finished with failure', { message: msg })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, msg)
|
||||
}
|
||||
} finally {
|
||||
// Always clean up execution state
|
||||
setIsExecuting(false)
|
||||
} catch {}
|
||||
|
||||
if (succeeded) {
|
||||
logger.debug('Workflow execution finished with success')
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
`Workflow execution completed. Started at: ${executionStartTime}`
|
||||
)
|
||||
} else {
|
||||
const msg = errorMessage || 'Workflow execution failed'
|
||||
logger.error('Workflow execution finished with failure', { message: msg })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, msg)
|
||||
}
|
||||
}, WORKFLOW_EXECUTION_TIMEOUT_MS)
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
const failedDependency = typeof message === 'string' && /dependency/i.test(message)
|
||||
const status = failedDependency ? 424 : 500
|
||||
|
||||
logger.error('Run workflow failed', { message })
|
||||
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(status, failedDependency ? undefined : message)
|
||||
}
|
||||
}
|
||||
|
||||
async execute(args?: RunWorkflowArgs): Promise<void> {
|
||||
|
||||
@@ -5,7 +5,7 @@ import { jwtDecode } from 'jwt-decode'
|
||||
import { createPermissionError, verifyWorkflowAccess } from '@/lib/copilot/auth/permissions'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
|
||||
import { getEnvironmentVariableKeys } from '@/lib/environment/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getAllOAuthServices } from '@/lib/oauth/oauth'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
@@ -26,13 +26,8 @@ export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any>
|
||||
|
||||
const authenticatedUserId = context.userId
|
||||
|
||||
let workspaceId: string | undefined
|
||||
|
||||
if (params?.workflowId) {
|
||||
const { hasAccess, workspaceId: wId } = await verifyWorkflowAccess(
|
||||
authenticatedUserId,
|
||||
params.workflowId
|
||||
)
|
||||
const { hasAccess } = await verifyWorkflowAccess(authenticatedUserId, params.workflowId)
|
||||
|
||||
if (!hasAccess) {
|
||||
const errorMessage = createPermissionError('access credentials in')
|
||||
@@ -42,8 +37,6 @@ export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any>
|
||||
})
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
workspaceId = wId
|
||||
}
|
||||
|
||||
const userId = authenticatedUserId
|
||||
@@ -129,23 +122,14 @@ export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any>
|
||||
baseProvider: service.baseProvider,
|
||||
}))
|
||||
|
||||
// Fetch environment variables from both personal and workspace
|
||||
const envResult = await getPersonalAndWorkspaceEnv(userId, workspaceId)
|
||||
|
||||
// Get all unique variable names from both personal and workspace
|
||||
const personalVarNames = Object.keys(envResult.personalEncrypted)
|
||||
const workspaceVarNames = Object.keys(envResult.workspaceEncrypted)
|
||||
const allVarNames = [...new Set([...personalVarNames, ...workspaceVarNames])]
|
||||
// Fetch environment variables
|
||||
const envResult = await getEnvironmentVariableKeys(userId)
|
||||
|
||||
logger.info('Fetched credentials', {
|
||||
userId,
|
||||
workspaceId,
|
||||
connectedCount: connectedCredentials.length,
|
||||
notConnectedCount: notConnectedServices.length,
|
||||
personalEnvVarCount: personalVarNames.length,
|
||||
workspaceEnvVarCount: workspaceVarNames.length,
|
||||
totalEnvVarCount: allVarNames.length,
|
||||
conflicts: envResult.conflicts,
|
||||
envVarCount: envResult.count,
|
||||
})
|
||||
|
||||
return {
|
||||
@@ -160,11 +144,8 @@ export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any>
|
||||
},
|
||||
},
|
||||
environment: {
|
||||
variableNames: allVarNames,
|
||||
count: allVarNames.length,
|
||||
personalVariables: personalVarNames,
|
||||
workspaceVariables: workspaceVarNames,
|
||||
conflicts: envResult.conflicts,
|
||||
variableNames: envResult.variableNames,
|
||||
count: envResult.count,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workspaceEnvironment } from '@sim/db/schema'
|
||||
import { environment } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { z } from 'zod'
|
||||
import { createPermissionError, verifyWorkflowAccess } from '@/lib/copilot/auth/permissions'
|
||||
@@ -52,33 +52,28 @@ export const setEnvironmentVariablesServerTool: BaseServerTool<SetEnvironmentVar
|
||||
const authenticatedUserId = context.userId
|
||||
const { variables, workflowId } = params || ({} as SetEnvironmentVariablesParams)
|
||||
|
||||
if (!workflowId) {
|
||||
throw new Error('workflowId is required to set workspace environment variables')
|
||||
if (workflowId) {
|
||||
const { hasAccess } = await verifyWorkflowAccess(authenticatedUserId, workflowId)
|
||||
|
||||
if (!hasAccess) {
|
||||
const errorMessage = createPermissionError('modify environment variables in')
|
||||
logger.error('Unauthorized attempt to set environment variables', {
|
||||
workflowId,
|
||||
authenticatedUserId,
|
||||
})
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
const { hasAccess, workspaceId } = await verifyWorkflowAccess(authenticatedUserId, workflowId)
|
||||
|
||||
if (!hasAccess) {
|
||||
const errorMessage = createPermissionError('modify environment variables in')
|
||||
logger.error('Unauthorized attempt to set environment variables', {
|
||||
workflowId,
|
||||
authenticatedUserId,
|
||||
})
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
if (!workspaceId) {
|
||||
throw new Error('Could not determine workspace for this workflow')
|
||||
}
|
||||
const userId = authenticatedUserId
|
||||
|
||||
const normalized = normalizeVariables(variables || {})
|
||||
const { variables: validatedVariables } = EnvVarSchema.parse({ variables: normalized })
|
||||
|
||||
// Fetch existing workspace environment variables
|
||||
const existingData = await db
|
||||
.select()
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, userId))
|
||||
.limit(1)
|
||||
const existingEncrypted = (existingData[0]?.variables as Record<string, string>) || {}
|
||||
|
||||
@@ -114,36 +109,26 @@ export const setEnvironmentVariablesServerTool: BaseServerTool<SetEnvironmentVar
|
||||
|
||||
const finalEncrypted = { ...existingEncrypted, ...newlyEncrypted }
|
||||
|
||||
// Save to workspace environment variables
|
||||
await db
|
||||
.insert(workspaceEnvironment)
|
||||
.insert(environment)
|
||||
.values({
|
||||
id: crypto.randomUUID(),
|
||||
workspaceId,
|
||||
userId,
|
||||
variables: finalEncrypted,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [workspaceEnvironment.workspaceId],
|
||||
target: [environment.userId],
|
||||
set: { variables: finalEncrypted, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
logger.info('Saved workspace environment variables', {
|
||||
workspaceId,
|
||||
workflowId,
|
||||
addedCount: added.length,
|
||||
updatedCount: updated.length,
|
||||
totalCount: Object.keys(finalEncrypted).length,
|
||||
})
|
||||
|
||||
return {
|
||||
message: `Successfully processed ${Object.keys(validatedVariables).length} workspace environment variable(s): ${added.length} added, ${updated.length} updated`,
|
||||
message: `Successfully processed ${Object.keys(validatedVariables).length} environment variable(s): ${added.length} added, ${updated.length} updated`,
|
||||
variableCount: Object.keys(validatedVariables).length,
|
||||
variableNames: Object.keys(validatedVariables),
|
||||
totalVariableCount: Object.keys(finalEncrypted).length,
|
||||
addedVariables: added,
|
||||
updatedVariables: updated,
|
||||
workspaceId,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -645,29 +645,19 @@ function createBlockFromParams(
|
||||
function normalizeTools(tools: any[]): any[] {
|
||||
return tools.map((tool) => {
|
||||
if (tool.type === 'custom-tool') {
|
||||
// New reference format: minimal fields only
|
||||
if (tool.customToolId && !tool.schema && !tool.code) {
|
||||
return {
|
||||
type: tool.type,
|
||||
customToolId: tool.customToolId,
|
||||
usageControl: tool.usageControl || 'auto',
|
||||
isExpanded: tool.isExpanded ?? true,
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy inline format: include all fields
|
||||
// Reconstruct sanitized custom tool fields
|
||||
const normalized: any = {
|
||||
...tool,
|
||||
params: tool.params || {},
|
||||
isExpanded: tool.isExpanded ?? true,
|
||||
}
|
||||
|
||||
// Ensure schema has proper structure (for inline format)
|
||||
// Ensure schema has proper structure
|
||||
if (normalized.schema?.function) {
|
||||
normalized.schema = {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: normalized.schema.function.name || tool.title, // Preserve name or derive from title
|
||||
name: tool.title, // Derive name from title
|
||||
description: normalized.schema.function.description,
|
||||
parameters: normalized.schema.function.parameters,
|
||||
},
|
||||
|
||||
@@ -230,8 +230,6 @@ export const env = createEnv({
|
||||
ZOOM_CLIENT_SECRET: z.string().optional(), // Zoom OAuth client secret
|
||||
WORDPRESS_CLIENT_ID: z.string().optional(), // WordPress.com OAuth client ID
|
||||
WORDPRESS_CLIENT_SECRET: z.string().optional(), // WordPress.com OAuth client secret
|
||||
ZAPIER_CLIENT_ID: z.string().optional(), // Zapier AI Actions OAuth client ID
|
||||
ZAPIER_CLIENT_SECRET: z.string().optional(), // Zapier AI Actions OAuth client secret
|
||||
|
||||
// E2B Remote Code Execution
|
||||
E2B_ENABLED: z.string().optional(), // Enable E2B remote code execution
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
export { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
|
||||
export type {
|
||||
RateLimitConfig,
|
||||
SubscriptionPlan,
|
||||
TriggerType,
|
||||
} from '@/lib/core/rate-limiter/types'
|
||||
export { RATE_LIMITS, RateLimitError } from '@/lib/core/rate-limiter/types'
|
||||
@@ -1,309 +0,0 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
|
||||
import { MANUAL_EXECUTION_LIMIT, RATE_LIMITS } from '@/lib/core/rate-limiter/types'
|
||||
|
||||
vi.mock('@sim/db', () => ({
|
||||
db: {
|
||||
select: vi.fn(),
|
||||
insert: vi.fn(),
|
||||
update: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
eq: vi.fn((field, value) => ({ field, value })),
|
||||
sql: vi.fn((strings, ...values) => ({ sql: strings.join('?'), values })),
|
||||
and: vi.fn((...conditions) => ({ and: conditions })),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/redis', () => ({
|
||||
getRedisClient: vi.fn().mockReturnValue(null),
|
||||
}))
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
|
||||
describe('RateLimiter', () => {
|
||||
const rateLimiter = new RateLimiter()
|
||||
const testUserId = 'test-user-123'
|
||||
const freeSubscription = { plan: 'free', referenceId: testUserId }
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.mocked(getRedisClient).mockReturnValue(null)
|
||||
})
|
||||
|
||||
describe('checkRateLimitWithSubscription', () => {
|
||||
it('should allow unlimited requests for manual trigger type', async () => {
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'manual',
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.remaining).toBe(MANUAL_EXECUTION_LIMIT)
|
||||
expect(result.resetAt).toBeInstanceOf(Date)
|
||||
expect(db.select).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should allow first API request for sync execution (DB fallback)', async () => {
|
||||
vi.mocked(db.select).mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
vi.mocked(db.insert).mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
onConflictDoUpdate: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([
|
||||
{
|
||||
syncApiRequests: 1,
|
||||
asyncApiRequests: 0,
|
||||
apiEndpointRequests: 0,
|
||||
windowStart: new Date(),
|
||||
},
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.remaining).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute - 1)
|
||||
expect(result.resetAt).toBeInstanceOf(Date)
|
||||
})
|
||||
|
||||
it('should allow first API request for async execution (DB fallback)', async () => {
|
||||
vi.mocked(db.select).mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
vi.mocked(db.insert).mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
onConflictDoUpdate: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([
|
||||
{
|
||||
syncApiRequests: 0,
|
||||
asyncApiRequests: 1,
|
||||
apiEndpointRequests: 0,
|
||||
windowStart: new Date(),
|
||||
},
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
true
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.remaining).toBe(RATE_LIMITS.free.asyncApiExecutionsPerMinute - 1)
|
||||
expect(result.resetAt).toBeInstanceOf(Date)
|
||||
})
|
||||
|
||||
it('should work for all trigger types except manual (DB fallback)', async () => {
|
||||
const triggerTypes = ['api', 'webhook', 'schedule', 'chat'] as const
|
||||
|
||||
for (const triggerType of triggerTypes) {
|
||||
vi.mocked(db.select).mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
vi.mocked(db.insert).mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
onConflictDoUpdate: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([
|
||||
{
|
||||
syncApiRequests: 1,
|
||||
asyncApiRequests: 0,
|
||||
apiEndpointRequests: 0,
|
||||
windowStart: new Date(),
|
||||
},
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
triggerType,
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.remaining).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute - 1)
|
||||
}
|
||||
})
|
||||
|
||||
it('should use Redis when available', async () => {
|
||||
const mockRedis = {
|
||||
eval: vi.fn().mockResolvedValue(1), // Lua script returns count after INCR
|
||||
}
|
||||
vi.mocked(getRedisClient).mockReturnValue(mockRedis as any)
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.remaining).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute - 1)
|
||||
expect(mockRedis.eval).toHaveBeenCalled()
|
||||
expect(db.select).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should deny requests when Redis rate limit exceeded', async () => {
|
||||
const mockRedis = {
|
||||
eval: vi.fn().mockResolvedValue(RATE_LIMITS.free.syncApiExecutionsPerMinute + 1),
|
||||
}
|
||||
vi.mocked(getRedisClient).mockReturnValue(mockRedis as any)
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(false)
|
||||
expect(result.remaining).toBe(0)
|
||||
})
|
||||
|
||||
it('should fall back to DB when Redis fails', async () => {
|
||||
const mockRedis = {
|
||||
eval: vi.fn().mockRejectedValue(new Error('Redis connection failed')),
|
||||
}
|
||||
vi.mocked(getRedisClient).mockReturnValue(mockRedis as any)
|
||||
|
||||
vi.mocked(db.select).mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
vi.mocked(db.insert).mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
onConflictDoUpdate: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([
|
||||
{
|
||||
syncApiRequests: 1,
|
||||
asyncApiRequests: 0,
|
||||
apiEndpointRequests: 0,
|
||||
windowStart: new Date(),
|
||||
},
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(db.select).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRateLimitStatusWithSubscription', () => {
|
||||
it('should return unlimited for manual trigger type', async () => {
|
||||
const status = await rateLimiter.getRateLimitStatusWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'manual',
|
||||
false
|
||||
)
|
||||
|
||||
expect(status.used).toBe(0)
|
||||
expect(status.limit).toBe(MANUAL_EXECUTION_LIMIT)
|
||||
expect(status.remaining).toBe(MANUAL_EXECUTION_LIMIT)
|
||||
expect(status.resetAt).toBeInstanceOf(Date)
|
||||
})
|
||||
|
||||
it('should return sync API limits for API trigger type (DB fallback)', async () => {
|
||||
vi.mocked(db.select).mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
}),
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const status = await rateLimiter.getRateLimitStatusWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
false
|
||||
)
|
||||
|
||||
expect(status.used).toBe(0)
|
||||
expect(status.limit).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute)
|
||||
expect(status.remaining).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute)
|
||||
expect(status.resetAt).toBeInstanceOf(Date)
|
||||
})
|
||||
|
||||
it('should use Redis for status when available', async () => {
|
||||
const mockRedis = {
|
||||
get: vi.fn().mockResolvedValue('5'),
|
||||
}
|
||||
vi.mocked(getRedisClient).mockReturnValue(mockRedis as any)
|
||||
|
||||
const status = await rateLimiter.getRateLimitStatusWithSubscription(
|
||||
testUserId,
|
||||
freeSubscription,
|
||||
'api',
|
||||
false
|
||||
)
|
||||
|
||||
expect(status.used).toBe(5)
|
||||
expect(status.limit).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute)
|
||||
expect(status.remaining).toBe(RATE_LIMITS.free.syncApiExecutionsPerMinute - 5)
|
||||
expect(mockRedis.get).toHaveBeenCalled()
|
||||
expect(db.select).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('resetRateLimit', () => {
|
||||
it('should delete rate limit record for user', async () => {
|
||||
vi.mocked(db.delete).mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue({}),
|
||||
} as any)
|
||||
|
||||
await rateLimiter.resetRateLimit(testUserId)
|
||||
|
||||
expect(db.delete).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,10 @@
|
||||
/**
|
||||
* Execution timeout constants
|
||||
*
|
||||
* DEFAULT_EXECUTION_TIMEOUT_MS: The default timeout for executing user code (10 minutes)
|
||||
* These constants define the timeout values for code execution.
|
||||
* - DEFAULT_EXECUTION_TIMEOUT_MS: The default timeout for executing user code (3 minutes)
|
||||
* - MAX_EXECUTION_DURATION: The maximum duration for the API route (adds 30s buffer for overhead)
|
||||
*/
|
||||
|
||||
export const DEFAULT_EXECUTION_TIMEOUT_MS = 600000 // 10 minutes (600 seconds)
|
||||
export const DEFAULT_EXECUTION_TIMEOUT_MS = 180000 // 3 minutes (180 seconds)
|
||||
export const MAX_EXECUTION_DURATION = 210 // 3.5 minutes (210 seconds) - includes buffer for sandbox creation
|
||||
|
||||
@@ -3,10 +3,10 @@ import { workflow } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
import { RateLimiter } from '@/services/queue/RateLimiter'
|
||||
|
||||
const logger = createLogger('ExecutionPreprocessing')
|
||||
|
||||
@@ -228,18 +228,26 @@ export async function preprocessExecution(
|
||||
const workspaceId = workflowRecord.workspaceId || providedWorkspaceId || ''
|
||||
|
||||
// ========== STEP 2: Check Deployment Status ==========
|
||||
// If workflow is not deployed and deployment is required, reject without logging.
|
||||
// No log entry or cost should be created for calls to undeployed workflows
|
||||
// since the workflow was never intended to run.
|
||||
if (checkDeployment && !workflowRecord.isDeployed) {
|
||||
logger.warn(`[${requestId}] Workflow not deployed: ${workflowId}`)
|
||||
|
||||
await logPreprocessingError({
|
||||
workflowId,
|
||||
executionId,
|
||||
triggerType,
|
||||
requestId,
|
||||
userId: workflowRecord.userId || userId,
|
||||
workspaceId,
|
||||
errorMessage: 'Workflow is not deployed. Please deploy the workflow before triggering it.',
|
||||
loggingSession: providedLoggingSession,
|
||||
})
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message: 'Workflow is not deployed',
|
||||
statusCode: 403,
|
||||
logCreated: false,
|
||||
logCreated: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
member,
|
||||
organization,
|
||||
userStats,
|
||||
user as userTable,
|
||||
workflow,
|
||||
@@ -9,11 +10,7 @@ import {
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import {
|
||||
checkUsageStatus,
|
||||
getOrgUsageLimit,
|
||||
maybeSendUsageThresholdEmail,
|
||||
} from '@/lib/billing/core/usage'
|
||||
import { checkUsageStatus, maybeSendUsageThresholdEmail } from '@/lib/billing/core/usage'
|
||||
import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
|
||||
import { isBillingEnabled } from '@/lib/core/config/environment'
|
||||
import { redactApiKeys } from '@/lib/core/security/redaction'
|
||||
@@ -389,8 +386,21 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
limit,
|
||||
})
|
||||
} else if (sub?.referenceId) {
|
||||
// Get org usage limit using shared helper
|
||||
const { limit: orgLimit } = await getOrgUsageLimit(sub.referenceId, sub.plan, sub.seats)
|
||||
let orgLimit = 0
|
||||
const orgRows = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, sub.referenceId))
|
||||
.limit(1)
|
||||
const { getPlanPricing } = await import('@/lib/billing/core/billing')
|
||||
const { basePrice } = getPlanPricing(sub.plan)
|
||||
const minimum = (sub.seats || 1) * basePrice
|
||||
if (orgRows.length > 0 && orgRows[0].orgUsageLimit) {
|
||||
const configured = Number.parseFloat(orgRows[0].orgUsageLimit)
|
||||
orgLimit = Math.max(configured, minimum)
|
||||
} else {
|
||||
orgLimit = minimum
|
||||
}
|
||||
|
||||
const [{ sum: orgUsageBefore }] = await db
|
||||
.select({ sum: sql`COALESCE(SUM(${userStats.currentPeriodCost}), 0)` })
|
||||
|
||||
@@ -37,7 +37,6 @@ import {
|
||||
WebflowIcon,
|
||||
WordpressIcon,
|
||||
xIcon,
|
||||
ZapierIcon,
|
||||
ZoomIcon,
|
||||
} from '@/components/icons'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
@@ -71,7 +70,6 @@ export type OAuthProvider =
|
||||
| 'shopify'
|
||||
| 'zoom'
|
||||
| 'wordpress'
|
||||
| 'zapier'
|
||||
| string
|
||||
|
||||
export type OAuthService =
|
||||
@@ -113,7 +111,6 @@ export type OAuthService =
|
||||
| 'shopify'
|
||||
| 'zoom'
|
||||
| 'wordpress'
|
||||
| 'zapier'
|
||||
export interface OAuthProviderConfig {
|
||||
id: OAuthProvider
|
||||
name: string
|
||||
@@ -894,23 +891,6 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
||||
},
|
||||
defaultService: 'wordpress',
|
||||
},
|
||||
zapier: {
|
||||
id: 'zapier',
|
||||
name: 'Zapier',
|
||||
icon: (props) => ZapierIcon(props),
|
||||
services: {
|
||||
zapier: {
|
||||
id: 'zapier',
|
||||
name: 'Zapier AI Actions',
|
||||
description: 'Execute actions across 7,000+ apps using Zapier AI Actions.',
|
||||
providerId: 'zapier',
|
||||
icon: (props) => ZapierIcon(props),
|
||||
baseProviderIcon: (props) => ZapierIcon(props),
|
||||
scopes: ['openid', 'nla:exposed_actions:execute'],
|
||||
},
|
||||
},
|
||||
defaultService: 'zapier',
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1490,20 +1470,6 @@ function getProviderAuthConfig(provider: string): ProviderAuthConfig {
|
||||
supportsRefreshTokenRotation: false,
|
||||
}
|
||||
}
|
||||
case 'zapier': {
|
||||
// Zapier AI Actions OAuth - tokens expire after 10 hours
|
||||
const { clientId, clientSecret } = getCredentials(
|
||||
env.ZAPIER_CLIENT_ID,
|
||||
env.ZAPIER_CLIENT_SECRET
|
||||
)
|
||||
return {
|
||||
tokenEndpoint: 'https://actions.zapier.com/oauth/token/',
|
||||
clientId,
|
||||
clientSecret,
|
||||
useBasicAuth: false,
|
||||
supportsRefreshTokenRotation: true,
|
||||
}
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported provider: ${provider}`)
|
||||
}
|
||||
|
||||
@@ -494,17 +494,12 @@ export async function verifyProviderAuth(
|
||||
/**
|
||||
* Run preprocessing checks for webhook execution
|
||||
* This replaces the old checkRateLimits and checkUsageLimits functions
|
||||
*
|
||||
* @param isTestMode - If true, skips deployment check (for test webhooks that run on live/draft state)
|
||||
*/
|
||||
export async function checkWebhookPreprocessing(
|
||||
foundWorkflow: any,
|
||||
foundWebhook: any,
|
||||
requestId: string,
|
||||
options?: { isTestMode?: boolean }
|
||||
requestId: string
|
||||
): Promise<NextResponse | null> {
|
||||
const { isTestMode = false } = options || {}
|
||||
|
||||
try {
|
||||
const executionId = uuidv4()
|
||||
|
||||
@@ -515,7 +510,7 @@ export async function checkWebhookPreprocessing(
|
||||
executionId,
|
||||
requestId,
|
||||
checkRateLimit: true, // Webhooks need rate limiting
|
||||
checkDeployment: !isTestMode, // Test webhooks skip deployment check (run on live state)
|
||||
checkDeployment: true, // Webhooks require deployed workflows
|
||||
workspaceId: foundWorkflow.workspaceId,
|
||||
})
|
||||
|
||||
|
||||
@@ -1,414 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook, workflow } from '@sim/db/schema'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import Parser from 'rss-parser'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('RssPollingService')
|
||||
|
||||
const MAX_CONSECUTIVE_FAILURES = 10
|
||||
const MAX_GUIDS_TO_TRACK = 100 // Track recent guids to prevent duplicates
|
||||
|
||||
interface RssWebhookConfig {
|
||||
feedUrl: string
|
||||
lastCheckedTimestamp?: string
|
||||
lastSeenGuids?: string[]
|
||||
etag?: string
|
||||
lastModified?: string
|
||||
}
|
||||
|
||||
interface RssItem {
|
||||
title?: string
|
||||
link?: string
|
||||
pubDate?: string
|
||||
guid?: string
|
||||
description?: string
|
||||
content?: string
|
||||
contentSnippet?: string
|
||||
author?: string
|
||||
creator?: string
|
||||
categories?: string[]
|
||||
enclosure?: {
|
||||
url: string
|
||||
type?: string
|
||||
length?: string | number
|
||||
}
|
||||
isoDate?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface RssFeed {
|
||||
title?: string
|
||||
link?: string
|
||||
description?: string
|
||||
items: RssItem[]
|
||||
}
|
||||
|
||||
export interface RssWebhookPayload {
|
||||
item: RssItem
|
||||
feed: {
|
||||
title?: string
|
||||
link?: string
|
||||
description?: string
|
||||
}
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
const parser = new Parser({
|
||||
timeout: 30000,
|
||||
headers: {
|
||||
'User-Agent': 'SimStudio/1.0 RSS Poller',
|
||||
},
|
||||
})
|
||||
|
||||
async function markWebhookFailed(webhookId: string) {
|
||||
try {
|
||||
const result = await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
failedCount: sql`COALESCE(${webhook.failedCount}, 0) + 1`,
|
||||
lastFailedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, webhookId))
|
||||
.returning({ failedCount: webhook.failedCount })
|
||||
|
||||
const newFailedCount = result[0]?.failedCount || 0
|
||||
const shouldDisable = newFailedCount >= MAX_CONSECUTIVE_FAILURES
|
||||
|
||||
if (shouldDisable) {
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
isActive: false,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, webhookId))
|
||||
|
||||
logger.warn(
|
||||
`Webhook ${webhookId} auto-disabled after ${MAX_CONSECUTIVE_FAILURES} consecutive failures`
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Failed to mark webhook ${webhookId} as failed:`, err)
|
||||
}
|
||||
}
|
||||
|
||||
async function markWebhookSuccess(webhookId: string) {
|
||||
try {
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
failedCount: 0,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, webhookId))
|
||||
} catch (err) {
|
||||
logger.error(`Failed to mark webhook ${webhookId} as successful:`, err)
|
||||
}
|
||||
}
|
||||
|
||||
export async function pollRssWebhooks() {
|
||||
logger.info('Starting RSS webhook polling')
|
||||
|
||||
try {
|
||||
const activeWebhooksResult = await db
|
||||
.select({ webhook })
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.where(
|
||||
and(eq(webhook.provider, 'rss'), eq(webhook.isActive, true), eq(workflow.isDeployed, true))
|
||||
)
|
||||
|
||||
const activeWebhooks = activeWebhooksResult.map((r) => r.webhook)
|
||||
|
||||
if (!activeWebhooks.length) {
|
||||
logger.info('No active RSS webhooks found')
|
||||
return { total: 0, successful: 0, failed: 0, details: [] }
|
||||
}
|
||||
|
||||
logger.info(`Found ${activeWebhooks.length} active RSS webhooks`)
|
||||
|
||||
const CONCURRENCY = 10
|
||||
const running: Promise<void>[] = []
|
||||
let successCount = 0
|
||||
let failureCount = 0
|
||||
|
||||
const enqueue = async (webhookData: (typeof activeWebhooks)[number]) => {
|
||||
const webhookId = webhookData.id
|
||||
const requestId = nanoid()
|
||||
|
||||
try {
|
||||
const config = webhookData.providerConfig as unknown as RssWebhookConfig
|
||||
|
||||
if (!config?.feedUrl) {
|
||||
logger.error(`[${requestId}] Missing feedUrl for webhook ${webhookId}`)
|
||||
await markWebhookFailed(webhookId)
|
||||
failureCount++
|
||||
return
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const { feed, items: newItems } = await fetchNewRssItems(config, requestId)
|
||||
|
||||
if (!newItems.length) {
|
||||
await updateWebhookConfig(webhookId, config, now.toISOString(), [])
|
||||
await markWebhookSuccess(webhookId)
|
||||
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
|
||||
successCount++
|
||||
return
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Found ${newItems.length} new items for webhook ${webhookId}`)
|
||||
|
||||
const { processedCount, failedCount: itemFailedCount } = await processRssItems(
|
||||
newItems,
|
||||
feed,
|
||||
webhookData,
|
||||
requestId
|
||||
)
|
||||
|
||||
// Collect guids from processed items
|
||||
const newGuids = newItems
|
||||
.map((item) => item.guid || item.link || '')
|
||||
.filter((guid) => guid.length > 0)
|
||||
|
||||
await updateWebhookConfig(webhookId, config, now.toISOString(), newGuids)
|
||||
|
||||
if (itemFailedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId)
|
||||
failureCount++
|
||||
logger.warn(
|
||||
`[${requestId}] All ${itemFailedCount} items failed to process for webhook ${webhookId}`
|
||||
)
|
||||
} else {
|
||||
await markWebhookSuccess(webhookId)
|
||||
successCount++
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed ${processedCount} items for webhook ${webhookId}${itemFailedCount > 0 ? ` (${itemFailedCount} failed)` : ''}`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error processing RSS webhook ${webhookId}:`, error)
|
||||
await markWebhookFailed(webhookId)
|
||||
failureCount++
|
||||
}
|
||||
}
|
||||
|
||||
for (const webhookData of activeWebhooks) {
|
||||
const promise = enqueue(webhookData)
|
||||
.then(() => {})
|
||||
.catch((err) => {
|
||||
logger.error('Unexpected error in webhook processing:', err)
|
||||
failureCount++
|
||||
})
|
||||
|
||||
running.push(promise)
|
||||
|
||||
if (running.length >= CONCURRENCY) {
|
||||
const completedIdx = await Promise.race(running.map((p, i) => p.then(() => i)))
|
||||
running.splice(completedIdx, 1)
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.allSettled(running)
|
||||
|
||||
const summary = {
|
||||
total: activeWebhooks.length,
|
||||
successful: successCount,
|
||||
failed: failureCount,
|
||||
details: [],
|
||||
}
|
||||
|
||||
logger.info('RSS polling completed', {
|
||||
total: summary.total,
|
||||
successful: summary.successful,
|
||||
failed: summary.failed,
|
||||
})
|
||||
|
||||
return summary
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error('Error in RSS polling service:', errorMessage)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchNewRssItems(
|
||||
config: RssWebhookConfig,
|
||||
requestId: string
|
||||
): Promise<{ feed: RssFeed; items: RssItem[] }> {
|
||||
try {
|
||||
logger.debug(`[${requestId}] Fetching RSS feed: ${config.feedUrl}`)
|
||||
|
||||
// Parse the RSS feed
|
||||
const feed = await parser.parseURL(config.feedUrl)
|
||||
|
||||
if (!feed.items || !feed.items.length) {
|
||||
logger.debug(`[${requestId}] No items in feed`)
|
||||
return { feed: feed as RssFeed, items: [] }
|
||||
}
|
||||
|
||||
// Filter new items based on timestamp and guids
|
||||
const lastCheckedTime = config.lastCheckedTimestamp
|
||||
? new Date(config.lastCheckedTimestamp)
|
||||
: null
|
||||
const lastSeenGuids = new Set(config.lastSeenGuids || [])
|
||||
|
||||
const newItems = feed.items.filter((item) => {
|
||||
const itemGuid = item.guid || item.link || ''
|
||||
|
||||
// Check if we've already seen this item by guid
|
||||
if (itemGuid && lastSeenGuids.has(itemGuid)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if the item is newer than our last check
|
||||
if (lastCheckedTime && item.isoDate) {
|
||||
const itemDate = new Date(item.isoDate)
|
||||
if (itemDate <= lastCheckedTime) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
// Sort by date, newest first
|
||||
newItems.sort((a, b) => {
|
||||
const dateA = a.isoDate ? new Date(a.isoDate).getTime() : 0
|
||||
const dateB = b.isoDate ? new Date(b.isoDate).getTime() : 0
|
||||
return dateB - dateA
|
||||
})
|
||||
|
||||
// Limit to 25 items per poll to prevent overwhelming the system
|
||||
const limitedItems = newItems.slice(0, 25)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Found ${newItems.length} new items (processing ${limitedItems.length})`
|
||||
)
|
||||
|
||||
return { feed: feed as RssFeed, items: limitedItems as RssItem[] }
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Error fetching RSS feed:`, errorMessage)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function processRssItems(
|
||||
items: RssItem[],
|
||||
feed: RssFeed,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ processedCount: number; failedCount: number }> {
|
||||
let processedCount = 0
|
||||
let failedCount = 0
|
||||
|
||||
for (const item of items) {
|
||||
try {
|
||||
const itemGuid = item.guid || item.link || `${item.title}-${item.pubDate}`
|
||||
|
||||
await pollingIdempotency.executeWithIdempotency(
|
||||
'rss',
|
||||
`${webhookData.id}:${itemGuid}`,
|
||||
async () => {
|
||||
const payload: RssWebhookPayload = {
|
||||
item: {
|
||||
title: item.title,
|
||||
link: item.link,
|
||||
pubDate: item.pubDate,
|
||||
guid: item.guid,
|
||||
description: item.description,
|
||||
content: item.content,
|
||||
contentSnippet: item.contentSnippet,
|
||||
author: item.author || item.creator,
|
||||
categories: item.categories,
|
||||
enclosure: item.enclosure,
|
||||
isoDate: item.isoDate,
|
||||
},
|
||||
feed: {
|
||||
title: feed.title,
|
||||
link: feed.link,
|
||||
description: feed.description,
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Webhook-Secret': webhookData.secret || '',
|
||||
'User-Agent': 'SimStudio/1.0',
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(
|
||||
`[${requestId}] Failed to trigger webhook for item ${itemGuid}:`,
|
||||
response.status,
|
||||
errorText
|
||||
)
|
||||
throw new Error(`Webhook request failed: ${response.status} - ${errorText}`)
|
||||
}
|
||||
|
||||
return {
|
||||
itemGuid,
|
||||
webhookStatus: response.status,
|
||||
processed: true,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully processed item ${item.title || itemGuid} for webhook ${webhookData.id}`
|
||||
)
|
||||
processedCount++
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Error processing item:`, errorMessage)
|
||||
failedCount++
|
||||
}
|
||||
}
|
||||
|
||||
return { processedCount, failedCount }
|
||||
}
|
||||
|
||||
async function updateWebhookConfig(
|
||||
webhookId: string,
|
||||
_config: RssWebhookConfig,
|
||||
timestamp: string,
|
||||
newGuids: string[]
|
||||
) {
|
||||
try {
|
||||
const result = await db.select().from(webhook).where(eq(webhook.id, webhookId))
|
||||
const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {}
|
||||
|
||||
// Merge new guids with existing ones, keeping only the most recent
|
||||
const existingGuids = existingConfig.lastSeenGuids || []
|
||||
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)
|
||||
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: {
|
||||
...existingConfig,
|
||||
lastCheckedTimestamp: timestamp,
|
||||
lastSeenGuids: allGuids,
|
||||
} as any,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, webhookId))
|
||||
} catch (err) {
|
||||
logger.error(`Failed to update webhook ${webhookId} config:`, err)
|
||||
}
|
||||
}
|
||||
@@ -795,33 +795,6 @@ export async function formatWebhookInput(
|
||||
return body
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'rss') {
|
||||
if (body && typeof body === 'object' && 'item' in body) {
|
||||
const item = body.item as Record<string, any>
|
||||
const feed = body.feed as Record<string, any>
|
||||
|
||||
return {
|
||||
title: item?.title,
|
||||
link: item?.link,
|
||||
pubDate: item?.pubDate,
|
||||
item,
|
||||
feed,
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'rss',
|
||||
path: foundWebhook.path,
|
||||
providerConfig: foundWebhook.providerConfig,
|
||||
payload: body,
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
method: request.method,
|
||||
},
|
||||
},
|
||||
workflowId: foundWorkflow.id,
|
||||
}
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'hubspot') {
|
||||
const events = Array.isArray(body) ? body : [body]
|
||||
const event = events[0]
|
||||
@@ -2371,41 +2344,6 @@ export async function configureOutlookPolling(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure RSS polling for a webhook
|
||||
*/
|
||||
export async function configureRssPolling(webhookData: any, requestId: string): Promise<boolean> {
|
||||
const logger = createLogger('RssWebhookSetup')
|
||||
logger.info(`[${requestId}] Setting up RSS polling for webhook ${webhookData.id}`)
|
||||
|
||||
try {
|
||||
const providerConfig = (webhookData.providerConfig as Record<string, any>) || {}
|
||||
const now = new Date()
|
||||
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: {
|
||||
...providerConfig,
|
||||
lastCheckedTimestamp: now.toISOString(),
|
||||
lastSeenGuids: [],
|
||||
setupCompleted: true,
|
||||
},
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(webhook.id, webhookData.id))
|
||||
|
||||
logger.info(`[${requestId}] Successfully configured RSS polling for webhook ${webhookData.id}`)
|
||||
return true
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Failed to configure RSS polling`, {
|
||||
webhookId: webhookData.id,
|
||||
error: error.message,
|
||||
})
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export function convertSquareBracketsToTwiML(twiml: string | undefined): string | undefined {
|
||||
if (!twiml) {
|
||||
return twiml
|
||||
|
||||
@@ -19,32 +19,8 @@ interface CustomTool {
|
||||
usageControl?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Stored tool format that may contain either reference or inline definition
|
||||
*/
|
||||
interface StoredCustomTool {
|
||||
type: string
|
||||
title?: string
|
||||
toolId?: string
|
||||
customToolId?: string
|
||||
schema?: any
|
||||
code?: string
|
||||
usageControl?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a stored tool is a reference-only custom tool (no inline definition)
|
||||
*/
|
||||
function isCustomToolReference(tool: StoredCustomTool): boolean {
|
||||
return tool.type === 'custom-tool' && !!tool.customToolId && !tool.code
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all custom tools from agent blocks in the workflow state
|
||||
*
|
||||
* @remarks
|
||||
* Only extracts tools with inline definitions (legacy format).
|
||||
* Reference-only tools (new format with customToolId) are skipped since they're already in the database.
|
||||
*/
|
||||
export function extractCustomToolsFromWorkflowState(workflowState: any): CustomTool[] {
|
||||
const customToolsMap = new Map<string, CustomTool>()
|
||||
@@ -84,18 +60,14 @@ export function extractCustomToolsFromWorkflowState(workflowState: any): CustomT
|
||||
}
|
||||
|
||||
for (const tool of tools) {
|
||||
if (!tool || typeof tool !== 'object' || tool.type !== 'custom-tool') {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip reference-only tools - they're already in the database
|
||||
if (isCustomToolReference(tool)) {
|
||||
logger.debug(`Skipping reference-only custom tool: ${tool.title || tool.customToolId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Only persist tools with inline definitions (legacy format)
|
||||
if (tool.title && tool.schema?.function && tool.code) {
|
||||
if (
|
||||
tool &&
|
||||
typeof tool === 'object' &&
|
||||
tool.type === 'custom-tool' &&
|
||||
tool.title &&
|
||||
tool.schema?.function &&
|
||||
tool.code
|
||||
) {
|
||||
const toolKey = tool.toolId || tool.title
|
||||
|
||||
if (!customToolsMap.has(toolKey)) {
|
||||
|
||||
@@ -128,16 +128,6 @@ function sanitizeConditions(conditionsJson: string): string {
|
||||
function sanitizeTools(tools: any[]): any[] {
|
||||
return tools.map((tool) => {
|
||||
if (tool.type === 'custom-tool') {
|
||||
// New reference format: minimal fields only
|
||||
if (tool.customToolId && !tool.schema && !tool.code) {
|
||||
return {
|
||||
type: tool.type,
|
||||
customToolId: tool.customToolId,
|
||||
usageControl: tool.usageControl,
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy inline format: include all fields
|
||||
const sanitized: any = {
|
||||
type: tool.type,
|
||||
title: tool.title,
|
||||
@@ -145,19 +135,15 @@ function sanitizeTools(tools: any[]): any[] {
|
||||
usageControl: tool.usageControl,
|
||||
}
|
||||
|
||||
// Include schema for inline format (legacy format)
|
||||
if (tool.schema?.function) {
|
||||
sanitized.schema = {
|
||||
type: tool.schema.type || 'function',
|
||||
function: {
|
||||
name: tool.schema.function.name,
|
||||
description: tool.schema.function.description,
|
||||
parameters: tool.schema.function.parameters,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Include code for inline format (legacy format)
|
||||
if (tool.code) {
|
||||
sanitized.code = tool.code
|
||||
}
|
||||
|
||||
@@ -5,9 +5,6 @@ import { getTool } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('WorkflowValidation')
|
||||
|
||||
/**
|
||||
* Checks if a custom tool has a valid inline schema
|
||||
*/
|
||||
function isValidCustomToolSchema(tool: any): boolean {
|
||||
try {
|
||||
if (!tool || typeof tool !== 'object') return false
|
||||
@@ -30,26 +27,6 @@ function isValidCustomToolSchema(tool: any): boolean {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a custom tool is a valid reference-only format (new format)
|
||||
*/
|
||||
function isValidCustomToolReference(tool: any): boolean {
|
||||
try {
|
||||
if (!tool || typeof tool !== 'object') return false
|
||||
if (tool.type !== 'custom-tool') return false
|
||||
|
||||
// Reference format: has customToolId but no inline schema/code
|
||||
// This is valid - the tool will be loaded dynamically during execution
|
||||
if (tool.customToolId && typeof tool.customToolId === 'string') {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
} catch (_err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export function sanitizeAgentToolsInBlocks(blocks: Record<string, any>): {
|
||||
blocks: Record<string, any>
|
||||
warnings: string[]
|
||||
@@ -93,34 +70,24 @@ export function sanitizeAgentToolsInBlocks(blocks: Record<string, any>): {
|
||||
// Allow non-custom tools to pass through as-is
|
||||
if (!tool || typeof tool !== 'object') return false
|
||||
if (tool.type !== 'custom-tool') return true
|
||||
|
||||
// Check if it's a valid reference-only format (new format)
|
||||
if (isValidCustomToolReference(tool)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check if it's a valid inline schema format (legacy format)
|
||||
const ok = isValidCustomToolSchema(tool)
|
||||
if (!ok) {
|
||||
logger.warn('Removing invalid custom tool from workflow', {
|
||||
blockId,
|
||||
blockName: block.name,
|
||||
hasCustomToolId: !!tool.customToolId,
|
||||
hasSchema: !!tool.schema,
|
||||
})
|
||||
}
|
||||
return ok
|
||||
})
|
||||
.map((tool: any) => {
|
||||
if (tool.type === 'custom-tool') {
|
||||
// For reference-only tools, ensure usageControl default
|
||||
// Ensure required defaults to avoid client crashes
|
||||
if (!tool.code || typeof tool.code !== 'string') {
|
||||
tool.code = ''
|
||||
}
|
||||
if (!tool.usageControl) {
|
||||
tool.usageControl = 'auto'
|
||||
}
|
||||
// For inline tools (legacy), also ensure code default
|
||||
if (!tool.customToolId && (!tool.code || typeof tool.code !== 'string')) {
|
||||
tool.code = ''
|
||||
}
|
||||
}
|
||||
return tool
|
||||
})
|
||||
|
||||
@@ -115,6 +115,10 @@ export class Serializer {
|
||||
safeParallels
|
||||
)
|
||||
|
||||
if (validateRequired) {
|
||||
this.validateSubflowsBeforeExecution(blocks, safeLoops, safeParallels)
|
||||
}
|
||||
|
||||
return {
|
||||
version: '1.0',
|
||||
blocks: Object.values(blocks).map((block) =>
|
||||
@@ -135,6 +139,99 @@ export class Serializer {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate loop and parallel subflows for required inputs when running in "each/collection" modes
|
||||
*/
|
||||
private validateSubflowsBeforeExecution(
|
||||
blocks: Record<string, BlockState>,
|
||||
loops: Record<string, Loop>,
|
||||
parallels: Record<string, Parallel>
|
||||
): void {
|
||||
// Validate loops in forEach mode
|
||||
Object.values(loops || {}).forEach((loop) => {
|
||||
if (!loop) return
|
||||
if (loop.loopType === 'forEach') {
|
||||
const items = (loop as any).forEachItems
|
||||
|
||||
const hasNonEmptyCollection = (() => {
|
||||
if (items === undefined || items === null) return false
|
||||
if (Array.isArray(items)) return items.length > 0
|
||||
if (typeof items === 'object') return Object.keys(items).length > 0
|
||||
if (typeof items === 'string') {
|
||||
const trimmed = items.trim()
|
||||
if (trimmed.length === 0) return false
|
||||
// If it looks like JSON, parse to confirm non-empty [] / {}
|
||||
if (trimmed.startsWith('[') || trimmed.startsWith('{')) {
|
||||
try {
|
||||
const parsed = JSON.parse(trimmed)
|
||||
if (Array.isArray(parsed)) return parsed.length > 0
|
||||
if (parsed && typeof parsed === 'object') return Object.keys(parsed).length > 0
|
||||
} catch {
|
||||
// Non-JSON or invalid JSON string – allow non-empty string (could be a reference like <start.items>)
|
||||
return true
|
||||
}
|
||||
}
|
||||
// Non-JSON string – allow (may be a variable reference/expression)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})()
|
||||
|
||||
if (!hasNonEmptyCollection) {
|
||||
const blockName = blocks[loop.id]?.name || 'Loop'
|
||||
const error = new WorkflowValidationError(
|
||||
`${blockName} requires a collection for forEach mode. Provide a non-empty array/object or a variable reference.`,
|
||||
loop.id,
|
||||
'loop',
|
||||
blockName
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Validate parallels in collection mode
|
||||
Object.values(parallels || {}).forEach((parallel) => {
|
||||
if (!parallel) return
|
||||
if ((parallel as any).parallelType === 'collection') {
|
||||
const distribution = (parallel as any).distribution
|
||||
|
||||
const hasNonEmptyDistribution = (() => {
|
||||
if (distribution === undefined || distribution === null) return false
|
||||
if (Array.isArray(distribution)) return distribution.length > 0
|
||||
if (typeof distribution === 'object') return Object.keys(distribution).length > 0
|
||||
if (typeof distribution === 'string') {
|
||||
const trimmed = distribution.trim()
|
||||
if (trimmed.length === 0) return false
|
||||
// If it looks like JSON, parse to confirm non-empty [] / {}
|
||||
if (trimmed.startsWith('[') || trimmed.startsWith('{')) {
|
||||
try {
|
||||
const parsed = JSON.parse(trimmed)
|
||||
if (Array.isArray(parsed)) return parsed.length > 0
|
||||
if (parsed && typeof parsed === 'object') return Object.keys(parsed).length > 0
|
||||
} catch {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})()
|
||||
|
||||
if (!hasNonEmptyDistribution) {
|
||||
const blockName = blocks[parallel.id]?.name || 'Parallel'
|
||||
const error = new WorkflowValidationError(
|
||||
`${blockName} requires a collection for collection mode. Provide a non-empty array/object or a variable reference.`,
|
||||
parallel.id,
|
||||
'parallel',
|
||||
blockName
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private serializeBlock(
|
||||
block: BlockState,
|
||||
options: {
|
||||
@@ -351,15 +448,6 @@ export class Serializer {
|
||||
) {
|
||||
params[id] = subBlock.value
|
||||
}
|
||||
|
||||
if (subBlockConfig?.type === 'checkbox-list' && Array.isArray(subBlockConfig.options)) {
|
||||
subBlockConfig.options.forEach((option: { id: string; label: string }) => {
|
||||
const optionSubBlock = block.subBlocks[option.id]
|
||||
if (optionSubBlock !== undefined) {
|
||||
params[option.id] = optionSubBlock.value
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Then check for any subBlocks with default values
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user