v0.5.43: export logs, circleback, grain, vertex, code hygiene, schedule improvements

This commit is contained in:
Waleed
2025-12-23 19:19:18 -08:00
committed by GitHub
267 changed files with 19269 additions and 4143 deletions

File diff suppressed because one or more lines are too long

View File

@@ -13,6 +13,7 @@ import {
BrainIcon,
BrowserUseIcon,
CalendlyIcon,
CirclebackIcon,
ClayIcon,
ConfluenceIcon,
CursorIcon,
@@ -40,6 +41,7 @@ import {
GoogleSlidesIcon,
GoogleVaultIcon,
GrafanaIcon,
GrainIcon,
HubspotIcon,
HuggingFaceIcon,
HunterIOIcon,
@@ -128,6 +130,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
asana: AsanaIcon,
browser_use: BrowserUseIcon,
calendly: CalendlyIcon,
circleback: CirclebackIcon,
clay: ClayIcon,
confluence: ConfluenceIcon,
cursor: CursorIcon,
@@ -154,6 +157,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
google_slides: GoogleSlidesIcon,
google_vault: GoogleVaultIcon,
grafana: GrafanaIcon,
grain: GrainIcon,
hubspot: HubspotIcon,
huggingface: HuggingFaceIcon,
hunter: HunterIOIcon,

View File

@@ -0,0 +1,59 @@
---
title: Circleback
description: KI-gestützte Meeting-Notizen und Aufgaben
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="circleback"
color="linear-gradient(180deg, #E0F7FA 0%, #FFFFFF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Circleback](https://circleback.ai/) ist eine KI-gestützte Plattform, die Meeting-Notizen, Aufgaben, Transkripte und Aufzeichnungen für Ihr Team automatisiert. Wenn ein Meeting abgeschlossen ist, verarbeitet Circleback die Konversation und liefert detaillierte Notizen und Aufgaben sowie ein Transkript und eine Aufzeichnung (sofern verfügbar). Dies hilft Teams dabei, Erkenntnisse effizient zu erfassen, Aufgaben zu verteilen und sicherzustellen, dass nichts übersehen wird alles nahtlos in Ihre Workflows integriert.
Mit der Sim Circleback-Integration können Sie:
- **Detaillierte Meeting-Notizen und Aufgaben erhalten**: Sammeln Sie automatisch gut formatierte Meeting-Zusammenfassungen und verfolgen Sie umsetzbare Aufgaben, die während Ihrer Anrufe besprochen wurden.
- **Auf vollständige Meeting-Aufzeichnungen und Transkripte zugreifen**: Erhalten Sie die vollständige Konversation und die zugehörige Aufzeichnung, um wichtige Momente einfach zu überprüfen oder mit Kollegen zu teilen.
- **Teilnehmerinformationen und Meeting-Kontext erfassen**: Teilnehmerlisten, Meeting-Metadaten und Tags helfen dabei, Ihre Daten organisiert und umsetzbar zu halten.
- **Erkenntnisse direkt in Ihre Workflows liefern**: Lösen Sie Automatisierungen aus oder senden Sie Circleback-Daten an andere Systeme, sobald ein Meeting beendet ist, mithilfe der leistungsstarken Webhook-Trigger von Sim.
**So funktioniert es in Sim:**
Circleback verwendet Webhook-Trigger: Sobald ein Meeting verarbeitet wurde, werden die Daten automatisch an Ihren Agenten oder Ihre Automatisierung übertragen. Sie können weitere Automatisierungen basierend auf folgenden Ereignissen erstellen:
- Meeting abgeschlossen (alle verarbeiteten Daten verfügbar)
- Neue Notizen (Notizen sind verfügbar, noch bevor das Meeting vollständig verarbeitet ist)
- Raw-Webhook-Integration für erweiterte Anwendungsfälle
**Die folgenden Informationen sind in der Circleback-Meeting-Webhook-Payload verfügbar:**
| Feld | Typ | Beschreibung |
|----------------|---------|----------------------------------------------------|
| `id` | number | Circleback Meeting-ID |
| `name` | string | Meeting-Titel |
| `url` | string | Virtueller Meeting-Link (Zoom, Meet, Teams usw.) |
| `createdAt` | string | Zeitstempel der Meeting-Erstellung |
| `duration` | number | Dauer in Sekunden |
| `recordingUrl` | string | Aufzeichnungs-URL (24 Stunden gültig) |
| `tags` | json | Array von Tags |
| `icalUid` | string | Kalender-Event-ID |
| `attendees` | json | Array von Teilnehmer-Objekten |
| `notes` | string | Meeting-Notizen in Markdown |
| `actionItems` | json | Array von Aufgaben |
| `transcript` | json | Array von Transkript-Segmenten |
| `insights` | json | Vom Nutzer erstellte Insights |
| `meeting` | json | Vollständige Meeting-Daten |
Egal, ob Sie sofortige Zusammenfassungen verteilen, Aufgaben protokollieren oder benutzerdefinierte Workflows erstellen möchten, die durch neue Meeting-Daten ausgelöst werden Circleback und Sim machen es nahtlos, alles rund um Ihre Meetings automatisch zu verwalten.
{/* MANUAL-CONTENT-END */}
## Nutzungsanleitung
Erhalten Sie Meeting-Notizen, Aufgaben, Transkripte und Aufzeichnungen, wenn Meetings verarbeitet werden. Circleback nutzt Webhooks, um Daten an Ihre Workflows zu übermitteln.
## Hinweise
- Kategorie: `triggers`
- Typ: `circleback`

View File

@@ -0,0 +1,218 @@
---
title: Grain
description: Zugriff auf Meeting-Aufzeichnungen, Transkripte und KI-Zusammenfassungen
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="grain"
color="#F6FAF9"
/>
{/* MANUAL-CONTENT-START:intro */}
[Grain](https://grain.com/) ist eine moderne Plattform zum Erfassen, Speichern und Teilen von Meeting-Aufzeichnungen, Transkripten, Highlights und KI-gestützten Zusammenfassungen. Grain ermöglicht es Teams, Gespräche in umsetzbare Erkenntnisse zu verwandeln und alle über wichtige Momente aus Meetings auf dem Laufenden zu halten.
Mit Grain können Sie:
- **Auf durchsuchbare Aufzeichnungen und Transkripte zugreifen**: Finden und überprüfen Sie jedes Meeting nach Stichwort, Teilnehmer oder Thema.
- **Highlights und Clips teilen**: Erfassen Sie wichtige Momente und teilen Sie kurze Video-/Audio-Highlights in Ihrem Team oder in Workflows.
- **KI-generierte Zusammenfassungen erhalten**: Erstellen Sie automatisch Meeting-Zusammenfassungen, Aktionspunkte und wichtige Erkenntnisse mithilfe der fortschrittlichen KI von Grain.
- **Meetings nach Team oder Typ organisieren**: Taggen und kategorisieren Sie Aufzeichnungen für einfachen Zugriff und Reporting.
Die Sim-Grain-Integration ermöglicht es Ihren Agenten:
- Meeting-Aufzeichnungen und Details nach flexiblen Filtern (Datum/Uhrzeit, Teilnehmer, Team usw.) aufzulisten, zu suchen und abzurufen.
- Auf KI-Zusammenfassungen, Teilnehmer, Highlights und andere Metadaten für Meetings zuzugreifen, um Automatisierungen oder Analysen zu unterstützen.
- Workflows auszulösen, sobald neue Meetings verarbeitet, Zusammenfassungen generiert oder Highlights über Grain-Webhooks erstellt werden.
- Grain-Daten einfach in andere Tools zu überführen oder Teammitglieder zu benachrichtigen, sobald etwas Wichtiges in einem Meeting passiert.
Ob Sie Follow-up-Aktionen automatisieren, wichtige Gespräche dokumentieren oder Erkenntnisse in Ihrer Organisation sichtbar machen möchten Grain und Sim machen es einfach, Meeting-Intelligence mit Ihren Workflows zu verbinden.
{/* MANUAL-CONTENT-END */}
## Nutzungsanweisungen
Integrieren Sie Grain in Ihren Workflow. Greifen Sie auf Meeting-Aufzeichnungen, Transkripte, Highlights und KI-generierte Zusammenfassungen zu. Kann auch Workflows basierend auf Grain-Webhook-Ereignissen auslösen.
## Tools
### `grain_list_recordings`
Aufzeichnungen von Grain mit optionalen Filtern und Paginierung auflisten
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain API-Schlüssel \(Personal Access Token\) |
| `cursor` | string | Nein | Paginierungs-Cursor für nächste Seite |
| `beforeDatetime` | string | Nein | Nur Aufzeichnungen vor diesem ISO8601-Zeitstempel |
| `afterDatetime` | string | Nein | Nur Aufzeichnungen nach diesem ISO8601-Zeitstempel |
| `participantScope` | string | Nein | Filter: "internal" oder "external" |
| `titleSearch` | string | Nein | Suchbegriff zum Filtern nach Aufzeichnungstitel |
| `teamId` | string | Nein | Nach Team-UUID filtern |
| `meetingTypeId` | string | Nein | Nach Meeting-Typ-UUID filtern |
| `includeHighlights` | boolean | Nein | Highlights/Clips in Antwort einschließen |
| `includeParticipants` | boolean | Nein | Teilnehmerliste in Antwort einschließen |
| `includeAiSummary` | boolean | Nein | KI-generierte Zusammenfassung einschließen |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `recordings` | array | Array von Aufzeichnungsobjekten |
### `grain_get_recording`
Details einer einzelnen Aufzeichnung nach ID abrufen
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain API-Schlüssel \(Personal Access Token\) |
| `recordingId` | string | Ja | Die Aufzeichnungs-UUID |
| `includeHighlights` | boolean | Nein | Highlights/Clips einschließen |
| `includeParticipants` | boolean | Nein | Teilnehmerliste einschließen |
| `includeAiSummary` | boolean | Nein | KI-Zusammenfassung einschließen |
| `includeCalendarEvent` | boolean | Nein | Kalenderereignisdaten einschließen |
| `includeHubspot` | boolean | Nein | HubSpot-Verknüpfungen einschließen |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `id` | string | Aufnahme-UUID |
| `title` | string | Aufnahmetitel |
| `start_datetime` | string | ISO8601-Startzeitstempel |
| `end_datetime` | string | ISO8601-Endzeitstempel |
| `duration_ms` | number | Dauer in Millisekunden |
| `media_type` | string | audio, transcript oder video |
| `source` | string | Aufnahmequelle \(zoom, meet, teams, etc.\) |
| `url` | string | URL zur Ansicht in Grain |
| `thumbnail_url` | string | Vorschaubild-URL |
| `tags` | array | Array von Tag-Strings |
| `teams` | array | Teams, zu denen die Aufnahme gehört |
| `meeting_type` | object | Meeting-Typ-Informationen \(id, name, scope\) |
| `highlights` | array | Highlights \(falls enthalten\) |
| `participants` | array | Teilnehmer \(falls enthalten\) |
| `ai_summary` | object | KI-Zusammenfassungstext \(falls enthalten\) |
| `calendar_event` | object | Kalenderereignisdaten \(falls enthalten\) |
| `hubspot` | object | HubSpot-Verknüpfungen \(falls enthalten\) |
### `grain_get_transcript`
Vollständiges Transkript einer Aufnahme abrufen
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain-API-Schlüssel \(Personal Access Token\) |
| `recordingId` | string | Ja | Die Aufnahme-UUID |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `transcript` | array | Array von Transkriptabschnitten |
### `grain_list_teams`
Alle Teams im Workspace auflisten
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain API-Schlüssel \(Personal Access Token\) |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `teams` | array | Array von Team-Objekten |
### `grain_list_meeting_types`
Alle Meeting-Typen im Workspace auflisten
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain API-Schlüssel \(Personal Access Token\) |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `meeting_types` | array | Array von Meeting-Typ-Objekten |
### `grain_create_hook`
Einen Webhook erstellen, um Aufzeichnungs-Events zu empfangen
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain API-Schlüssel \(Personal Access Token\) |
| `hookUrl` | string | Ja | Webhook-Endpunkt-URL \(muss mit 2xx antworten\) |
| `filterBeforeDatetime` | string | Nein | Filter: Aufzeichnungen vor diesem Datum |
| `filterAfterDatetime` | string | Nein | Filter: Aufzeichnungen nach diesem Datum |
| `filterParticipantScope` | string | Nein | Filter: "internal" oder "external" |
| `filterTeamId` | string | Nein | Filter: spezifische Team-UUID |
| `filterMeetingTypeId` | string | Nein | Filter: spezifischer Meeting-Typ |
| `includeHighlights` | boolean | Nein | Highlights in Webhook-Payload einschließen |
| `includeParticipants` | boolean | Nein | Teilnehmer in Webhook-Payload einschließen |
| `includeAiSummary` | boolean | Nein | KI-Zusammenfassung in Webhook-Payload einschließen |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `id` | string | Hook-UUID |
| `enabled` | boolean | Ob der Hook aktiv ist |
| `hook_url` | string | Die Webhook-URL |
| `filter` | object | Angewendete Filter |
| `include` | object | Enthaltene Felder |
| `inserted_at` | string | ISO8601-Erstellungszeitstempel |
### `grain_list_hooks`
Alle Webhooks für das Konto auflisten
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain-API-Schlüssel \(Personal Access Token\) |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `hooks` | array | Array von Hook-Objekten |
### `grain_delete_hook`
Einen Webhook anhand der ID löschen
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Ja | Grain-API-Schlüssel \(Personal Access Token\) |
| `hookId` | string | Ja | Die zu löschende Hook-UUID |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `success` | boolean | True, wenn der Webhook erfolgreich gelöscht wurde |
## Hinweise
- Kategorie: `tools`
- Typ: `grain`

View File

@@ -61,8 +61,6 @@ Extrahieren und verarbeiten Sie Webinhalte in sauberen, LLM-freundlichen Text mi
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `content` | string | Der extrahierte Inhalt von der URL, verarbeitet zu sauberem, LLM-freundlichem Text |
| `links` | array | Liste der auf der Seite gefundenen Links (wenn gatherLinks oder withLinksummary aktiviert ist) |
| `images` | array | Liste der auf der Seite gefundenen Bilder (wenn withImagesummary aktiviert ist) |
### `jina_search`

View File

@@ -42,13 +42,14 @@ Senden Sie eine Chat-Completion-Anfrage an jeden unterstützten LLM-Anbieter
| `model` | string | Ja | Das zu verwendende Modell \(z. B. gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| `systemPrompt` | string | Nein | System-Prompt zur Festlegung des Verhaltens des Assistenten |
| `context` | string | Ja | Die Benutzernachricht oder der Kontext, der an das Modell gesendet werden soll |
| `apiKey` | string | Nein | API-Schlüssel für den Anbieter \(verwendet Plattform-Schlüssel, falls nicht für gehostete Modelle angegeben\) |
| `temperature` | number | Nein | Temperatur für die Antwortgenerierung \(0-2\) |
| `apiKey` | string | Nein | API-Schlüssel für den Anbieter \(verwendet den Plattformschlüssel, falls nicht für gehostete Modelle angegeben\) |
| `temperature` | number | Nein | Temperatur für die Antwortgenerierung \(02\) |
| `maxTokens` | number | Nein | Maximale Anzahl von Tokens in der Antwort |
| `azureEndpoint` | string | Nein | Azure OpenAI-Endpunkt-URL |
| `azureApiVersion` | string | Nein | Azure OpenAI-API-Version |
| `vertexProject` | string | Nein | Google Cloud-Projekt-ID für Vertex AI |
| `vertexLocation` | string | Nein | Google Cloud-Standort für Vertex AI \(Standard: us-central1\) |
| `vertexCredential` | string | Nein | Google Cloud OAuth-Anmeldeinformations-ID für Vertex AI |
#### Ausgabe

View File

@@ -5,7 +5,6 @@ title: Zeitplan
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { Video } from '@/components/ui/video'
Der Zeitplan-Block löst Workflows automatisch nach einem wiederkehrenden Zeitplan zu bestimmten Intervallen oder Zeiten aus.
@@ -21,67 +20,58 @@ Der Zeitplan-Block löst Workflows automatisch nach einem wiederkehrenden Zeitpl
## Zeitplan-Optionen
Konfigurieren Sie, wann Ihr Workflow ausgeführt wird, mit den Dropdown-Optionen:
Konfigurieren Sie, wann Ihr Workflow ausgeführt wird:
<Tabs items={['Einfache Intervalle', 'Cron-Ausdrücke']}>
<Tab>
<ul className="list-disc space-y-1 pl-6">
<li><strong>Alle paar Minuten</strong>: 5, 15, 30 Minuten-Intervalle</li>
<li><strong>Stündlich</strong>: Jede Stunde oder alle paar Stunden</li>
<li><strong>Täglich</strong>: Einmal oder mehrmals pro Tag</li>
<li><strong>Wöchentlich</strong>: Bestimmte Wochentage</li>
<li><strong>Monatlich</strong>: Bestimmte Tage des Monats</li>
<li><strong>Alle X Minuten</strong>: Ausführung in Minutenintervallen (1-1440)</li>
<li><strong>Stündlich</strong>: Ausführung zu einer bestimmten Minute jeder Stunde</li>
<li><strong>Täglich</strong>: Ausführung zu einer bestimmten Uhrzeit jeden Tag</li>
<li><strong>Wöchentlich</strong>: Ausführung an einem bestimmten Tag und einer bestimmten Uhrzeit jede Woche</li>
<li><strong>Monatlich</strong>: Ausführung an einem bestimmten Tag und einer bestimmten Uhrzeit jeden Monat</li>
</ul>
</Tab>
<Tab>
<p>Verwenden Sie Cron-Ausdrücke für erweiterte Zeitplanung:</p>
<p>Verwenden Sie Cron-Ausdrücke für erweiterte Planung:</p>
<div className="text-sm space-y-1">
<div><code>0 9 * * 1-5</code> - Jeden Wochentag um 9 Uhr</div>
<div><code>*/15 * * * *</code> - Alle 15 Minuten</div>
<div><code>0 0 1 * *</code> - Am ersten Tag jedes Monats</div>
<div><code>0 0 1 * *</code> - Erster Tag jedes Monats</div>
</div>
</Tab>
</Tabs>
## Zeitpläne konfigurieren
## Aktivierung
Wenn ein Workflow geplant ist:
- Der Zeitplan wird **aktiv** und zeigt die nächste Ausführungszeit an
- Klicken Sie auf die Schaltfläche **"Geplant"**, um den Zeitplan zu deaktivieren
- Zeitpläne werden nach **3 aufeinanderfolgenden Fehlern** automatisch deaktiviert
Zeitpläne sind an die Workflow-Bereitstellung gebunden:
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-2.png"
alt="Aktiver Zeitplan-Block"
width={500}
height={400}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Deaktivierter Zeitplan"
width={500}
height={350}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Deaktivierter Zeitplan"
width={500}
height={400}
className="my-6"
/>
</div>
Deaktivierte Zeitpläne zeigen an, wann sie zuletzt aktiv waren. Klicken Sie auf das **"Deaktiviert"**-Badge, um den Zeitplan wieder zu aktivieren.
- **Workflow bereitstellen** → Zeitplan wird aktiv und beginnt mit der Ausführung
- **Workflow-Bereitstellung aufheben** → Zeitplan wird entfernt
- **Workflow erneut bereitstellen** → Zeitplan wird mit aktueller Konfiguration neu erstellt
<Callout>
Zeitplan-Blöcke können keine eingehenden Verbindungen empfangen und dienen ausschließlich als Workflow-Auslöser.
Sie müssen Ihren Workflow bereitstellen, damit der Zeitplan mit der Ausführung beginnt. Konfigurieren Sie den Zeitplan-Block und stellen Sie ihn dann über die Symbolleiste bereit.
</Callout>
## Automatische Deaktivierung
Zeitpläne werden nach **10 aufeinanderfolgenden Fehlschlägen** automatisch deaktiviert, um unkontrollierte Fehler zu verhindern. Bei Deaktivierung:
- Erscheint ein Warnhinweis auf dem Zeitplan-Block
- Die Ausführung des Zeitplans wird gestoppt
- Klicken Sie auf den Hinweis, um den Zeitplan zu reaktivieren
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Deaktivierter Zeitplan"
width={500}
height={400}
className="my-6"
/>
</div>
<Callout>
Zeitplan-Blöcke können keine eingehenden Verbindungen empfangen und dienen ausschließlich als Workflow-Einstiegspunkte.
</Callout>

View File

@@ -0,0 +1,64 @@
---
title: Circleback
description: AI-powered meeting notes and action items
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="circleback"
color="linear-gradient(180deg, #E0F7FA 0%, #FFFFFF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Circleback](https://circleback.ai/) is an AI-powered platform that automates meeting notes, action items, transcripts, and recordings for your team. When a meeting is completed, Circleback processes the conversation and provides detailed notes and action items, along with a transcript and a recording (when available). This helps teams efficiently capture insights, distribute action items, and ensure nothing is missed—all seamlessly integrated into your workflows.
With the Sim Circleback integration, you can:
- **Receive detailed meeting notes and action items**: Automatically collect well-formatted meeting summaries and track actionable tasks discussed during your calls.
- **Access complete meeting recordings and transcripts**: Get the full conversation and the associated recording, making it easy to review key moments or share with colleagues.
- **Capture attendee information and meeting context**: Attendee lists, meeting metadata, and tags help keep your data organized and actionable.
- **Deliver insights directly into your workflows**: Trigger automations or send Circleback data to other systems the moment a meeting is done, using Sims powerful webhook triggers.
**How it works in Sim:**
Circleback uses webhook triggers: whenever a meeting is processed, data is pushed automatically to your agent or automation. You can build further automations based on:
- Meeting completed (all processed data available)
- New notes (notes ready even before full meeting is processed)
- Raw webhook integration for advanced use cases
**The following information is available in the Circleback meeting webhook payload:**
| Field | Type | Description |
|----------------|---------|----------------------------------------------------|
| `id` | number | Circleback meeting ID |
| `name` | string | Meeting title |
| `url` | string | Virtual meeting URL (Zoom, Meet, Teams, etc.) |
| `createdAt` | string | Meeting creation timestamp |
| `duration` | number | Duration in seconds |
| `recordingUrl` | string | Recording URL (valid 24 hours) |
| `tags` | json | Array of tags |
| `icalUid` | string | Calendar event ID |
| `attendees` | json | Array of attendee objects |
| `notes` | string | Meeting notes in Markdown |
| `actionItems` | json | Array of action items |
| `transcript` | json | Array of transcript segments |
| `insights` | json | User-created insights |
| `meeting` | json | Full meeting payload |
Whether you want to distribute instant summaries, log action items, or build custom workflows triggered by new meeting data, Circleback and Sim make it seamless to handle everything related to your meetings—automatically.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Receive meeting notes, action items, transcripts, and recordings when meetings are processed. Circleback uses webhooks to push data to your workflows.
## Notes
- Category: `triggers`
- Type: `circleback`

View File

@@ -0,0 +1,223 @@
---
title: Grain
description: Access meeting recordings, transcripts, and AI summaries
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="grain"
color="#F6FAF9"
/>
{/* MANUAL-CONTENT-START:intro */}
[Grain](https://grain.com/) is a modern platform for capturing, storing, and sharing meeting recordings, transcripts, highlights, and AI-powered summaries. Grain enables teams to turn conversations into actionable insights and keep everyone aligned on key moments from meetings.
With Grain, you can:
- **Access searchable recordings and transcripts**: Find and review every meeting by keyword, participant, or topic.
- **Share highlights and clips**: Capture important moments and share short video/audio highlights across your team or workflows.
- **Get AI-generated summaries**: Automatically produce meeting summaries, action items, and key insights using Grains advanced AI.
- **Organize meetings by team or type**: Tag and categorize recordings for easy access and reporting.
The Sim Grain integration empowers your agents to:
- List, search, and retrieve meeting recordings and details by flexible filters (datetime, participant, team, etc).
- Access AI summaries, participants, highlights, and other metadata for meetings to power automations or analysis.
- Trigger workflows whenever new meetings are processed, summaries are generated, or highlights are created via Grain webhooks.
- Easily bridge Grain data into other tools or notify teammates the moment something important happens in a meeting.
Whether you want to automate follow-up actions, keep records of important conversations, or surface insights across your organization, Grain and Sim make it easy to connect meeting intelligence to your workflows.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Grain into your workflow. Access meeting recordings, transcripts, highlights, and AI-generated summaries. Can also trigger workflows based on Grain webhook events.
## Tools
### `grain_list_recordings`
List recordings from Grain with optional filters and pagination
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `cursor` | string | No | Pagination cursor for next page |
| `beforeDatetime` | string | No | Only recordings before this ISO8601 timestamp |
| `afterDatetime` | string | No | Only recordings after this ISO8601 timestamp |
| `participantScope` | string | No | Filter: "internal" or "external" |
| `titleSearch` | string | No | Search term to filter by recording title |
| `teamId` | string | No | Filter by team UUID |
| `meetingTypeId` | string | No | Filter by meeting type UUID |
| `includeHighlights` | boolean | No | Include highlights/clips in response |
| `includeParticipants` | boolean | No | Include participant list in response |
| `includeAiSummary` | boolean | No | Include AI-generated summary |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordings` | array | Array of recording objects |
### `grain_get_recording`
Get details of a single recording by ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `recordingId` | string | Yes | The recording UUID |
| `includeHighlights` | boolean | No | Include highlights/clips |
| `includeParticipants` | boolean | No | Include participant list |
| `includeAiSummary` | boolean | No | Include AI summary |
| `includeCalendarEvent` | boolean | No | Include calendar event data |
| `includeHubspot` | boolean | No | Include HubSpot associations |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Recording UUID |
| `title` | string | Recording title |
| `start_datetime` | string | ISO8601 start timestamp |
| `end_datetime` | string | ISO8601 end timestamp |
| `duration_ms` | number | Duration in milliseconds |
| `media_type` | string | audio, transcript, or video |
| `source` | string | Recording source \(zoom, meet, teams, etc.\) |
| `url` | string | URL to view in Grain |
| `thumbnail_url` | string | Thumbnail image URL |
| `tags` | array | Array of tag strings |
| `teams` | array | Teams the recording belongs to |
| `meeting_type` | object | Meeting type info \(id, name, scope\) |
| `highlights` | array | Highlights \(if included\) |
| `participants` | array | Participants \(if included\) |
| `ai_summary` | object | AI summary text \(if included\) |
| `calendar_event` | object | Calendar event data \(if included\) |
| `hubspot` | object | HubSpot associations \(if included\) |
### `grain_get_transcript`
Get the full transcript of a recording
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `recordingId` | string | Yes | The recording UUID |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `transcript` | array | Array of transcript sections |
### `grain_list_teams`
List all teams in the workspace
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `teams` | array | Array of team objects |
### `grain_list_meeting_types`
List all meeting types in the workspace
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `meeting_types` | array | Array of meeting type objects |
### `grain_create_hook`
Create a webhook to receive recording events
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `hookUrl` | string | Yes | Webhook endpoint URL \(must respond 2xx\) |
| `filterBeforeDatetime` | string | No | Filter: recordings before this date |
| `filterAfterDatetime` | string | No | Filter: recordings after this date |
| `filterParticipantScope` | string | No | Filter: "internal" or "external" |
| `filterTeamId` | string | No | Filter: specific team UUID |
| `filterMeetingTypeId` | string | No | Filter: specific meeting type |
| `includeHighlights` | boolean | No | Include highlights in webhook payload |
| `includeParticipants` | boolean | No | Include participants in webhook payload |
| `includeAiSummary` | boolean | No | Include AI summary in webhook payload |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Hook UUID |
| `enabled` | boolean | Whether hook is active |
| `hook_url` | string | The webhook URL |
| `filter` | object | Applied filters |
| `include` | object | Included fields |
| `inserted_at` | string | ISO8601 creation timestamp |
### `grain_list_hooks`
List all webhooks for the account
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `hooks` | array | Array of hook objects |
### `grain_delete_hook`
Delete a webhook by ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `hookId` | string | Yes | The hook UUID to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | True when webhook was successfully deleted |
## Notes
- Category: `tools`
- Type: `grain`

View File

@@ -64,8 +64,6 @@ Extract and process web content into clean, LLM-friendly text using Jina AI Read
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | The extracted content from the URL, processed into clean, LLM-friendly text |
| `links` | array | List of links found on the page \(when gatherLinks or withLinksummary is enabled\) |
| `images` | array | List of images found on the page \(when withImagesummary is enabled\) |
### `jina_search`

View File

@@ -9,6 +9,7 @@
"asana",
"browser_use",
"calendly",
"circleback",
"clay",
"confluence",
"cursor",
@@ -35,6 +36,7 @@
"google_slides",
"google_vault",
"grafana",
"grain",
"hubspot",
"huggingface",
"hunter",

View File

@@ -52,6 +52,7 @@ Send a chat completion request to any supported LLM provider
| `azureApiVersion` | string | No | Azure OpenAI API version |
| `vertexProject` | string | No | Google Cloud project ID for Vertex AI |
| `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) |
| `vertexCredential` | string | No | Google Cloud OAuth credential ID for Vertex AI |
#### Output

View File

@@ -5,7 +5,6 @@ title: Schedule
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { Video } from '@/components/ui/video'
The Schedule block automatically triggers workflows on a recurring schedule at specified intervals or times.
@@ -21,16 +20,16 @@ The Schedule block automatically triggers workflows on a recurring schedule at s
## Schedule Options
Configure when your workflow runs using the dropdown options:
Configure when your workflow runs:
<Tabs items={['Simple Intervals', 'Cron Expressions']}>
<Tab>
<ul className="list-disc space-y-1 pl-6">
<li><strong>Every few minutes</strong>: 5, 15, 30 minute intervals</li>
<li><strong>Hourly</strong>: Every hour or every few hours</li>
<li><strong>Daily</strong>: Once or multiple times per day</li>
<li><strong>Weekly</strong>: Specific days of the week</li>
<li><strong>Monthly</strong>: Specific days of the month</li>
<li><strong>Every X Minutes</strong>: Run at minute intervals (1-1440)</li>
<li><strong>Hourly</strong>: Run at a specific minute each hour</li>
<li><strong>Daily</strong>: Run at a specific time each day</li>
<li><strong>Weekly</strong>: Run on a specific day and time each week</li>
<li><strong>Monthly</strong>: Run on a specific day and time each month</li>
</ul>
</Tab>
<Tab>
@@ -43,24 +42,25 @@ Configure when your workflow runs using the dropdown options:
</Tab>
</Tabs>
## Configuring Schedules
## Activation
When a workflow is scheduled:
- The schedule becomes **active** and shows the next execution time
- Click the **"Scheduled"** button to deactivate the schedule
- Schedules automatically deactivate after **3 consecutive failures**
Schedules are tied to workflow deployment:
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-2.png"
alt="Active Schedule Block"
width={500}
height={400}
className="my-6"
/>
</div>
- **Deploy workflow** → Schedule becomes active and starts running
- **Undeploy workflow** → Schedule is removed
- **Redeploy workflow** → Schedule is recreated with current configuration
## Disabled Schedules
<Callout>
You must deploy your workflow for the schedule to start running. Configure the schedule block, then deploy from the toolbar.
</Callout>
## Automatic Disabling
Schedules automatically disable after **10 consecutive failures** to prevent runaway errors. When disabled:
- A warning badge appears on the schedule block
- The schedule stops executing
- Click the badge to reactivate the schedule
<div className="flex justify-center">
<Image
@@ -72,8 +72,6 @@ When a workflow is scheduled:
/>
</div>
Disabled schedules show when they were last active. Click the **"Disabled"** badge to reactivate the schedule.
<Callout>
Schedule blocks cannot receive incoming connections and serve as pure workflow triggers.
</Callout>
Schedule blocks cannot receive incoming connections and serve as workflow entry points only.
</Callout>

View File

@@ -0,0 +1,59 @@
---
title: Circleback
description: Notas de reuniones e ítems de acción impulsados por IA
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="circleback"
color="linear-gradient(180deg, #E0F7FA 0%, #FFFFFF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Circleback](https://circleback.ai/) es una plataforma impulsada por IA que automatiza las notas de reuniones, ítems de acción, transcripciones y grabaciones para tu equipo. Cuando se completa una reunión, Circleback procesa la conversación y proporciona notas detalladas e ítems de acción, junto con una transcripción y una grabación (cuando está disponible). Esto ayuda a los equipos a capturar información de manera eficiente, distribuir ítems de acción y asegurar que no se pierda nada, todo integrado sin problemas en tus flujos de trabajo.
Con la integración de Sim Circleback, puedes:
- **Recibir notas detalladas de reuniones e ítems de acción**: Recopila automáticamente resúmenes de reuniones bien formateados y realiza seguimiento de las tareas accionables discutidas durante tus llamadas.
- **Acceder a grabaciones y transcripciones completas de reuniones**: Obtén la conversación completa y la grabación asociada, facilitando la revisión de momentos clave o compartir con colegas.
- **Capturar información de asistentes y contexto de la reunión**: Las listas de asistentes, metadatos de reuniones y etiquetas ayudan a mantener tus datos organizados y accionables.
- **Entregar información directamente en tus flujos de trabajo**: Activa automatizaciones o envía datos de Circleback a otros sistemas en el momento en que finaliza una reunión, usando los potentes activadores webhook de Sim.
**Cómo funciona en Sim:**
Circleback utiliza activadores webhook: cada vez que se procesa una reunión, los datos se envían automáticamente a tu agente o automatización. Puedes crear más automatizaciones basadas en:
- Reunión completada (todos los datos procesados disponibles)
- Nuevas notas (notas listas incluso antes de que se procese la reunión completa)
- Integración webhook sin procesar para casos de uso avanzados
**La siguiente información está disponible en la carga útil del webhook de reunión de Circleback:**
| Campo | Tipo | Descripción |
|----------------|---------|----------------------------------------------------|
| `id` | number | ID de reunión de Circleback |
| `name` | string | Título de la reunión |
| `url` | string | URL de reunión virtual (Zoom, Meet, Teams, etc.) |
| `createdAt` | string | Marca de tiempo de creación de la reunión |
| `duration` | number | Duración en segundos |
| `recordingUrl` | string | URL de grabación (válida 24 horas) |
| `tags` | json | Array de etiquetas |
| `icalUid` | string | ID de evento de calendario |
| `attendees` | json | Array de objetos de asistentes |
| `notes` | string | Notas de la reunión en Markdown |
| `actionItems` | json | Array de elementos de acción |
| `transcript` | json | Array de segmentos de transcripción |
| `insights` | json | Insights creados por el usuario |
| `meeting` | json | Payload completo de la reunión |
Ya sea que quieras distribuir resúmenes instantáneos, registrar elementos de acción o crear flujos de trabajo personalizados activados por nuevos datos de reuniones, Circleback y Sim hacen que sea sencillo manejar todo lo relacionado con tus reuniones, automáticamente.
{/* MANUAL-CONTENT-END */}
## Instrucciones de uso
Recibe notas de reuniones, elementos de acción, transcripciones y grabaciones cuando se procesen las reuniones. Circleback utiliza webhooks para enviar datos a tus flujos de trabajo.
## Notas
- Categoría: `triggers`
- Tipo: `circleback`

View File

@@ -0,0 +1,218 @@
---
title: Grain
description: Accede a grabaciones de reuniones, transcripciones y resúmenes de IA
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="grain"
color="#F6FAF9"
/>
{/* MANUAL-CONTENT-START:intro */}
[Grain](https://grain.com/) es una plataforma moderna para capturar, almacenar y compartir grabaciones de reuniones, transcripciones, momentos destacados y resúmenes generados por IA. Grain permite a los equipos convertir conversaciones en información procesable y mantener a todos alineados con los momentos clave de las reuniones.
Con Grain, puedes:
- **Acceder a grabaciones y transcripciones con búsqueda**: Encuentra y revisa cada reunión por palabra clave, participante o tema.
- **Compartir momentos destacados y clips**: Captura momentos importantes y comparte fragmentos cortos de video/audio en tu equipo o flujos de trabajo.
- **Obtener resúmenes generados por IA**: Produce automáticamente resúmenes de reuniones, elementos de acción e información clave utilizando la IA avanzada de Grain.
- **Organizar reuniones por equipo o tipo**: Etiqueta y categoriza grabaciones para facilitar el acceso y la generación de informes.
La integración de Sim con Grain permite a tus agentes:
- Listar, buscar y recuperar grabaciones de reuniones y detalles mediante filtros flexibles (fecha y hora, participante, equipo, etc.).
- Acceder a resúmenes de IA, participantes, momentos destacados y otros metadatos de reuniones para impulsar automatizaciones o análisis.
- Activar flujos de trabajo cada vez que se procesen nuevas reuniones, se generen resúmenes o se creen momentos destacados a través de webhooks de Grain.
- Conectar fácilmente los datos de Grain con otras herramientas o notificar a los compañeros de equipo en el momento en que sucede algo importante en una reunión.
Ya sea que desees automatizar acciones de seguimiento, mantener registros de conversaciones importantes o destacar información en toda tu organización, Grain y Sim facilitan la conexión de la inteligencia de reuniones con tus flujos de trabajo.
{/* MANUAL-CONTENT-END */}
## Instrucciones de uso
Integra Grain en tu flujo de trabajo. Accede a grabaciones de reuniones, transcripciones, momentos destacados y resúmenes generados por IA. También puede activar flujos de trabajo basados en eventos de webhook de Grain.
## Herramientas
### `grain_list_recordings`
Lista las grabaciones de Grain con filtros opcionales y paginación
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
| `cursor` | string | No | Cursor de paginación para la siguiente página |
| `beforeDatetime` | string | No | Solo grabaciones anteriores a esta marca de tiempo ISO8601 |
| `afterDatetime` | string | No | Solo grabaciones posteriores a esta marca de tiempo ISO8601 |
| `participantScope` | string | No | Filtro: "internal" o "external" |
| `titleSearch` | string | No | Término de búsqueda para filtrar por título de grabación |
| `teamId` | string | No | Filtrar por UUID de equipo |
| `meetingTypeId` | string | No | Filtrar por UUID de tipo de reunión |
| `includeHighlights` | boolean | No | Incluir destacados/clips en la respuesta |
| `includeParticipants` | boolean | No | Incluir lista de participantes en la respuesta |
| `includeAiSummary` | boolean | No | Incluir resumen generado por IA |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `recordings` | array | Array de objetos de grabación |
### `grain_get_recording`
Obtiene los detalles de una única grabación por ID
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
| `recordingId` | string | Sí | El UUID de la grabación |
| `includeHighlights` | boolean | No | Incluir destacados/clips |
| `includeParticipants` | boolean | No | Incluir lista de participantes |
| `includeAiSummary` | boolean | No | Incluir resumen de IA |
| `includeCalendarEvent` | boolean | No | Incluir datos del evento de calendario |
| `includeHubspot` | boolean | No | Incluir asociaciones de HubSpot |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `id` | string | UUID de la grabación |
| `title` | string | Título de la grabación |
| `start_datetime` | string | Marca de tiempo de inicio ISO8601 |
| `end_datetime` | string | Marca de tiempo de finalización ISO8601 |
| `duration_ms` | number | Duración en milisegundos |
| `media_type` | string | audio, transcript o video |
| `source` | string | Fuente de la grabación \(zoom, meet, teams, etc.\) |
| `url` | string | URL para ver en Grain |
| `thumbnail_url` | string | URL de la imagen en miniatura |
| `tags` | array | Array de cadenas de etiquetas |
| `teams` | array | Equipos a los que pertenece la grabación |
| `meeting_type` | object | Información del tipo de reunión \(id, nombre, alcance\) |
| `highlights` | array | Destacados \(si se incluyen\) |
| `participants` | array | Participantes \(si se incluyen\) |
| `ai_summary` | object | Texto del resumen de IA \(si se incluye\) |
| `calendar_event` | object | Datos del evento de calendario \(si se incluyen\) |
| `hubspot` | object | Asociaciones de HubSpot \(si se incluyen\) |
### `grain_get_transcript`
Obtener la transcripción completa de una grabación
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave de API de Grain \(token de acceso personal\) |
| `recordingId` | string | Sí | El UUID de la grabación |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `transcript` | array | Array de secciones de transcripción |
### `grain_list_teams`
Listar todos los equipos en el espacio de trabajo
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `teams` | array | Array de objetos de equipo |
### `grain_list_meeting_types`
Listar todos los tipos de reunión en el espacio de trabajo
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `meeting_types` | array | Array de objetos de tipo de reunión |
### `grain_create_hook`
Crear un webhook para recibir eventos de grabación
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
| `hookUrl` | string | Sí | URL del endpoint del webhook \(debe responder 2xx\) |
| `filterBeforeDatetime` | string | No | Filtro: grabaciones antes de esta fecha |
| `filterAfterDatetime` | string | No | Filtro: grabaciones después de esta fecha |
| `filterParticipantScope` | string | No | Filtro: "internal" o "external" |
| `filterTeamId` | string | No | Filtro: UUID de equipo específico |
| `filterMeetingTypeId` | string | No | Filtro: tipo de reunión específico |
| `includeHighlights` | boolean | No | Incluir destacados en la carga del webhook |
| `includeParticipants` | boolean | No | Incluir participantes en la carga del webhook |
| `includeAiSummary` | boolean | No | Incluir resumen de IA en la carga del webhook |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `id` | string | UUID del hook |
| `enabled` | boolean | Si el hook está activo |
| `hook_url` | string | La URL del webhook |
| `filter` | object | Filtros aplicados |
| `include` | object | Campos incluidos |
| `inserted_at` | string | Marca de tiempo de creación ISO8601 |
### `grain_list_hooks`
Listar todos los webhooks de la cuenta
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `hooks` | array | Array de objetos hook |
### `grain_delete_hook`
Eliminar un webhook por ID
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Sí | Clave API de Grain \(token de acceso personal\) |
| `hookId` | string | Sí | El UUID del hook a eliminar |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `success` | boolean | Verdadero cuando el webhook se eliminó correctamente |
## Notas
- Categoría: `tools`
- Tipo: `grain`

View File

@@ -61,8 +61,6 @@ Extrae y procesa contenido web en texto limpio y compatible con LLM usando Jina
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `content` | string | El contenido extraído de la URL, procesado en texto limpio y compatible con LLM |
| `links` | array | Lista de enlaces encontrados en la página (cuando gatherLinks o withLinksummary está activado) |
| `images` | array | Lista de imágenes encontradas en la página (cuando withImagesummary está activado) |
### `jina_search`

View File

@@ -38,17 +38,18 @@ Envía una solicitud de completado de chat a cualquier proveedor de LLM compatib
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `model` | string | Sí | El modelo a utilizar \(ej., gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| --------- | ---- | --------- | ----------- |
| `model` | string | Sí | El modelo a utilizar \(por ejemplo, gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| `systemPrompt` | string | No | Prompt del sistema para establecer el comportamiento del asistente |
| `context` | string | Sí | El mensaje del usuario o contexto a enviar al modelo |
| `apiKey` | string | No | Clave API del proveedor \(usa la clave de la plataforma si no se proporciona para modelos alojados\) |
| `temperature` | number | No | Temperatura para la generación de respuestas \(0-2\) |
| `maxTokens` | number | No | Tokens máximos en la respuesta |
| `maxTokens` | number | No | Máximo de tokens en la respuesta |
| `azureEndpoint` | string | No | URL del endpoint de Azure OpenAI |
| `azureApiVersion` | string | No | Versión de la API de Azure OpenAI |
| `vertexProject` | string | No | ID del proyecto de Google Cloud para Vertex AI |
| `vertexLocation` | string | No | Ubicación de Google Cloud para Vertex AI \(por defecto us-central1\) |
| `vertexCredential` | string | No | ID de credencial OAuth de Google Cloud para Vertex AI |
#### Salida

View File

@@ -5,7 +5,6 @@ title: Programación
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { Video } from '@/components/ui/video'
El bloque de Programación activa automáticamente flujos de trabajo de forma recurrente en intervalos o momentos específicos.
@@ -21,16 +20,16 @@ El bloque de Programación activa automáticamente flujos de trabajo de forma re
## Opciones de programación
Configura cuándo se ejecuta tu flujo de trabajo utilizando las opciones desplegables:
Configura cuándo se ejecuta tu flujo de trabajo:
<Tabs items={['Intervalos simples', 'Expresiones cron']}>
<Tab>
<ul className="list-disc space-y-1 pl-6">
<li><strong>Cada pocos minutos</strong>: intervalos de 5, 15, 30 minutos</li>
<li><strong>Por hora</strong>: Cada hora o cada pocas horas</li>
<li><strong>Diariamente</strong>: Una o varias veces al día</li>
<li><strong>Semanalmente</strong>: Días específicos de la semana</li>
<li><strong>Mensualmente</strong>: Días específicos del mes</li>
<li><strong>Cada X minutos</strong>: ejecutar en intervalos de minutos (1-1440)</li>
<li><strong>Cada hora</strong>: ejecutar en un minuto específico cada hora</li>
<li><strong>Diariamente</strong>: ejecutar a una hora específica cada día</li>
<li><strong>Semanalmente</strong>: ejecutar en un día y hora específicos cada semana</li>
<li><strong>Mensualmente</strong>: ejecutar en un día y hora específicos cada mes</li>
</ul>
</Tab>
<Tab>
@@ -43,45 +42,36 @@ Configura cuándo se ejecuta tu flujo de trabajo utilizando las opciones despleg
</Tab>
</Tabs>
## Configuración de programaciones
## Activación
Cuando un flujo de trabajo está programado:
- La programación se vuelve **activa** y muestra el próximo tiempo de ejecución
- Haz clic en el botón **"Programado"** para desactivar la programación
- Las programaciones se desactivan automáticamente después de **3 fallos consecutivos**
Las programaciones están vinculadas al despliegue del flujo de trabajo:
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-2.png"
alt="Bloque de programación activo"
width={500}
height={400}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Programación desactivada"
width={500}
height={350}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Programación desactivada"
width={500}
height={400}
className="my-6"
/>
</div>
Las programaciones desactivadas muestran cuándo estuvieron activas por última vez. Haz clic en la insignia **"Desactivado"** para reactivar la programación.
- **Desplegar flujo de trabajo** → la programación se activa y comienza a ejecutarse
- **Retirar flujo de trabajo** → la programación se elimina
- **Redesplegar flujo de trabajo** → la programación se recrea con la configuración actual
<Callout>
Los bloques de programación no pueden recibir conexiones entrantes y funcionan exclusivamente como disparadores de flujos de trabajo.
Debes desplegar tu flujo de trabajo para que la programación comience a ejecutarse. Configura el bloque de programación y luego despliega desde la barra de herramientas.
</Callout>
## Desactivación automática
Las programaciones se desactivan automáticamente después de **10 fallos consecutivos** para evitar errores descontrolados. Cuando se desactiva:
- Aparece una insignia de advertencia en el bloque de programación
- La programación deja de ejecutarse
- Haz clic en la insignia para reactivar la programación
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Programación desactivada"
width={500}
height={400}
className="my-6"
/>
</div>
<Callout>
Los bloques de programación no pueden recibir conexiones entrantes y sirven únicamente como puntos de entrada del flujo de trabajo.
</Callout>

View File

@@ -0,0 +1,59 @@
---
title: Circleback
description: Notes de réunion et tâches générées par IA
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="circleback"
color="linear-gradient(180deg, #E0F7FA 0%, #FFFFFF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Circleback](https://circleback.ai/) est une plateforme alimentée par IA qui automatise les notes de réunion, les tâches, les transcriptions et les enregistrements pour votre équipe. Lorsqu'une réunion est terminée, Circleback traite la conversation et fournit des notes détaillées et des tâches, accompagnées d'une transcription et d'un enregistrement (lorsque disponible). Cela aide les équipes à capturer efficacement les informations, à distribuer les tâches et à s'assurer que rien n'est oublié, le tout intégré de manière transparente dans vos flux de travail.
Avec l'intégration Sim Circleback, vous pouvez :
- **Recevoir des notes de réunion détaillées et des tâches** : collectez automatiquement des résumés de réunion bien formatés et suivez les tâches discutées lors de vos appels.
- **Accéder aux enregistrements et transcriptions complètes des réunions** : obtenez la conversation complète et l'enregistrement associé, facilitant la révision des moments clés ou le partage avec des collègues.
- **Capturer les informations sur les participants et le contexte de la réunion** : les listes de participants, les métadonnées de réunion et les tags aident à garder vos données organisées et exploitables.
- **Transmettre les informations directement dans vos flux de travail** : déclenchez des automatisations ou envoyez les données Circleback vers d'autres systèmes dès qu'une réunion est terminée, en utilisant les puissants déclencheurs webhook de Sim.
**Comment cela fonctionne dans Sim :**
Circleback utilise des déclencheurs webhook : chaque fois qu'une réunion est traitée, les données sont automatiquement transmises à votre agent ou automatisation. Vous pouvez créer d'autres automatisations basées sur :
- Réunion terminée (toutes les données traitées disponibles)
- Nouvelles notes (notes prêtes avant même que la réunion complète ne soit traitée)
- Intégration webhook brute pour des cas d'usage avancés
**Les informations suivantes sont disponibles dans la charge utile du webhook de réunion Circleback :**
| Champ | Type | Description |
|----------------|---------|----------------------------------------------------|
| `id` | number | ID de réunion Circleback |
| `name` | string | Titre de la réunion |
| `url` | string | URL de réunion virtuelle (Zoom, Meet, Teams, etc.) |
| `createdAt` | string | Horodatage de création de la réunion |
| `duration` | number | Durée en secondes |
| `recordingUrl` | string | URL d'enregistrement (valide 24 heures) |
| `tags` | json | Tableau d'étiquettes |
| `icalUid` | string | ID d'événement de calendrier |
| `attendees` | json | Tableau d'objets participants |
| `notes` | string | Notes de réunion en Markdown |
| `actionItems` | json | Tableau d'éléments d'action |
| `transcript` | json | Tableau de segments de transcription |
| `insights` | json | Informations créées par l'utilisateur |
| `meeting` | json | Charge utile complète de la réunion |
Que vous souhaitiez distribuer des résumés instantanés, enregistrer des éléments d'action ou créer des workflows personnalisés déclenchés par de nouvelles données de réunion, Circleback et Sim facilitent la gestion automatique de tout ce qui concerne vos réunions.
{/* MANUAL-CONTENT-END */}
## Instructions d'utilisation
Recevez les notes de réunion, les éléments d'action, les transcriptions et les enregistrements lorsque les réunions sont traitées. Circleback utilise des webhooks pour transmettre les données à vos workflows.
## Remarques
- Catégorie : `triggers`
- Type : `circleback`

View File

@@ -0,0 +1,218 @@
---
title: Grain
description: Accédez aux enregistrements de réunions, transcriptions et résumés IA
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="grain"
color="#F6FAF9"
/>
{/* MANUAL-CONTENT-START:intro */}
[Grain](https://grain.com/) est une plateforme moderne pour capturer, stocker et partager des enregistrements de réunions, des transcriptions, des moments clés et des résumés générés par IA. Grain permet aux équipes de transformer les conversations en informations exploitables et de maintenir tout le monde aligné sur les moments importants des réunions.
Avec Grain, vous pouvez :
- **Accéder aux enregistrements et transcriptions consultables** : trouvez et consultez chaque réunion par mot-clé, participant ou sujet.
- **Partager des moments clés et des extraits** : capturez les moments importants et partagez de courts extraits vidéo/audio avec votre équipe ou dans vos workflows.
- **Obtenir des résumés générés par IA** : produisez automatiquement des résumés de réunions, des actions à entreprendre et des informations clés grâce à l'IA avancée de Grain.
- **Organiser les réunions par équipe ou par type** : étiquetez et catégorisez les enregistrements pour un accès et un reporting faciles.
L'intégration Sim Grain permet à vos agents de :
- Lister, rechercher et récupérer les enregistrements de réunions et leurs détails selon des filtres flexibles (date/heure, participant, équipe, etc.).
- Accéder aux résumés IA, participants, moments clés et autres métadonnées des réunions pour alimenter des automatisations ou des analyses.
- Déclencher des workflows dès que de nouvelles réunions sont traitées, que des résumés sont générés ou que des moments clés sont créés via les webhooks Grain.
- Connecter facilement les données Grain à d'autres outils ou notifier les membres de l'équipe dès qu'un événement important se produit dans une réunion.
Que vous souhaitiez automatiser les actions de suivi, conserver des traces de conversations importantes ou faire remonter des informations dans toute votre organisation, Grain et Sim facilitent la connexion de l'intelligence des réunions à vos workflows.
{/* MANUAL-CONTENT-END */}
## Instructions d'utilisation
Intégrez Grain dans votre workflow. Accédez aux enregistrements de réunions, transcriptions, moments clés et résumés générés par IA. Peut également déclencher des workflows basés sur les événements webhook de Grain.
## Outils
### `grain_list_recordings`
Liste les enregistrements de Grain avec des filtres optionnels et une pagination
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain (jeton d'accès personnel) |
| `cursor` | string | Non | Curseur de pagination pour la page suivante |
| `beforeDatetime` | string | Non | Uniquement les enregistrements avant cet horodatage ISO8601 |
| `afterDatetime` | string | Non | Uniquement les enregistrements après cet horodatage ISO8601 |
| `participantScope` | string | Non | Filtre : « internal » ou « external » |
| `titleSearch` | string | Non | Terme de recherche pour filtrer par titre d'enregistrement |
| `teamId` | string | Non | Filtrer par UUID d'équipe |
| `meetingTypeId` | string | Non | Filtrer par UUID de type de réunion |
| `includeHighlights` | boolean | Non | Inclure les moments forts/extraits dans la réponse |
| `includeParticipants` | boolean | Non | Inclure la liste des participants dans la réponse |
| `includeAiSummary` | boolean | Non | Inclure le résumé généré par IA |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `recordings` | array | Tableau d'objets d'enregistrement |
### `grain_get_recording`
Obtient les détails d'un seul enregistrement par ID
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain (jeton d'accès personnel) |
| `recordingId` | string | Oui | L'UUID de l'enregistrement |
| `includeHighlights` | boolean | Non | Inclure les moments forts/extraits |
| `includeParticipants` | boolean | Non | Inclure la liste des participants |
| `includeAiSummary` | boolean | Non | Inclure le résumé IA |
| `includeCalendarEvent` | boolean | Non | Inclure les données d'événement de calendrier |
| `includeHubspot` | boolean | Non | Inclure les associations HubSpot |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | UUID de l'enregistrement |
| `title` | string | Titre de l'enregistrement |
| `start_datetime` | string | Horodatage de début ISO8601 |
| `end_datetime` | string | Horodatage de fin ISO8601 |
| `duration_ms` | number | Durée en millisecondes |
| `media_type` | string | audio, transcript ou video |
| `source` | string | Source de l'enregistrement \(zoom, meet, teams, etc.\) |
| `url` | string | URL pour visualiser dans Grain |
| `thumbnail_url` | string | URL de l'image miniature |
| `tags` | array | Tableau de chaînes de tags |
| `teams` | array | Équipes auxquelles appartient l'enregistrement |
| `meeting_type` | object | Informations sur le type de réunion \(id, nom, portée\) |
| `highlights` | array | Points forts \(si inclus\) |
| `participants` | array | Participants \(si inclus\) |
| `ai_summary` | object | Texte du résumé IA \(si inclus\) |
| `calendar_event` | object | Données de l'événement de calendrier \(si incluses\) |
| `hubspot` | object | Associations HubSpot \(si incluses\) |
### `grain_get_transcript`
Obtenir la transcription complète d'un enregistrement
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain \(jeton d'accès personnel\) |
| `recordingId` | string | Oui | UUID de l'enregistrement |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `transcript` | array | Tableau de sections de transcription |
### `grain_list_teams`
Lister toutes les équipes dans l'espace de travail
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain (jeton d'accès personnel) |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `teams` | array | Tableau d'objets équipe |
### `grain_list_meeting_types`
Lister tous les types de réunion dans l'espace de travail
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain (jeton d'accès personnel) |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `meeting_types` | array | Tableau d'objets type de réunion |
### `grain_create_hook`
Créer un webhook pour recevoir les événements d'enregistrement
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain (jeton d'accès personnel) |
| `hookUrl` | string | Oui | URL du point de terminaison webhook (doit répondre 2xx) |
| `filterBeforeDatetime` | string | Non | Filtre : enregistrements avant cette date |
| `filterAfterDatetime` | string | Non | Filtre : enregistrements après cette date |
| `filterParticipantScope` | string | Non | Filtre : « internal » ou « external » |
| `filterTeamId` | string | Non | Filtre : UUID d'équipe spécifique |
| `filterMeetingTypeId` | string | Non | Filtre : type de réunion spécifique |
| `includeHighlights` | boolean | Non | Inclure les moments forts dans la charge utile du webhook |
| `includeParticipants` | boolean | Non | Inclure les participants dans la charge utile du webhook |
| `includeAiSummary` | boolean | Non | Inclure le résumé IA dans la charge utile du webhook |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | UUID du hook |
| `enabled` | boolean | Indique si le hook est actif |
| `hook_url` | string | L'URL du webhook |
| `filter` | object | Filtres appliqués |
| `include` | object | Champs inclus |
| `inserted_at` | string | Horodatage de création ISO8601 |
### `grain_list_hooks`
Lister tous les webhooks du compte
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain \(jeton d'accès personnel\) |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `hooks` | array | Tableau d'objets hook |
### `grain_delete_hook`
Supprimer un webhook par ID
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Oui | Clé API Grain \(jeton d'accès personnel\) |
| `hookId` | string | Oui | L'UUID du hook à supprimer |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Vrai lorsque le webhook a été supprimé avec succès |
## Remarques
- Catégorie : `tools`
- Type : `grain`

View File

@@ -61,8 +61,6 @@ Extrayez et traitez le contenu web en texte propre et adapté aux LLM avec Jina
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `content` | string | Le contenu extrait de l'URL, traité en texte propre et adapté aux LLM |
| `links` | array | Liste des liens trouvés sur la page (lorsque gatherLinks ou withLinksummary est activé) |
| `images` | array | Liste des images trouvées sur la page (lorsque withImagesummary est activé) |
### `jina_search`

View File

@@ -49,6 +49,7 @@ Envoyez une requête de complétion de chat à n'importe quel fournisseur de LLM
| `azureApiVersion` | string | Non | Version de l'API Azure OpenAI |
| `vertexProject` | string | Non | ID du projet Google Cloud pour Vertex AI |
| `vertexLocation` | string | Non | Emplacement Google Cloud pour Vertex AI \(par défaut us-central1\) |
| `vertexCredential` | string | Non | ID des identifiants OAuth Google Cloud pour Vertex AI |
#### Sortie

View File

@@ -5,7 +5,6 @@ title: Planification
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { Video } from '@/components/ui/video'
Le bloc Planification déclenche automatiquement des workflows de manière récurrente à des intervalles ou moments spécifiés.
@@ -21,67 +20,58 @@ Le bloc Planification déclenche automatiquement des workflows de manière récu
## Options de planification
Configurez quand votre workflow s'exécute en utilisant les options du menu déroulant :
Configurez quand votre workflow s'exécute :
<Tabs items={['Intervalles simples', 'Expressions cron']}>
<Tab>
<ul className="list-disc space-y-1 pl-6">
<li><strong>Toutes les quelques minutes</strong> : intervalles de 5, 15, 30 minutes</li>
<li><strong>Toutes les heures</strong> : chaque heure ou toutes les quelques heures</li>
<li><strong>Quotidien</strong> : une ou plusieurs fois par jour</li>
<li><strong>Hebdomadaire</strong> : jours spécifiques de la semaine</li>
<li><strong>Mensuel</strong> : jours spécifiques du mois</li>
<li><strong>Toutes les X minutes</strong> : exécution à intervalles de minutes (1-1440)</li>
<li><strong>Toutes les heures</strong> : exécution à une minute spécifique chaque heure</li>
<li><strong>Quotidien</strong> : exécution à une heure spécifique chaque jour</li>
<li><strong>Hebdomadaire</strong> : exécution un jour et une heure spécifiques chaque semaine</li>
<li><strong>Mensuel</strong> : exécution un jour et une heure spécifiques chaque mois</li>
</ul>
</Tab>
<Tab>
<p>Utilisez des expressions cron pour une planification avancée :</p>
<p>Utilisez les expressions cron pour une planification avancée :</p>
<div className="text-sm space-y-1">
<div><code>0 9 * * 1-5</code> - Chaque jour de semaine à 9h</div>
<div><code>0 9 * * 1-5</code> - Chaque jour de semaine à 9 h</div>
<div><code>*/15 * * * *</code> - Toutes les 15 minutes</div>
<div><code>0 0 1 * *</code> - Premier jour de chaque mois</div>
</div>
</Tab>
</Tabs>
## Configuration des planifications
## Activation
Lorsqu'un workflow est planifié :
- La planification devient **active** et affiche la prochaine heure d'exécution
- Cliquez sur le bouton **"Planifié"** pour désactiver la planification
- Les planifications se désactivent automatiquement après **3 échecs consécutifs**
Les planifications sont liées au déploiement du workflow :
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-2.png"
alt="Bloc de planification actif"
width={500}
height={400}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Planification désactivée"
width={500}
height={350}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Planification désactivée"
width={500}
height={400}
className="my-6"
/>
</div>
Les planifications désactivées indiquent quand elles ont été actives pour la dernière fois. Cliquez sur le badge **"Désactivé"** pour réactiver la planification.
- **Déployer le workflow** → la planification devient active et commence à s'exécuter
- **Annuler le déploiement du workflow** → la planification est supprimée
- **Redéployer le workflow** → la planification est recréée avec la configuration actuelle
<Callout>
Les blocs de planification ne peuvent pas recevoir de connexions entrantes et servent uniquement de déclencheurs de workflow.
Vous devez déployer votre workflow pour que la planification commence à s'exécuter. Configurez le bloc de planification, puis déployez depuis la barre d'outils.
</Callout>
## Désactivation automatique
Les planifications se désactivent automatiquement après **10 échecs consécutifs** pour éviter les erreurs incontrôlées. Lorsqu'elle est désactivée :
- Un badge d'avertissement apparaît sur le bloc de planification
- La planification cesse de s'exécuter
- Cliquez sur le badge pour réactiver la planification
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="Planification désactivée"
width={500}
height={400}
className="my-6"
/>
</div>
<Callout>
Les blocs de planification ne peuvent pas recevoir de connexions entrantes et servent uniquement de points d'entrée de workflow.
</Callout>

View File

@@ -0,0 +1,59 @@
---
title: Circleback
description: AI搭載の議事録とアクションアイテム
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="circleback"
color="linear-gradient(180deg, #E0F7FA 0%, #FFFFFF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Circleback](https://circleback.ai/)は、チームの議事録、アクションアイテム、文字起こし、録音を自動化するAI搭載プラットフォームです。会議が終了すると、Circlebackが会話を処理し、詳細な議事録とアクションアイテム、文字起こしと録音(利用可能な場合)を提供します。これにより、チームは効率的に洞察を記録し、アクションアイテムを配布し、見落としがないことを確認できます。すべてがワークフローにシームレスに統合されます。
Sim Circleback統合により、次のことが可能になります。
- **詳細な議事録とアクションアイテムの受信**: 通話中に議論された実行可能なタスクを追跡し、整形された会議サマリーを自動的に収集します。
- **完全な会議録音と文字起こしへのアクセス**: 会話全体と関連する録音を取得し、重要な瞬間を簡単に確認したり、同僚と共有したりできます。
- **参加者情報と会議コンテキストの記録**: 参加者リスト、会議メタデータ、タグにより、データを整理して実行可能な状態に保ちます。
- **ワークフローに直接洞察を配信**: 会議が終了した瞬間に、Simの強力なWebhookトリガーを使用して、自動化をトリガーしたり、Circlebackデータを他のシステムに送信したりできます。
**Simでの動作方法:**
CirclebackはWebhookトリガーを使用します。会議が処理されるたびに、データが自動的にエージェントまたは自動化にプッシュされます。次の条件に基づいてさらなる自動化を構築できます。
- 会議完了(すべての処理済みデータが利用可能)
- 新しいノート(会議全体が処理される前にノートが準備完了)
- 高度なユースケース向けの生のWebhook統合
**Circleback会議Webhookペイロードでは、次の情報が利用可能です:**
| フィールド | タイプ | 説明 |
|----------------|---------|----------------------------------------------------|
| `id` | number | CirclebackミーティングID |
| `name` | string | ミーティングタイトル |
| `url` | string | バーチャルミーティングURLZoom、Meet、Teamsなど |
| `createdAt` | string | ミーティング作成タイムスタンプ |
| `duration` | number | 秒単位の長さ |
| `recordingUrl` | string | 録画URL24時間有効 |
| `tags` | json | タグの配列 |
| `icalUid` | string | カレンダーイベントID |
| `attendees` | json | 参加者オブジェクトの配列 |
| `notes` | string | Markdown形式のミーティングート |
| `actionItems` | json | アクションアイテムの配列 |
| `transcript` | json | トランスクリプトセグメントの配列 |
| `insights` | json | ユーザー作成のインサイト |
| `meeting` | json | 完全なミーティングペイロード |
即座にサマリーを配信したい場合でも、アクションアイテムを記録したい場合でも、新しいミーティングデータによってトリガーされるカスタムワークフローを構築したい場合でも、CirclebackとSimを使えば、ミーティングに関連するすべてを自動的にシームレスに処理できます。
{/* MANUAL-CONTENT-END */}
## 使用方法
ミーティングが処理されると、ミーティングート、アクションアイテム、トランスクリプト、録画を受信します。Circlebackはwebhookを使用してワークフローにデータをプッシュします。
## 注意事項
- カテゴリー: `triggers`
- タイプ: `circleback`

View File

@@ -0,0 +1,218 @@
---
title: Grain
description: 会議の録画、文字起こし、AI要約にアクセス
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="grain"
color="#F6FAF9"
/>
{/* MANUAL-CONTENT-START:intro */}
[Grain](https://grain.com/)は、会議の録画、文字起こし、ハイライト、AI搭載の要約を記録、保存、共有するための最新プラットフォームです。Grainを使用すると、チームは会話を実用的なインサイトに変換し、会議の重要な瞬間について全員の認識を一致させることができます。
Grainでできること:
- **検索可能な録画と文字起こしへのアクセス**: キーワード、参加者、トピックで会議を検索して確認できます。
- **ハイライトとクリップの共有**: 重要な瞬間を記録し、短い動画/音声のハイライトをチームやワークフロー全体で共有できます。
- **AI生成の要約を取得**: Grainの高度なAIを使用して、会議の要約、アクションアイテム、主要なインサイトを自動的に作成します。
- **チームやタイプ別に会議を整理**: 録画にタグを付けて分類し、簡単にアクセスしてレポートを作成できます。
Sim Grain統合により、エージェントは次のことが可能になります:
- 柔軟なフィルター(日時、参加者、チームなど)で会議の録画と詳細を一覧表示、検索、取得できます。
- 会議のAI要約、参加者、ハイライト、その他のメタデータにアクセスして、自動化や分析を強化できます。
- Grain Webhookを介して、新しい会議が処理されたとき、要約が生成されたとき、またはハイライトが作成されたときにワークフローをトリガーできます。
- Grainのデータを他のツールに簡単に連携したり、会議で重要なことが発生した瞬間にチームメイトに通知したりできます。
フォローアップアクションを自動化したり、重要な会話の記録を保持したり、組織全体でインサイトを表示したりする場合でも、GrainとSimを使用すると、会議のインテリジェンスをワークフローに簡単に接続できます。
{/* MANUAL-CONTENT-END */}
## 使用方法
Grainをワークフローに統合します。会議の録画、文字起こし、ハイライト、AI生成の要約にアクセスできます。Grain Webhookイベントに基づいてワークフローをトリガーすることもできます。
## ツール
### `grain_list_recordings`
オプションのフィルターとページネーションを使用してGrainから録画を一覧表示
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー個人アクセストークン |
| `cursor` | string | いいえ | 次のページのページネーションカーソル |
| `beforeDatetime` | string | いいえ | このISO8601タイムスタンプより前の録画のみ |
| `afterDatetime` | string | いいえ | このISO8601タイムスタンプより後の録画のみ |
| `participantScope` | string | いいえ | フィルター「internal」または「external」 |
| `titleSearch` | string | いいえ | 録画タイトルでフィルタリングする検索語 |
| `teamId` | string | いいえ | チームUUIDでフィルタリング |
| `meetingTypeId` | string | いいえ | ミーティングタイプUUIDでフィルタリング |
| `includeHighlights` | boolean | いいえ | レスポンスにハイライト/クリップを含める |
| `includeParticipants` | boolean | いいえ | レスポンスに参加者リストを含める |
| `includeAiSummary` | boolean | いいえ | AI生成サマリーを含める |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `recordings` | array | 録画オブジェクトの配列 |
### `grain_get_recording`
IDで単一の録画の詳細を取得
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー個人アクセストークン |
| `recordingId` | string | はい | 録画UUID |
| `includeHighlights` | boolean | いいえ | ハイライト/クリップを含める |
| `includeParticipants` | boolean | いいえ | 参加者リストを含める |
| `includeAiSummary` | boolean | いいえ | AIサマリーを含める |
| `includeCalendarEvent` | boolean | いいえ | カレンダーイベントデータを含める |
| `includeHubspot` | boolean | いいえ | HubSpot関連付けを含める |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `id` | string | 録画UUID |
| `title` | string | 録画タイトル |
| `start_datetime` | string | ISO8601形式の開始タイムスタンプ |
| `end_datetime` | string | ISO8601形式の終了タイムスタンプ |
| `duration_ms` | number | ミリ秒単位の長さ |
| `media_type` | string | audio、transcript、またはvideo |
| `source` | string | 録画ソース(zoom、meet、teamsなど) |
| `url` | string | Grainで表示するためのURL |
| `thumbnail_url` | string | サムネイル画像URL |
| `tags` | array | タグ文字列の配列 |
| `teams` | array | 録画が属するチーム |
| `meeting_type` | object | ミーティングタイプ情報(id、name、scope) |
| `highlights` | array | ハイライト(含まれる場合) |
| `participants` | array | 参加者(含まれる場合) |
| `ai_summary` | object | AI要約テキスト(含まれる場合) |
| `calendar_event` | object | カレンダーイベントデータ(含まれる場合) |
| `hubspot` | object | HubSpot関連付け(含まれる場合) |
### `grain_get_transcript`
録画の完全なトランスクリプトを取得
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー(パーソナルアクセストークン) |
| `recordingId` | string | はい | 録画UUID |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `transcript` | array | トランスクリプトセクションの配列 |
### `grain_list_teams`
ワークスペース内のすべてのチームを一覧表示
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー(パーソナルアクセストークン) |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `teams` | array | チームオブジェクトの配列 |
### `grain_list_meeting_types`
ワークスペース内のすべてのミーティングタイプを一覧表示
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー(パーソナルアクセストークン) |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `meeting_types` | array | ミーティングタイプオブジェクトの配列 |
### `grain_create_hook`
録画イベントを受信するためのWebhookを作成
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー(パーソナルアクセストークン) |
| `hookUrl` | string | はい | WebhookエンドポイントURL(2xxを返す必要があります) |
| `filterBeforeDatetime` | string | いいえ | フィルタ: この日付より前の録画 |
| `filterAfterDatetime` | string | いいえ | フィルタ: この日付より後の録画 |
| `filterParticipantScope` | string | いいえ | フィルタ: "internal"または"external" |
| `filterTeamId` | string | いいえ | フィルタ: 特定のチームUUID |
| `filterMeetingTypeId` | string | いいえ | フィルタ: 特定のミーティングタイプ |
| `includeHighlights` | boolean | いいえ | Webhookペイロードにハイライトを含める |
| `includeParticipants` | boolean | いいえ | Webhookペイロードに参加者を含める |
| `includeAiSummary` | boolean | いいえ | WebhookペイロードにAIサマリーを含める |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `id` | string | フックUUID |
| `enabled` | boolean | フックがアクティブかどうか |
| `hook_url` | string | WebフックURL |
| `filter` | object | 適用されたフィルタ |
| `include` | object | 含まれるフィールド |
| `inserted_at` | string | ISO8601形式の作成タイムスタンプ |
### `grain_list_hooks`
アカウントのすべてのWebフックを一覧表示
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー個人アクセストークン |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `hooks` | array | フックオブジェクトの配列 |
### `grain_delete_hook`
IDでWebフックを削除
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | はい | Grain APIキー個人アクセストークン |
| `hookId` | string | はい | 削除するフックUUID |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `success` | boolean | Webフックが正常に削除された場合はtrue |
## 注記
- カテゴリ: `tools`
- タイプ: `grain`

View File

@@ -60,9 +60,7 @@ Jina AI Readerを使用してウェブコンテンツを抽出し、LLMフレン
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `content` | string | URLから抽出されたコンテンツで、クリーンでLLMフレンドリーなテキストに処理されたもの |
| `links` | array | ページで見つかったリンクのリストgatherLinksまたはwithLinksummaryが有効な場合 |
| `images` | array | ページで見つかった画像のリストwithImagesummaryが有効な場合 |
| `content` | string | URLから抽出されたコンテンツクリーンでLLMフレンドリーなテキストに処理されています |
### `jina_search`

View File

@@ -49,6 +49,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| `azureApiVersion` | string | いいえ | Azure OpenAI APIバージョン |
| `vertexProject` | string | いいえ | Vertex AI用のGoogle CloudプロジェクトID |
| `vertexLocation` | string | いいえ | Vertex AI用のGoogle Cloudロケーションデフォルトはus-central1 |
| `vertexCredential` | string | いいえ | Vertex AI用のGoogle Cloud OAuth認証情報ID |
#### 出力

View File

@@ -5,7 +5,6 @@ title: スケジュール
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { Video } from '@/components/ui/video'
スケジュールブロックは、指定された間隔または時間に定期的なスケジュールでワークフローを自動的にトリガーします。
@@ -21,20 +20,20 @@ import { Video } from '@/components/ui/video'
## スケジュールオプション
ドロップダウンオプションを使用してワークフローの実行タイミングを設定します:
ワークフローの実行タイミングを設定します:
<Tabs items={['簡単な間隔', 'Cron式']}>
<Tabs items={['Simple Intervals', 'Cron Expressions']}>
<Tab>
<ul className="list-disc space-y-1 pl-6">
<li><strong>分ごと</strong>5分、15分、30分間隔</li>
<li><strong>毎時</strong>1時間ごとまたは数時間ごと</li>
<li><strong>毎日</strong>1日に1回または複数回</li>
<li><strong>毎週</strong>週の特定の曜日</li>
<li><strong>毎月</strong>月の特定の日</li>
<li><strong>X分ごと</strong>分単位の間隔で実行1〜1440</li>
<li><strong>毎時</strong>毎時指定した分に実行</li>
<li><strong>毎日</strong>毎日指定した時刻に実行</li>
<li><strong>毎週</strong>毎週指定した曜日と時刻に実行</li>
<li><strong>毎月</strong>毎月指定した日時に実行</li>
</ul>
</Tab>
<Tab>
<p>高度なスケジューリングにはCron式を使用します</p>
<p>高度なスケジュール設定にはcron式を使用します</p>
<div className="text-sm space-y-1">
<div><code>0 9 * * 1-5</code> - 平日の午前9時</div>
<div><code>*/15 * * * *</code> - 15分ごと</div>
@@ -43,45 +42,36 @@ import { Video } from '@/components/ui/video'
</Tab>
</Tabs>
## スケジュールの設定
## アクティベーション
ワークフローがスケジュールされると
- スケジュールが**有効**になり、次の実行時間が表示されます
- **「スケジュール済み」**ボタンをクリックするとスケジュールを無効にできます
- スケジュールは**3回連続で失敗すると**自動的に無効になります
スケジュールはワークフローのデプロイに連動します
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-2.png"
alt="アクティブなスケジュールブロック"
width={500}
height={400}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="無効化されたスケジュール"
width={500}
height={350}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="無効化されたスケジュール"
width={500}
height={400}
className="my-6"
/>
</div>
無効化されたスケジュールは、最後に有効だった時間を表示します。**「無効」**バッジをクリックすると、スケジュールを再度有効にできます。
- **ワークフローをデプロイ** → スケジュールが有効になり実行を開始
- **ワークフローをアンデプロイ** → スケジュールが削除
- **ワークフローを再デプロイ** → 現在の設定でスケジュールが再作成
<Callout>
スケジュールブロックは入力接続を受け取ることができず、純粋なワークフロートリガーとして機能します
スケジュールを開始するには、ワークフローをデプロイする必要があります。スケジュールブロックを設定してから、ツールバーからデプロイしてください
</Callout>
## 自動無効化
スケジュールは**10回連続で失敗**すると、エラーの連鎖を防ぐため自動的に無効化されます。無効化されると:
- スケジュールブロックに警告バッジが表示されます
- スケジュールの実行が停止します
- バッジをクリックしてスケジュールを再有効化できます
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="無効化されたスケジュール"
width={500}
height={400}
className="my-6"
/>
</div>
<Callout>
スケジュールブロックは入力接続を受け取ることができず、ワークフローのエントリーポイントとしてのみ機能します。
</Callout>

View File

@@ -0,0 +1,58 @@
---
title: Circleback
description: AI 驱动的会议记录与行动项
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="circleback"
color="linear-gradient(180deg, #E0F7FA 0%, #FFFFFF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Circleback](https://circleback.ai/) 是一个 AI 驱动的平台可为您的团队自动生成会议记录、行动项、文字稿和录音。每当会议结束后Circleback 会处理对话内容,提供详细的会议纪要和行动项,同时附上文字稿和录音(如有)。这有助于团队高效捕捉洞见、分发行动项,并确保不会遗漏任何重要信息——所有内容都能无缝集成到您的工作流程中。
通过 Sim Circleback 集成,您可以:
- **获取详细的会议记录和行动项**:自动收集格式良好的会议摘要,并跟踪通话中讨论的可执行任务。
- **访问完整的会议录音和文字稿**:获取完整对话及相关录音,便于回顾关键时刻或与同事分享。
- **捕捉与会者信息和会议背景**:与会者名单、会议元数据和标签帮助您有序管理和利用数据。
- **将洞见直接推送到您的工作流程**:会议结束后,利用 Sim 强大的 webhook 触发器,自动触发自动化流程或将 Circleback 数据发送到其他系统。
**在 Sim 中的工作方式:**
Circleback 使用 webhook 触发器:每当会议处理完成,数据会自动推送到您的代理或自动化流程。您可以基于以下内容构建更多自动化:
- 会议完成(所有处理数据可用)
- 新会议记录(即使会议尚未全部处理,会议纪要也可提前获取)
- 原始 webhook 集成,适用于高级用例
**Circleback 会议 webhook 有效载荷中包含以下信息:**
| 字段 | 类型 | 描述 |
|----------------|---------|----------------------------------------------------|
| `id` | number | Circleback 会议 ID |
| `name` | string | 会议标题 |
| `url` | string | 虚拟会议 URLZoom、Meet、Teams 等) |
| `createdAt` | string | 会议创建时间戳 |
| `duration` | number | 时长(秒) |
| `recordingUrl` | string | 录制文件 URL有效期 24 小时) |
| `tags` | json | 标签数组 |
| `icalUid` | string | 日历事件 ID |
| `attendees` | json | 参会者对象数组 |
| `notes` | string | Markdown 格式会议记录 |
| `actionItems` | json | 行动项数组 |
| `transcript` | json | 会议记录片段数组 |
| `insights` | json | 用户创建的洞见 |
| `meeting` | json | 完整会议数据 |
无论你是想分发即时摘要、记录行动项还是基于新会议数据构建自定义工作流Circleback 和 Sim 都能让你自动无缝处理所有与会议相关的事务。
## 使用说明
当会议被处理时你将收到会议记录、行动项、转录和录音。Circleback 使用 webhook 将数据推送到你的工作流。
## 备注
- 分类:`triggers`
- 类型:`circleback`

View File

@@ -0,0 +1,218 @@
---
title: Grain
description: 访问会议录音、转录文本和 AI 摘要
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="grain"
color="#F6FAF9"
/>
{/* MANUAL-CONTENT-START:intro */}
[Grain](https://grain.com/) 是一个现代化平台,用于捕捉、存储和分享会议录音、转录文本、重点片段以及 AI 驱动的摘要。Grain 帮助团队将对话转化为可执行的洞察,让每个人都能对会议中的关键时刻保持一致。
使用 Grain您可以
- **访问可搜索的录音和转录文本**:可按关键词、参与者或主题查找和回顾每场会议。
- **分享重点片段和剪辑**:捕捉重要时刻,并在团队或工作流中分享短视频/音频片段。
- **获取 AI 生成的摘要**:利用 Grain 的先进 AI 自动生成会议摘要、行动项和关键洞察。
- **按团队或类型组织会议**:为录音打标签和分类,便于访问和报告。
Sim Grain 集成让您的坐席能够:
- 通过灵活的筛选条件(日期时间、参与者、团队等)列出、搜索和获取会议录音及详细信息。
- 获取会议的 AI 摘要、参与者、重点片段及其他元数据,以支持自动化或分析。
- 通过 Grain webhook在新会议被处理、摘要生成或重点片段创建时触发工作流。
- 轻松将 Grain 数据桥接到其他工具,或在会议中有重要事件发生时即时通知团队成员。
无论您是想自动化后续操作、保留重要对话记录还是在组织内挖掘洞察Grain 和 Sim 都能让您轻松将会议智能连接到工作流中。
{/* MANUAL-CONTENT-END */}
## 使用说明
将 Grain 集成到您的工作流中。访问会议录音、转录文本、重点片段和 AI 生成的摘要。还可以基于 Grain webhook 事件触发工作流。
## 工具
### `grain_list_recordings`
从 Grain 获取录音列表,可选过滤和分页
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
| `cursor` | string | 否 | 下一页的分页游标 |
| `beforeDatetime` | string | 否 | 仅包含此 ISO8601 时间戳之前的录音 |
| `afterDatetime` | string | 否 | 仅包含此 ISO8601 时间戳之后的录音 |
| `participantScope` | string | 否 | 过滤条件“internal” 或 “external” |
| `titleSearch` | string | 否 | 按录音标题搜索过滤 |
| `teamId` | string | 否 | 按团队 UUID 过滤 |
| `meetingTypeId` | string | 否 | 按会议类型 UUID 过滤 |
| `includeHighlights` | boolean | 否 | 响应中包含重点/片段 |
| `includeParticipants` | boolean | 否 | 响应中包含参与者列表 |
| `includeAiSummary` | boolean | 否 | 包含 AI 生成的摘要 |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `recordings` | array | 录音对象数组 |
### `grain_get_recording`
根据 ID 获取单个录音的详细信息
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
| `recordingId` | string | 是 | 录音 UUID |
| `includeHighlights` | boolean | 否 | 包含重点/片段 |
| `includeParticipants` | boolean | 否 | 包含参与者列表 |
| `includeAiSummary` | boolean | 否 | 包含 AI 摘要 |
| `includeCalendarEvent` | boolean | 否 | 包含日历事件数据 |
| `includeHubspot` | boolean | 否 | 包含 HubSpot 关联 |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `id` | string | 录音 UUID |
| `title` | string | 录音标题 |
| `start_datetime` | string | ISO8601 开始时间戳 |
| `end_datetime` | string | ISO8601 结束时间戳 |
| `duration_ms` | number | 持续时间(毫秒) |
| `media_type` | string | 音频、转录或视频 |
| `source` | string | 录音来源zoom、meet、teams 等) |
| `url` | string | 在 Grain 中查看的 URL |
| `thumbnail_url` | string | 缩略图 URL |
| `tags` | array | 标签字符串数组 |
| `teams` | array | 录音所属团队 |
| `meeting_type` | object | 会议类型信息id、name、scope |
| `highlights` | array | 高亮内容(如有) |
| `participants` | array | 参与者(如有) |
| `ai_summary` | object | AI 摘要文本(如有) |
| `calendar_event` | object | 日历事件数据(如有) |
| `hubspot` | object | HubSpot 关联信息(如有) |
### `grain_get_transcript`
获取录音的完整转录文本
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | ---- | ----------- |
| `apiKey` | string | 是 | Grain API 密钥(个人访问令牌) |
| `recordingId` | string | 是 | 录音 UUID |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `transcript` | array | 转录片段数组 |
### `grain_list_teams`
列出工作区中的所有团队
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `teams` | array | 团队对象数组 |
### `grain_list_meeting_types`
列出工作区中的所有会议类型
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `meeting_types` | array | 会议类型对象数组 |
### `grain_create_hook`
创建一个 webhook 以接收录制事件
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
| `hookUrl` | string | 是 | Webhook endpoint URL必须响应 2xx |
| `filterBeforeDatetime` | string | 否 | 筛选:此日期之前的录制 |
| `filterAfterDatetime` | string | 否 | 筛选:此日期之后的录制 |
| `filterParticipantScope` | string | 否 | 筛选“internal” 或 “external” |
| `filterTeamId` | string | 否 | 筛选:指定团队 UUID |
| `filterMeetingTypeId` | string | 否 | 筛选:指定会议类型 |
| `includeHighlights` | boolean | 否 | 在 webhook 负载中包含重点内容 |
| `includeParticipants` | boolean | 否 | 在 webhook 负载中包含参与者 |
| `includeAiSummary` | boolean | 否 | 在 webhook 负载中包含 AI 摘要 |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `id` | string | Hook UUID |
| `enabled` | boolean | Hook 是否激活 |
| `hook_url` | string | webhook URL |
| `filter` | object | 已应用的过滤器 |
| `include` | object | 包含的字段 |
| `inserted_at` | string | ISO8601 创建时间戳 |
### `grain_list_hooks`
列出该账户下的所有 webhook
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `hooks` | array | Hook 对象数组 |
### `grain_delete_hook`
根据 ID 删除 webhook
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | 是 | Grain API key个人访问令牌 |
| `hookId` | string | 是 | 要删除的 Hook UUID |
#### 输出
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `success` | boolean | webhook 删除成功时为 true |
## 备注
- 分类:`tools`
- 类型:`grain`

View File

@@ -60,9 +60,7 @@ Jina AI Reader 专注于从网页中提取最相关的内容,去除杂乱、
| 参数 | 类型 | 描述 |
| --------- | ---- | ----------- |
| `content` | 字符串 | 从 URL 提取的内容,处理为干净且适合 LLM 的文本 |
| `links` | 数组 | 页面中找到的链接列表(当启用 gatherLinks 或 withLinksummary 时) |
| `images` | 数组 | 页面中找到的图片列表(当启用 withImagesummary 时) |
| `content` | 字符串 | 从 URL 提取的内容,处理为简洁、适合 LLM 的文本 |
### `jina_search`

View File

@@ -39,7 +39,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `model` | string | 是 | 要使用的模型(例如 gpt-4o、claude-sonnet-4-5、gemini-2.0-flash |
| `model` | string | 是 | 要使用的模型(例如gpt-4o、claude-sonnet-4-5、gemini-2.0-flash |
| `systemPrompt` | string | 否 | 设置助手行为的 system prompt |
| `context` | string | 是 | 发送给模型的用户消息或上下文 |
| `apiKey` | string | 否 | 提供方的 API key如未提供托管模型将使用平台密钥 |
@@ -49,6 +49,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| `azureApiVersion` | string | 否 | Azure OpenAI API 版本 |
| `vertexProject` | string | 否 | Vertex AI 的 Google Cloud 项目 ID |
| `vertexLocation` | string | 否 | Vertex AI 的 Google Cloud 区域(默认为 us-central1 |
| `vertexCredential` | string | 否 | Vertex AI 的 Google Cloud OAuth 凭证 ID |
#### 输出

View File

@@ -5,7 +5,6 @@ title: 计划
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { Video } from '@/components/ui/video'
计划模块会在指定的时间间隔或时间点自动触发工作流。
@@ -21,67 +20,58 @@ import { Video } from '@/components/ui/video'
## 计划选项
通过下拉选项配置工作流的运行时间:
配置工作流的运行时间:
<Tabs items={['简单间隔', 'Cron 表达式']}>
<Tab>
<ul className="list-disc space-y-1 pl-6">
<li><strong>每隔几分钟</strong>5 分钟、15 分钟、30 分钟的间隔</li>
<li><strong>每小时</strong>:每小时或每隔几小时</li>
<li><strong>每天</strong>:每天一次或多次</li>
<li><strong>每周</strong>一周中的特定日子</li>
<li><strong>每月</strong>一个月中的特定日子</li>
<li><strong>每 X 分钟</strong>按分钟间隔运行1-1440</li>
<li><strong>每小时</strong>:每小时在指定的分钟运行</li>
<li><strong>每天</strong>:每天在指定时间运行</li>
<li><strong>每周</strong>每周在指定的星期和时间运行</li>
<li><strong>每月</strong>每月在指定的日期和时间运行</li>
</ul>
</Tab>
<Tab>
<p>使用 cron 表达式进行高级调度:</p>
<div className="text-sm space-y-1">
<div><code>0 9 * * 1-5</code> - 每个工作日的上午 9 点</div>
<div><code>*/15 * * * *</code> - 每 15 分钟</div>
<div><code>*/15 * * * *</code> - 每 15 分钟一次</div>
<div><code>0 0 1 * *</code> - 每月的第一天</div>
</div>
</Tab>
</Tabs>
## 配置计划
## 启用
当工作流被计划时
- 计划变为**激活**状态,并显示下次执行时间
- 点击 **"已计划"** 按钮以停用计划
- 计划在 **连续失败 3 次** 后会自动停用
计划与工作流部署相关联
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-2.png"
alt="活动计划块"
width={500}
height={400}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="停用的计划"
width={500}
height={350}
className="my-6"
/>
</div>
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="禁用计划"
width={500}
height={400}
className="my-6"
/>
</div>
已禁用的计划会显示上次激活的时间。点击 **"已禁用"** 徽章以重新激活计划。
- **部署工作流** → 计划激活并开始运行
- **取消部署工作流** → 计划被移除
- **重新部署工作流** → 计划会以当前配置重新创建
<Callout>
计划块无法接收传入连接,仅作为纯工作流触发器
必须先部署工作流,计划才会开始运行。请先配置计划块,然后在工具栏中部署
</Callout>
## 自动禁用
计划在连续 **10 次失败** 后会自动禁用,以防止错误持续发生。禁用后:
- 计划块上会显示警告徽章
- 计划将停止执行
- 点击徽章可重新激活计划
<div className="flex justify-center">
<Image
src="/static/blocks/schedule-3.png"
alt="已禁用的计划"
width={500}
height={400}
className="my-6"
/>
</div>
<Callout>
计划块无法接收传入连接,只能作为工作流的入口点。
</Callout>

View File

@@ -217,19 +217,21 @@ checksums:
content/9: cbca5d806da167603e38e7dc90344e57
fb53ce2c1fc28db4c6c09f5296ff59c6:
meta/title: a75428cb811bc50150cecde090a3a0d5
content/0: c0a142478cc5c515f87d368fa72da818
content/0: e4684b7201c2aed215c82606e9eaa293
content/1: 3fcad3dff5044fbf0c734bab806c437e
content/2: 7c82b7d111a2517b08861c4c0e71eff9
content/3: 1ed1a03c7f922f4b1437594f34ea2afb
content/4: ca43f34465308970910b39fa073e10ec
content/5: f67fd398c98884cf0829682dca6d5d91
content/6: 72a5feaa2b80a1f22d224e311a0e4efe
content/7: 62261cedf5fff6a13220f3f0b6de661b
content/8: e58bf5c8afb239f2606ec5dfba30fc2f
content/9: 35840d3d91271d11c9449e7f316ff280
content/10: 2ff1c8bf00c740f66bce8a4a7f768ca8
content/11: 909f57e2475676b16d90b6605cd3ff43
content/12: 8f5b5d43297c4ff114ca49395878292b
content/4: d34ebf41fb97810c8398b4064520bd7b
content/5: 5025a2d2e9eadc2b91f323b2862b0a1a
content/6: 913f67efd4923e0f70e29640405e34d2
content/7: a706670c6362a1b723ccc3d6720ad6af
content/8: ab4fe131de634064f9a7744a11599434
content/9: 2f6c9564a33ad9f752df55840b0c8e16
content/10: fef34568e5bbd5a50e2a89412f85302c
content/11: b7ae0ecf6fbaa92b049c718720e4007e
content/12: bcd95e6bef30b6f480fee33800928b13
content/13: 2ff1c8bf00c740f66bce8a4a7f768ca8
content/14: 16eb64906b9e981ea3c11525ff5a1c2e
73129cc41f543288d67924faea3172db:
meta/title: 8cbe02c3108a0dbe0586dbc18db04efe
meta/description: 9540ac7731cebd594afa4ce2fb59ab9d
@@ -557,7 +559,7 @@ checksums:
content/8: 6325adefb6e1520835225285b18b6a45
content/9: b7fa85fce9c7476fe132df189e27dac1
content/10: 371d0e46b4bd2c23f559b8bc112f6955
content/11: 7ad14ccfe548588081626cfe769ad492
content/11: a34c59648e0f7218a8e9b72c333366fb
content/12: bcadfc362b69078beee0088e5936c98b
content/13: 6af66efd0da20944a87fdb8d9defa358
content/14: b3f310d5ef115bea5a8b75bf25d7ea9a
@@ -2664,7 +2666,7 @@ checksums:
content/12: 371d0e46b4bd2c23f559b8bc112f6955
content/13: 6ad8fcd98fc25eab726d05f9e9ccc6a4
content/14: bcadfc362b69078beee0088e5936c98b
content/15: 0ac8cd06fceaf16c960de79f7df987ee
content/15: 1eb58de69f18ba555d7f349fed365de5
content/16: c340d51e1b2d05b9b68a79baa8e9481a
content/17: 64d5a97527775c7bfcdcbb418a10ea35
content/18: 371d0e46b4bd2c23f559b8bc112f6955
@@ -49869,3 +49871,84 @@ checksums:
content/32: fd0f38eb3fe5cf95be366a4ff6b4fb90
content/33: b3f310d5ef115bea5a8b75bf25d7ea9a
content/34: 4a7b2c644e487f3d12b6a6b54f8c6773
d75b83c6e1f54ba41b8cd27960256f4e:
meta/title: 63d9b961cc414fe48ed3a117b1849ac0
meta/description: 0828295c4f8482d4ab18ae67cefb3efa
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
content/1: ce93512e241ca1ac9723d797d937e8d6
content/2: 4539a8e7b9a0b8c570e8b2261e6d53e8
content/3: 05d783b8313bd21464edbc35f72acda7
content/4: aadfc263ce44fb67b5ec899cf7034707
content/5: 7feedc49fa38d45979f4ae3685e2a2e8
content/6: 6d8ac64adb588d4675e8ad779861cf79
content/7: 9b55ef7d0cb63e28ac9aa5b71ca5611e
content/8: 821e6394b0a953e2b0842b04ae8f3105
content/9: 3e3c921ad486b0390454b325a0ecab98
content/10: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
content/11: 3e12916db64b7037df05c733542689b8
content/12: bf76a8fa5e9be0ad03d4a25fc1cd5d2c
content/13: 371d0e46b4bd2c23f559b8bc112f6955
content/14: 7476e5130f17fef0005e9eb79a288a4b
content/15: bcadfc362b69078beee0088e5936c98b
content/16: 921522dc74bcfe253933280a44e32325
content/17: 5c91a98c8c182a86561bdc2bb55d52fb
content/18: 5a003869e25c931a6a39e75f1fbb331e
content/19: 371d0e46b4bd2c23f559b8bc112f6955
content/20: c03a1ad5898fb9592c47f9fef3a443f9
content/21: bcadfc362b69078beee0088e5936c98b
content/22: 966dd802eb6aa5f8a6d37be800aa0476
content/23: d7f931ee4088a41234a19dbc070bbb06
content/24: b7a5a66f81700ac0f58f0f417a090db1
content/25: 371d0e46b4bd2c23f559b8bc112f6955
content/26: 8dddce76764cf0050ac4f8cb88cbf3b7
content/27: bcadfc362b69078beee0088e5936c98b
content/28: b30efde22ffd4646ac11e1b7053b2f71
content/29: e12dd4d7b99e7c7038b8935f48fbed29
content/30: fd9b29ad276abb6ffbb5350d71fb174a
content/31: 371d0e46b4bd2c23f559b8bc112f6955
content/32: 2e9928cbf2e736fc61f08d4339ccae59
content/33: bcadfc362b69078beee0088e5936c98b
content/34: 99b063108e07f3350f6ec02ce632d682
content/35: c1d1369970a7430014aa1f70a75e1b56
content/36: 25adb991028a92365272704d5921c0fe
content/37: 371d0e46b4bd2c23f559b8bc112f6955
content/38: 2e9928cbf2e736fc61f08d4339ccae59
content/39: bcadfc362b69078beee0088e5936c98b
content/40: 852dffd5402c58c35f6abfd6b8046585
content/41: 66a326fe86b5ff7c12f097bae8917018
content/42: a58bde3efd6164d3541047bd97cee6fe
content/43: 371d0e46b4bd2c23f559b8bc112f6955
content/44: 1c98f5538b8b37801da7f5e8c5912219
content/45: bcadfc362b69078beee0088e5936c98b
content/46: a84d2702883c8af99a401582e2192d39
content/47: e3dd3df817017359361432029b0c5ef1
content/48: 557fb6942a695af69e94fbd7692590e6
content/49: 371d0e46b4bd2c23f559b8bc112f6955
content/50: 2e9928cbf2e736fc61f08d4339ccae59
content/51: bcadfc362b69078beee0088e5936c98b
content/52: 69a6fbea11482ab284195984788c9710
content/53: dafa30ae47d52c901b756bd8bd0ae2fd
content/54: 70e8dcde230d0cd4a9b7b18c8df043cd
content/55: 371d0e46b4bd2c23f559b8bc112f6955
content/56: 97a4116e8509aede52ea1801656a7671
content/57: bcadfc362b69078beee0088e5936c98b
content/58: 2b73b3348aa37da99e35d12e892197f2
content/59: b3f310d5ef115bea5a8b75bf25d7ea9a
content/60: 11e67a936d6e434842446342f83b5289
753fe021f7c0fca8dc429f2e971fae5a:
meta/title: b3498307d692252f1286175a18b62e16
meta/description: 5f52444a2ad126633723bb44d06e7638
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
content/1: 28c5925266bfcf8a373b0490f6e63c8c
content/2: 76482724500904d534bc171470aa5594
content/3: 5e43b6ea89ab6aa913524b5db4c4f2f3
content/4: aa6b3680f93d09752072d278d8d3e6bb
content/5: 49686bd2f0af8b45071a5e11a47df85e
content/6: 4fcff29464aac96e894b0e9da8b7aac5
content/7: 89255fc21a3a429f27d1f2cdfe065235
content/8: 51dac9c1f218035a3f23137588eca5b6
content/9: 2ddb58bd6414897d33c6cb1590558749
content/10: 821e6394b0a953e2b0842b04ae8f3105
content/11: 972721b310d5e3e6e08ec33dc9630f62
content/12: b3f310d5ef115bea5a8b75bf25d7ea9a
content/13: 06a9cbcec05366fe1c873c90c36b4f44

Binary file not shown.

Before

Width:  |  Height:  |  Size: 138 KiB

View File

@@ -20,7 +20,7 @@ interface NavProps {
}
export default function Nav({ hideAuthButtons = false, variant = 'landing' }: NavProps = {}) {
const [githubStars, setGithubStars] = useState('18.6k')
const [githubStars, setGithubStars] = useState('24k')
const [isHovered, setIsHovered] = useState(false)
const [isLoginHovered, setIsLoginHovered] = useState(false)
const router = useRouter()

View File

@@ -1,26 +1,42 @@
'use client'
import { type ReactNode, useState } from 'react'
import type { ReactNode } from 'react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
export function QueryProvider({ children }: { children: ReactNode }) {
const [queryClient] = useState(
() =>
new QueryClient({
defaultOptions: {
queries: {
staleTime: 30 * 1000,
gcTime: 5 * 60 * 1000,
refetchOnWindowFocus: false,
retry: 1,
retryOnMount: false,
},
mutations: {
retry: 1,
},
},
})
)
/**
* Singleton QueryClient instance for client-side use.
* Can be imported directly for cache operations outside React components.
*/
function makeQueryClient() {
return new QueryClient({
defaultOptions: {
queries: {
staleTime: 30 * 1000,
gcTime: 5 * 60 * 1000,
refetchOnWindowFocus: false,
retry: 1,
retryOnMount: false,
},
mutations: {
retry: 1,
},
},
})
}
let browserQueryClient: QueryClient | undefined
export function getQueryClient() {
if (typeof window === 'undefined') {
return makeQueryClient()
}
if (!browserQueryClient) {
browserQueryClient = makeQueryClient()
}
return browserQueryClient
}
export function QueryProvider({ children }: { children: ReactNode }) {
const queryClient = getQueryClient()
return <QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
}

View File

@@ -70,7 +70,7 @@ describe('OAuth Connections API Route', () => {
})
)
vi.doMock('@/lib/oauth/oauth', () => ({
vi.doMock('@/lib/oauth/utils', () => ({
parseProvider: mockParseProvider,
evaluateScopeCoverage: mockEvaluateScopeCoverage,
}))

View File

@@ -5,8 +5,8 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import type { OAuthProvider } from '@/lib/oauth/oauth'
import { evaluateScopeCoverage, parseProvider } from '@/lib/oauth/oauth'
import type { OAuthProvider } from '@/lib/oauth'
import { evaluateScopeCoverage, parseProvider } from '@/lib/oauth'
const logger = createLogger('OAuthConnectionsAPI')

View File

@@ -42,7 +42,7 @@ describe('OAuth Credentials API Route', () => {
getSession: mockGetSession,
}))
vi.doMock('@/lib/oauth/oauth', () => ({
vi.doMock('@/lib/oauth/utils', () => ({
parseProvider: mockParseProvider,
evaluateScopeCoverage: mockEvaluateScopeCoverage,
}))

View File

@@ -7,7 +7,7 @@ import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { evaluateScopeCoverage, parseProvider } from '@/lib/oauth/oauth'
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
export const dynamic = 'force-dynamic'
@@ -132,7 +132,7 @@ export async function GET(request: NextRequest) {
}
// Parse the provider to get base provider and feature type (if provider is present)
const { baseProvider } = parseProvider(providerParam || 'google-default')
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider)
let accountsData

View File

@@ -26,6 +26,7 @@ vi.mock('@sim/db', () => ({
vi.mock('@/lib/oauth/oauth', () => ({
refreshOAuthToken: vi.fn(),
OAUTH_PROVIDERS: {},
}))
vi.mock('@/lib/logs/console/logger', () => ({
@@ -38,7 +39,7 @@ vi.mock('@/lib/logs/console/logger', () => ({
}))
import { db } from '@sim/db'
import { refreshOAuthToken } from '@/lib/oauth/oauth'
import { refreshOAuthToken } from '@/lib/oauth'
import {
getCredential,
getUserId,

View File

@@ -3,7 +3,7 @@ import { account, workflow } from '@sim/db/schema'
import { and, desc, eq } from 'drizzle-orm'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshOAuthToken } from '@/lib/oauth/oauth'
import { refreshOAuthToken } from '@/lib/oauth'
const logger = createLogger('OAuthUtilsAPI')

View File

@@ -10,9 +10,9 @@ import {
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { validateUUID } from '@/lib/core/security/input-validation'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import { isUuidV4 } from '@/executor/constants'
const logger = createLogger('CheckpointRevertAPI')
@@ -87,9 +87,8 @@ export async function POST(request: NextRequest) {
isDeployed: cleanedState.isDeployed,
})
const workflowIdValidation = validateUUID(checkpoint.workflowId, 'workflowId')
if (!workflowIdValidation.isValid) {
logger.error(`[${tracker.requestId}] Invalid workflow ID: ${workflowIdValidation.error}`)
if (!isUuidV4(checkpoint.workflowId)) {
logger.error(`[${tracker.requestId}] Invalid workflow ID format`)
return NextResponse.json({ error: 'Invalid workflow ID format' }, { status: 400 })
}

View File

@@ -14,6 +14,8 @@ import { generateRequestId } from '@/lib/core/utils/request'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { executeTool } from '@/tools'
import { getTool } from '@/tools/utils'
@@ -33,14 +35,18 @@ const ExecuteToolSchema = z.object({
function resolveEnvVarReferences(value: any, envVars: Record<string, string>): any {
if (typeof value === 'string') {
// Check for exact match: entire string is "{{VAR_NAME}}"
const exactMatch = /^\{\{([^}]+)\}\}$/.exec(value)
const exactMatchPattern = new RegExp(
`^\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}$`
)
const exactMatch = exactMatchPattern.exec(value)
if (exactMatch) {
const envVarName = exactMatch[1].trim()
return envVars[envVarName] ?? value
}
// Check for embedded references: "prefix {{VAR}} suffix"
return value.replace(/\{\{([^}]+)\}\}/g, (match, varName) => {
const envVarPattern = createEnvVarPattern()
return value.replace(envVarPattern, (match, varName) => {
const trimmedName = varName.trim()
return envVars[trimmedName] ?? match
})

View File

@@ -14,6 +14,7 @@ import type { StorageConfig } from '@/lib/uploads/core/storage-client'
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { isUuid } from '@/executor/constants'
const logger = createLogger('FileAuthorization')
@@ -85,9 +86,7 @@ function extractWorkspaceIdFromKey(key: string): string | null {
const parts = key.split('/')
const workspaceId = parts[0]
// Validate UUID format
const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i
if (workspaceId && UUID_PATTERN.test(workspaceId)) {
if (workspaceId && isUuid(workspaceId)) {
return workspaceId
}

View File

@@ -1,5 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { sanitizeFileName } from '@/executor/constants'
import '@/lib/uploads/core/setup.server'
import { getSession } from '@/lib/auth'
import type { StorageContext } from '@/lib/uploads/config'
@@ -154,7 +155,7 @@ export async function POST(request: NextRequest) {
logger.info(`Uploading knowledge-base file: ${originalName}`)
const timestamp = Date.now()
const safeFileName = originalName.replace(/\s+/g, '-')
const safeFileName = sanitizeFileName(originalName)
const storageKey = `kb/${timestamp}-${safeFileName}`
const metadata: Record<string, string> = {
@@ -267,9 +268,8 @@ export async function POST(request: NextRequest) {
logger.info(`Uploading ${context} file: ${originalName}`)
// Generate storage key with context prefix and timestamp to ensure uniqueness
const timestamp = Date.now()
const safeFileName = originalName.replace(/\s+/g, '-')
const safeFileName = sanitizeFileName(originalName)
const storageKey = `${context}/${timestamp}-${safeFileName}`
const metadata: Record<string, string> = {

View File

@@ -5,6 +5,7 @@ import { executeInE2B } from '@/lib/execution/e2b'
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
import { createLogger } from '@/lib/logs/console/logger'
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
import {
createEnvVarPattern,
createWorkflowVariablePattern,
@@ -405,7 +406,7 @@ function resolveWorkflowVariables(
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
const foundVariable = Object.entries(workflowVariables).find(
([_, variable]) => (variable.name || '').replace(/\s+/g, '') === variableName
([_, variable]) => normalizeName(variable.name || '') === variableName
)
let variableValue: unknown = ''
@@ -513,31 +514,26 @@ function resolveTagVariables(
): string {
let resolvedCode = code
const tagMatches = resolvedCode.match(/<([a-zA-Z_][a-zA-Z0-9_.]*[a-zA-Z0-9_])>/g) || []
const tagPattern = new RegExp(
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`,
'g'
)
const tagMatches = resolvedCode.match(tagPattern) || []
for (const match of tagMatches) {
const tagName = match.slice(1, -1).trim()
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
// Handle nested paths like "getrecord.response.data" or "function1.response.result"
// First try params, then blockData directly, then try with block name mapping
let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || ''
// If not found and the path starts with a block name, try mapping the block name to ID
if (!tagValue && tagName.includes('.')) {
const pathParts = tagName.split('.')
if (!tagValue && tagName.includes(REFERENCE.PATH_DELIMITER)) {
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
const normalizedBlockName = pathParts[0] // This should already be normalized like "function1"
// Find the block ID by looking for a block name that normalizes to this value
let blockId = null
for (const [blockName, id] of Object.entries(blockNameMapping)) {
// Apply the same normalization logic as the UI: remove spaces and lowercase
const normalizedName = blockName.replace(/\s+/g, '').toLowerCase()
if (normalizedName === normalizedBlockName) {
blockId = id
break
}
}
// Direct lookup using normalized block name
const blockId = blockNameMapping[normalizedBlockName] ?? null
if (blockId) {
const remainingPath = pathParts.slice(1).join('.')
@@ -617,13 +613,6 @@ function getNestedValue(obj: any, path: string): any {
}, obj)
}
/**
* Escape special regex characters in a string
*/
function escapeRegExp(string: string): string {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
}
/**
* Remove one trailing newline from stdout
* This handles the common case where print() or console.log() adds a trailing \n

View File

@@ -1,28 +1,15 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
const logger = createLogger('LogsExportAPI')
export const revalidate = 0
const ExportParamsSchema = z.object({
level: z.string().optional(),
workflowIds: z.string().optional(),
folderIds: z.string().optional(),
triggers: z.string().optional(),
startDate: z.string().optional(),
endDate: z.string().optional(),
search: z.string().optional(),
workflowName: z.string().optional(),
folderName: z.string().optional(),
workspaceId: z.string(),
})
function escapeCsv(value: any): string {
if (value === null || value === undefined) return ''
const str = String(value)
@@ -41,7 +28,7 @@ export async function GET(request: NextRequest) {
const userId = session.user.id
const { searchParams } = new URL(request.url)
const params = ExportParamsSchema.parse(Object.fromEntries(searchParams.entries()))
const params = LogFilterParamsSchema.parse(Object.fromEntries(searchParams.entries()))
const selectColumns = {
id: workflowExecutionLogs.id,
@@ -57,53 +44,11 @@ export async function GET(request: NextRequest) {
workflowName: workflow.name,
}
let conditions: SQL | undefined = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
if (params.level && params.level !== 'all') {
const levels = params.level.split(',').filter(Boolean)
if (levels.length === 1) {
conditions = and(conditions, eq(workflowExecutionLogs.level, levels[0]))
} else if (levels.length > 1) {
conditions = and(conditions, inArray(workflowExecutionLogs.level, levels))
}
}
if (params.workflowIds) {
const workflowIds = params.workflowIds.split(',').filter(Boolean)
if (workflowIds.length > 0) conditions = and(conditions, inArray(workflow.id, workflowIds))
}
if (params.folderIds) {
const folderIds = params.folderIds.split(',').filter(Boolean)
if (folderIds.length > 0) conditions = and(conditions, inArray(workflow.folderId, folderIds))
}
if (params.triggers) {
const triggers = params.triggers.split(',').filter(Boolean)
if (triggers.length > 0 && !triggers.includes('all')) {
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
}
}
if (params.startDate) {
conditions = and(conditions, gte(workflowExecutionLogs.startedAt, new Date(params.startDate)))
}
if (params.endDate) {
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
}
if (params.search) {
const term = `%${params.search}%`
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${term}`)
}
if (params.workflowName) {
const nameTerm = `%${params.workflowName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${nameTerm}`)
}
if (params.folderName) {
const folderTerm = `%${params.folderName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${folderTerm}`)
}
const workspaceCondition = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
const filterConditions = buildFilterConditions(params)
const conditions = filterConditions
? and(workspaceCondition, filterConditions)
: workspaceCondition
const header = [
'startedAt',

View File

@@ -6,51 +6,22 @@ import {
workflowDeploymentVersion,
workflowExecutionLogs,
} from '@sim/db/schema'
import {
and,
desc,
eq,
gt,
gte,
inArray,
isNotNull,
isNull,
lt,
lte,
ne,
or,
type SQL,
sql,
} from 'drizzle-orm'
import { and, desc, eq, isNotNull, isNull, or, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
const logger = createLogger('LogsAPI')
export const revalidate = 0
const QueryParamsSchema = z.object({
const QueryParamsSchema = LogFilterParamsSchema.extend({
details: z.enum(['basic', 'full']).optional().default('basic'),
limit: z.coerce.number().optional().default(100),
offset: z.coerce.number().optional().default(0),
level: z.string().optional(),
workflowIds: z.string().optional(),
folderIds: z.string().optional(),
triggers: z.string().optional(),
startDate: z.string().optional(),
endDate: z.string().optional(),
search: z.string().optional(),
workflowName: z.string().optional(),
folderName: z.string().optional(),
executionId: z.string().optional(),
costOperator: z.enum(['=', '>', '<', '>=', '<=', '!=']).optional(),
costValue: z.coerce.number().optional(),
durationOperator: z.enum(['=', '>', '<', '>=', '<=', '!=']).optional(),
durationValue: z.coerce.number().optional(),
workspaceId: z.string(),
})
export async function GET(request: NextRequest) {
@@ -78,6 +49,7 @@ export async function GET(request: NextRequest) {
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
deploymentVersionId: workflowExecutionLogs.deploymentVersionId,
level: workflowExecutionLogs.level,
status: workflowExecutionLogs.status,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
@@ -107,6 +79,7 @@ export async function GET(request: NextRequest) {
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
deploymentVersionId: workflowExecutionLogs.deploymentVersionId,
level: workflowExecutionLogs.level,
status: workflowExecutionLogs.status,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
@@ -197,102 +170,11 @@ export async function GET(request: NextRequest) {
}
}
if (params.workflowIds) {
const workflowIds = params.workflowIds.split(',').filter(Boolean)
if (workflowIds.length > 0) {
conditions = and(conditions, inArray(workflow.id, workflowIds))
}
}
if (params.folderIds) {
const folderIds = params.folderIds.split(',').filter(Boolean)
if (folderIds.length > 0) {
conditions = and(conditions, inArray(workflow.folderId, folderIds))
}
}
if (params.triggers) {
const triggers = params.triggers.split(',').filter(Boolean)
if (triggers.length > 0 && !triggers.includes('all')) {
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
}
}
if (params.startDate) {
conditions = and(
conditions,
gte(workflowExecutionLogs.startedAt, new Date(params.startDate))
)
}
if (params.endDate) {
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
}
if (params.search) {
const searchTerm = `%${params.search}%`
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
}
if (params.workflowName) {
const nameTerm = `%${params.workflowName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${nameTerm}`)
}
if (params.folderName) {
const folderTerm = `%${params.folderName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${folderTerm}`)
}
if (params.executionId) {
conditions = and(conditions, eq(workflowExecutionLogs.executionId, params.executionId))
}
if (params.costOperator && params.costValue !== undefined) {
const costField = sql`(${workflowExecutionLogs.cost}->>'total')::numeric`
switch (params.costOperator) {
case '=':
conditions = and(conditions, sql`${costField} = ${params.costValue}`)
break
case '>':
conditions = and(conditions, sql`${costField} > ${params.costValue}`)
break
case '<':
conditions = and(conditions, sql`${costField} < ${params.costValue}`)
break
case '>=':
conditions = and(conditions, sql`${costField} >= ${params.costValue}`)
break
case '<=':
conditions = and(conditions, sql`${costField} <= ${params.costValue}`)
break
case '!=':
conditions = and(conditions, sql`${costField} != ${params.costValue}`)
break
}
}
if (params.durationOperator && params.durationValue !== undefined) {
const durationField = workflowExecutionLogs.totalDurationMs
switch (params.durationOperator) {
case '=':
conditions = and(conditions, eq(durationField, params.durationValue))
break
case '>':
conditions = and(conditions, gt(durationField, params.durationValue))
break
case '<':
conditions = and(conditions, lt(durationField, params.durationValue))
break
case '>=':
conditions = and(conditions, gte(durationField, params.durationValue))
break
case '<=':
conditions = and(conditions, lte(durationField, params.durationValue))
break
case '!=':
conditions = and(conditions, ne(durationField, params.durationValue))
break
}
// Apply common filters (workflowIds, folderIds, triggers, dates, search, cost, duration)
// Level filtering is handled above with advanced running/pending state logic
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: false })
if (commonFilters) {
conditions = and(conditions, commonFilters)
}
const logs = await baseQuery
@@ -379,15 +261,16 @@ export async function GET(request: NextRequest) {
input: 0,
output: 0,
total: 0,
tokens: { prompt: 0, completion: 0, total: 0 },
tokens: { input: 0, output: 0, total: 0 },
})
}
const modelCost = models.get(block.cost.model)
modelCost.input += Number(block.cost.input) || 0
modelCost.output += Number(block.cost.output) || 0
modelCost.total += Number(block.cost.total) || 0
modelCost.tokens.prompt += block.cost.tokens?.prompt || 0
modelCost.tokens.completion += block.cost.tokens?.completion || 0
modelCost.tokens.input += block.cost.tokens?.input || block.cost.tokens?.prompt || 0
modelCost.tokens.output +=
block.cost.tokens?.output || block.cost.tokens?.completion || 0
modelCost.tokens.total += block.cost.tokens?.total || 0
}
}
@@ -399,8 +282,8 @@ export async function GET(request: NextRequest) {
output: totalOutputCost,
tokens: {
total: totalTokens,
prompt: totalPromptTokens,
completion: totalCompletionTokens,
input: totalPromptTokens,
output: totalCompletionTokens,
},
models: Object.fromEntries(models),
}
@@ -451,6 +334,7 @@ export async function GET(request: NextRequest) {
deploymentVersion: log.deploymentVersion ?? null,
deploymentVersionName: log.deploymentVersionName ?? null,
level: log.level,
status: log.status,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),

View File

@@ -6,6 +6,8 @@ import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
import { validateMcpServerUrl } from '@/lib/mcp/url-validator'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
const logger = createLogger('McpServerTestAPI')
@@ -23,12 +25,13 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
* Resolve environment variables in strings
*/
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
const envMatches = value.match(/\{\{([^}]+)\}\}/g)
const envVarPattern = createEnvVarPattern()
const envMatches = value.match(envVarPattern)
if (!envMatches) return value
let resolvedValue = value
for (const match of envMatches) {
const envKey = match.slice(2, -2).trim()
const envKey = match.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length).trim()
const envValue = envVars[envKey]
if (envValue === undefined) {

View File

@@ -1,6 +1,10 @@
import { db } from '@sim/db'
import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import type { StreamingExecution } from '@/executor/types'
import { executeProviderRequest } from '@/providers'
import { getApiKey } from '@/providers/utils'
@@ -37,6 +41,7 @@ export async function POST(request: NextRequest) {
azureApiVersion,
vertexProject,
vertexLocation,
vertexCredential,
responseFormat,
workflowId,
workspaceId,
@@ -62,6 +67,7 @@ export async function POST(request: NextRequest) {
hasAzureApiVersion: !!azureApiVersion,
hasVertexProject: !!vertexProject,
hasVertexLocation: !!vertexLocation,
hasVertexCredential: !!vertexCredential,
hasResponseFormat: !!responseFormat,
workflowId,
stream: !!stream,
@@ -76,13 +82,18 @@ export async function POST(request: NextRequest) {
let finalApiKey: string
try {
finalApiKey = getApiKey(provider, model, apiKey)
if (provider === 'vertex' && vertexCredential) {
finalApiKey = await resolveVertexCredential(requestId, vertexCredential)
} else {
finalApiKey = getApiKey(provider, model, apiKey)
}
} catch (error) {
logger.error(`[${requestId}] Failed to get API key:`, {
provider,
model,
error: error instanceof Error ? error.message : String(error),
hasProvidedApiKey: !!apiKey,
hasVertexCredential: !!vertexCredential,
})
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'API key error' },
@@ -165,8 +176,8 @@ export async function POST(request: NextRequest) {
: '',
model: executionData.output?.model,
tokens: executionData.output?.tokens || {
prompt: 0,
completion: 0,
input: 0,
output: 0,
total: 0,
},
// Sanitize any potential Unicode characters in tool calls
@@ -324,3 +335,27 @@ function sanitizeObject(obj: any): any {
return result
}
/**
* Resolves a Vertex AI OAuth credential to an access token
*/
async function resolveVertexCredential(requestId: string, credentialId: string): Promise<string> {
logger.info(`[${requestId}] Resolving Vertex AI credential: ${credentialId}`)
const credential = await db.query.account.findFirst({
where: eq(account.id, credentialId),
})
if (!credential) {
throw new Error(`Vertex AI credential not found: ${credentialId}`)
}
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
if (!accessToken) {
throw new Error('Failed to get Vertex AI access token')
}
logger.info(`[${requestId}] Successfully resolved Vertex AI credential`)
return accessToken
}

View File

@@ -0,0 +1,652 @@
/**
* Tests for schedule reactivate PUT API route
*
* @vitest-environment node
*/
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
const { mockGetSession, mockGetUserEntityPermissions, mockDbSelect, mockDbUpdate } = vi.hoisted(
() => ({
mockGetSession: vi.fn(),
mockGetUserEntityPermissions: vi.fn(),
mockDbSelect: vi.fn(),
mockDbUpdate: vi.fn(),
})
)
vi.mock('@/lib/auth', () => ({
getSession: mockGetSession,
}))
vi.mock('@/lib/workspaces/permissions/utils', () => ({
getUserEntityPermissions: mockGetUserEntityPermissions,
}))
vi.mock('@sim/db', () => ({
db: {
select: mockDbSelect,
update: mockDbUpdate,
},
}))
vi.mock('@sim/db/schema', () => ({
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
workflowSchedule: { id: 'id', workflowId: 'workflowId', status: 'status' },
}))
vi.mock('drizzle-orm', () => ({
eq: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: () => 'test-request-id',
}))
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
import { PUT } from './route'
function createRequest(body: Record<string, unknown>): NextRequest {
return new NextRequest(new URL('http://test/api/schedules/sched-1'), {
method: 'PUT',
body: JSON.stringify(body),
headers: { 'Content-Type': 'application/json' },
})
}
function createParams(id: string): { params: Promise<{ id: string }> } {
return { params: Promise.resolve({ id }) }
}
function mockDbChain(selectResults: unknown[][]) {
let selectCallIndex = 0
mockDbSelect.mockImplementation(() => ({
from: () => ({
where: () => ({
limit: () => selectResults[selectCallIndex++] || [],
}),
}),
}))
mockDbUpdate.mockImplementation(() => ({
set: () => ({
where: vi.fn().mockResolvedValue({}),
}),
}))
}
describe('Schedule PUT API (Reactivate)', () => {
beforeEach(() => {
vi.clearAllMocks()
mockGetSession.mockResolvedValue({ user: { id: 'user-1' } })
mockGetUserEntityPermissions.mockResolvedValue('write')
})
afterEach(() => {
vi.clearAllMocks()
})
describe('Authentication', () => {
it('returns 401 when user is not authenticated', async () => {
mockGetSession.mockResolvedValue(null)
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(401)
const data = await res.json()
expect(data.error).toBe('Unauthorized')
})
})
describe('Request Validation', () => {
it('returns 400 when action is not reactivate', async () => {
mockDbChain([
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'disable' }), createParams('sched-1'))
expect(res.status).toBe(400)
const data = await res.json()
expect(data.error).toBe('Invalid request body')
})
it('returns 400 when action is missing', async () => {
mockDbChain([
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({}), createParams('sched-1'))
expect(res.status).toBe(400)
const data = await res.json()
expect(data.error).toBe('Invalid request body')
})
})
describe('Schedule Not Found', () => {
it('returns 404 when schedule does not exist', async () => {
mockDbChain([[]])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-999'))
expect(res.status).toBe(404)
const data = await res.json()
expect(data.error).toBe('Schedule not found')
})
it('returns 404 when workflow does not exist for schedule', async () => {
mockDbChain([[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }], []])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(404)
const data = await res.json()
expect(data.error).toBe('Workflow not found')
})
})
describe('Authorization', () => {
it('returns 403 when user is not workflow owner', async () => {
mockDbChain([
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
[{ userId: 'other-user', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(403)
const data = await res.json()
expect(data.error).toBe('Not authorized to modify this schedule')
})
it('returns 403 for workspace member with only read permission', async () => {
mockGetUserEntityPermissions.mockResolvedValue('read')
mockDbChain([
[{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }],
[{ userId: 'other-user', workspaceId: 'ws-1' }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(403)
})
it('allows workflow owner to reactivate', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '*/5 * * * *',
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
expect(data.message).toBe('Schedule activated successfully')
})
it('allows workspace member with write permission to reactivate', async () => {
mockGetUserEntityPermissions.mockResolvedValue('write')
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '*/5 * * * *',
timezone: 'UTC',
},
],
[{ userId: 'other-user', workspaceId: 'ws-1' }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
})
it('allows workspace admin to reactivate', async () => {
mockGetUserEntityPermissions.mockResolvedValue('admin')
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '*/5 * * * *',
timezone: 'UTC',
},
],
[{ userId: 'other-user', workspaceId: 'ws-1' }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
})
})
describe('Schedule State Handling', () => {
it('returns success message when schedule is already active', async () => {
mockDbChain([
[{ id: 'sched-1', workflowId: 'wf-1', status: 'active' }],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
expect(data.message).toBe('Schedule is already active')
expect(mockDbUpdate).not.toHaveBeenCalled()
})
it('successfully reactivates disabled schedule', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '*/5 * * * *',
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
expect(data.message).toBe('Schedule activated successfully')
expect(data.nextRunAt).toBeDefined()
expect(mockDbUpdate).toHaveBeenCalled()
})
it('returns 400 when schedule has no cron expression', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: null,
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(400)
const data = await res.json()
expect(data.error).toBe('Schedule has no cron expression')
})
it('returns 400 when schedule has invalid cron expression', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: 'invalid-cron',
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(400)
const data = await res.json()
expect(data.error).toBe('Schedule has invalid cron expression')
})
it('calculates nextRunAt from stored cron expression (every 5 minutes)', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '*/5 * * * *',
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
const afterCall = Date.now()
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt).getTime()
// nextRunAt should be within 0-5 minutes in the future
expect(nextRunAt).toBeGreaterThan(beforeCall)
expect(nextRunAt).toBeLessThanOrEqual(afterCall + 5 * 60 * 1000 + 1000)
// Should align with 5-minute intervals (minute divisible by 5)
expect(new Date(nextRunAt).getMinutes() % 5).toBe(0)
})
it('calculates nextRunAt from daily cron expression', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '30 14 * * *', // 2:30 PM daily
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date at 14:30 UTC
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getUTCHours()).toBe(14)
expect(nextRunAt.getUTCMinutes()).toBe(30)
expect(nextRunAt.getUTCSeconds()).toBe(0)
})
it('calculates nextRunAt from weekly cron expression', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '0 9 * * 1', // Monday at 9:00 AM
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date on Monday at 09:00 UTC
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getUTCDay()).toBe(1) // Monday
expect(nextRunAt.getUTCHours()).toBe(9)
expect(nextRunAt.getUTCMinutes()).toBe(0)
})
it('calculates nextRunAt from monthly cron expression', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '0 10 15 * *', // 15th of month at 10:00 AM
timezone: 'UTC',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date on the 15th at 10:00 UTC
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getUTCDate()).toBe(15)
expect(nextRunAt.getUTCHours()).toBe(10)
expect(nextRunAt.getUTCMinutes()).toBe(0)
})
})
describe('Timezone Handling in Reactivation', () => {
it('calculates nextRunAt with America/New_York timezone', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '0 9 * * *', // 9:00 AM Eastern
timezone: 'America/New_York',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
// The exact UTC hour will depend on DST, but it should be 13:00 or 14:00 UTC
const utcHour = nextRunAt.getUTCHours()
expect([13, 14]).toContain(utcHour) // 9 AM ET = 1-2 PM UTC depending on DST
expect(nextRunAt.getUTCMinutes()).toBe(0)
})
it('calculates nextRunAt with Asia/Tokyo timezone', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '30 15 * * *', // 3:30 PM Japan Time
timezone: 'Asia/Tokyo',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
// 3:30 PM JST (UTC+9) = 6:30 AM UTC
expect(nextRunAt.getUTCHours()).toBe(6)
expect(nextRunAt.getUTCMinutes()).toBe(30)
})
it('calculates nextRunAt with Europe/London timezone', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '0 12 * * 5', // Friday at noon London time
timezone: 'Europe/London',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date on Friday
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getUTCDay()).toBe(5) // Friday
// UTC hour depends on BST/GMT (11:00 or 12:00 UTC)
const utcHour = nextRunAt.getUTCHours()
expect([11, 12]).toContain(utcHour)
expect(nextRunAt.getUTCMinutes()).toBe(0)
})
it('uses UTC as default timezone when timezone is not set', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '0 10 * * *', // 10:00 AM
timezone: null,
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date at 10:00 UTC
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getUTCHours()).toBe(10)
expect(nextRunAt.getUTCMinutes()).toBe(0)
})
it('handles minutely schedules with timezone correctly', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '*/10 * * * *', // Every 10 minutes
timezone: 'America/Los_Angeles',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date within the next 10 minutes
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getTime()).toBeLessThanOrEqual(beforeCall + 10 * 60 * 1000 + 1000)
// Should align with 10-minute intervals
expect(nextRunAt.getMinutes() % 10).toBe(0)
})
it('handles hourly schedules with timezone correctly', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '15 * * * *', // At minute 15 of every hour
timezone: 'America/Chicago',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date at minute 15
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getMinutes()).toBe(15)
expect(nextRunAt.getSeconds()).toBe(0)
})
it('handles custom cron expressions with complex patterns and timezone', async () => {
mockDbChain([
[
{
id: 'sched-1',
workflowId: 'wf-1',
status: 'disabled',
cronExpression: '0 9 * * 1-5', // Weekdays at 9 AM
timezone: 'America/New_York',
},
],
[{ userId: 'user-1', workspaceId: null }],
])
const beforeCall = Date.now()
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(200)
const data = await res.json()
const nextRunAt = new Date(data.nextRunAt)
// Should be a future date on a weekday (1-5)
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
const dayOfWeek = nextRunAt.getUTCDay()
expect([1, 2, 3, 4, 5]).toContain(dayOfWeek)
})
})
describe('Error Handling', () => {
it('returns 500 when database operation fails', async () => {
mockDbSelect.mockImplementation(() => {
throw new Error('Database connection failed')
})
const res = await PUT(createRequest({ action: 'reactivate' }), createParams('sched-1'))
expect(res.status).toBe(500)
const data = await res.json()
expect(data.error).toBe('Failed to update schedule')
})
})
})

View File

@@ -6,104 +6,26 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('ScheduleAPI')
export const dynamic = 'force-dynamic'
const scheduleActionEnum = z.enum(['reactivate', 'disable'])
const scheduleStatusEnum = z.enum(['active', 'disabled'])
const scheduleUpdateSchema = z
.object({
action: scheduleActionEnum.optional(),
status: scheduleStatusEnum.optional(),
})
.refine((data) => data.action || data.status, {
message: 'Either action or status must be provided',
})
const scheduleUpdateSchema = z.object({
action: z.literal('reactivate'),
})
/**
* Delete a schedule
*/
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = generateRequestId()
try {
const { id } = await params
logger.debug(`[${requestId}] Deleting schedule with ID: ${id}`)
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized schedule deletion attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Find the schedule and check ownership
const schedules = await db
.select({
schedule: workflowSchedule,
workflow: {
id: workflow.id,
userId: workflow.userId,
workspaceId: workflow.workspaceId,
},
})
.from(workflowSchedule)
.innerJoin(workflow, eq(workflowSchedule.workflowId, workflow.id))
.where(eq(workflowSchedule.id, id))
.limit(1)
if (schedules.length === 0) {
logger.warn(`[${requestId}] Schedule not found: ${id}`)
return NextResponse.json({ error: 'Schedule not found' }, { status: 404 })
}
const workflowRecord = schedules[0].workflow
// Check authorization - either the user owns the workflow or has write/admin workspace permissions
let isAuthorized = workflowRecord.userId === session.user.id
// If not authorized by ownership and the workflow belongs to a workspace, check workspace permissions
if (!isAuthorized && workflowRecord.workspaceId) {
const userPermission = await getUserEntityPermissions(
session.user.id,
'workspace',
workflowRecord.workspaceId
)
isAuthorized = userPermission === 'write' || userPermission === 'admin'
}
if (!isAuthorized) {
logger.warn(`[${requestId}] Unauthorized schedule deletion attempt for schedule: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
}
// Delete the schedule
await db.delete(workflowSchedule).where(eq(workflowSchedule.id, id))
logger.info(`[${requestId}] Successfully deleted schedule: ${id}`)
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error(`[${requestId}] Error deleting schedule`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* Update a schedule - can be used to reactivate a disabled schedule
* Reactivate a disabled schedule
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
try {
const { id } = await params
const scheduleId = id
logger.debug(`[${requestId}] Updating schedule with ID: ${scheduleId}`)
const { id: scheduleId } = await params
logger.debug(`[${requestId}] Reactivating schedule with ID: ${scheduleId}`)
const session = await getSession()
if (!session?.user?.id) {
@@ -115,18 +37,16 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
const validation = scheduleUpdateSchema.safeParse(body)
if (!validation.success) {
const firstError = validation.error.errors[0]
logger.warn(`[${requestId}] Validation error:`, firstError)
return NextResponse.json({ error: firstError.message }, { status: 400 })
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
}
const { action, status: requestedStatus } = validation.data
const [schedule] = await db
.select({
id: workflowSchedule.id,
workflowId: workflowSchedule.workflowId,
status: workflowSchedule.status,
cronExpression: workflowSchedule.cronExpression,
timezone: workflowSchedule.timezone,
})
.from(workflowSchedule)
.where(eq(workflowSchedule.id, scheduleId))
@@ -164,57 +84,40 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Not authorized to modify this schedule' }, { status: 403 })
}
if (action === 'reactivate' || (requestedStatus && requestedStatus === 'active')) {
if (schedule.status === 'active') {
return NextResponse.json({ message: 'Schedule is already active' }, { status: 200 })
}
if (schedule.status === 'active') {
return NextResponse.json({ message: 'Schedule is already active' }, { status: 200 })
}
const now = new Date()
const nextRunAt = new Date(now.getTime() + 60 * 1000) // Schedule to run in 1 minute
if (!schedule.cronExpression) {
logger.error(`[${requestId}] Schedule has no cron expression: ${scheduleId}`)
return NextResponse.json({ error: 'Schedule has no cron expression' }, { status: 400 })
}
await db
.update(workflowSchedule)
.set({
status: 'active',
failedCount: 0,
updatedAt: now,
nextRunAt,
})
.where(eq(workflowSchedule.id, scheduleId))
const cronResult = validateCronExpression(schedule.cronExpression, schedule.timezone || 'UTC')
if (!cronResult.isValid || !cronResult.nextRun) {
logger.error(`[${requestId}] Invalid cron expression for schedule: ${scheduleId}`)
return NextResponse.json({ error: 'Schedule has invalid cron expression' }, { status: 400 })
}
logger.info(`[${requestId}] Reactivated schedule: ${scheduleId}`)
const now = new Date()
const nextRunAt = cronResult.nextRun
return NextResponse.json({
message: 'Schedule activated successfully',
await db
.update(workflowSchedule)
.set({
status: 'active',
failedCount: 0,
updatedAt: now,
nextRunAt,
})
}
.where(eq(workflowSchedule.id, scheduleId))
if (action === 'disable' || (requestedStatus && requestedStatus === 'disabled')) {
if (schedule.status === 'disabled') {
return NextResponse.json({ message: 'Schedule is already disabled' }, { status: 200 })
}
logger.info(`[${requestId}] Reactivated schedule: ${scheduleId}`)
const now = new Date()
await db
.update(workflowSchedule)
.set({
status: 'disabled',
updatedAt: now,
nextRunAt: null, // Clear next run time when disabled
})
.where(eq(workflowSchedule.id, scheduleId))
logger.info(`[${requestId}] Disabled schedule: ${scheduleId}`)
return NextResponse.json({
message: 'Schedule disabled successfully',
})
}
logger.warn(`[${requestId}] Unsupported update action for schedule: ${scheduleId}`)
return NextResponse.json({ error: 'Unsupported update action' }, { status: 400 })
return NextResponse.json({
message: 'Schedule activated successfully',
nextRunAt,
})
} catch (error) {
logger.error(`[${requestId}] Error updating schedule`, error)
return NextResponse.json({ error: 'Failed to update schedule' }, { status: 500 })

View File

@@ -1,143 +0,0 @@
/**
* Integration tests for schedule status API route
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, mockScheduleStatusDb } from '@/app/api/__test-utils__/utils'
// Common mocks
const mockSchedule = {
id: 'schedule-id',
workflowId: 'workflow-id',
status: 'active',
failedCount: 0,
lastRanAt: new Date('2024-01-01T00:00:00.000Z'),
lastFailedAt: null,
nextRunAt: new Date('2024-01-02T00:00:00.000Z'),
}
beforeEach(() => {
vi.resetModules()
vi.doMock('@/lib/logs/console/logger', () => ({
createLogger: () => ({ info: vi.fn(), warn: vi.fn(), error: vi.fn(), debug: vi.fn() }),
}))
vi.doMock('crypto', () => ({
randomUUID: vi.fn(() => 'test-uuid'),
default: { randomUUID: vi.fn(() => 'test-uuid') },
}))
})
afterEach(() => {
vi.clearAllMocks()
})
describe('Schedule Status API Route', () => {
it('returns schedule status successfully', async () => {
mockScheduleStatusDb({}) // default mocks
vi.doMock('@/lib/auth', () => ({
getSession: vi.fn().mockResolvedValue({ user: { id: 'user-id' } }),
}))
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/schedules/[id]/status/route')
const res = await GET(req, { params: Promise.resolve({ id: 'schedule-id' }) })
expect(res.status).toBe(200)
const data = await res.json()
expect(data).toMatchObject({
status: 'active',
failedCount: 0,
nextRunAt: mockSchedule.nextRunAt.toISOString(),
isDisabled: false,
})
})
it('marks disabled schedules with isDisabled = true', async () => {
mockScheduleStatusDb({ schedule: [{ ...mockSchedule, status: 'disabled' }] })
vi.doMock('@/lib/auth', () => ({
getSession: vi.fn().mockResolvedValue({ user: { id: 'user-id' } }),
}))
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/schedules/[id]/status/route')
const res = await GET(req, { params: Promise.resolve({ id: 'schedule-id' }) })
expect(res.status).toBe(200)
const data = await res.json()
expect(data).toHaveProperty('status', 'disabled')
expect(data).toHaveProperty('isDisabled', true)
expect(data).toHaveProperty('lastFailedAt')
})
it('returns 404 if schedule not found', async () => {
mockScheduleStatusDb({ schedule: [] })
vi.doMock('@/lib/auth', () => ({
getSession: vi.fn().mockResolvedValue({ user: { id: 'user-id' } }),
}))
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/schedules/[id]/status/route')
const res = await GET(req, { params: Promise.resolve({ id: 'missing-id' }) })
expect(res.status).toBe(404)
const data = await res.json()
expect(data).toHaveProperty('error', 'Schedule not found')
})
it('returns 404 if related workflow not found', async () => {
mockScheduleStatusDb({ workflow: [] })
vi.doMock('@/lib/auth', () => ({
getSession: vi.fn().mockResolvedValue({ user: { id: 'user-id' } }),
}))
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/schedules/[id]/status/route')
const res = await GET(req, { params: Promise.resolve({ id: 'schedule-id' }) })
expect(res.status).toBe(404)
const data = await res.json()
expect(data).toHaveProperty('error', 'Workflow not found')
})
it('returns 403 when user is not owner of workflow', async () => {
mockScheduleStatusDb({ workflow: [{ userId: 'another-user' }] })
vi.doMock('@/lib/auth', () => ({
getSession: vi.fn().mockResolvedValue({ user: { id: 'user-id' } }),
}))
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/schedules/[id]/status/route')
const res = await GET(req, { params: Promise.resolve({ id: 'schedule-id' }) })
expect(res.status).toBe(403)
const data = await res.json()
expect(data).toHaveProperty('error', 'Not authorized to view this schedule')
})
it('returns 401 when user is not authenticated', async () => {
mockScheduleStatusDb({})
vi.doMock('@/lib/auth', () => ({
getSession: vi.fn().mockResolvedValue(null),
}))
const req = createMockRequest('GET')
const { GET } = await import('@/app/api/schedules/[id]/status/route')
const res = await GET(req, { params: Promise.resolve({ id: 'schedule-id' }) })
expect(res.status).toBe(401)
const data = await res.json()
expect(data).toHaveProperty('error', 'Unauthorized')
})
})

View File

@@ -1,84 +0,0 @@
import { db } from '@sim/db'
import { workflow, workflowSchedule } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('ScheduleStatusAPI')
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
const scheduleId = id
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized schedule status request`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const [schedule] = await db
.select({
id: workflowSchedule.id,
workflowId: workflowSchedule.workflowId,
status: workflowSchedule.status,
failedCount: workflowSchedule.failedCount,
lastRanAt: workflowSchedule.lastRanAt,
lastFailedAt: workflowSchedule.lastFailedAt,
nextRunAt: workflowSchedule.nextRunAt,
})
.from(workflowSchedule)
.where(eq(workflowSchedule.id, scheduleId))
.limit(1)
if (!schedule) {
logger.warn(`[${requestId}] Schedule not found: ${scheduleId}`)
return NextResponse.json({ error: 'Schedule not found' }, { status: 404 })
}
const [workflowRecord] = await db
.select({ userId: workflow.userId, workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, schedule.workflowId))
.limit(1)
if (!workflowRecord) {
logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`)
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
// Check authorization - either the user owns the workflow or has workspace permissions
let isAuthorized = workflowRecord.userId === session.user.id
// If not authorized by ownership and the workflow belongs to a workspace, check workspace permissions
if (!isAuthorized && workflowRecord.workspaceId) {
const userPermission = await getUserEntityPermissions(
session.user.id,
'workspace',
workflowRecord.workspaceId
)
isAuthorized = userPermission !== null
}
if (!isAuthorized) {
logger.warn(`[${requestId}] User not authorized to view this schedule: ${scheduleId}`)
return NextResponse.json({ error: 'Not authorized to view this schedule' }, { status: 403 })
}
return NextResponse.json({
status: schedule.status,
failedCount: schedule.failedCount,
lastRanAt: schedule.lastRanAt,
lastFailedAt: schedule.lastFailedAt,
nextRunAt: schedule.nextRunAt,
isDisabled: schedule.status === 'disabled',
})
} catch (error) {
logger.error(`[${requestId}] Error retrieving schedule status: ${scheduleId}`, error)
return NextResponse.json({ error: 'Failed to retrieve schedule status' }, { status: 500 })
}
}

View File

@@ -1,43 +1,15 @@
/**
* Integration tests for schedule configuration API route
* Tests for schedule GET API route
*
* @vitest-environment node
*/
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, mockExecutionDependencies } from '@/app/api/__test-utils__/utils'
const {
mockGetSession,
mockGetUserEntityPermissions,
mockSelectLimit,
mockInsertValues,
mockOnConflictDoUpdate,
mockInsert,
mockUpdate,
mockDelete,
mockTransaction,
mockRandomUUID,
mockGetScheduleTimeValues,
mockGetSubBlockValue,
mockGenerateCronExpression,
mockCalculateNextRunTime,
mockValidateCronExpression,
} = vi.hoisted(() => ({
const { mockGetSession, mockGetUserEntityPermissions, mockDbSelect } = vi.hoisted(() => ({
mockGetSession: vi.fn(),
mockGetUserEntityPermissions: vi.fn(),
mockSelectLimit: vi.fn(),
mockInsertValues: vi.fn(),
mockOnConflictDoUpdate: vi.fn(),
mockInsert: vi.fn(),
mockUpdate: vi.fn(),
mockDelete: vi.fn(),
mockTransaction: vi.fn(),
mockRandomUUID: vi.fn(),
mockGetScheduleTimeValues: vi.fn(),
mockGetSubBlockValue: vi.fn(),
mockGenerateCronExpression: vi.fn(),
mockCalculateNextRunTime: vi.fn(),
mockValidateCronExpression: vi.fn(),
mockDbSelect: vi.fn(),
}))
vi.mock('@/lib/auth', () => ({
@@ -50,231 +22,136 @@ vi.mock('@/lib/workspaces/permissions/utils', () => ({
vi.mock('@sim/db', () => ({
db: {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
limit: mockSelectLimit,
}),
}),
}),
insert: mockInsert,
update: mockUpdate,
delete: mockDelete,
select: mockDbSelect,
},
}))
vi.mock('@sim/db/schema', () => ({
workflow: {
id: 'workflow_id',
userId: 'user_id',
workspaceId: 'workspace_id',
},
workflowSchedule: {
id: 'schedule_id',
workflowId: 'workflow_id',
blockId: 'block_id',
cronExpression: 'cron_expression',
nextRunAt: 'next_run_at',
status: 'status',
},
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
workflowSchedule: { workflowId: 'workflowId', blockId: 'blockId' },
}))
vi.mock('drizzle-orm', () => ({
eq: vi.fn((...args) => ({ type: 'eq', args })),
and: vi.fn((...args) => ({ type: 'and', args })),
}))
vi.mock('crypto', () => ({
randomUUID: mockRandomUUID,
default: {
randomUUID: mockRandomUUID,
},
}))
vi.mock('@/lib/workflows/schedules/utils', () => ({
getScheduleTimeValues: mockGetScheduleTimeValues,
getSubBlockValue: mockGetSubBlockValue,
generateCronExpression: mockGenerateCronExpression,
calculateNextRunTime: mockCalculateNextRunTime,
validateCronExpression: mockValidateCronExpression,
BlockState: {},
eq: vi.fn(),
and: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn(() => 'test-request-id'),
generateRequestId: () => 'test-request-id',
}))
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: vi.fn(() => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
})),
}),
}))
vi.mock('@/lib/core/telemetry', () => ({
trackPlatformEvent: vi.fn(),
}))
import { GET } from '@/app/api/schedules/route'
import { db } from '@sim/db'
import { POST } from '@/app/api/schedules/route'
function createRequest(url: string): NextRequest {
return new NextRequest(new URL(url), { method: 'GET' })
}
describe('Schedule Configuration API Route', () => {
function mockDbChain(results: any[]) {
let callIndex = 0
mockDbSelect.mockImplementation(() => ({
from: () => ({
where: () => ({
limit: () => results[callIndex++] || [],
}),
}),
}))
}
describe('Schedule GET API', () => {
beforeEach(() => {
vi.clearAllMocks()
;(db as any).transaction = mockTransaction
mockExecutionDependencies()
mockGetSession.mockResolvedValue({
user: {
id: 'user-id',
email: 'test@example.com',
},
})
mockGetUserEntityPermissions.mockResolvedValue('admin')
mockSelectLimit.mockReturnValue([
{
id: 'workflow-id',
userId: 'user-id',
workspaceId: null,
},
])
mockInsertValues.mockImplementation(() => ({
onConflictDoUpdate: mockOnConflictDoUpdate,
}))
mockOnConflictDoUpdate.mockResolvedValue({})
mockInsert.mockReturnValue({
values: mockInsertValues,
})
mockUpdate.mockImplementation(() => ({
set: vi.fn().mockImplementation(() => ({
where: vi.fn().mockResolvedValue([]),
})),
}))
mockDelete.mockImplementation(() => ({
where: vi.fn().mockResolvedValue([]),
}))
mockTransaction.mockImplementation(async (callback) => {
const tx = {
insert: vi.fn().mockReturnValue({
values: mockInsertValues,
}),
}
return callback(tx)
})
mockRandomUUID.mockReturnValue('test-uuid')
mockGetScheduleTimeValues.mockReturnValue({
scheduleTime: '09:30',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 30],
weeklyDay: 1,
weeklyTime: [9, 30],
monthlyDay: 1,
monthlyTime: [9, 30],
})
mockGetSubBlockValue.mockImplementation((block: any, id: string) => {
const subBlocks = {
startWorkflow: 'schedule',
scheduleType: 'daily',
scheduleTime: '09:30',
dailyTime: '09:30',
}
return subBlocks[id as keyof typeof subBlocks] || ''
})
mockGenerateCronExpression.mockReturnValue('0 9 * * *')
mockCalculateNextRunTime.mockReturnValue(new Date())
mockValidateCronExpression.mockReturnValue({ isValid: true })
mockGetSession.mockResolvedValue({ user: { id: 'user-1' } })
mockGetUserEntityPermissions.mockResolvedValue('read')
})
afterEach(() => {
vi.clearAllMocks()
})
it('should create a new schedule successfully', async () => {
const req = createMockRequest('POST', {
workflowId: 'workflow-id',
state: {
blocks: {
'starter-id': {
type: 'starter',
subBlocks: {
startWorkflow: { value: 'schedule' },
scheduleType: { value: 'daily' },
scheduleTime: { value: '09:30' },
dailyTime: { value: '09:30' },
},
},
},
edges: [],
loops: {},
},
})
it('returns schedule data for authorized user', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
[{ id: 'sched-1', cronExpression: '0 9 * * *', status: 'active', failedCount: 0 }],
])
const response = await POST(req)
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
const data = await res.json()
expect(response).toBeDefined()
expect(response.status).toBe(200)
const responseData = await response.json()
expect(responseData).toHaveProperty('message', 'Schedule updated')
expect(responseData).toHaveProperty('cronExpression', '0 9 * * *')
expect(responseData).toHaveProperty('nextRunAt')
expect(res.status).toBe(200)
expect(data.schedule.cronExpression).toBe('0 9 * * *')
expect(data.isDisabled).toBe(false)
})
it('should handle errors gracefully', async () => {
mockSelectLimit.mockReturnValue([])
it('returns null when no schedule exists', async () => {
mockDbChain([[{ userId: 'user-1', workspaceId: null }], []])
const req = createMockRequest('POST', {
workflowId: 'workflow-id',
state: { blocks: {}, edges: [], loops: {} },
})
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
const data = await res.json()
const response = await POST(req)
expect(response.status).toBeGreaterThanOrEqual(400)
const data = await response.json()
expect(data).toHaveProperty('error')
expect(res.status).toBe(200)
expect(data.schedule).toBeNull()
})
it('should require authentication', async () => {
it('requires authentication', async () => {
mockGetSession.mockResolvedValue(null)
const req = createMockRequest('POST', {
workflowId: 'workflow-id',
state: { blocks: {}, edges: [], loops: {} },
})
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
const response = await POST(req)
expect(response.status).toBe(401)
const data = await response.json()
expect(data).toHaveProperty('error', 'Unauthorized')
expect(res.status).toBe(401)
})
it('should validate input data', async () => {
const req = createMockRequest('POST', {
workflowId: 'workflow-id',
})
it('requires workflowId parameter', async () => {
const res = await GET(createRequest('http://test/api/schedules'))
const response = await POST(req)
expect(res.status).toBe(400)
})
expect(response.status).toBe(400)
const data = await response.json()
expect(data).toHaveProperty('error', 'Invalid request data')
it('returns 404 for non-existent workflow', async () => {
mockDbChain([[]])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
expect(res.status).toBe(404)
})
it('denies access for unauthorized user', async () => {
mockDbChain([[{ userId: 'other-user', workspaceId: null }]])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
expect(res.status).toBe(403)
})
it('allows workspace members to view', async () => {
mockDbChain([
[{ userId: 'other-user', workspaceId: 'ws-1' }],
[{ id: 'sched-1', status: 'active', failedCount: 0 }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
expect(res.status).toBe(200)
})
it('indicates disabled schedule with failures', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
[{ id: 'sched-1', status: 'disabled', failedCount: 10 }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
const data = await res.json()
expect(res.status).toBe(200)
expect(data.isDisabled).toBe(true)
expect(data.hasFailures).toBe(true)
})
})

View File

@@ -2,61 +2,13 @@ import { db } from '@sim/db'
import { workflow, workflowSchedule } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import {
type BlockState,
calculateNextRunTime,
generateCronExpression,
getScheduleTimeValues,
getSubBlockValue,
validateCronExpression,
} from '@/lib/workflows/schedules/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('ScheduledAPI')
const ScheduleRequestSchema = z.object({
workflowId: z.string(),
blockId: z.string().optional(),
state: z.object({
blocks: z.record(z.any()),
edges: z.array(z.any()),
loops: z.record(z.any()),
}),
})
function hasValidScheduleConfig(
scheduleType: string | undefined,
scheduleValues: ReturnType<typeof getScheduleTimeValues>,
starterBlock: BlockState
): boolean {
switch (scheduleType) {
case 'minutes':
return !!scheduleValues.minutesInterval
case 'hourly':
return scheduleValues.hourlyMinute !== undefined
case 'daily':
return !!scheduleValues.dailyTime[0] || !!scheduleValues.dailyTime[1]
case 'weekly':
return (
!!scheduleValues.weeklyDay &&
(!!scheduleValues.weeklyTime[0] || !!scheduleValues.weeklyTime[1])
)
case 'monthly':
return (
!!scheduleValues.monthlyDay &&
(!!scheduleValues.monthlyTime[0] || !!scheduleValues.monthlyTime[1])
)
case 'custom':
return !!getSubBlockValue(starterBlock, 'cronExpression')
default:
return false
}
}
/**
* Get schedule information for a workflow
*/
@@ -65,11 +17,6 @@ export async function GET(req: NextRequest) {
const url = new URL(req.url)
const workflowId = url.searchParams.get('workflowId')
const blockId = url.searchParams.get('blockId')
const mode = url.searchParams.get('mode')
if (mode && mode !== 'schedule') {
return NextResponse.json({ schedule: null })
}
try {
const session = await getSession()
@@ -145,262 +92,3 @@ export async function GET(req: NextRequest) {
return NextResponse.json({ error: 'Failed to retrieve workflow schedule' }, { status: 500 })
}
}
const saveAttempts = new Map<string, { count: number; resetAt: number }>()
const RATE_LIMIT_WINDOW = 60000 // 1 minute
const RATE_LIMIT_MAX = 10 // 10 saves per minute
/**
* Create or update a schedule for a workflow
*/
export async function POST(req: NextRequest) {
const requestId = generateRequestId()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized schedule update attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const now = Date.now()
const userKey = session.user.id
const limit = saveAttempts.get(userKey)
if (limit && limit.resetAt > now) {
if (limit.count >= RATE_LIMIT_MAX) {
logger.warn(`[${requestId}] Rate limit exceeded for user: ${userKey}`)
return NextResponse.json(
{ error: 'Too many save attempts. Please wait a moment and try again.' },
{ status: 429 }
)
}
limit.count++
} else {
saveAttempts.set(userKey, { count: 1, resetAt: now + RATE_LIMIT_WINDOW })
}
const body = await req.json()
const { workflowId, blockId, state } = ScheduleRequestSchema.parse(body)
logger.info(`[${requestId}] Processing schedule update for workflow ${workflowId}`)
const [workflowRecord] = await db
.select({ userId: workflow.userId, workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, workflowId))
.limit(1)
if (!workflowRecord) {
logger.warn(`[${requestId}] Workflow not found: ${workflowId}`)
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
let isAuthorized = workflowRecord.userId === session.user.id
if (!isAuthorized && workflowRecord.workspaceId) {
const userPermission = await getUserEntityPermissions(
session.user.id,
'workspace',
workflowRecord.workspaceId
)
isAuthorized = userPermission === 'write' || userPermission === 'admin'
}
if (!isAuthorized) {
logger.warn(
`[${requestId}] User not authorized to modify schedule for workflow: ${workflowId}`
)
return NextResponse.json({ error: 'Not authorized to modify this workflow' }, { status: 403 })
}
let targetBlock: BlockState | undefined
if (blockId) {
targetBlock = Object.values(state.blocks).find((block: any) => block.id === blockId) as
| BlockState
| undefined
} else {
targetBlock = Object.values(state.blocks).find(
(block: any) => block.type === 'starter' || block.type === 'schedule'
) as BlockState | undefined
}
if (!targetBlock) {
logger.warn(`[${requestId}] No starter or schedule block found in workflow ${workflowId}`)
return NextResponse.json(
{ error: 'No starter or schedule block found in workflow' },
{ status: 400 }
)
}
const startWorkflow = getSubBlockValue(targetBlock, 'startWorkflow')
const scheduleType = getSubBlockValue(targetBlock, 'scheduleType')
const scheduleValues = getScheduleTimeValues(targetBlock)
const hasScheduleConfig = hasValidScheduleConfig(scheduleType, scheduleValues, targetBlock)
const isScheduleBlock = targetBlock.type === 'schedule'
const hasValidConfig = isScheduleBlock || (startWorkflow === 'schedule' && hasScheduleConfig)
logger.info(`[${requestId}] Schedule validation debug:`, {
workflowId,
blockId,
blockType: targetBlock.type,
isScheduleBlock,
startWorkflow,
scheduleType,
hasScheduleConfig,
hasValidConfig,
scheduleValues: {
minutesInterval: scheduleValues.minutesInterval,
dailyTime: scheduleValues.dailyTime,
cronExpression: scheduleValues.cronExpression,
},
})
if (!hasValidConfig) {
logger.info(
`[${requestId}] Removing schedule for workflow ${workflowId} - no valid configuration found`
)
const deleteConditions = [eq(workflowSchedule.workflowId, workflowId)]
if (blockId) {
deleteConditions.push(eq(workflowSchedule.blockId, blockId))
}
await db
.delete(workflowSchedule)
.where(deleteConditions.length > 1 ? and(...deleteConditions) : deleteConditions[0])
return NextResponse.json({ message: 'Schedule removed' })
}
if (isScheduleBlock) {
logger.info(`[${requestId}] Processing schedule trigger block for workflow ${workflowId}`)
} else if (startWorkflow !== 'schedule') {
logger.info(
`[${requestId}] Setting workflow to scheduled mode based on schedule configuration`
)
}
logger.debug(`[${requestId}] Schedule type for workflow ${workflowId}: ${scheduleType}`)
let cronExpression: string | null = null
let nextRunAt: Date | undefined
const timezone = getSubBlockValue(targetBlock, 'timezone') || 'UTC'
try {
const defaultScheduleType = scheduleType || 'daily'
const scheduleStartAt = getSubBlockValue(targetBlock, 'scheduleStartAt')
const scheduleTime = getSubBlockValue(targetBlock, 'scheduleTime')
logger.debug(`[${requestId}] Schedule configuration:`, {
type: defaultScheduleType,
timezone,
startDate: scheduleStartAt || 'not specified',
time: scheduleTime || 'not specified',
})
const sanitizedScheduleValues =
defaultScheduleType !== 'custom'
? { ...scheduleValues, cronExpression: null }
: scheduleValues
cronExpression = generateCronExpression(defaultScheduleType, sanitizedScheduleValues)
if (cronExpression) {
const validation = validateCronExpression(cronExpression, timezone)
if (!validation.isValid) {
logger.error(`[${requestId}] Invalid cron expression: ${validation.error}`, {
scheduleType: defaultScheduleType,
cronExpression,
})
return NextResponse.json(
{ error: `Invalid schedule configuration: ${validation.error}` },
{ status: 400 }
)
}
}
nextRunAt = calculateNextRunTime(defaultScheduleType, sanitizedScheduleValues)
logger.debug(
`[${requestId}] Generated cron: ${cronExpression}, next run at: ${nextRunAt.toISOString()}`
)
} catch (error: any) {
logger.error(`[${requestId}] Error generating schedule: ${error}`)
const errorMessage = error?.message || 'Failed to generate schedule'
return NextResponse.json({ error: errorMessage }, { status: 400 })
}
const values = {
id: crypto.randomUUID(),
workflowId,
blockId,
cronExpression,
triggerType: 'schedule',
createdAt: new Date(),
updatedAt: new Date(),
nextRunAt,
timezone,
status: 'active', // Ensure new schedules are active
failedCount: 0, // Reset failure count for new schedules
}
const setValues = {
blockId,
cronExpression,
updatedAt: new Date(),
nextRunAt,
timezone,
status: 'active', // Reactivate if previously disabled
failedCount: 0, // Reset failure count on reconfiguration
}
await db.transaction(async (tx) => {
await tx
.insert(workflowSchedule)
.values(values)
.onConflictDoUpdate({
target: [workflowSchedule.workflowId, workflowSchedule.blockId],
set: setValues,
})
})
logger.info(`[${requestId}] Schedule updated for workflow ${workflowId}`, {
nextRunAt: nextRunAt?.toISOString(),
cronExpression,
})
try {
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
trackPlatformEvent('platform.schedule.created', {
'workflow.id': workflowId,
'schedule.type': scheduleType || 'daily',
'schedule.timezone': timezone,
'schedule.is_custom': scheduleType === 'custom',
})
} catch (_e) {
// Silently fail
}
return NextResponse.json({
message: 'Schedule updated',
schedule: { id: values.id },
nextRunAt,
cronExpression,
})
} catch (error: any) {
logger.error(`[${requestId}] Error updating workflow schedule`, error)
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error?.message || 'Failed to update workflow schedule'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -1,9 +1,9 @@
import { type NextRequest, NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateUUID } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { isUuidV4 } from '@/executor/constants'
export const dynamic = 'force-dynamic'
const logger = createLogger('GoogleCalendarAPI')
@@ -35,18 +35,14 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialValidation = validateUUID(credentialId, 'credentialId')
if (!credentialValidation.isValid) {
if (!isUuidV4(credentialId)) {
logger.warn(`[${requestId}] Invalid credentialId format`, { credentialId })
return NextResponse.json({ error: credentialValidation.error }, { status: 400 })
return NextResponse.json({ error: 'Invalid credential ID format' }, { status: 400 })
}
if (workflowId) {
const workflowValidation = validateUUID(workflowId, 'workflowId')
if (!workflowValidation.isValid) {
logger.warn(`[${requestId}] Invalid workflowId format`, { workflowId })
return NextResponse.json({ error: workflowValidation.error }, { status: 400 })
}
if (workflowId && !isUuidV4(workflowId)) {
logger.warn(`[${requestId}] Invalid workflowId format`, { workflowId })
return NextResponse.json({ error: 'Invalid workflow ID format' }, { status: 400 })
}
const authz = await authorizeCredentialUse(request, { credentialId, workflowId })
if (!authz.ok || !authz.credentialOwnerUserId) {

View File

@@ -87,8 +87,8 @@ export async function POST(request: NextRequest) {
output: 0,
total: SEARCH_TOOL_COST,
tokens: {
prompt: 0,
completion: 0,
input: 0,
output: 0,
total: 0,
},
model: 'search-exa',

View File

@@ -3,7 +3,12 @@ import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { deployWorkflow } from '@/lib/workflows/persistence/utils'
import { deployWorkflow, loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import {
createSchedulesForDeploy,
deleteSchedulesForWorkflow,
validateWorkflowSchedules,
} from '@/lib/workflows/schedules'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -98,13 +103,25 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return createErrorResponse(error.message, error.status)
}
// Attribution: this route is UI-only; require session user as actor
const actorUserId: string | null = session?.user?.id ?? null
if (!actorUserId) {
logger.warn(`[${requestId}] Unable to resolve actor user for workflow deployment: ${id}`)
return createErrorResponse('Unable to determine deploying user', 400)
}
const normalizedData = await loadWorkflowFromNormalizedTables(id)
if (!normalizedData) {
return createErrorResponse('Failed to load workflow state', 500)
}
const scheduleValidation = validateWorkflowSchedules(normalizedData.blocks)
if (!scheduleValidation.isValid) {
logger.warn(
`[${requestId}] Schedule validation failed for workflow ${id}: ${scheduleValidation.error}`
)
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const deployResult = await deployWorkflow({
workflowId: id,
deployedBy: actorUserId,
@@ -117,6 +134,23 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const deployedAt = deployResult.deployedAt!
let scheduleInfo: { scheduleId?: string; cronExpression?: string; nextRunAt?: Date } = {}
const scheduleResult = await createSchedulesForDeploy(id, normalizedData.blocks, db)
if (!scheduleResult.success) {
logger.error(
`[${requestId}] Failed to create schedule for workflow ${id}: ${scheduleResult.error}`
)
} else if (scheduleResult.scheduleId) {
scheduleInfo = {
scheduleId: scheduleResult.scheduleId,
cronExpression: scheduleResult.cronExpression,
nextRunAt: scheduleResult.nextRunAt,
}
logger.info(
`[${requestId}] Schedule created for workflow ${id}: ${scheduleResult.scheduleId}`
)
}
logger.info(`[${requestId}] Workflow deployed successfully: ${id}`)
const responseApiKeyInfo = workflowData!.workspaceId
@@ -127,6 +161,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
apiKey: responseApiKeyInfo,
isDeployed: true,
deployedAt,
schedule: scheduleInfo.scheduleId
? {
id: scheduleInfo.scheduleId,
cronExpression: scheduleInfo.cronExpression,
nextRunAt: scheduleInfo.nextRunAt,
}
: undefined,
})
} catch (error: any) {
logger.error(`[${requestId}] Error deploying workflow: ${id}`, {
@@ -156,6 +197,8 @@ export async function DELETE(
}
await db.transaction(async (tx) => {
await deleteSchedulesForWorkflow(id, tx)
await tx
.update(workflowDeploymentVersion)
.set({ isActive: false })
@@ -169,7 +212,6 @@ export async function DELETE(
logger.info(`[${requestId}] Workflow undeployed successfully: ${id}`)
// Track workflow undeployment
try {
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
trackPlatformEvent('platform.workflow.undeployed', {

View File

@@ -22,6 +22,7 @@ import {
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
import { createHttpResponseFromBlock, workflowHasResponseBlock } from '@/lib/workflows/utils'
import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
import { normalizeName } from '@/executor/constants'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { StreamingExecution } from '@/executor/types'
import { Serializer } from '@/serializer'
@@ -86,10 +87,9 @@ function resolveOutputIds(
const blockName = outputId.substring(0, dotIndex)
const path = outputId.substring(dotIndex + 1)
const normalizedBlockName = blockName.toLowerCase().replace(/\s+/g, '')
const normalizedBlockName = normalizeName(blockName)
const block = Object.values(blocks).find((b: any) => {
const normalized = (b.name || '').toLowerCase().replace(/\s+/g, '')
return normalized === normalizedBlockName
return normalizeName(b.name || '') === normalizedBlockName
})
if (!block) {
@@ -713,7 +713,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
await PauseResumeManager.processQueuedResumes(executionId)
}
if (result.error === 'Workflow execution was cancelled') {
if (result.status === 'cancelled') {
logger.info(`[${requestId}] Workflow execution was cancelled`)
sendEvent({
type: 'execution:cancelled',

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { webhook, workflow, workflowSchedule } from '@sim/db/schema'
import { webhook, workflow } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
@@ -10,12 +10,6 @@ import { createLogger } from '@/lib/logs/console/logger'
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation'
import {
calculateNextRunTime,
generateCronExpression,
getScheduleTimeValues,
validateCronExpression,
} from '@/lib/workflows/schedules/utils'
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
import type { BlockState } from '@/stores/workflows/workflow/types'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
@@ -210,7 +204,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
}
await syncWorkflowWebhooks(workflowId, workflowState.blocks)
await syncWorkflowSchedules(workflowId, workflowState.blocks)
// Extract and persist custom tools to database
try {
@@ -318,79 +311,6 @@ async function syncWorkflowWebhooks(
})
}
type ScheduleBlockInput = Parameters<typeof getScheduleTimeValues>[0]
async function syncWorkflowSchedules(
workflowId: string,
blocks: Record<string, any>
): Promise<void> {
await syncBlockResources(workflowId, blocks, {
resourceName: 'schedule',
subBlockId: 'scheduleId',
buildMetadata: buildScheduleMetadata,
applyMetadata: upsertScheduleRecord,
})
}
interface ScheduleMetadata {
cronExpression: string | null
nextRunAt: Date | null
timezone: string
}
function buildScheduleMetadata(block: BlockState): ScheduleMetadata | null {
const scheduleType = getSubBlockValue<string>(block, 'scheduleType') || 'daily'
const scheduleBlock = convertToScheduleBlock(block)
const scheduleValues = getScheduleTimeValues(scheduleBlock)
const sanitizedValues =
scheduleType !== 'custom' ? { ...scheduleValues, cronExpression: null } : scheduleValues
try {
const cronExpression = generateCronExpression(scheduleType, sanitizedValues)
const timezone = scheduleValues.timezone || 'UTC'
if (cronExpression) {
const validation = validateCronExpression(cronExpression, timezone)
if (!validation.isValid) {
logger.warn('Invalid cron expression while syncing schedule', {
blockId: block.id,
cronExpression,
error: validation.error,
})
return null
}
}
const nextRunAt = calculateNextRunTime(scheduleType, sanitizedValues)
return {
cronExpression,
timezone,
nextRunAt,
}
} catch (error) {
logger.error('Failed to build schedule metadata during sync', {
blockId: block.id,
error,
})
return null
}
}
function convertToScheduleBlock(block: BlockState): ScheduleBlockInput {
const subBlocks: ScheduleBlockInput['subBlocks'] = {}
Object.entries(block.subBlocks || {}).forEach(([id, subBlock]) => {
subBlocks[id] = { value: stringifySubBlockValue(subBlock?.value) }
})
return {
type: block.type,
subBlocks,
}
}
interface WebhookMetadata {
triggerPath: string
provider: string | null
@@ -473,58 +393,6 @@ async function upsertWebhookRecord(
})
}
async function upsertScheduleRecord(
workflowId: string,
block: BlockState,
scheduleId: string,
metadata: ScheduleMetadata
): Promise<void> {
const now = new Date()
const [existing] = await db
.select({
id: workflowSchedule.id,
nextRunAt: workflowSchedule.nextRunAt,
})
.from(workflowSchedule)
.where(eq(workflowSchedule.id, scheduleId))
.limit(1)
if (existing) {
await db
.update(workflowSchedule)
.set({
workflowId,
blockId: block.id,
cronExpression: metadata.cronExpression,
nextRunAt: metadata.nextRunAt ?? existing.nextRunAt,
timezone: metadata.timezone,
updatedAt: now,
})
.where(eq(workflowSchedule.id, scheduleId))
return
}
await db.insert(workflowSchedule).values({
id: scheduleId,
workflowId,
blockId: block.id,
cronExpression: metadata.cronExpression,
nextRunAt: metadata.nextRunAt ?? null,
triggerType: 'schedule',
timezone: metadata.timezone,
status: 'active',
failedCount: 0,
createdAt: now,
updatedAt: now,
})
logger.info('Recreated missing schedule after workflow save', {
workflowId,
blockId: block.id,
scheduleId,
})
}
interface BlockResourceSyncConfig<T> {
resourceName: string
subBlockId: string
@@ -573,27 +441,3 @@ async function syncBlockResources<T>(
}
}
}
function stringifySubBlockValue(value: unknown): string {
if (value === undefined || value === null) {
return ''
}
if (typeof value === 'string') {
return value
}
if (typeof value === 'number' || typeof value === 'boolean') {
return String(value)
}
if (value instanceof Date) {
return value.toISOString()
}
try {
return JSON.stringify(value)
} catch {
return String(value)
}
}

View File

@@ -53,7 +53,7 @@ function buildTestPayload(subscription: typeof workspaceNotificationSubscription
totalDurationMs: 5000,
cost: {
total: 0.00123,
tokens: { prompt: 100, completion: 50, total: 150 },
tokens: { input: 100, output: 50, total: 150 },
},
},
links: {

View File

@@ -117,7 +117,7 @@ export default function ChatClient({ identifier }: { identifier: string }) {
const [error, setError] = useState<string | null>(null)
const messagesEndRef = useRef<HTMLDivElement>(null)
const messagesContainerRef = useRef<HTMLDivElement>(null)
const [starCount, setStarCount] = useState('19.4k')
const [starCount, setStarCount] = useState('24k')
const [conversationId, setConversationId] = useState('')
const [showScrollButton, setShowScrollButton] = useState(false)

View File

@@ -131,8 +131,8 @@ function formatExecutionData(executionData: any) {
: null,
tokens: tokens
? {
prompt: tokens.prompt || 0,
completion: tokens.completion || 0,
input: tokens.input || tokens.prompt || 0,
output: tokens.output || tokens.completion || 0,
total: tokens.total || 0,
}
: null,
@@ -347,12 +347,12 @@ function PinnedLogs({
</h4>
<div className='space-y-[4px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-3)] p-[12px] text-[13px]'>
<div className='flex justify-between text-[var(--text-primary)]'>
<span>Prompt:</span>
<span>{formatted.tokens.prompt}</span>
<span>Input:</span>
<span>{formatted.tokens.input}</span>
</div>
<div className='flex justify-between text-[var(--text-primary)]'>
<span>Completion:</span>
<span>{formatted.tokens.completion}</span>
<span>Output:</span>
<span>{formatted.tokens.output}</span>
</div>
<div className='flex justify-between border-[var(--border)] border-t pt-[4px] font-medium text-[var(--text-primary)]'>
<span>Total:</span>
@@ -498,8 +498,8 @@ export function FrozenCanvas({
total: null,
},
tokens: span.tokens || {
prompt: null,
completion: null,
input: null,
output: null,
total: null,
},
modelUsed: span.model || null,

View File

@@ -7,8 +7,12 @@ import { ScrollArea } from '@/components/ui/scroll-area'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import { FileCards, FrozenCanvas, TraceSpans } from '@/app/workspace/[workspaceId]/logs/components'
import { useLogDetailsResize } from '@/app/workspace/[workspaceId]/logs/hooks'
import type { LogStatus } from '@/app/workspace/[workspaceId]/logs/utils'
import { formatDate, StatusBadge, TriggerBadge } from '@/app/workspace/[workspaceId]/logs/utils'
import {
formatDate,
getDisplayStatus,
StatusBadge,
TriggerBadge,
} from '@/app/workspace/[workspaceId]/logs/utils'
import { formatCost } from '@/providers/utils'
import type { WorkflowLog } from '@/stores/logs/filters/types'
import { useLogDetailsUIStore } from '@/stores/logs/store'
@@ -100,14 +104,7 @@ export const LogDetails = memo(function LogDetails({
[log?.createdAt]
)
const logStatus: LogStatus = useMemo(() => {
if (!log) return 'info'
const baseLevel = (log.level || 'info').toLowerCase()
const isError = baseLevel === 'error'
const isPending = !isError && log.hasPendingPause === true
const isRunning = !isError && !isPending && log.duration === null
return isError ? 'error' : isPending ? 'pending' : isRunning ? 'running' : 'info'
}, [log])
const logStatus = useMemo(() => getDisplayStatus(log?.status), [log?.status])
return (
<>
@@ -344,8 +341,8 @@ export const LogDetails = memo(function LogDetails({
Tokens:
</span>
<span className='font-medium text-[12px] text-[var(--text-secondary)]'>
{log.cost?.tokens?.prompt || 0} in / {log.cost?.tokens?.completion || 0}{' '}
out
{log.cost?.tokens?.input || log.cost?.tokens?.prompt || 0} in /{' '}
{log.cost?.tokens?.output || log.cost?.tokens?.completion || 0} out
</span>
</div>
</div>

View File

@@ -6,8 +6,14 @@ import Link from 'next/link'
import { List, type RowComponentProps, useListRef } from 'react-window'
import { Badge, buttonVariants } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import {
formatDate,
formatDuration,
getDisplayStatus,
StatusBadge,
TriggerBadge,
} from '@/app/workspace/[workspaceId]/logs/utils'
import type { WorkflowLog } from '@/stores/logs/filters/types'
import { formatDate, formatDuration, StatusBadge, TriggerBadge } from '../../utils'
const LOG_ROW_HEIGHT = 44 as const
@@ -25,10 +31,6 @@ interface LogRowProps {
const LogRow = memo(
function LogRow({ log, isSelected, onClick, selectedRowRef }: LogRowProps) {
const formattedDate = useMemo(() => formatDate(log.createdAt), [log.createdAt])
const baseLevel = (log.level || 'info').toLowerCase()
const isError = baseLevel === 'error'
const isPending = !isError && log.hasPendingPause === true
const isRunning = !isError && !isPending && log.duration === null
const handleClick = useCallback(() => onClick(log), [onClick, log])
@@ -54,9 +56,7 @@ const LogRow = memo(
{/* Status */}
<div className='w-[12%] min-w-[100px]'>
<StatusBadge
status={isError ? 'error' : isPending ? 'pending' : isRunning ? 'running' : 'info'}
/>
<StatusBadge status={getDisplayStatus(log.status)} />
</div>
{/* Workflow */}
@@ -93,7 +93,7 @@ const LogRow = memo(
</div>
{/* Resume Link */}
{isPending && log.executionId && (log.workflow?.id || log.workflowId) && (
{log.status === 'pending' && log.executionId && (log.workflow?.id || log.workflowId) && (
<Link
href={`/resume/${log.workflow?.id || log.workflowId}/${log.executionId}`}
target='_blank'
@@ -115,8 +115,7 @@ const LogRow = memo(
return (
prevProps.log.id === nextProps.log.id &&
prevProps.log.duration === nextProps.log.duration &&
prevProps.log.level === nextProps.log.level &&
prevProps.log.hasPendingPause === nextProps.log.hasPendingPause &&
prevProps.log.status === nextProps.log.status &&
prevProps.isSelected === nextProps.isSelected
)
}

View File

@@ -4,6 +4,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { AlertCircle, Loader2 } from 'lucide-react'
import { useParams } from 'next/navigation'
import { cn } from '@/lib/core/utils/cn'
import { getStartDateFromTimeRange } from '@/lib/logs/filters'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
import { useFolders } from '@/hooks/queries/folders'
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
@@ -136,9 +137,7 @@ export default function Logs() {
const hasStatusChange =
prevLog?.id === updatedLog.id &&
(updatedLog.duration !== prevLog.duration ||
updatedLog.level !== prevLog.level ||
updatedLog.hasPendingPause !== prevLog.hasPendingPause)
(updatedLog.duration !== prevLog.duration || updatedLog.status !== prevLog.status)
if (updatedLog !== selectedLog) {
setSelectedLog(updatedLog)
@@ -262,6 +261,11 @@ export default function Logs() {
if (workflowIds.length > 0) params.set('workflowIds', workflowIds.join(','))
if (folderIds.length > 0) params.set('folderIds', folderIds.join(','))
const startDate = getStartDateFromTimeRange(timeRange)
if (startDate) {
params.set('startDate', startDate.toISOString())
}
const parsed = parseQuery(debouncedSearchQuery)
const extra = queryToApiParams(parsed)
Object.entries(extra).forEach(([k, v]) => params.set(k, v))

View File

@@ -7,8 +7,22 @@ import { getBlock } from '@/blocks/registry'
const CORE_TRIGGER_TYPES = ['manual', 'api', 'schedule', 'chat', 'webhook'] as const
const RUNNING_COLOR = '#22c55e' as const
const PENDING_COLOR = '#f59e0b' as const
export type LogStatus = 'error' | 'pending' | 'running' | 'info' | 'cancelled'
export type LogStatus = 'error' | 'pending' | 'running' | 'info'
export function getDisplayStatus(status: string | null | undefined): LogStatus {
switch (status) {
case 'running':
return 'running'
case 'pending':
return 'pending'
case 'cancelled':
return 'cancelled'
case 'failed':
return 'error'
default:
return 'info'
}
}
/**
* Checks if a hex color is gray/neutral (low saturation) or too light/dark
@@ -77,6 +91,11 @@ export const StatusBadge = React.memo(({ status }: StatusBadgeProps) => {
color: lightenColor(RUNNING_COLOR, 65),
label: 'Running',
},
cancelled: {
bg: 'var(--terminal-status-info-bg)',
color: 'var(--terminal-status-info-color)',
label: 'Cancelled',
},
info: {
bg: 'var(--terminal-status-info-bg)',
color: 'var(--terminal-status-info-color)',
@@ -271,6 +290,7 @@ export interface ExecutionLog {
executionId: string
startedAt: string
level: string
status: string
trigger: string
triggerUserId: string | null
triggerInputs?: unknown
@@ -291,6 +311,7 @@ interface RawLogResponse extends LogWithDuration, LogWithExecutionData {
endedAt?: string
createdAt?: string
level?: string
status?: string
trigger?: string
triggerUserId?: string | null
error?: string
@@ -331,6 +352,7 @@ export function mapToExecutionLog(log: RawLogResponse): ExecutionLog {
executionId: log.executionId,
startedAt,
level: log.level || 'info',
status: log.status || 'completed',
trigger: log.trigger || 'manual',
triggerUserId: log.triggerUserId || null,
triggerInputs: undefined,
@@ -365,6 +387,7 @@ export function mapToExecutionLogAlt(log: RawLogResponse): ExecutionLog {
executionId: log.executionId,
startedAt: log.createdAt || log.startedAt || new Date().toISOString(),
level: log.level || 'info',
status: log.status || 'completed',
trigger: log.trigger || 'manual',
triggerUserId: log.triggerUserId || null,
triggerInputs: undefined,

View File

@@ -1,7 +1,15 @@
'use client'
import { type KeyboardEvent, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { AlertCircle, ArrowDownToLine, ArrowUp, MoreVertical, Paperclip, X } from 'lucide-react'
import {
AlertCircle,
ArrowDownToLine,
ArrowUp,
MoreVertical,
Paperclip,
Square,
X,
} from 'lucide-react'
import {
Badge,
Button,
@@ -211,7 +219,7 @@ export function Chat() {
const { entries } = useTerminalConsoleStore()
const { isExecuting } = useExecutionStore()
const { handleRunWorkflow } = useWorkflowExecution()
const { handleRunWorkflow, handleCancelExecution } = useWorkflowExecution()
const { data: session } = useSession()
const { addToQueue } = useOperationQueue()
@@ -224,7 +232,7 @@ export function Chat() {
// Refs
const inputRef = useRef<HTMLInputElement>(null)
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
const abortControllerRef = useRef<AbortController | null>(null)
const streamReaderRef = useRef<ReadableStreamDefaultReader<Uint8Array> | null>(null)
// File upload hook
const {
@@ -436,10 +444,28 @@ export function Chat() {
useEffect(() => {
return () => {
timeoutRef.current && clearTimeout(timeoutRef.current)
abortControllerRef.current?.abort()
streamReaderRef.current?.cancel()
}
}, [])
// React to execution cancellation from run button
useEffect(() => {
if (!isExecuting && isStreaming) {
const lastMessage = workflowMessages[workflowMessages.length - 1]
if (lastMessage?.isStreaming) {
streamReaderRef.current?.cancel()
streamReaderRef.current = null
finalizeMessageStream(lastMessage.id)
}
}
}, [isExecuting, isStreaming, workflowMessages, finalizeMessageStream])
const handleStopStreaming = useCallback(() => {
streamReaderRef.current?.cancel()
streamReaderRef.current = null
handleCancelExecution()
}, [handleCancelExecution])
/**
* Processes streaming response from workflow execution
* Reads the stream chunk by chunk and updates the message content in real-time
@@ -449,6 +475,7 @@ export function Chat() {
const processStreamingResponse = useCallback(
async (stream: ReadableStream, responseMessageId: string) => {
const reader = stream.getReader()
streamReaderRef.current = reader
const decoder = new TextDecoder()
let accumulatedContent = ''
let buffer = ''
@@ -509,8 +536,15 @@ export function Chat() {
}
}
} catch (error) {
logger.error('Error processing stream:', error)
if ((error as Error)?.name !== 'AbortError') {
logger.error('Error processing stream:', error)
}
finalizeMessageStream(responseMessageId)
} finally {
// Only clear ref if it's still our reader (prevents clobbering a new stream)
if (streamReaderRef.current === reader) {
streamReaderRef.current = null
}
focusInput(100)
}
},
@@ -590,10 +624,6 @@ export function Chat() {
}
setHistoryIndex(-1)
// Reset abort controller
abortControllerRef.current?.abort()
abortControllerRef.current = new AbortController()
const conversationId = getConversationId(activeWorkflowId)
try {
@@ -1022,22 +1052,31 @@ export function Chat() {
<Paperclip className='!h-3.5 !w-3.5' />
</Badge>
<Button
onClick={handleSendMessage}
disabled={
(!chatMessage.trim() && chatFiles.length === 0) ||
!activeWorkflowId ||
isExecuting
}
className={cn(
'h-[22px] w-[22px] rounded-full p-0 transition-colors',
chatMessage.trim() || chatFiles.length > 0
? '!bg-[var(--c-C0C0C0)] hover:!bg-[var(--c-D0D0D0)]'
: '!bg-[var(--c-C0C0C0)]'
)}
>
<ArrowUp className='h-3.5 w-3.5 text-black' strokeWidth={2.25} />
</Button>
{isStreaming ? (
<Button
onClick={handleStopStreaming}
className='h-[22px] w-[22px] rounded-full p-0 transition-colors !bg-[var(--c-C0C0C0)] hover:!bg-[var(--c-D0D0D0)]'
>
<Square className='h-2.5 w-2.5 fill-black text-black' />
</Button>
) : (
<Button
onClick={handleSendMessage}
disabled={
(!chatMessage.trim() && chatFiles.length === 0) ||
!activeWorkflowId ||
isExecuting
}
className={cn(
'h-[22px] w-[22px] rounded-full p-0 transition-colors',
chatMessage.trim() || chatFiles.length > 0
? '!bg-[var(--c-C0C0C0)] hover:!bg-[var(--c-D0D0D0)]'
: '!bg-[var(--c-C0C0C0)]'
)}
>
<ArrowUp className='h-3.5 w-3.5 text-black' strokeWidth={2.25} />
</Button>
)}
</div>
</div>

View File

@@ -18,6 +18,7 @@ import { getEnv } from '@/lib/core/config/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getInputFormatExample as getInputFormatExampleUtil } from '@/lib/workflows/operations/deployment-utils'
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
import { startsWithUuid } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
@@ -289,10 +290,9 @@ export function DeployModal({
if (!open || selectedStreamingOutputs.length === 0) return
const blocks = Object.values(useWorkflowStore.getState().blocks)
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i
const validOutputs = selectedStreamingOutputs.filter((outputId) => {
if (UUID_REGEX.test(outputId)) {
if (startsWithUuid(outputId)) {
const underscoreIndex = outputId.indexOf('_')
if (underscoreIndex === -1) return false
@@ -464,6 +464,8 @@ export function DeployModal({
setDeploymentInfo((prev) => (prev ? { ...prev, needsRedeployment: false } : prev))
} catch (error: unknown) {
logger.error('Error redeploying workflow:', { error })
const errorMessage = error instanceof Error ? error.message : 'Failed to redeploy workflow'
setApiDeployError(errorMessage)
} finally {
setIsSubmitting(false)
}

View File

@@ -1,6 +1,10 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@/lib/logs/console/logger'
import { useNotificationStore } from '@/stores/notifications/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import { runPreDeployChecks } from './use-predeploy-checks'
const logger = createLogger('useDeployment')
@@ -20,54 +24,94 @@ export function useDeployment({
}: UseDeploymentProps) {
const [isDeploying, setIsDeploying] = useState(false)
const setDeploymentStatus = useWorkflowRegistry((state) => state.setDeploymentStatus)
const addNotification = useNotificationStore((state) => state.addNotification)
const blocks = useWorkflowStore((state) => state.blocks)
const edges = useWorkflowStore((state) => state.edges)
const loops = useWorkflowStore((state) => state.loops)
const parallels = useWorkflowStore((state) => state.parallels)
/**
* Handle initial deployment and open modal
* Handle deploy button click
* First deploy: calls API to deploy, then opens modal on success
* Redeploy: validates client-side, then opens modal if valid
*/
const handleDeployClick = useCallback(async () => {
if (!workflowId) return { success: false, shouldOpenModal: false }
// If undeployed, deploy first then open modal
if (!isDeployed) {
setIsDeploying(true)
try {
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
deployChatEnabled: false,
}),
if (isDeployed) {
const liveBlocks = mergeSubblockState(blocks, workflowId)
const checkResult = runPreDeployChecks({
blocks: liveBlocks,
edges,
loops,
parallels,
workflowId,
})
if (!checkResult.passed) {
addNotification({
level: 'error',
message: checkResult.error || 'Pre-deploy validation failed',
workflowId,
})
if (response.ok) {
const responseData = await response.json()
const isDeployedStatus = responseData.isDeployed ?? false
const deployedAtTime = responseData.deployedAt
? new Date(responseData.deployedAt)
: undefined
setDeploymentStatus(
workflowId,
isDeployedStatus,
deployedAtTime,
responseData.apiKey || ''
)
await refetchDeployedState()
return { success: true, shouldOpenModal: true }
}
return { success: false, shouldOpenModal: true }
} catch (error) {
logger.error('Error deploying workflow:', error)
return { success: false, shouldOpenModal: true }
} finally {
setIsDeploying(false)
return { success: false, shouldOpenModal: false }
}
return { success: true, shouldOpenModal: true }
}
// If already deployed, just signal to open modal
return { success: true, shouldOpenModal: true }
}, [workflowId, isDeployed, refetchDeployedState, setDeploymentStatus])
setIsDeploying(true)
try {
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
deployChatEnabled: false,
}),
})
if (response.ok) {
const responseData = await response.json()
const isDeployedStatus = responseData.isDeployed ?? false
const deployedAtTime = responseData.deployedAt
? new Date(responseData.deployedAt)
: undefined
setDeploymentStatus(workflowId, isDeployedStatus, deployedAtTime, responseData.apiKey || '')
await refetchDeployedState()
return { success: true, shouldOpenModal: true }
}
const errorData = await response.json()
const errorMessage = errorData.error || 'Failed to deploy workflow'
addNotification({
level: 'error',
message: errorMessage,
workflowId,
})
return { success: false, shouldOpenModal: false }
} catch (error) {
logger.error('Error deploying workflow:', error)
const errorMessage = error instanceof Error ? error.message : 'Failed to deploy workflow'
addNotification({
level: 'error',
message: errorMessage,
workflowId,
})
return { success: false, shouldOpenModal: false }
} finally {
setIsDeploying(false)
}
}, [
workflowId,
isDeployed,
blocks,
edges,
loops,
parallels,
refetchDeployedState,
setDeploymentStatus,
addNotification,
])
return {
isDeploying,

View File

@@ -0,0 +1,65 @@
import type { Edge } from 'reactflow'
import { validateWorkflowSchedules } from '@/lib/workflows/schedules/validation'
import { Serializer } from '@/serializer'
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
export interface PreDeployCheckResult {
passed: boolean
error?: string
}
export interface PreDeployContext {
blocks: Record<string, BlockState>
edges: Edge[]
loops: Record<string, Loop>
parallels: Record<string, Parallel>
workflowId: string
}
type PreDeployCheck = (context: PreDeployContext) => PreDeployCheckResult
/**
* Validates schedule block configuration
*/
const scheduleValidationCheck: PreDeployCheck = ({ blocks }) => {
const result = validateWorkflowSchedules(blocks)
return {
passed: result.isValid,
error: result.error ? `Invalid schedule configuration: ${result.error}` : undefined,
}
}
/**
* Validates required fields using the serializer's validation
*/
const requiredFieldsCheck: PreDeployCheck = ({ blocks, edges, loops, parallels }) => {
try {
const serializer = new Serializer()
serializer.serializeWorkflow(blocks, edges, loops, parallels, true)
return { passed: true }
} catch (error) {
return {
passed: false,
error: error instanceof Error ? error.message : 'Workflow validation failed',
}
}
}
/**
* All pre-deploy checks in execution order
* Add new checks here as needed
*/
const preDeployChecks: PreDeployCheck[] = [scheduleValidationCheck, requiredFieldsCheck]
/**
* Runs all pre-deploy checks and returns the first failure or success
*/
export function runPreDeployChecks(context: PreDeployContext): PreDeployCheckResult {
for (const check of preDeployChecks) {
const result = check(context)
if (!result.passed) {
return result
}
}
return { passed: true }
}

View File

@@ -7,7 +7,6 @@ import { client } from '@/lib/auth/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
import {
getProviderIdFromServiceId,
getServiceIdFromScopes,
OAUTH_PROVIDERS,
type OAuthProvider,
parseProvider,
@@ -21,7 +20,7 @@ export interface OAuthRequiredModalProps {
provider: OAuthProvider
toolName: string
requiredScopes?: string[]
serviceId?: string
serviceId: string
newScopes?: string[]
}
@@ -301,7 +300,6 @@ export function OAuthRequiredModal({
serviceId,
newScopes = [],
}: OAuthRequiredModalProps) {
const effectiveServiceId = serviceId || getServiceIdFromScopes(provider, requiredScopes)
const { baseProvider } = parseProvider(provider)
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
@@ -309,8 +307,8 @@ export function OAuthRequiredModal({
let ProviderIcon = baseProviderConfig?.icon || (() => null)
if (baseProviderConfig) {
for (const service of Object.values(baseProviderConfig.services)) {
if (service.id === effectiveServiceId || service.providerId === provider) {
for (const [key, service] of Object.entries(baseProviderConfig.services)) {
if (key === serviceId || service.providerId === provider) {
providerName = service.name
ProviderIcon = service.icon
break
@@ -343,7 +341,7 @@ export function OAuthRequiredModal({
const handleConnectDirectly = async () => {
try {
const providerId = getProviderIdFromServiceId(effectiveServiceId)
const providerId = getProviderIdFromServiceId(serviceId)
onClose()

View File

@@ -116,7 +116,7 @@ export function CredentialSelector({
setStoreValue('')
}, [invalidSelection, selectedId, effectiveProviderId, setStoreValue])
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, provider)
useCredentialRefreshTriggers(refetchCredentials)
const handleOpenChange = useCallback(
(isOpen: boolean) => {
@@ -268,11 +268,7 @@ export function CredentialSelector({
)
}
function useCredentialRefreshTriggers(
refetchCredentials: () => Promise<unknown>,
effectiveProviderId?: string,
provider?: OAuthProvider
) {
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
useEffect(() => {
const refresh = () => {
void refetchCredentials()
@@ -290,26 +286,12 @@ function useCredentialRefreshTriggers(
}
}
const handleCredentialDisconnected = (event: Event) => {
const customEvent = event as CustomEvent<{ providerId?: string }>
const providerId = customEvent.detail?.providerId
if (
providerId &&
(providerId === effectiveProviderId || (provider && providerId.startsWith(provider)))
) {
refresh()
}
}
document.addEventListener('visibilitychange', handleVisibilityChange)
window.addEventListener('pageshow', handlePageShow)
window.addEventListener('credential-disconnected', handleCredentialDisconnected)
return () => {
document.removeEventListener('visibilitychange', handleVisibilityChange)
window.removeEventListener('pageshow', handlePageShow)
window.removeEventListener('credential-disconnected', handleCredentialDisconnected)
}
}, [refetchCredentials, effectiveProviderId, provider])
}, [refetchCredentials])
}

View File

@@ -21,7 +21,7 @@ export { McpToolSelector } from './mcp-server-modal/mcp-tool-selector'
export { MessagesInput } from './messages-input/messages-input'
export { ProjectSelectorInput } from './project-selector/project-selector-input'
export { ResponseFormat } from './response/response-format'
export { ScheduleSave } from './schedule-save/schedule-save'
export { ScheduleInfo } from './schedule-info/schedule-info'
export { ShortInput } from './short-input/short-input'
export { SlackSelectorInput } from './slack-selector/slack-selector-input'
export { SliderInput } from './slider-input/slider-input'

View File

@@ -0,0 +1,194 @@
import { useCallback, useEffect, useState } from 'react'
import { AlertTriangle } from 'lucide-react'
import { useParams } from 'next/navigation'
import { createLogger } from '@/lib/logs/console/logger'
import { parseCronToHumanReadable } from '@/lib/workflows/schedules/utils'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
const logger = createLogger('ScheduleStatus')
interface ScheduleInfoProps {
blockId: string
isPreview?: boolean
}
/**
* Schedule status display component.
* Shows the current schedule status, next run time, and last run time.
* Schedule creation/deletion is handled during workflow deploy/undeploy.
*/
export function ScheduleInfo({ blockId, isPreview = false }: ScheduleInfoProps) {
const params = useParams()
const workflowId = params.workflowId as string
const [scheduleStatus, setScheduleStatus] = useState<'active' | 'disabled' | null>(null)
const [nextRunAt, setNextRunAt] = useState<Date | null>(null)
const [lastRanAt, setLastRanAt] = useState<Date | null>(null)
const [failedCount, setFailedCount] = useState<number>(0)
const [isLoadingStatus, setIsLoadingStatus] = useState(true)
const [savedCronExpression, setSavedCronExpression] = useState<string | null>(null)
const [isRedeploying, setIsRedeploying] = useState(false)
const [hasSchedule, setHasSchedule] = useState(false)
const scheduleTimezone = useSubBlockStore((state) => state.getValue(blockId, 'timezone'))
const fetchScheduleStatus = useCallback(async () => {
if (isPreview) return
setIsLoadingStatus(true)
try {
const response = await fetch(`/api/schedules?workflowId=${workflowId}&blockId=${blockId}`)
if (response.ok) {
const data = await response.json()
if (data.schedule) {
setHasSchedule(true)
setScheduleStatus(data.schedule.status)
setNextRunAt(data.schedule.nextRunAt ? new Date(data.schedule.nextRunAt) : null)
setLastRanAt(data.schedule.lastRanAt ? new Date(data.schedule.lastRanAt) : null)
setFailedCount(data.schedule.failedCount || 0)
setSavedCronExpression(data.schedule.cronExpression || null)
} else {
// No schedule exists (workflow not deployed or no schedule block)
setHasSchedule(false)
setScheduleStatus(null)
setNextRunAt(null)
setLastRanAt(null)
setFailedCount(0)
setSavedCronExpression(null)
}
}
} catch (error) {
logger.error('Error fetching schedule status', { error })
} finally {
setIsLoadingStatus(false)
}
}, [workflowId, blockId, isPreview])
useEffect(() => {
if (!isPreview) {
fetchScheduleStatus()
}
}, [isPreview, fetchScheduleStatus])
/**
* Handles redeploying the workflow when schedule is disabled due to failures.
* Redeploying will recreate the schedule with reset failure count.
*/
const handleRedeploy = async () => {
if (isPreview || isRedeploying) return
setIsRedeploying(true)
try {
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ deployChatEnabled: false }),
})
if (response.ok) {
// Refresh schedule status after redeploy
await fetchScheduleStatus()
logger.info('Workflow redeployed successfully to reset schedule', { workflowId, blockId })
} else {
const errorData = await response.json()
logger.error('Failed to redeploy workflow', { error: errorData.error })
}
} catch (error) {
logger.error('Error redeploying workflow', { error })
} finally {
setIsRedeploying(false)
}
}
// Don't render anything if there's no deployed schedule
if (!hasSchedule && !isLoadingStatus) {
return null
}
return (
<div className='mt-2'>
{isLoadingStatus ? (
<div className='flex items-center gap-2 text-muted-foreground text-sm'>
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
Loading schedule status...
</div>
) : (
<div className='space-y-1'>
{/* Failure badge with redeploy action */}
{failedCount >= 10 && scheduleStatus === 'disabled' && (
<button
type='button'
onClick={handleRedeploy}
disabled={isRedeploying}
className='flex w-full cursor-pointer items-center gap-2 rounded-md bg-destructive/10 px-3 py-2 text-left text-destructive text-sm transition-colors hover:bg-destructive/20 disabled:cursor-not-allowed disabled:opacity-50'
>
{isRedeploying ? (
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
) : (
<AlertTriangle className='h-4 w-4 flex-shrink-0' />
)}
<span>
{isRedeploying
? 'Redeploying...'
: `Schedule disabled after ${failedCount} failures - Click to redeploy`}
</span>
</button>
)}
{/* Show warning for failed runs under threshold */}
{failedCount > 0 && failedCount < 10 && (
<div className='flex items-center gap-2'>
<span className='text-destructive text-sm'>
{failedCount} failed run{failedCount !== 1 ? 's' : ''}
</span>
</div>
)}
{/* Cron expression human-readable description */}
{savedCronExpression && (
<p className='text-muted-foreground text-sm'>
Runs{' '}
{parseCronToHumanReadable(
savedCronExpression,
scheduleTimezone || 'UTC'
).toLowerCase()}
</p>
)}
{/* Next run time */}
{nextRunAt && (
<p className='text-sm'>
<span className='font-medium'>Next run:</span>{' '}
{nextRunAt.toLocaleString('en-US', {
timeZone: scheduleTimezone || 'UTC',
year: 'numeric',
month: 'numeric',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true,
})}{' '}
{scheduleTimezone || 'UTC'}
</p>
)}
{/* Last ran time */}
{lastRanAt && (
<p className='text-muted-foreground text-sm'>
<span className='font-medium'>Last ran:</span>{' '}
{lastRanAt.toLocaleString('en-US', {
timeZone: scheduleTimezone || 'UTC',
year: 'numeric',
month: 'numeric',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true,
})}{' '}
{scheduleTimezone || 'UTC'}
</p>
)}
</div>
)}
</div>
)
}

View File

@@ -1,499 +0,0 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useParams } from 'next/navigation'
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
import { Trash } from '@/components/emcn/icons/trash'
import { Alert, AlertDescription } from '@/components/ui/alert'
import { cn } from '@/lib/core/utils/cn'
import { createLogger } from '@/lib/logs/console/logger'
import { parseCronToHumanReadable } from '@/lib/workflows/schedules/utils'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useScheduleManagement } from '@/hooks/use-schedule-management'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
const logger = createLogger('ScheduleSave')
interface ScheduleSaveProps {
blockId: string
isPreview?: boolean
disabled?: boolean
}
type SaveStatus = 'idle' | 'saving' | 'saved' | 'error'
export function ScheduleSave({ blockId, isPreview = false, disabled = false }: ScheduleSaveProps) {
const params = useParams()
const workflowId = params.workflowId as string
const [saveStatus, setSaveStatus] = useState<SaveStatus>('idle')
const [errorMessage, setErrorMessage] = useState<string | null>(null)
const [deleteStatus, setDeleteStatus] = useState<'idle' | 'deleting'>('idle')
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [scheduleStatus, setScheduleStatus] = useState<'active' | 'disabled' | null>(null)
const [nextRunAt, setNextRunAt] = useState<Date | null>(null)
const [lastRanAt, setLastRanAt] = useState<Date | null>(null)
const [failedCount, setFailedCount] = useState<number>(0)
const [isLoadingStatus, setIsLoadingStatus] = useState(false)
const [savedCronExpression, setSavedCronExpression] = useState<string | null>(null)
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
const { scheduleId, saveConfig, deleteConfig, isSaving } = useScheduleManagement({
blockId,
isPreview,
})
const scheduleType = useSubBlockStore((state) => state.getValue(blockId, 'scheduleType'))
const scheduleMinutesInterval = useSubBlockStore((state) =>
state.getValue(blockId, 'minutesInterval')
)
const scheduleHourlyMinute = useSubBlockStore((state) => state.getValue(blockId, 'hourlyMinute'))
const scheduleDailyTime = useSubBlockStore((state) => state.getValue(blockId, 'dailyTime'))
const scheduleWeeklyDay = useSubBlockStore((state) => state.getValue(blockId, 'weeklyDay'))
const scheduleWeeklyTime = useSubBlockStore((state) => state.getValue(blockId, 'weeklyDayTime'))
const scheduleMonthlyDay = useSubBlockStore((state) => state.getValue(blockId, 'monthlyDay'))
const scheduleMonthlyTime = useSubBlockStore((state) => state.getValue(blockId, 'monthlyTime'))
const scheduleCronExpression = useSubBlockStore((state) =>
state.getValue(blockId, 'cronExpression')
)
const scheduleTimezone = useSubBlockStore((state) => state.getValue(blockId, 'timezone'))
const validateRequiredFields = useCallback((): { valid: boolean; missingFields: string[] } => {
const missingFields: string[] = []
if (!scheduleType) {
missingFields.push('Frequency')
return { valid: false, missingFields }
}
switch (scheduleType) {
case 'minutes': {
const minutesNum = Number(scheduleMinutesInterval)
if (
!scheduleMinutesInterval ||
Number.isNaN(minutesNum) ||
minutesNum < 1 ||
minutesNum > 1440
) {
missingFields.push('Minutes Interval (must be 1-1440)')
}
break
}
case 'hourly': {
const hourlyNum = Number(scheduleHourlyMinute)
if (
scheduleHourlyMinute === null ||
scheduleHourlyMinute === undefined ||
scheduleHourlyMinute === '' ||
Number.isNaN(hourlyNum) ||
hourlyNum < 0 ||
hourlyNum > 59
) {
missingFields.push('Minute (must be 0-59)')
}
break
}
case 'daily':
if (!scheduleDailyTime) {
missingFields.push('Time')
}
break
case 'weekly':
if (!scheduleWeeklyDay) {
missingFields.push('Day of Week')
}
if (!scheduleWeeklyTime) {
missingFields.push('Time')
}
break
case 'monthly': {
const monthlyNum = Number(scheduleMonthlyDay)
if (!scheduleMonthlyDay || Number.isNaN(monthlyNum) || monthlyNum < 1 || monthlyNum > 31) {
missingFields.push('Day of Month (must be 1-31)')
}
if (!scheduleMonthlyTime) {
missingFields.push('Time')
}
break
}
case 'custom':
if (!scheduleCronExpression) {
missingFields.push('Cron Expression')
}
break
}
if (!scheduleTimezone && scheduleType !== 'minutes' && scheduleType !== 'hourly') {
missingFields.push('Timezone')
}
return {
valid: missingFields.length === 0,
missingFields,
}
}, [
scheduleType,
scheduleMinutesInterval,
scheduleHourlyMinute,
scheduleDailyTime,
scheduleWeeklyDay,
scheduleWeeklyTime,
scheduleMonthlyDay,
scheduleMonthlyTime,
scheduleCronExpression,
scheduleTimezone,
])
const requiredSubBlockIds = useMemo(() => {
return [
'scheduleType',
'minutesInterval',
'hourlyMinute',
'dailyTime',
'weeklyDay',
'weeklyDayTime',
'monthlyDay',
'monthlyTime',
'cronExpression',
'timezone',
]
}, [])
const subscribedSubBlockValues = useSubBlockStore(
useCallback(
(state) => {
const values: Record<string, any> = {}
requiredSubBlockIds.forEach((subBlockId) => {
const value = state.getValue(blockId, subBlockId)
if (value !== null && value !== undefined && value !== '') {
values[subBlockId] = value
}
})
return values
},
[blockId, requiredSubBlockIds]
)
)
const previousValuesRef = useRef<Record<string, any>>({})
const validationTimeoutRef = useRef<NodeJS.Timeout | null>(null)
useEffect(() => {
if (saveStatus !== 'error') {
previousValuesRef.current = subscribedSubBlockValues
return
}
const hasChanges = Object.keys(subscribedSubBlockValues).some(
(key) =>
previousValuesRef.current[key] !== (subscribedSubBlockValues as Record<string, any>)[key]
)
if (!hasChanges) {
return
}
if (validationTimeoutRef.current) {
clearTimeout(validationTimeoutRef.current)
}
validationTimeoutRef.current = setTimeout(() => {
const validation = validateRequiredFields()
if (validation.valid) {
setErrorMessage(null)
setSaveStatus('idle')
logger.debug('Error cleared after validation passed', { blockId })
} else {
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
logger.debug('Error message updated', {
blockId,
missingFields: validation.missingFields,
})
}
previousValuesRef.current = subscribedSubBlockValues
}, 300)
return () => {
if (validationTimeoutRef.current) {
clearTimeout(validationTimeoutRef.current)
}
}
}, [blockId, subscribedSubBlockValues, saveStatus, validateRequiredFields])
const fetchScheduleStatus = useCallback(async () => {
if (!scheduleId || isPreview) return
setIsLoadingStatus(true)
try {
const response = await fetch(
`/api/schedules?workflowId=${workflowId}&blockId=${blockId}&mode=schedule`
)
if (response.ok) {
const data = await response.json()
if (data.schedule) {
setScheduleStatus(data.schedule.status)
setNextRunAt(data.schedule.nextRunAt ? new Date(data.schedule.nextRunAt) : null)
setLastRanAt(data.schedule.lastRanAt ? new Date(data.schedule.lastRanAt) : null)
setFailedCount(data.schedule.failedCount || 0)
setSavedCronExpression(data.schedule.cronExpression || null)
}
}
} catch (error) {
logger.error('Error fetching schedule status', { error })
} finally {
setIsLoadingStatus(false)
}
}, [workflowId, blockId, scheduleId, isPreview])
useEffect(() => {
if (scheduleId && !isPreview) {
fetchScheduleStatus()
}
}, [scheduleId, isPreview, fetchScheduleStatus])
const handleSave = async () => {
if (isPreview || disabled) return
setSaveStatus('saving')
setErrorMessage(null)
try {
const validation = validateRequiredFields()
if (!validation.valid) {
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
setSaveStatus('error')
return
}
const result = await saveConfig()
if (!result.success) {
throw new Error('Save config returned false')
}
setSaveStatus('saved')
setErrorMessage(null)
const scheduleIdValue = useSubBlockStore.getState().getValue(blockId, 'scheduleId')
collaborativeSetSubblockValue(blockId, 'scheduleId', scheduleIdValue)
if (result.nextRunAt) {
setNextRunAt(new Date(result.nextRunAt))
setScheduleStatus('active')
}
// Fetch additional status info, then apply cron from save result to prevent stale data
await fetchScheduleStatus()
if (result.cronExpression) {
setSavedCronExpression(result.cronExpression)
}
setTimeout(() => {
setSaveStatus('idle')
}, 2000)
logger.info('Schedule configuration saved successfully', {
blockId,
hasScheduleId: !!scheduleId,
})
} catch (error: any) {
setSaveStatus('error')
setErrorMessage(error.message || 'An error occurred while saving.')
logger.error('Error saving schedule config', { error })
}
}
const handleDelete = async () => {
if (isPreview || disabled) return
setShowDeleteDialog(false)
setDeleteStatus('deleting')
try {
const success = await deleteConfig()
if (!success) {
throw new Error('Failed to delete schedule')
}
setScheduleStatus(null)
setNextRunAt(null)
setLastRanAt(null)
setFailedCount(0)
collaborativeSetSubblockValue(blockId, 'scheduleId', null)
logger.info('Schedule deleted successfully', { blockId })
} catch (error: any) {
setErrorMessage(error.message || 'An error occurred while deleting.')
logger.error('Error deleting schedule', { error })
} finally {
setDeleteStatus('idle')
}
}
const handleDeleteConfirm = () => {
handleDelete()
}
const handleToggleStatus = async () => {
if (!scheduleId || isPreview || disabled) return
try {
const action = scheduleStatus === 'active' ? 'disable' : 'reactivate'
const response = await fetch(`/api/schedules/${scheduleId}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ action }),
})
if (response.ok) {
await fetchScheduleStatus()
logger.info(`Schedule ${action}d successfully`, { scheduleId })
} else {
throw new Error(`Failed to ${action} schedule`)
}
} catch (error: any) {
setErrorMessage(
error.message ||
`An error occurred while ${scheduleStatus === 'active' ? 'disabling' : 'reactivating'} the schedule.`
)
logger.error('Error toggling schedule status', { error })
}
}
return (
<div className='mt-2'>
<div className='flex gap-2'>
<Button
variant='default'
onClick={handleSave}
disabled={disabled || isPreview || isSaving || saveStatus === 'saving' || isLoadingStatus}
className={cn(
'h-9 flex-1 rounded-[8px] transition-all duration-200',
saveStatus === 'saved' && 'bg-green-600 hover:bg-green-700',
saveStatus === 'error' && 'bg-red-600 hover:bg-red-700'
)}
>
{saveStatus === 'saving' && (
<>
<div className='mr-2 h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
Saving...
</>
)}
{saveStatus === 'saved' && 'Saved'}
{saveStatus === 'idle' && (scheduleId ? 'Update Schedule' : 'Save Schedule')}
{saveStatus === 'error' && 'Error'}
</Button>
{scheduleId && (
<Button
variant='default'
onClick={() => setShowDeleteDialog(true)}
disabled={disabled || isPreview || deleteStatus === 'deleting' || isSaving}
className='h-9 rounded-[8px] px-3'
>
{deleteStatus === 'deleting' ? (
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
) : (
<Trash className='h-[14px] w-[14px]' />
)}
</Button>
)}
</div>
{errorMessage && (
<Alert variant='destructive' className='mt-2'>
<AlertDescription>{errorMessage}</AlertDescription>
</Alert>
)}
{scheduleId && (scheduleStatus || isLoadingStatus || nextRunAt) && (
<div className='mt-2 space-y-1'>
{isLoadingStatus ? (
<div className='flex items-center gap-2 text-muted-foreground text-sm'>
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
Loading schedule status...
</div>
) : (
<>
{failedCount > 0 && (
<div className='flex items-center gap-2'>
<span className='text-destructive text-sm'>
{failedCount} failed run{failedCount !== 1 ? 's' : ''}
</span>
</div>
)}
{savedCronExpression && (
<p className='text-muted-foreground text-sm'>
Runs{' '}
{parseCronToHumanReadable(
savedCronExpression,
scheduleTimezone || 'UTC'
).toLowerCase()}
</p>
)}
{nextRunAt && (
<p className='text-sm'>
<span className='font-medium'>Next run:</span>{' '}
{nextRunAt.toLocaleString('en-US', {
timeZone: scheduleTimezone || 'UTC',
year: 'numeric',
month: 'numeric',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true,
})}{' '}
{scheduleTimezone || 'UTC'}
</p>
)}
{lastRanAt && (
<p className='text-muted-foreground text-sm'>
<span className='font-medium'>Last ran:</span>{' '}
{lastRanAt.toLocaleString('en-US', {
timeZone: scheduleTimezone || 'UTC',
year: 'numeric',
month: 'numeric',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true,
})}{' '}
{scheduleTimezone || 'UTC'}
</p>
)}
</>
)}
</div>
)}
<Modal open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
<ModalContent size='sm'>
<ModalHeader>Delete Schedule</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-tertiary)]'>
Are you sure you want to delete this schedule configuration? This will stop the
workflow from running automatically.{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button variant='active' onClick={() => setShowDeleteDialog(false)}>
Cancel
</Button>
<Button
variant='primary'
onClick={handleDeleteConfirm}
className='!bg-[var(--text-error)] !text-white hover:!bg-[var(--text-error)]/90'
>
Delete
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</div>
)
}

View File

@@ -3,7 +3,7 @@
import { useEffect, useMemo, useState } from 'react'
import { useParams } from 'next/navigation'
import { Tooltip } from '@/components/emcn'
import { getProviderIdFromServiceId } from '@/lib/oauth/oauth'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'

View File

@@ -4,7 +4,6 @@ import { Button, Combobox } from '@/components/emcn/components'
import {
getCanonicalScopesForProvider,
getProviderIdFromServiceId,
getServiceIdFromScopes,
OAUTH_PROVIDERS,
type OAuthProvider,
type OAuthService,
@@ -45,7 +44,7 @@ interface ToolCredentialSelectorProps {
provider: OAuthProvider
requiredScopes?: string[]
label?: string
serviceId?: OAuthService
serviceId: OAuthService
disabled?: boolean
}
@@ -65,15 +64,7 @@ export function ToolCredentialSelector({
const selectedId = value || ''
const effectiveServiceId = useMemo(
() => serviceId || getServiceIdFromScopes(provider, requiredScopes),
[provider, requiredScopes, serviceId]
)
const effectiveProviderId = useMemo(
() => getProviderIdFromServiceId(effectiveServiceId),
[effectiveServiceId]
)
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const {
data: credentials = [],
@@ -126,7 +117,7 @@ export function ToolCredentialSelector({
onChange('')
}, [invalidSelection, onChange])
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, provider)
useCredentialRefreshTriggers(refetchCredentials)
const handleOpenChange = useCallback(
(isOpen: boolean) => {
@@ -240,18 +231,14 @@ export function ToolCredentialSelector({
toolName={getProviderName(provider)}
requiredScopes={getCanonicalScopesForProvider(effectiveProviderId)}
newScopes={missingRequiredScopes}
serviceId={effectiveServiceId}
serviceId={serviceId}
/>
)}
</>
)
}
function useCredentialRefreshTriggers(
refetchCredentials: () => Promise<unknown>,
effectiveProviderId?: string,
provider?: OAuthProvider
) {
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
useEffect(() => {
const refresh = () => {
void refetchCredentials()
@@ -269,26 +256,12 @@ function useCredentialRefreshTriggers(
}
}
const handleCredentialDisconnected = (event: Event) => {
const customEvent = event as CustomEvent<{ providerId?: string }>
const providerId = customEvent.detail?.providerId
if (
providerId &&
(providerId === effectiveProviderId || (provider && providerId.startsWith(provider)))
) {
refresh()
}
}
document.addEventListener('visibilitychange', handleVisibilityChange)
window.addEventListener('pageshow', handlePageShow)
window.addEventListener('credential-disconnected', handleCredentialDisconnected)
return () => {
document.removeEventListener('visibilitychange', handleVisibilityChange)
window.removeEventListener('pageshow', handlePageShow)
window.removeEventListener('credential-disconnected', handleCredentialDisconnected)
}
}, [refetchCredentials, effectiveProviderId, provider])
}, [refetchCredentials])
}

View File

@@ -24,7 +24,7 @@ import {
getProviderIdFromServiceId,
type OAuthProvider,
type OAuthService,
} from '@/lib/oauth/oauth'
} from '@/lib/oauth'
import {
CheckboxList,
Code,

View File

@@ -29,7 +29,7 @@ import {
MessagesInput,
ProjectSelectorInput,
ResponseFormat,
ScheduleSave,
ScheduleInfo,
ShortInput,
SlackSelectorInput,
SliderInput,
@@ -592,8 +592,8 @@ function SubBlockComponent({
/>
)
case 'schedule-save':
return <ScheduleSave blockId={blockId} isPreview={isPreview} disabled={disabled} />
case 'schedule-info':
return <ScheduleInfo blockId={blockId} isPreview={isPreview} />
case 'oauth-input':
return (

View File

@@ -7,9 +7,9 @@ import {
} from '@/lib/workflows/sanitization/references'
import { checkTagTrigger } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
import { normalizeName, REFERENCE } from '@/executor/constants'
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { normalizeName } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -89,7 +89,7 @@ export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId
*/
const shouldHighlightReference = useCallback(
(part: string): boolean => {
if (!part.startsWith('<') || !part.endsWith('>')) {
if (!part.startsWith(REFERENCE.START) || !part.endsWith(REFERENCE.END)) {
return false
}
@@ -108,8 +108,8 @@ export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId
return true
}
const inner = reference.slice(1, -1)
const [prefix] = inner.split('.')
const inner = reference.slice(REFERENCE.START.length, -REFERENCE.END.length)
const [prefix] = inner.split(REFERENCE.PATH_DELIMITER)
const normalizedPrefix = normalizeName(prefix)
if (SYSTEM_REFERENCE_PREFIXES.has(normalizedPrefix)) {

View File

@@ -15,17 +15,15 @@ export interface UseScheduleInfoReturn {
isLoading: boolean
/** Function to reactivate a disabled schedule */
reactivateSchedule: (scheduleId: string) => Promise<void>
/** Function to disable an active schedule */
disableSchedule: (scheduleId: string) => Promise<void>
}
/**
* Custom hook for managing schedule information
* Custom hook for fetching schedule information
*
* @param blockId - The ID of the block
* @param blockType - The type of the block
* @param workflowId - The current workflow ID
* @returns Schedule information state and operations
* @returns Schedule information state and reactivate function
*/
export function useScheduleInfo(
blockId: string,
@@ -44,7 +42,6 @@ export function useScheduleInfo(
const params = new URLSearchParams({
workflowId: wfId,
mode: 'schedule',
blockId,
})
@@ -77,6 +74,7 @@ export function useScheduleInfo(
timezone: scheduleTimezone,
status: schedule.status,
isDisabled: schedule.status === 'disabled',
failedCount: schedule.failedCount || 0,
id: schedule.id,
})
} catch (error) {
@@ -94,14 +92,12 @@ export function useScheduleInfo(
try {
const response = await fetch(`/api/schedules/${scheduleId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ action: 'reactivate' }),
})
if (response.ok && workflowId) {
fetchScheduleInfo(workflowId)
await fetchScheduleInfo(workflowId)
} else {
logger.error('Failed to reactivate schedule')
}
@@ -112,29 +108,6 @@ export function useScheduleInfo(
[workflowId, fetchScheduleInfo]
)
const disableSchedule = useCallback(
async (scheduleId: string) => {
try {
const response = await fetch(`/api/schedules/${scheduleId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ action: 'disable' }),
})
if (response.ok && workflowId) {
fetchScheduleInfo(workflowId)
} else {
logger.error('Failed to disable schedule')
}
} catch (error) {
logger.error('Error disabling schedule:', error)
}
},
[workflowId, fetchScheduleInfo]
)
useEffect(() => {
if (blockType === 'schedule' && workflowId) {
fetchScheduleInfo(workflowId)
@@ -143,27 +116,14 @@ export function useScheduleInfo(
setIsLoading(false)
}
const handleScheduleUpdate = (event: CustomEvent) => {
if (event.detail?.workflowId === workflowId && event.detail?.blockId === blockId) {
logger.debug('Schedule update event received, refetching schedule info')
if (blockType === 'schedule') {
fetchScheduleInfo(workflowId)
}
}
}
window.addEventListener('schedule-updated', handleScheduleUpdate as EventListener)
return () => {
setIsLoading(false)
window.removeEventListener('schedule-updated', handleScheduleUpdate as EventListener)
}
}, [blockType, workflowId, blockId, fetchScheduleInfo])
}, [blockType, workflowId, fetchScheduleInfo])
return {
scheduleInfo,
isLoading,
reactivateSchedule,
disableSchedule,
}
}

View File

@@ -24,5 +24,6 @@ export interface ScheduleInfo {
timezone: string
status?: string
isDisabled?: boolean
failedCount?: number
id?: string
}

View File

@@ -564,7 +564,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
scheduleInfo,
isLoading: isLoadingScheduleInfo,
reactivateSchedule,
disableSchedule,
} = useScheduleInfo(id, type, currentWorkflowId)
const { childWorkflowId, childIsDeployed, childNeedsRedeploy, refetchDeployment } =

View File

@@ -2,7 +2,7 @@ import { useMemo } from 'react'
import { useShallow } from 'zustand/react/shallow'
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
import { SYSTEM_REFERENCE_PREFIXES } from '@/lib/workflows/sanitization/references'
import { normalizeName } from '@/stores/workflows/utils'
import { normalizeName } from '@/executor/constants'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { REFERENCE } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -44,7 +45,7 @@ function parseResponseFormatSafely(responseFormatValue: any, blockId: string): a
if (typeof responseFormatValue === 'string') {
const trimmedValue = responseFormatValue.trim()
if (trimmedValue.startsWith('<') && trimmedValue.includes('>')) {
if (trimmedValue.startsWith(REFERENCE.START) && trimmedValue.includes(REFERENCE.END)) {
return trimmedValue
}

View File

@@ -1,6 +1,8 @@
import { useCallback, useRef, useState } from 'react'
import { useQueryClient } from '@tanstack/react-query'
import { createLogger } from '@/lib/logs/console/logger'
import type { GenerationType } from '@/blocks/types'
import { subscriptionKeys } from '@/hooks/queries/subscription'
const logger = createLogger('useWand')
@@ -17,12 +19,10 @@ function buildContextInfo(currentValue?: string, generationType?: string): strin
let contextInfo = `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
// Add type-specific context analysis
if (generationType) {
switch (generationType) {
case 'javascript-function-body':
case 'typescript-function-body': {
// Analyze code structure
const hasFunction = /function\s+\w+/.test(currentValue)
const hasArrowFunction = /=>\s*{/.test(currentValue)
const hasReturn = /return\s+/.test(currentValue)
@@ -32,7 +32,6 @@ function buildContextInfo(currentValue?: string, generationType?: string): strin
case 'json-schema':
case 'json-object':
// Analyze JSON structure
try {
const parsed = JSON.parse(currentValue)
const keys = Object.keys(parsed)
@@ -77,13 +76,13 @@ export function useWand({
onStreamStart,
onGenerationComplete,
}: UseWandProps) {
const queryClient = useQueryClient()
const [isLoading, setIsLoading] = useState(false)
const [isPromptVisible, setIsPromptVisible] = useState(false)
const [promptInputValue, setPromptInputValue] = useState('')
const [error, setError] = useState<string | null>(null)
const [isStreaming, setIsStreaming] = useState(false)
// Conversation history state
const [conversationHistory, setConversationHistory] = useState<ChatMessage[]>([])
const abortControllerRef = useRef<AbortController | null>(null)
@@ -143,25 +142,20 @@ export function useWand({
abortControllerRef.current = new AbortController()
// Signal the start of streaming to clear previous content
if (onStreamStart) {
onStreamStart()
}
try {
// Build context-aware message
const contextInfo = buildContextInfo(currentValue, wandConfig?.generationType)
// Build the system prompt with context information
let systemPrompt = wandConfig?.prompt || ''
if (systemPrompt.includes('{context}')) {
systemPrompt = systemPrompt.replace('{context}', contextInfo)
}
// User message is just the user's specific request
const userMessage = prompt
// Keep track of the current prompt for history
const currentPrompt = prompt
const response = await fetch('/api/wand', {
@@ -172,9 +166,9 @@ export function useWand({
},
body: JSON.stringify({
prompt: userMessage,
systemPrompt: systemPrompt, // Send the processed system prompt with context
systemPrompt: systemPrompt,
stream: true,
history: wandConfig?.maintainHistory ? conversationHistory : [], // Include history if enabled
history: wandConfig?.maintainHistory ? conversationHistory : [],
}),
signal: abortControllerRef.current.signal,
cache: 'no-store',
@@ -256,6 +250,10 @@ export function useWand({
prompt,
contentLength: accumulatedContent.length,
})
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.user() })
}, 1000)
} catch (error: any) {
if (error.name === 'AbortError') {
logger.debug('Wand generation cancelled')
@@ -276,6 +274,7 @@ export function useWand({
onStreamChunk,
onStreamStart,
onGenerationComplete,
queryClient,
]
)

View File

@@ -1,4 +1,4 @@
import { useCallback, useState } from 'react'
import { useCallback, useRef, useState } from 'react'
import { useQueryClient } from '@tanstack/react-query'
import { v4 as uuidv4 } from 'uuid'
import { createLogger } from '@/lib/logs/console/logger'
@@ -88,9 +88,9 @@ function extractExecutionResult(error: unknown): ExecutionResult | null {
}
export function useWorkflowExecution() {
const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry()
const queryClient = useQueryClient()
const { toggleConsole, addConsole } = useTerminalConsoleStore()
const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore()
@@ -111,6 +111,7 @@ export function useWorkflowExecution() {
} = useExecutionStore()
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
const executionStream = useExecutionStream()
const currentChatExecutionIdRef = useRef<string | null>(null)
const isViewingDiff = useWorkflowDiffStore((state) => state.isShowingDiff)
/**
@@ -312,13 +313,25 @@ export function useWorkflowExecution() {
// For chat executions, we'll use a streaming approach
if (isChatExecution) {
let isCancelled = false
const executionId = uuidv4()
currentChatExecutionIdRef.current = executionId
const stream = new ReadableStream({
async start(controller) {
const { encodeSSE } = await import('@/lib/core/utils/sse')
const executionId = uuidv4()
const streamedContent = new Map<string, string>()
const streamReadingPromises: Promise<void>[] = []
const safeEnqueue = (data: Uint8Array) => {
if (!isCancelled) {
try {
controller.enqueue(data)
} catch {
isCancelled = true
}
}
}
// Handle file uploads if present
const uploadedFiles: any[] = []
interface UploadErrorCapableInput {
@@ -432,7 +445,7 @@ export function useWorkflowExecution() {
}
}
controller.enqueue(encodeSSE({ blockId, chunk: chunkToSend }))
safeEnqueue(encodeSSE({ blockId, chunk: chunkToSend }))
}
} catch (error) {
logger.error('Error reading from stream:', error)
@@ -485,7 +498,7 @@ export function useWorkflowExecution() {
const separator = streamedContent.size > 0 ? '\n\n' : ''
// Send the non-streaming block output as a chunk
controller.enqueue(encodeSSE({ blockId, chunk: separator + formattedOutput }))
safeEnqueue(encodeSSE({ blockId, chunk: separator + formattedOutput }))
// Track that we've sent output for this block
streamedContent.set(blockId, formattedOutput)
@@ -503,13 +516,8 @@ export function useWorkflowExecution() {
)
// Check if execution was cancelled
if (
result &&
'success' in result &&
!result.success &&
result.error === 'Workflow execution was cancelled'
) {
controller.enqueue(encodeSSE({ event: 'cancelled', data: result }))
if (result && 'status' in result && result.status === 'cancelled') {
safeEnqueue(encodeSSE({ event: 'cancelled', data: result }))
return
}
@@ -563,12 +571,12 @@ export function useWorkflowExecution() {
logger.info(`Processed ${processedCount} blocks for streaming tokenization`)
}
// Invalidate subscription query to update usage
queryClient.invalidateQueries({ queryKey: subscriptionKeys.user() })
queryClient.invalidateQueries({ queryKey: subscriptionKeys.usage() })
// Invalidate subscription queries to update usage
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.user() })
}, 1000)
const { encodeSSE } = await import('@/lib/core/utils/sse')
controller.enqueue(encodeSSE({ event: 'final', data: result }))
safeEnqueue(encodeSSE({ event: 'final', data: result }))
// Note: Logs are already persisted server-side via execution-core.ts
}
} catch (error: any) {
@@ -586,17 +594,23 @@ export function useWorkflowExecution() {
}
// Send the error as final event so downstream handlers can treat it uniformly
const { encodeSSE } = await import('@/lib/core/utils/sse')
controller.enqueue(encodeSSE({ event: 'final', data: errorResult }))
safeEnqueue(encodeSSE({ event: 'final', data: errorResult }))
// Do not error the controller to allow consumers to process the final event
} finally {
controller.close()
setIsExecuting(false)
setIsDebugging(false)
setActiveBlocks(new Set())
if (!isCancelled) {
controller.close()
}
if (currentChatExecutionIdRef.current === executionId) {
setIsExecuting(false)
setIsDebugging(false)
setActiveBlocks(new Set())
}
}
},
cancel() {
isCancelled = true
},
})
return { success: true, stream }
}
@@ -630,9 +644,10 @@ export function useWorkflowExecution() {
;(result.metadata as any).source = 'chat'
}
// Invalidate subscription query to update usage
queryClient.invalidateQueries({ queryKey: subscriptionKeys.user() })
queryClient.invalidateQueries({ queryKey: subscriptionKeys.usage() })
// Invalidate subscription queries to update usage
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.user() })
}, 1000)
}
return result
} catch (error: any) {
@@ -654,6 +669,7 @@ export function useWorkflowExecution() {
setPendingBlocks,
setActiveBlocks,
workflows,
queryClient,
]
)
@@ -1314,7 +1330,10 @@ export function useWorkflowExecution() {
// Cancel the execution stream (server-side)
executionStream.cancel()
// Reset execution state
// Mark current chat execution as superseded so its cleanup won't affect new executions
currentChatExecutionIdRef.current = null
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
setIsExecuting(false)
setIsDebugging(false)
setActiveBlocks(new Set())

View File

@@ -2236,27 +2236,6 @@ const WorkflowContent = React.memo(() => {
return () => window.removeEventListener('keydown', handleKeyDown)
}, [selectedEdgeInfo, removeEdge, getNodes, removeBlock, effectivePermissions.canEdit])
/** Handles sub-block value updates from custom events. */
useEffect(() => {
const handleSubBlockValueUpdate = (event: CustomEvent) => {
const { blockId, subBlockId, value } = event.detail
if (blockId && subBlockId) {
// Use collaborative function to go through queue system
// This ensures 5-second timeout and error detection work
collaborativeSetSubblockValue(blockId, subBlockId, value)
}
}
window.addEventListener('update-subblock-value', handleSubBlockValueUpdate as EventListener)
return () => {
window.removeEventListener(
'update-subblock-value',
handleSubBlockValueUpdate as EventListener
)
}
}, [collaborativeSetSubblockValue])
return (
<div className='flex h-full w-full flex-col overflow-hidden bg-[var(--bg)]'>
<div className='relative h-full w-full flex-1 bg-[var(--bg)]'>

View File

@@ -16,6 +16,7 @@ import {
import { Trash } from '@/components/emcn/icons/trash'
import { Input, Skeleton } from '@/components/ui'
import { createLogger } from '@/lib/logs/console/logger'
import { isValidEnvVarName } from '@/executor/constants'
import {
usePersonalEnvironment,
useRemoveWorkspaceEnvironment,
@@ -28,7 +29,6 @@ import {
const logger = createLogger('EnvironmentVariables')
const GRID_COLS = 'grid grid-cols-[minmax(0,1fr)_8px_minmax(0,1fr)_auto] items-center'
const ENV_VAR_PATTERN = /^[A-Za-z_][A-Za-z0-9_]*$/
const PRIMARY_BUTTON_STYLES =
'!bg-[var(--brand-tertiary-2)] !text-[var(--text-inverse)] hover:!bg-[var(--brand-tertiary-2)]/90'
@@ -59,7 +59,7 @@ interface UIEnvironmentVariable {
function validateEnvVarKey(key: string): string | undefined {
if (!key) return undefined
if (key.includes(' ')) return 'Spaces are not allowed'
if (!ENV_VAR_PATTERN.test(key)) return 'Only letters, numbers, and underscores allowed'
if (!isValidEnvVarName(key)) return 'Only letters, numbers, and underscores allowed'
return undefined
}
@@ -377,7 +377,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
if (equalIndex === -1 || equalIndex === 0) return null
const potentialKey = withoutExport.substring(0, equalIndex).trim()
if (!ENV_VAR_PATTERN.test(potentialKey)) return null
if (!isValidEnvVarName(potentialKey)) return null
let value = withoutExport.substring(equalIndex + 1)

View File

@@ -15,7 +15,7 @@ import {
import { Input, Skeleton } from '@/components/ui'
import { cn } from '@/lib/core/utils/cn'
import { createLogger } from '@/lib/logs/console/logger'
import { OAUTH_PROVIDERS } from '@/lib/oauth/oauth'
import { OAUTH_PROVIDERS } from '@/lib/oauth'
import {
type ServiceInfo,
useConnectOAuthService,

View File

@@ -75,6 +75,16 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
getFolderIds: () => folder.id,
})
// Folder expand hook - must be declared before callbacks that use expandFolder
const {
isExpanded,
handleToggleExpanded,
expandFolder,
handleKeyDown: handleExpandKeyDown,
} = useFolderExpand({
folderId: folder.id,
})
/**
* Handle create workflow in folder using React Query mutation.
* Generates name and color upfront for optimistic UI updates.
@@ -95,6 +105,8 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
if (result.id) {
router.push(`/workspace/${workspaceId}/w/${result.id}`)
// Expand the parent folder so the new workflow is visible
expandFolder()
// Scroll to the newly created workflow
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
@@ -104,7 +116,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
// Error already handled by mutation's onError callback
logger.error('Failed to create workflow in folder:', error)
}
}, [createWorkflowMutation, workspaceId, folder.id, router])
}, [createWorkflowMutation, workspaceId, folder.id, router, expandFolder])
/**
* Handle create sub-folder using React Query mutation.
@@ -118,6 +130,8 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
parentId: folder.id,
})
if (result.id) {
// Expand the parent folder so the new folder is visible
expandFolder()
// Scroll to the newly created folder
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
@@ -126,16 +140,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
} catch (error) {
logger.error('Failed to create folder:', error)
}
}, [createFolderMutation, workspaceId, folder.id])
// Folder expand hook
const {
isExpanded,
handleToggleExpanded,
handleKeyDown: handleExpandKeyDown,
} = useFolderExpand({
folderId: folder.id,
})
}, [createFolderMutation, workspaceId, folder.id, expandFolder])
/**
* Drag start handler - sets folder data for drag operation

View File

@@ -13,7 +13,7 @@ interface UseFolderExpandProps {
* @returns Expansion state and event handlers
*/
export function useFolderExpand({ folderId }: UseFolderExpandProps) {
const { expandedFolders, toggleExpanded } = useFolderStore()
const { expandedFolders, toggleExpanded, setExpanded } = useFolderStore()
const isExpanded = expandedFolders.has(folderId)
/**
@@ -23,6 +23,13 @@ export function useFolderExpand({ folderId }: UseFolderExpandProps) {
toggleExpanded(folderId)
}, [folderId, toggleExpanded])
/**
* Expand the folder (useful when creating items inside)
*/
const expandFolder = useCallback(() => {
setExpanded(folderId, true)
}, [folderId, setExpanded])
/**
* Handle keyboard navigation (Enter/Space)
*/
@@ -39,6 +46,7 @@ export function useFolderExpand({ folderId }: UseFolderExpandProps) {
return {
isExpanded,
handleToggleExpanded,
expandFolder,
handleKeyDown,
}
}

View File

@@ -22,8 +22,10 @@ import {
getScheduleTimeValues,
getSubBlockValue,
} from '@/lib/workflows/schedules/utils'
import { REFERENCE } from '@/executor/constants'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult } from '@/executor/types'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
const logger = createLogger('TriggerScheduleExecution')
@@ -128,17 +130,25 @@ async function ensureBlockVariablesResolvable(
await Promise.all(
Object.values(subBlocks).map(async (subBlock) => {
const value = subBlock.value
if (typeof value !== 'string' || !value.includes('{{') || !value.includes('}}')) {
if (
typeof value !== 'string' ||
!value.includes(REFERENCE.ENV_VAR_START) ||
!value.includes(REFERENCE.ENV_VAR_END)
) {
return
}
const matches = value.match(/{{([^}]+)}}/g)
const envVarPattern = createEnvVarPattern()
const matches = value.match(envVarPattern)
if (!matches) {
return
}
for (const match of matches) {
const varName = match.slice(2, -2)
const varName = match.slice(
REFERENCE.ENV_VAR_START.length,
-REFERENCE.ENV_VAR_END.length
)
const encryptedValue = variables[varName]
if (!encryptedValue) {
throw new Error(`Environment variable "${varName}" was not found`)
@@ -299,30 +309,22 @@ async function runWorkflowExecution({
}
return { status: 'failure', blocks, executionResult }
} catch (earlyError) {
logger.error(
`[${requestId}] Early failure in scheduled workflow ${payload.workflowId}`,
earlyError
)
} catch (error: unknown) {
logger.error(`[${requestId}] Early failure in scheduled workflow ${payload.workflowId}`, error)
try {
const executionResult = (earlyError as any)?.executionResult as ExecutionResult | undefined
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
const errorWithResult = error as { executionResult?: ExecutionResult }
const executionResult = errorWithResult?.executionResult
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
await loggingSession.safeCompleteWithError({
error: {
message: `Schedule execution failed: ${
earlyError instanceof Error ? earlyError.message : String(earlyError)
}`,
stackTrace: earlyError instanceof Error ? earlyError.stack : undefined,
},
traceSpans,
})
} catch (loggingError) {
logger.error(`[${requestId}] Failed to complete log entry for schedule failure`, loggingError)
}
await loggingSession.safeCompleteWithError({
error: {
message: error instanceof Error ? error.message : String(error),
stackTrace: error instanceof Error ? error.stack : undefined,
},
traceSpans,
})
throw earlyError
throw error
}
}
@@ -563,6 +565,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
updatedAt: now,
nextRunAt,
failedCount: 0,
lastQueuedAt: null,
},
requestId,
`Error updating schedule ${payload.scheduleId} after success`,
@@ -596,8 +599,10 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
`Error updating schedule ${payload.scheduleId} after failure`,
`Updated schedule ${payload.scheduleId} after failure`
)
} catch (error: any) {
if (error?.message?.includes('Service overloaded')) {
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error)
if (errorMessage.includes('Service overloaded')) {
logger.warn(`[${requestId}] Service overloaded, retrying schedule in 5 minutes`)
const retryDelay = 5 * 60 * 1000
@@ -642,7 +647,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
`Updated schedule ${payload.scheduleId} after execution error`
)
}
} catch (error: any) {
} catch (error: unknown) {
logger.error(`[${requestId}] Error processing schedule ${payload.scheduleId}`, error)
}
}

View File

@@ -536,10 +536,13 @@ async function executeWebhookJobInternal(
executedAt: new Date().toISOString(),
provider: payload.provider,
}
} catch (error: any) {
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error)
const errorStack = error instanceof Error ? error.stack : undefined
logger.error(`[${requestId}] Webhook execution failed`, {
error: error.message,
stack: error.stack,
error: errorMessage,
stack: errorStack,
workflowId: payload.workflowId,
provider: payload.provider,
})
@@ -567,10 +570,11 @@ async function executeWebhookJobInternal(
isTest: payload.testMode === true,
executionTarget: payload.executionTarget || 'deployed',
},
deploymentVersionId, // Pass if available (undefined for early errors)
deploymentVersionId,
})
const executionResult = (error?.executionResult as ExecutionResult | undefined) || {
const errorWithResult = error as { executionResult?: ExecutionResult }
const executionResult = errorWithResult?.executionResult || {
success: false,
output: {},
logs: [],
@@ -581,8 +585,8 @@ async function executeWebhookJobInternal(
endedAt: new Date().toISOString(),
totalDurationMs: 0,
error: {
message: error.message || 'Webhook execution failed',
stackTrace: error.stack,
message: errorMessage || 'Webhook execution failed',
stackTrace: errorStack,
},
traceSpans,
})

View File

@@ -3,10 +3,12 @@ import { v4 as uuidv4 } from 'uuid'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { createLogger } from '@/lib/logs/console/logger'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult } from '@/executor/types'
const logger = createLogger('TriggerWorkflowExecution')
@@ -66,6 +68,12 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
logger.info(`[${requestId}] Preprocessing passed. Using actor: ${actorUserId}`)
await loggingSession.safeStart({
userId: actorUserId,
workspaceId,
variables: {},
})
const workflow = await getWorkflowById(workflowId)
if (!workflow) {
throw new Error(`Workflow ${workflowId} not found after preprocessing`)
@@ -131,11 +139,24 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
executedAt: new Date().toISOString(),
metadata: payload.metadata,
}
} catch (error: any) {
} catch (error: unknown) {
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, {
error: error.message,
error: error instanceof Error ? error.message : String(error),
executionId,
})
const errorWithResult = error as { executionResult?: ExecutionResult }
const executionResult = errorWithResult?.executionResult
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
await loggingSession.safeCompleteWithError({
error: {
message: error instanceof Error ? error.message : String(error),
stackTrace: error instanceof Error ? error.stack : undefined,
},
traceSpans,
})
throw error
}
}

View File

@@ -310,6 +310,7 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
type: 'short-input',
placeholder: 'your-gcp-project-id',
connectionDroppable: false,
required: true,
condition: {
field: 'model',
value: providers.vertex.models,
@@ -321,6 +322,7 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
type: 'short-input',
placeholder: 'us-central1',
connectionDroppable: false,
required: true,
condition: {
field: 'model',
value: providers.vertex.models,

Some files were not shown because too many files have changed in this diff Show More