Compare commits
61 Commits
improvemen
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d615c78a30 | ||
|
|
b7f25786ce | ||
|
|
3a9e5f3b78 | ||
|
|
39444fa1a8 | ||
|
|
45ca926e6d | ||
|
|
77ee01747d | ||
|
|
474762d6fb | ||
|
|
0005c3e465 | ||
|
|
fc40b4f7af | ||
|
|
2a7f51a2f6 | ||
|
|
90c3c43607 | ||
|
|
83d813a7cc | ||
|
|
811c736705 | ||
|
|
c6757311af | ||
|
|
b5b12ba2d1 | ||
|
|
0d30676e34 | ||
|
|
36bdccb449 | ||
|
|
f45730a89e | ||
|
|
04cd837e9c | ||
|
|
c23130a26e | ||
|
|
7575cd6f27 | ||
|
|
fbde64f0b0 | ||
|
|
25f7ed20f6 | ||
|
|
261aa3d72d | ||
|
|
9da19e84b7 | ||
|
|
e83afc0a62 | ||
|
|
1720fa8749 | ||
|
|
f3ad7750af | ||
|
|
78b7643e65 | ||
|
|
67cfb21d08 | ||
|
|
1d6975db49 | ||
|
|
837aabca5e | ||
|
|
f9cfca92bf | ||
|
|
25afacb25e | ||
|
|
fcf52ac4d5 | ||
|
|
842200bcf2 | ||
|
|
a0fb889644 | ||
|
|
f526c36fc0 | ||
|
|
e24f31cbce | ||
|
|
3fbd57caf1 | ||
|
|
b5da61377c | ||
|
|
18b7032494 | ||
|
|
b7bbef8620 | ||
|
|
52edbea659 | ||
|
|
d480057fd3 | ||
|
|
c27c233da0 | ||
|
|
ebef5f3a27 | ||
|
|
12c4c2d44f | ||
|
|
929a352edb | ||
|
|
6cd078b0fe | ||
|
|
31874939ee | ||
|
|
e157ce5fbc | ||
|
|
774e5d585c | ||
|
|
54cc93743f | ||
|
|
8c32ad4c0d | ||
|
|
1d08796853 | ||
|
|
ebcd243942 | ||
|
|
b7e814b721 | ||
|
|
842ef27ed9 | ||
|
|
31c34b2ea3 | ||
|
|
8f0ef58056 |
13
.github/workflows/test-build.yml
vendored
@@ -48,6 +48,19 @@ jobs:
|
||||
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
|
||||
run: bun run test
|
||||
|
||||
- name: Check schema and migrations are in sync
|
||||
working-directory: packages/db
|
||||
run: |
|
||||
bunx drizzle-kit generate --config=./drizzle.config.ts
|
||||
if [ -n "$(git status --porcelain ./migrations)" ]; then
|
||||
echo "❌ Schema and migrations are out of sync!"
|
||||
echo "Run 'cd packages/db && bunx drizzle-kit generate' and commit the new migrations."
|
||||
git status --porcelain ./migrations
|
||||
git diff ./migrations
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Schema and migrations are in sync"
|
||||
|
||||
- name: Build application
|
||||
env:
|
||||
NODE_OPTIONS: '--no-warnings'
|
||||
|
||||
@@ -188,6 +188,7 @@ DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
|
||||
|
||||
Then run the migrations:
|
||||
```bash
|
||||
cd packages/db # Required so drizzle picks correct .env file
|
||||
bunx drizzle-kit migrate --config=./drizzle.config.ts
|
||||
```
|
||||
|
||||
|
||||
23
apps/docs/app/[lang]/not-found.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import { DocsBody, DocsPage } from 'fumadocs-ui/page'
|
||||
|
||||
export const metadata = {
|
||||
title: 'Page Not Found',
|
||||
}
|
||||
|
||||
export default function NotFound() {
|
||||
return (
|
||||
<DocsPage>
|
||||
<DocsBody>
|
||||
<div className='flex min-h-[60vh] flex-col items-center justify-center text-center'>
|
||||
<h1 className='mb-4 bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] bg-clip-text font-bold text-8xl text-transparent'>
|
||||
404
|
||||
</h1>
|
||||
<h2 className='mb-2 font-semibold text-2xl text-foreground'>Page Not Found</h2>
|
||||
<p className='text-muted-foreground'>
|
||||
The page you're looking for doesn't exist or has been moved.
|
||||
</p>
|
||||
</div>
|
||||
</DocsBody>
|
||||
</DocsPage>
|
||||
)
|
||||
}
|
||||
@@ -90,14 +90,20 @@ Ein Jira-Issue erstellen
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Ja | Ihre Jira-Domain (z.B. ihrfirma.atlassian.net) |
|
||||
| `domain` | string | Ja | Ihre Jira-Domain \(z.B. ihrfirma.atlassian.net\) |
|
||||
| `projectId` | string | Ja | Projekt-ID für das Issue |
|
||||
| `summary` | string | Ja | Zusammenfassung für das Issue |
|
||||
| `description` | string | Nein | Beschreibung für das Issue |
|
||||
| `priority` | string | Nein | Priorität für das Issue |
|
||||
| `assignee` | string | Nein | Bearbeiter für das Issue |
|
||||
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie anhand der Domain abgerufen. |
|
||||
| `issueType` | string | Ja | Art des zu erstellenden Issues (z.B. Task, Story) |
|
||||
| `priority` | string | Nein | Prioritäts-ID oder -Name für das Issue \(z.B. "10000" oder "High"\) |
|
||||
| `assignee` | string | Nein | Account-ID des Bearbeiters für das Issue |
|
||||
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie über die Domain abgerufen. |
|
||||
| `issueType` | string | Ja | Typ des zu erstellenden Issues \(z.B. Task, Story\) |
|
||||
| `labels` | array | Nein | Labels für das Issue \(Array von Label-Namen\) |
|
||||
| `duedate` | string | Nein | Fälligkeitsdatum für das Issue \(Format: YYYY-MM-DD\) |
|
||||
| `reporter` | string | Nein | Account-ID des Melders für das Issue |
|
||||
| `environment` | string | Nein | Umgebungsinformationen für das Issue |
|
||||
| `customFieldId` | string | Nein | Benutzerdefinierte Feld-ID \(z.B. customfield_10001\) |
|
||||
| `customFieldValue` | string | Nein | Wert für das benutzerdefinierte Feld |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
@@ -107,6 +113,7 @@ Ein Jira-Issue erstellen
|
||||
| `issueKey` | string | Erstellter Issue-Key \(z.B. PROJ-123\) |
|
||||
| `summary` | string | Issue-Zusammenfassung |
|
||||
| `url` | string | URL zum erstellten Issue |
|
||||
| `assigneeId` | string | Account-ID des zugewiesenen Benutzers \(falls zugewiesen\) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
@@ -520,6 +527,30 @@ Einen Beobachter von einem Jira-Issue entfernen
|
||||
| `issueKey` | string | Issue-Key |
|
||||
| `watcherAccountId` | string | Account-ID des entfernten Beobachters |
|
||||
|
||||
### `jira_get_users`
|
||||
|
||||
Jira-Benutzer abrufen. Wenn eine Account-ID angegeben wird, wird ein einzelner Benutzer zurückgegeben. Andernfalls wird eine Liste aller Benutzer zurückgegeben.
|
||||
|
||||
#### Eingabe
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Ja | Ihre Jira-Domain \(z.B. ihrfirma.atlassian.net\) |
|
||||
| `accountId` | string | Nein | Optionale Account-ID, um einen bestimmten Benutzer abzurufen. Wenn nicht angegeben, werden alle Benutzer zurückgegeben. |
|
||||
| `startAt` | number | Nein | Der Index des ersten zurückzugebenden Benutzers \(für Paginierung, Standard: 0\) |
|
||||
| `maxResults` | number | Nein | Maximale Anzahl der zurückzugebenden Benutzer \(Standard: 50\) |
|
||||
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie anhand der Domain abgerufen. |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Zeitstempel der Operation |
|
||||
| `users` | json | Array von Benutzern mit accountId, displayName, emailAddress, active-Status und avatarUrls |
|
||||
| `total` | number | Gesamtanzahl der zurückgegebenen Benutzer |
|
||||
| `startAt` | number | Startindex für Paginierung |
|
||||
| `maxResults` | number | Maximale Ergebnisse pro Seite |
|
||||
|
||||
## Hinweise
|
||||
|
||||
- Kategorie: `tools`
|
||||
|
||||
@@ -97,10 +97,16 @@ Write a Jira issue
|
||||
| `projectId` | string | Yes | Project ID for the issue |
|
||||
| `summary` | string | Yes | Summary for the issue |
|
||||
| `description` | string | No | Description for the issue |
|
||||
| `priority` | string | No | Priority for the issue |
|
||||
| `assignee` | string | No | Assignee for the issue |
|
||||
| `priority` | string | No | Priority ID or name for the issue \(e.g., "10000" or "High"\) |
|
||||
| `assignee` | string | No | Assignee account ID for the issue |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story\) |
|
||||
| `labels` | array | No | Labels for the issue \(array of label names\) |
|
||||
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
||||
| `reporter` | string | No | Reporter account ID for the issue |
|
||||
| `environment` | string | No | Environment information for the issue |
|
||||
| `customFieldId` | string | No | Custom field ID \(e.g., customfield_10001\) |
|
||||
| `customFieldValue` | string | No | Value for the custom field |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -110,6 +116,7 @@ Write a Jira issue
|
||||
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
|
||||
| `summary` | string | Issue summary |
|
||||
| `url` | string | URL to the created issue |
|
||||
| `assigneeId` | string | Account ID of the assigned user \(if assigned\) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
@@ -523,6 +530,30 @@ Remove a watcher from a Jira issue
|
||||
| `issueKey` | string | Issue key |
|
||||
| `watcherAccountId` | string | Removed watcher account ID |
|
||||
|
||||
### `jira_get_users`
|
||||
|
||||
Get Jira users. If an account ID is provided, returns a single user. Otherwise, returns a list of all users.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `accountId` | string | No | Optional account ID to get a specific user. If not provided, returns all users. |
|
||||
| `startAt` | number | No | The index of the first user to return \(for pagination, default: 0\) |
|
||||
| `maxResults` | number | No | Maximum number of users to return \(default: 50\) |
|
||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Timestamp of the operation |
|
||||
| `users` | json | Array of users with accountId, displayName, emailAddress, active status, and avatarUrls |
|
||||
| `total` | number | Total number of users returned |
|
||||
| `startAt` | number | Pagination start index |
|
||||
| `maxResults` | number | Maximum results per page |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -89,24 +89,31 @@ Escribir una incidencia de Jira
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
|
||||
| `projectId` | string | Sí | ID del proyecto para la incidencia |
|
||||
| `summary` | string | Sí | Resumen de la incidencia |
|
||||
| `description` | string | No | Descripción de la incidencia |
|
||||
| `priority` | string | No | Prioridad de la incidencia |
|
||||
| `assignee` | string | No | Asignado para la incidencia |
|
||||
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá utilizando el dominio. |
|
||||
| `priority` | string | No | ID o nombre de prioridad para la incidencia \(p. ej., "10000" o "Alta"\) |
|
||||
| `assignee` | string | No | ID de cuenta del asignado para la incidencia |
|
||||
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá usando el dominio. |
|
||||
| `issueType` | string | Sí | Tipo de incidencia a crear \(p. ej., Tarea, Historia\) |
|
||||
| `labels` | array | No | Etiquetas para la incidencia \(array de nombres de etiquetas\) |
|
||||
| `duedate` | string | No | Fecha de vencimiento para la incidencia \(formato: AAAA-MM-DD\) |
|
||||
| `reporter` | string | No | ID de cuenta del informador para la incidencia |
|
||||
| `environment` | string | No | Información del entorno para la incidencia |
|
||||
| `customFieldId` | string | No | ID del campo personalizado \(p. ej., customfield_10001\) |
|
||||
| `customFieldValue` | string | No | Valor para el campo personalizado |
|
||||
|
||||
#### Salida
|
||||
|
||||
| Parámetro | Tipo | Descripción |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Marca de tiempo de la operación |
|
||||
| `issueKey` | string | Clave de la incidencia creada (p. ej., PROJ-123) |
|
||||
| `issueKey` | string | Clave de la incidencia creada \(p. ej., PROJ-123\) |
|
||||
| `summary` | string | Resumen de la incidencia |
|
||||
| `url` | string | URL de la incidencia creada |
|
||||
| `assigneeId` | string | ID de cuenta del usuario asignado \(si está asignado\) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
@@ -520,6 +527,30 @@ Eliminar un observador de una incidencia de Jira
|
||||
| `issueKey` | string | Clave de incidencia |
|
||||
| `watcherAccountId` | string | ID de cuenta del observador eliminado |
|
||||
|
||||
### `jira_get_users`
|
||||
|
||||
Obtener usuarios de Jira. Si se proporciona un ID de cuenta, devuelve un solo usuario. De lo contrario, devuelve una lista de todos los usuarios.
|
||||
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
|
||||
| `accountId` | string | No | ID de cuenta opcional para obtener un usuario específico. Si no se proporciona, devuelve todos los usuarios. |
|
||||
| `startAt` | number | No | El índice del primer usuario a devolver \(para paginación, predeterminado: 0\) |
|
||||
| `maxResults` | number | No | Número máximo de usuarios a devolver \(predeterminado: 50\) |
|
||||
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá usando el dominio. |
|
||||
|
||||
#### Salida
|
||||
|
||||
| Parámetro | Tipo | Descripción |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Marca de tiempo de la operación |
|
||||
| `users` | json | Array de usuarios con accountId, displayName, emailAddress, estado activo y avatarUrls |
|
||||
| `total` | number | Número total de usuarios devueltos |
|
||||
| `startAt` | number | Índice de inicio de paginación |
|
||||
| `maxResults` | number | Máximo de resultados por página |
|
||||
|
||||
## Notas
|
||||
|
||||
- Categoría: `tools`
|
||||
|
||||
@@ -89,15 +89,21 @@ Rédiger une demande Jira
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Oui | Votre domaine Jira (ex. : votreentreprise.atlassian.net) |
|
||||
| `projectId` | string | Oui | ID du projet pour la demande |
|
||||
| `summary` | string | Oui | Résumé de la demande |
|
||||
| `description` | string | Non | Description de la demande |
|
||||
| `priority` | string | Non | Priorité de la demande |
|
||||
| `assignee` | string | Non | Assigné de la demande |
|
||||
| `cloudId` | string | Non | ID Jira Cloud pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
|
||||
| `issueType` | string | Oui | Type de demande à créer (ex. : Tâche, Story) |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `domain` | chaîne | Oui | Votre domaine Jira \(ex. : votreentreprise.atlassian.net\) |
|
||||
| `projectId` | chaîne | Oui | ID du projet pour le ticket |
|
||||
| `summary` | chaîne | Oui | Résumé du ticket |
|
||||
| `description` | chaîne | Non | Description du ticket |
|
||||
| `priority` | chaîne | Non | ID ou nom de la priorité du ticket \(ex. : "10000" ou "Haute"\) |
|
||||
| `assignee` | chaîne | Non | ID de compte de l'assigné pour le ticket |
|
||||
| `cloudId` | chaîne | Non | ID Cloud Jira pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
|
||||
| `issueType` | chaîne | Oui | Type de ticket à créer \(ex. : tâche, story\) |
|
||||
| `labels` | tableau | Non | Étiquettes pour le ticket \(tableau de noms d'étiquettes\) |
|
||||
| `duedate` | chaîne | Non | Date d'échéance du ticket \(format : AAAA-MM-JJ\) |
|
||||
| `reporter` | chaîne | Non | ID de compte du rapporteur pour le ticket |
|
||||
| `environment` | chaîne | Non | Informations d'environnement pour le ticket |
|
||||
| `customFieldId` | chaîne | Non | ID du champ personnalisé \(ex. : customfield_10001\) |
|
||||
| `customFieldValue` | chaîne | Non | Valeur pour le champ personnalisé |
|
||||
|
||||
#### Sortie
|
||||
|
||||
@@ -107,6 +113,7 @@ Rédiger une demande Jira
|
||||
| `issueKey` | chaîne | Clé du ticket créé \(ex. : PROJ-123\) |
|
||||
| `summary` | chaîne | Résumé du ticket |
|
||||
| `url` | chaîne | URL vers le ticket créé |
|
||||
| `assigneeId` | chaîne | ID de compte de l'utilisateur assigné \(si assigné\) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
@@ -520,7 +527,31 @@ Supprimer un observateur d'un ticket Jira
|
||||
| `issueKey` | string | Clé du ticket |
|
||||
| `watcherAccountId` | string | ID du compte observateur supprimé |
|
||||
|
||||
## Notes
|
||||
### `jira_get_users`
|
||||
|
||||
Récupère les utilisateurs Jira. Si un ID de compte est fourni, renvoie un seul utilisateur. Sinon, renvoie une liste de tous les utilisateurs.
|
||||
|
||||
#### Entrée
|
||||
|
||||
| Paramètre | Type | Obligatoire | Description |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `domain` | chaîne | Oui | Votre domaine Jira \(ex. : votreentreprise.atlassian.net\) |
|
||||
| `accountId` | chaîne | Non | ID de compte optionnel pour obtenir un utilisateur spécifique. S'il n'est pas fourni, renvoie tous les utilisateurs. |
|
||||
| `startAt` | nombre | Non | L'index du premier utilisateur à renvoyer \(pour la pagination, par défaut : 0\) |
|
||||
| `maxResults` | nombre | Non | Nombre maximum d'utilisateurs à renvoyer \(par défaut : 50\) |
|
||||
| `cloudId` | chaîne | Non | ID Cloud Jira pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
|
||||
|
||||
#### Sortie
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | chaîne | Horodatage de l'opération |
|
||||
| `users` | json | Tableau d'utilisateurs avec accountId, displayName, emailAddress, statut actif et avatarUrls |
|
||||
| `total` | nombre | Nombre total d'utilisateurs renvoyés |
|
||||
| `startAt` | nombre | Index de début de pagination |
|
||||
| `maxResults` | nombre | Nombre maximum de résultats par page |
|
||||
|
||||
## Remarques
|
||||
|
||||
- Catégorie : `tools`
|
||||
- Type : `jira`
|
||||
|
||||
@@ -94,10 +94,16 @@ Jira課題を作成する
|
||||
| `projectId` | string | はい | 課題のプロジェクトID |
|
||||
| `summary` | string | はい | 課題の要約 |
|
||||
| `description` | string | いいえ | 課題の説明 |
|
||||
| `priority` | string | いいえ | 課題の優先度 |
|
||||
| `assignee` | string | いいえ | 課題の担当者 |
|
||||
| `priority` | string | いいえ | 課題の優先度IDまたは名前(例:「10000」または「高」) |
|
||||
| `assignee` | string | いいえ | 課題の担当者アカウントID |
|
||||
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
|
||||
| `issueType` | string | はい | 作成する課題のタイプ(例:タスク、ストーリー) |
|
||||
| `labels` | array | いいえ | 課題のラベル(ラベル名の配列) |
|
||||
| `duedate` | string | いいえ | 課題の期限(形式:YYYY-MM-DD) |
|
||||
| `reporter` | string | いいえ | 課題の報告者アカウントID |
|
||||
| `environment` | string | いいえ | 課題の環境情報 |
|
||||
| `customFieldId` | string | いいえ | カスタムフィールドID(例:customfield_10001) |
|
||||
| `customFieldValue` | string | いいえ | カスタムフィールドの値 |
|
||||
|
||||
#### 出力
|
||||
|
||||
@@ -106,7 +112,8 @@ Jira課題を作成する
|
||||
| `ts` | string | 操作のタイムスタンプ |
|
||||
| `issueKey` | string | 作成された課題キー(例:PROJ-123) |
|
||||
| `summary` | string | 課題の要約 |
|
||||
| `url` | string | 作成された課題へのURL |
|
||||
| `url` | string | 作成された課題のURL |
|
||||
| `assigneeId` | string | 割り当てられたユーザーのアカウントID(割り当てられている場合) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
@@ -520,7 +527,31 @@ Jira課題からウォッチャーを削除する
|
||||
| `issueKey` | string | 課題キー |
|
||||
| `watcherAccountId` | string | 削除されたウォッチャーのアカウントID |
|
||||
|
||||
## 注意事項
|
||||
### `jira_get_users`
|
||||
|
||||
- カテゴリー: `tools`
|
||||
- タイプ: `jira`
|
||||
Jiraユーザーを取得します。アカウントIDが提供された場合、単一のユーザーを返します。それ以外の場合、すべてのユーザーのリストを返します。
|
||||
|
||||
#### 入力
|
||||
|
||||
| パラメータ | 型 | 必須 | 説明 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | はい | あなたのJiraドメイン(例:yourcompany.atlassian.net) |
|
||||
| `accountId` | string | いいえ | 特定のユーザーを取得するためのオプションのアカウントID。提供されない場合、すべてのユーザーを返します。 |
|
||||
| `startAt` | number | いいえ | 返す最初のユーザーのインデックス(ページネーション用、デフォルト:0) |
|
||||
| `maxResults` | number | いいえ | 返すユーザーの最大数(デフォルト:50) |
|
||||
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
|
||||
|
||||
#### 出力
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | 操作のタイムスタンプ |
|
||||
| `users` | json | accountId、displayName、emailAddress、activeステータス、avatarUrlsを含むユーザーの配列 |
|
||||
| `total` | number | 返されたユーザーの総数 |
|
||||
| `startAt` | number | ページネーション開始インデックス |
|
||||
| `maxResults` | number | ページあたりの最大結果数 |
|
||||
|
||||
## 注記
|
||||
|
||||
- カテゴリ:`tools`
|
||||
- タイプ:`jira`
|
||||
|
||||
@@ -91,13 +91,19 @@ Jira 的主要功能包括:
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如:yourcompany.atlassian.net\) |
|
||||
| `projectId` | 字符串 | 是 | 问题的项目 ID |
|
||||
| `summary` | 字符串 | 是 | 问题的摘要 |
|
||||
| `description` | 字符串 | 否 | 问题的描述 |
|
||||
| `priority` | 字符串 | 否 | 问题的优先级 |
|
||||
| `assignee` | 字符串 | 否 | 问题的负责人 |
|
||||
| `cloudId` | 字符串 | 否 | 实例的 Jira 云 ID。如果未提供,将使用域名获取。 |
|
||||
| `issueType` | 字符串 | 是 | 要创建的问题类型 \(例如:任务、故事\) |
|
||||
| `projectId` | 字符串 | 是 | 问题所属项目 ID |
|
||||
| `summary` | 字符串 | 是 | 问题摘要 |
|
||||
| `description` | 字符串 | 否 | 问题描述 |
|
||||
| `priority` | 字符串 | 否 | 问题优先级 ID 或名称 \(例如:“10000”或“High”\) |
|
||||
| `assignee` | 字符串 | 否 | 问题负责人账户 ID |
|
||||
| `cloudId` | 字符串 | 否 | 实例的 Jira Cloud ID。如果未提供,将使用域名获取。 |
|
||||
| `issueType` | 字符串 | 是 | 要创建的问题类型 \(例如:Task、Story\) |
|
||||
| `labels` | 数组 | 否 | 问题标签 \(标签名称数组\) |
|
||||
| `duedate` | 字符串 | 否 | 问题截止日期 \(格式:YYYY-MM-DD\) |
|
||||
| `reporter` | 字符串 | 否 | 问题报告人账户 ID |
|
||||
| `environment` | 字符串 | 否 | 问题环境信息 |
|
||||
| `customFieldId` | 字符串 | 否 | 自定义字段 ID \(例如:customfield_10001\) |
|
||||
| `customFieldValue` | 字符串 | 否 | 自定义字段的值 |
|
||||
|
||||
#### 输出
|
||||
|
||||
@@ -107,6 +113,7 @@ Jira 的主要功能包括:
|
||||
| `issueKey` | 字符串 | 创建的问题键 \(例如:PROJ-123\) |
|
||||
| `summary` | 字符串 | 问题摘要 |
|
||||
| `url` | 字符串 | 创建的问题的 URL |
|
||||
| `assigneeId` | 字符串 | 已分配用户的账户 ID(如已分配) |
|
||||
|
||||
### `jira_bulk_read`
|
||||
|
||||
@@ -520,7 +527,31 @@ Jira 的主要功能包括:
|
||||
| `issueKey` | string | 问题键 |
|
||||
| `watcherAccountId` | string | 移除的观察者账户 ID |
|
||||
|
||||
## 注意事项
|
||||
### `jira_get_users`
|
||||
|
||||
- 类别: `tools`
|
||||
- 类型: `jira`
|
||||
获取 Jira 用户。如果提供了账户 ID,则返回单个用户,否则返回所有用户的列表。
|
||||
|
||||
#### 输入
|
||||
|
||||
| 参数 | 类型 | 必需 | 描述 |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如:yourcompany.atlassian.net\) |
|
||||
| `accountId` | 字符串 | 否 | 可选账户 ID,用于获取特定用户。如果未提供,则返回所有用户。 |
|
||||
| `startAt` | 数字 | 否 | 要返回的第一个用户的索引 \(用于分页,默认值:0\) |
|
||||
| `maxResults` | 数字 | 否 | 要返回的最大用户数 \(默认值:50\) |
|
||||
| `cloudId` | 字符串 | 否 | 实例的 Jira Cloud ID。如果未提供,将使用域名获取。 |
|
||||
|
||||
#### 输出
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | 字符串 | 操作的时间戳 |
|
||||
| `users` | json | 用户数组,包含 accountId、displayName、emailAddress、active 状态和 avatarUrls |
|
||||
| `total` | 数字 | 返回的用户总数 |
|
||||
| `startAt` | 数字 | 分页起始索引 |
|
||||
| `maxResults` | 数字 | 每页最大结果数 |
|
||||
|
||||
## 备注
|
||||
|
||||
- 分类:`tools`
|
||||
- 类型:`jira`
|
||||
|
||||
@@ -2521,9 +2521,9 @@ checksums:
|
||||
content/22: ef92d95455e378abe4d27a1cdc5e1aed
|
||||
content/23: febd6019055f3754953fd93395d0dbf2
|
||||
content/24: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/25: 7ef3f388e5ee9346bac54c771d825f40
|
||||
content/25: caf6acbe2a4495ca055cb9006ce47250
|
||||
content/26: bcadfc362b69078beee0088e5936c98b
|
||||
content/27: e0fa91c45aa780fc03e91df77417f893
|
||||
content/27: 57662dd91f8d1d807377fd48fa0e9142
|
||||
content/28: b463f54cd5fe2458b5842549fbb5e1ce
|
||||
content/29: 55f8c724e1a2463bc29a32518a512c73
|
||||
content/30: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
@@ -2638,8 +2638,14 @@ checksums:
|
||||
content/139: 33fde4c3da4584b51f06183b7b192a78
|
||||
content/140: bcadfc362b69078beee0088e5936c98b
|
||||
content/141: b7451190f100388d999c183958d787a7
|
||||
content/142: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/143: 4930918f803340baa861bed9cdf789de
|
||||
content/142: d0f9e799e2e5cc62de60668d35fd846f
|
||||
content/143: b19069ff19899fe202217e06e002c447
|
||||
content/144: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/145: 480fd62f8d9cc18467e82f4c3f70beea
|
||||
content/146: bcadfc362b69078beee0088e5936c98b
|
||||
content/147: 4e73a65d3b873f3979587e10a0f39e72
|
||||
content/148: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||
content/149: 4930918f803340baa861bed9cdf789de
|
||||
8f76e389f6226f608571622b015ca6a1:
|
||||
meta/title: ddfe2191ea61b34d8b7cc1d7c19b94ac
|
||||
meta/description: 049ff551f2ebabb15cdea0c71bd8e4eb
|
||||
|
||||
@@ -573,10 +573,10 @@ export default function LoginPage({
|
||||
<Dialog open={forgotPasswordOpen} onOpenChange={setForgotPasswordOpen}>
|
||||
<DialogContent className='auth-card auth-card-shadow max-w-[540px] rounded-[10px] border backdrop-blur-sm'>
|
||||
<DialogHeader>
|
||||
<DialogTitle className='auth-text-primary font-semibold text-xl tracking-tight'>
|
||||
<DialogTitle className='font-semibold text-black text-xl tracking-tight'>
|
||||
Reset Password
|
||||
</DialogTitle>
|
||||
<DialogDescription className='auth-text-secondary text-sm'>
|
||||
<DialogDescription className='text-muted-foreground text-sm'>
|
||||
Enter your email address and we'll send you a link to reset your password if your
|
||||
account exists.
|
||||
</DialogDescription>
|
||||
|
||||
@@ -109,7 +109,7 @@ export default function Footer({ fullWidth = false }: FooterProps) {
|
||||
{FOOTER_BLOCKS.map((block) => (
|
||||
<Link
|
||||
key={block}
|
||||
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replace(' ', '-')}`}
|
||||
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replaceAll(' ', '-')}`}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='text-[14px] text-muted-foreground transition-colors hover:text-foreground'
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import Image from 'next/image'
|
||||
import Link from 'next/link'
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
|
||||
import { getAllPostMeta } from '@/lib/blog/registry'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { PostGrid } from '@/app/(landing)/studio/post-grid'
|
||||
|
||||
export const revalidate = 3600
|
||||
|
||||
@@ -18,7 +17,6 @@ export default async function StudioIndex({
|
||||
const all = await getAllPostMeta()
|
||||
const filtered = tag ? all.filter((p) => p.tags.includes(tag)) : all
|
||||
|
||||
// Sort to ensure featured post is first on page 1
|
||||
const sorted =
|
||||
pageNum === 1
|
||||
? filtered.sort((a, b) => {
|
||||
@@ -63,69 +61,7 @@ export default async function StudioIndex({
|
||||
</div> */}
|
||||
|
||||
{/* Grid layout for consistent rows */}
|
||||
<div className='grid grid-cols-1 gap-4 md:grid-cols-2 md:gap-6 lg:grid-cols-3'>
|
||||
{posts.map((p, i) => {
|
||||
return (
|
||||
<Link key={p.slug} href={`/studio/${p.slug}`} className='group flex flex-col'>
|
||||
<div className='flex h-full flex-col overflow-hidden rounded-xl border border-gray-200 transition-colors duration-300 hover:border-gray-300'>
|
||||
<Image
|
||||
src={p.ogImage}
|
||||
alt={p.title}
|
||||
width={800}
|
||||
height={450}
|
||||
className='h-48 w-full object-cover'
|
||||
sizes='(max-width: 768px) 100vw, (max-width: 1024px) 50vw, 33vw'
|
||||
loading='lazy'
|
||||
unoptimized
|
||||
/>
|
||||
<div className='flex flex-1 flex-col p-4'>
|
||||
<div className='mb-2 text-gray-600 text-xs'>
|
||||
{new Date(p.date).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
})}
|
||||
</div>
|
||||
<h3 className='shine-text mb-1 font-medium text-lg leading-tight'>{p.title}</h3>
|
||||
<p className='mb-3 line-clamp-3 flex-1 text-gray-700 text-sm'>{p.description}</p>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='-space-x-1.5 flex'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 3)
|
||||
.map((author, idx) => (
|
||||
<Avatar key={idx} className='size-4 border border-white'>
|
||||
<AvatarImage src={author?.avatarUrl} alt={author?.name} />
|
||||
<AvatarFallback className='border border-white bg-gray-100 text-[10px] text-gray-600'>
|
||||
{author?.name.slice(0, 2)}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
))}
|
||||
</div>
|
||||
<span className='text-gray-600 text-xs'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 2)
|
||||
.map((a) => a?.name)
|
||||
.join(', ')}
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length > 2 && (
|
||||
<>
|
||||
{' '}
|
||||
and{' '}
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2}{' '}
|
||||
other
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2 >
|
||||
1
|
||||
? 's'
|
||||
: ''}
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
<PostGrid posts={posts} />
|
||||
|
||||
{totalPages > 1 && (
|
||||
<div className='mt-10 flex items-center justify-center gap-3'>
|
||||
|
||||
90
apps/sim/app/(landing)/studio/post-grid.tsx
Normal file
@@ -0,0 +1,90 @@
|
||||
'use client'
|
||||
|
||||
import Image from 'next/image'
|
||||
import Link from 'next/link'
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
|
||||
|
||||
interface Author {
|
||||
id: string
|
||||
name: string
|
||||
avatarUrl?: string
|
||||
url?: string
|
||||
}
|
||||
|
||||
interface Post {
|
||||
slug: string
|
||||
title: string
|
||||
description: string
|
||||
date: string
|
||||
ogImage: string
|
||||
author: Author
|
||||
authors?: Author[]
|
||||
featured?: boolean
|
||||
}
|
||||
|
||||
export function PostGrid({ posts }: { posts: Post[] }) {
|
||||
return (
|
||||
<div className='grid grid-cols-1 gap-4 md:grid-cols-2 md:gap-6 lg:grid-cols-3'>
|
||||
{posts.map((p, index) => (
|
||||
<Link key={p.slug} href={`/studio/${p.slug}`} className='group flex flex-col'>
|
||||
<div className='flex h-full flex-col overflow-hidden rounded-xl border border-gray-200 transition-colors duration-300 hover:border-gray-300'>
|
||||
{/* Image container with fixed aspect ratio to prevent layout shift */}
|
||||
<div className='relative aspect-video w-full overflow-hidden'>
|
||||
<Image
|
||||
src={p.ogImage}
|
||||
alt={p.title}
|
||||
sizes='(max-width: 768px) 100vw, (max-width: 1024px) 50vw, 33vw'
|
||||
unoptimized
|
||||
priority={index < 6}
|
||||
loading={index < 6 ? undefined : 'lazy'}
|
||||
fill
|
||||
style={{ objectFit: 'cover' }}
|
||||
/>
|
||||
</div>
|
||||
<div className='flex flex-1 flex-col p-4'>
|
||||
<div className='mb-2 text-gray-600 text-xs'>
|
||||
{new Date(p.date).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
})}
|
||||
</div>
|
||||
<h3 className='shine-text mb-1 font-medium text-lg leading-tight'>{p.title}</h3>
|
||||
<p className='mb-3 line-clamp-3 flex-1 text-gray-700 text-sm'>{p.description}</p>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='-space-x-1.5 flex'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 3)
|
||||
.map((author, idx) => (
|
||||
<Avatar key={idx} className='size-4 border border-white'>
|
||||
<AvatarImage src={author?.avatarUrl} alt={author?.name} />
|
||||
<AvatarFallback className='border border-white bg-gray-100 text-[10px] text-gray-600'>
|
||||
{author?.name.slice(0, 2)}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
))}
|
||||
</div>
|
||||
<span className='text-gray-600 text-xs'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 2)
|
||||
.map((a) => a?.name)
|
||||
.join(', ')}
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length > 2 && (
|
||||
<>
|
||||
{' '}
|
||||
and {(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2}{' '}
|
||||
other
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2 > 1
|
||||
? 's'
|
||||
: ''}
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -12,6 +12,7 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
|
||||
pathname === '/' ||
|
||||
pathname.startsWith('/login') ||
|
||||
pathname.startsWith('/signup') ||
|
||||
pathname.startsWith('/reset-password') ||
|
||||
pathname.startsWith('/sso') ||
|
||||
pathname.startsWith('/terms') ||
|
||||
pathname.startsWith('/privacy') ||
|
||||
|
||||
@@ -759,3 +759,24 @@ input[type="search"]::-ms-clear {
|
||||
--surface-elevated: #202020;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove backticks from inline code in prose (Tailwind Typography default)
|
||||
*/
|
||||
.prose code::before,
|
||||
.prose code::after {
|
||||
content: none !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove underlines from heading anchor links in prose
|
||||
*/
|
||||
.prose h1 a,
|
||||
.prose h2 a,
|
||||
.prose h3 a,
|
||||
.prose h4 a,
|
||||
.prose h5 a,
|
||||
.prose h6 a {
|
||||
text-decoration: none !important;
|
||||
color: inherit !important;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,17 @@ export async function GET(request: NextRequest) {
|
||||
.from(account)
|
||||
.where(and(...whereConditions))
|
||||
|
||||
return NextResponse.json({ accounts })
|
||||
// Use the user's email as the display name (consistent with credential selector)
|
||||
const userEmail = session.user.email
|
||||
|
||||
const accountsWithDisplayName = accounts.map((acc) => ({
|
||||
id: acc.id,
|
||||
accountId: acc.accountId,
|
||||
providerId: acc.providerId,
|
||||
displayName: userEmail || acc.providerId,
|
||||
}))
|
||||
|
||||
return NextResponse.json({ accounts: accountsWithDisplayName })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch accounts', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
vi.mock('@/lib/core/utils/urls', () => ({
|
||||
getBaseUrl: vi.fn(() => 'https://app.example.com'),
|
||||
}))
|
||||
|
||||
describe('Forget Password API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
@@ -15,7 +19,7 @@ describe('Forget Password API Route', () => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should send password reset email successfully', async () => {
|
||||
it('should send password reset email successfully with same-origin redirectTo', async () => {
|
||||
setupAuthApiMocks({
|
||||
operations: {
|
||||
forgetPassword: { success: true },
|
||||
@@ -24,7 +28,7 @@ describe('Forget Password API Route', () => {
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
email: 'test@example.com',
|
||||
redirectTo: 'https://example.com/reset',
|
||||
redirectTo: 'https://app.example.com/reset',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
@@ -39,12 +43,36 @@ describe('Forget Password API Route', () => {
|
||||
expect(auth.auth.api.forgetPassword).toHaveBeenCalledWith({
|
||||
body: {
|
||||
email: 'test@example.com',
|
||||
redirectTo: 'https://example.com/reset',
|
||||
redirectTo: 'https://app.example.com/reset',
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
})
|
||||
|
||||
it('should reject external redirectTo URL', async () => {
|
||||
setupAuthApiMocks({
|
||||
operations: {
|
||||
forgetPassword: { success: true },
|
||||
},
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
email: 'test@example.com',
|
||||
redirectTo: 'https://evil.com/phishing',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.message).toBe('Redirect URL must be a valid same-origin URL')
|
||||
|
||||
const auth = await import('@/lib/auth')
|
||||
expect(auth.auth.api.forgetPassword).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should send password reset email without redirectTo', async () => {
|
||||
setupAuthApiMocks({
|
||||
operations: {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { isSameOrigin } from '@/lib/core/utils/validation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -13,10 +14,15 @@ const forgetPasswordSchema = z.object({
|
||||
.email('Please provide a valid email address'),
|
||||
redirectTo: z
|
||||
.string()
|
||||
.url('Redirect URL must be a valid URL')
|
||||
.optional()
|
||||
.or(z.literal(''))
|
||||
.transform((val) => (val === '' ? undefined : val)),
|
||||
.transform((val) => (val === '' || val === undefined ? undefined : val))
|
||||
.refine(
|
||||
(val) => val === undefined || (z.string().url().safeParse(val).success && isSameOrigin(val)),
|
||||
{
|
||||
message: 'Redirect URL must be a valid same-origin URL',
|
||||
}
|
||||
),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -20,6 +20,12 @@ export async function POST(request: Request) {
|
||||
cloudId: providedCloudId,
|
||||
issueType,
|
||||
parent,
|
||||
labels,
|
||||
duedate,
|
||||
reporter,
|
||||
environment,
|
||||
customFieldId,
|
||||
customFieldValue,
|
||||
} = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
@@ -94,17 +100,57 @@ export async function POST(request: Request) {
|
||||
}
|
||||
|
||||
if (priority !== undefined && priority !== null && priority !== '') {
|
||||
fields.priority = {
|
||||
name: priority,
|
||||
const isNumericId = /^\d+$/.test(priority)
|
||||
fields.priority = isNumericId ? { id: priority } : { name: priority }
|
||||
}
|
||||
|
||||
if (labels !== undefined && labels !== null && Array.isArray(labels) && labels.length > 0) {
|
||||
fields.labels = labels
|
||||
}
|
||||
|
||||
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
||||
fields.duedate = duedate
|
||||
}
|
||||
|
||||
if (reporter !== undefined && reporter !== null && reporter !== '') {
|
||||
fields.reporter = {
|
||||
id: reporter,
|
||||
}
|
||||
}
|
||||
|
||||
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
||||
fields.assignee = {
|
||||
id: assignee,
|
||||
if (environment !== undefined && environment !== null && environment !== '') {
|
||||
fields.environment = {
|
||||
type: 'doc',
|
||||
version: 1,
|
||||
content: [
|
||||
{
|
||||
type: 'paragraph',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: environment,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
customFieldId !== undefined &&
|
||||
customFieldId !== null &&
|
||||
customFieldId !== '' &&
|
||||
customFieldValue !== undefined &&
|
||||
customFieldValue !== null &&
|
||||
customFieldValue !== ''
|
||||
) {
|
||||
const fieldId = customFieldId.startsWith('customfield_')
|
||||
? customFieldId
|
||||
: `customfield_${customFieldId}`
|
||||
|
||||
fields[fieldId] = customFieldValue
|
||||
}
|
||||
|
||||
const body = { fields }
|
||||
|
||||
const response = await fetch(url, {
|
||||
@@ -132,16 +178,47 @@ export async function POST(request: Request) {
|
||||
}
|
||||
|
||||
const responseData = await response.json()
|
||||
logger.info('Successfully created Jira issue:', responseData.key)
|
||||
const issueKey = responseData.key || 'unknown'
|
||||
logger.info('Successfully created Jira issue:', issueKey)
|
||||
|
||||
let assigneeId: string | undefined
|
||||
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
||||
const assignUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}/assignee`
|
||||
logger.info('Assigning issue to:', assignee)
|
||||
|
||||
const assignResponse = await fetch(assignUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
accountId: assignee,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!assignResponse.ok) {
|
||||
const assignErrorText = await assignResponse.text()
|
||||
logger.warn('Failed to assign issue (issue was created successfully):', {
|
||||
status: assignResponse.status,
|
||||
error: assignErrorText,
|
||||
})
|
||||
} else {
|
||||
assigneeId = assignee
|
||||
logger.info('Successfully assigned issue to:', assignee)
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueKey: responseData.key || 'unknown',
|
||||
issueKey: issueKey,
|
||||
summary: responseData.fields?.summary || 'Issue created',
|
||||
success: true,
|
||||
url: `https://${domain}/browse/${responseData.key}`,
|
||||
url: `https://${domain}/browse/${issueKey}`,
|
||||
...(assigneeId && { assigneeId }),
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
|
||||
@@ -11,6 +11,7 @@ import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { ALL_TRIGGER_TYPES } from '@/lib/logs/types'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
@@ -30,7 +31,7 @@ const logger = createLogger('WorkflowExecuteAPI')
|
||||
|
||||
const ExecuteWorkflowSchema = z.object({
|
||||
selectedOutputs: z.array(z.string()).optional().default([]),
|
||||
triggerType: z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']).optional(),
|
||||
triggerType: z.enum(ALL_TRIGGER_TYPES).optional(),
|
||||
stream: z.boolean().optional(),
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
|
||||
@@ -6,13 +6,14 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { ALL_TRIGGER_TYPES } from '@/lib/logs/types'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { MAX_EMAIL_RECIPIENTS, MAX_WORKFLOW_IDS } from '../constants'
|
||||
|
||||
const logger = createLogger('WorkspaceNotificationAPI')
|
||||
|
||||
const levelFilterSchema = z.array(z.enum(['info', 'error']))
|
||||
const triggerFilterSchema = z.array(z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']))
|
||||
const triggerFilterSchema = z.array(z.enum(ALL_TRIGGER_TYPES))
|
||||
|
||||
const alertRuleSchema = z.enum([
|
||||
'consecutive_failures',
|
||||
|
||||
@@ -7,6 +7,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { ALL_TRIGGER_TYPES } from '@/lib/logs/types'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { MAX_EMAIL_RECIPIENTS, MAX_NOTIFICATIONS_PER_TYPE, MAX_WORKFLOW_IDS } from './constants'
|
||||
|
||||
@@ -14,7 +15,7 @@ const logger = createLogger('WorkspaceNotificationsAPI')
|
||||
|
||||
const notificationTypeSchema = z.enum(['webhook', 'email', 'slack'])
|
||||
const levelFilterSchema = z.array(z.enum(['info', 'error']))
|
||||
const triggerFilterSchema = z.array(z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']))
|
||||
const triggerFilterSchema = z.array(z.enum(ALL_TRIGGER_TYPES))
|
||||
|
||||
const alertRuleSchema = z.enum([
|
||||
'consecutive_failures',
|
||||
@@ -80,7 +81,7 @@ const createNotificationSchema = z
|
||||
workflowIds: z.array(z.string()).max(MAX_WORKFLOW_IDS).default([]),
|
||||
allWorkflows: z.boolean().default(false),
|
||||
levelFilter: levelFilterSchema.default(['info', 'error']),
|
||||
triggerFilter: triggerFilterSchema.default(['api', 'webhook', 'schedule', 'manual', 'chat']),
|
||||
triggerFilter: triggerFilterSchema.default([...ALL_TRIGGER_TYPES]),
|
||||
includeFinalOutput: z.boolean().default(false),
|
||||
includeTraceSpans: z.boolean().default(false),
|
||||
includeRateLimits: z.boolean().default(false),
|
||||
|
||||
@@ -104,6 +104,8 @@ export function SlackChannelSelector({
|
||||
disabled={disabled || channels.length === 0}
|
||||
isLoading={isLoading}
|
||||
error={fetchError}
|
||||
searchable
|
||||
searchPlaceholder='Search channels...'
|
||||
/>
|
||||
{selectedChannel && !fetchError && (
|
||||
<p className='text-[12px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -22,6 +22,7 @@ import { SlackIcon } from '@/components/icons'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { ALL_TRIGGER_TYPES, type TriggerType } from '@/lib/logs/types'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import {
|
||||
type NotificationSubscription,
|
||||
@@ -43,7 +44,6 @@ const PRIMARY_BUTTON_STYLES =
|
||||
|
||||
type NotificationType = 'webhook' | 'email' | 'slack'
|
||||
type LogLevel = 'info' | 'error'
|
||||
type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
type AlertRule =
|
||||
| 'none'
|
||||
| 'consecutive_failures'
|
||||
@@ -84,7 +84,6 @@ interface NotificationSettingsProps {
|
||||
}
|
||||
|
||||
const LOG_LEVELS: LogLevel[] = ['info', 'error']
|
||||
const TRIGGER_TYPES: TriggerType[] = ['api', 'webhook', 'schedule', 'manual', 'chat']
|
||||
|
||||
function formatAlertConfigLabel(config: {
|
||||
rule: AlertRule
|
||||
@@ -137,7 +136,7 @@ export function NotificationSettings({
|
||||
workflowIds: [] as string[],
|
||||
allWorkflows: true,
|
||||
levelFilter: ['info', 'error'] as LogLevel[],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat'] as TriggerType[],
|
||||
triggerFilter: [...ALL_TRIGGER_TYPES] as TriggerType[],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
@@ -207,7 +206,7 @@ export function NotificationSettings({
|
||||
workflowIds: [],
|
||||
allWorkflows: true,
|
||||
levelFilter: ['info', 'error'],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat'],
|
||||
triggerFilter: [...ALL_TRIGGER_TYPES],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
@@ -768,7 +767,7 @@ export function NotificationSettings({
|
||||
<Combobox
|
||||
options={slackAccounts.map((acc) => ({
|
||||
value: acc.id,
|
||||
label: acc.accountId,
|
||||
label: acc.displayName || 'Slack Workspace',
|
||||
}))}
|
||||
value={formData.slackAccountId}
|
||||
onChange={(value) => {
|
||||
@@ -859,7 +858,7 @@ export function NotificationSettings({
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label className='text-[var(--text-secondary)]'>Trigger Type Filters</Label>
|
||||
<Combobox
|
||||
options={TRIGGER_TYPES.map((trigger) => ({
|
||||
options={ALL_TRIGGER_TYPES.map((trigger) => ({
|
||||
label: trigger.charAt(0).toUpperCase() + trigger.slice(1),
|
||||
value: trigger,
|
||||
}))}
|
||||
|
||||
@@ -37,6 +37,7 @@ import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-
|
||||
import type { GenerationType } from '@/blocks/types'
|
||||
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
|
||||
import { useTagSelection } from '@/hooks/use-tag-selection'
|
||||
import { useTextHistory } from '@/hooks/use-text-history'
|
||||
import { normalizeBlockName } from '@/stores/workflows/utils'
|
||||
|
||||
const logger = createLogger('Code')
|
||||
@@ -305,6 +306,20 @@ export function Code({
|
||||
},
|
||||
})
|
||||
|
||||
// Text history for undo/redo with debouncing
|
||||
const textHistory = useTextHistory({
|
||||
blockId,
|
||||
subBlockId,
|
||||
value: code,
|
||||
onChange: (newValue) => {
|
||||
setCode(newValue)
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(newValue)
|
||||
}
|
||||
},
|
||||
disabled: isPreview || disabled || readOnly || isAiStreaming,
|
||||
})
|
||||
|
||||
const getDefaultValueString = () => {
|
||||
if (defaultValue === undefined || defaultValue === null) return ''
|
||||
if (typeof defaultValue === 'string') return defaultValue
|
||||
@@ -348,10 +363,12 @@ export function Code({
|
||||
useEffect(() => {
|
||||
handleStreamStartRef.current = () => {
|
||||
setCode('')
|
||||
lastInternalValueRef.current = ''
|
||||
}
|
||||
|
||||
handleGeneratedContentRef.current = (generatedCode: string) => {
|
||||
setCode(generatedCode)
|
||||
lastInternalValueRef.current = generatedCode
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(generatedCode)
|
||||
}
|
||||
@@ -387,14 +404,21 @@ export function Code({
|
||||
}
|
||||
}, [readOnly])
|
||||
|
||||
// Effects: Sync code with external value
|
||||
// Ref to track the last value we set internally (to avoid sync loops)
|
||||
const lastInternalValueRef = useRef<string>('')
|
||||
|
||||
// Effects: Sync code with external value (only for truly external changes)
|
||||
useEffect(() => {
|
||||
if (isAiStreaming) return
|
||||
const valueString = value?.toString() ?? ''
|
||||
if (valueString !== code) {
|
||||
|
||||
// Only sync if this is a genuine external change, not our own update
|
||||
// This prevents resetting the undo history when we update the store
|
||||
if (valueString !== code && valueString !== lastInternalValueRef.current) {
|
||||
setCode(valueString)
|
||||
lastInternalValueRef.current = valueString
|
||||
}
|
||||
}, [value, code, isAiStreaming])
|
||||
}, [value, isAiStreaming]) // Removed 'code' from dependencies to prevent sync loops
|
||||
|
||||
// Effects: Track active line number for cursor position
|
||||
useEffect(() => {
|
||||
@@ -502,8 +526,9 @@ export function Code({
|
||||
const dropPosition = textarea?.selectionStart ?? code.length
|
||||
const newValue = `${code.slice(0, dropPosition)}<${code.slice(dropPosition)}`
|
||||
|
||||
setCode(newValue)
|
||||
setStoreValue(newValue)
|
||||
// Use textHistory for proper undo tracking
|
||||
textHistory.handleChange(newValue)
|
||||
lastInternalValueRef.current = newValue
|
||||
const newCursorPosition = dropPosition + 1
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
@@ -531,7 +556,9 @@ export function Code({
|
||||
*/
|
||||
const handleTagSelect = (newValue: string) => {
|
||||
if (!isPreview && !readOnly) {
|
||||
setCode(newValue)
|
||||
// Use textHistory for proper undo tracking
|
||||
textHistory.handleChange(newValue)
|
||||
lastInternalValueRef.current = newValue
|
||||
emitTagSelection(newValue)
|
||||
}
|
||||
setShowTags(false)
|
||||
@@ -548,7 +575,9 @@ export function Code({
|
||||
*/
|
||||
const handleEnvVarSelect = (newValue: string) => {
|
||||
if (!isPreview && !readOnly) {
|
||||
setCode(newValue)
|
||||
// Use textHistory for proper undo tracking
|
||||
textHistory.handleChange(newValue)
|
||||
lastInternalValueRef.current = newValue
|
||||
emitTagSelection(newValue)
|
||||
}
|
||||
setShowEnvVars(false)
|
||||
@@ -741,8 +770,10 @@ export function Code({
|
||||
value={code}
|
||||
onValueChange={(newCode) => {
|
||||
if (!isAiStreaming && !isPreview && !disabled && !readOnly) {
|
||||
setCode(newCode)
|
||||
setStoreValue(newCode)
|
||||
// Use textHistory for debounced undo/redo tracking
|
||||
textHistory.handleChange(newCode)
|
||||
// Track this as an internal change to prevent sync loops
|
||||
lastInternalValueRef.current = newCode
|
||||
|
||||
const textarea = editorRef.current?.querySelector('textarea')
|
||||
if (textarea) {
|
||||
@@ -762,6 +793,10 @@ export function Code({
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
// Let text history handle undo/redo first
|
||||
if (textHistory.handleKeyDown(e)) {
|
||||
return
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
setShowTags(false)
|
||||
setShowEnvVars(false)
|
||||
@@ -770,6 +805,10 @@ export function Code({
|
||||
e.preventDefault()
|
||||
}
|
||||
}}
|
||||
onBlur={() => {
|
||||
// Commit any pending text history changes on blur
|
||||
textHistory.handleBlur()
|
||||
}}
|
||||
highlight={createHighlightFunction(effectiveLanguage, shouldHighlightReference)}
|
||||
{...getCodeEditorProps({ isStreaming: isAiStreaming, isPreview, disabled })}
|
||||
/>
|
||||
|
||||
@@ -90,6 +90,7 @@ export function ShortInput({
|
||||
blockId,
|
||||
triggerId: undefined,
|
||||
isPreview,
|
||||
useWebhookUrl,
|
||||
})
|
||||
|
||||
const wandHook = useWand({
|
||||
|
||||
@@ -844,8 +844,13 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (!accessibleBlock) continue
|
||||
|
||||
// Skip the current block - blocks cannot reference their own outputs
|
||||
// Exception: approval blocks can reference their own outputs
|
||||
if (accessibleBlockId === blockId && accessibleBlock.type !== 'approval') continue
|
||||
// Exception: approval and human_in_the_loop blocks can reference their own outputs
|
||||
if (
|
||||
accessibleBlockId === blockId &&
|
||||
accessibleBlock.type !== 'approval' &&
|
||||
accessibleBlock.type !== 'human_in_the_loop'
|
||||
)
|
||||
continue
|
||||
|
||||
const blockConfig = getBlock(accessibleBlock.type)
|
||||
|
||||
@@ -972,6 +977,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
}
|
||||
} else if (accessibleBlock.type === 'human_in_the_loop') {
|
||||
blockTags = [`${normalizedBlockName}.url`]
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(accessibleBlockId, 'operation')
|
||||
@@ -1214,31 +1221,25 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
let processedTag = tag
|
||||
|
||||
// Check if this is a file property and add [0] automatically
|
||||
// Only include user-accessible fields (matches UserFile interface)
|
||||
const fileProperties = ['id', 'name', 'url', 'size', 'type']
|
||||
const parts = tag.split('.')
|
||||
if (parts.length >= 2 && fileProperties.includes(parts[parts.length - 1])) {
|
||||
const fieldName = parts[parts.length - 2]
|
||||
if (parts.length >= 3 && blockGroup) {
|
||||
const arrayFieldName = parts[1] // e.g., "channels", "files", "users"
|
||||
const block = useWorkflowStore.getState().blocks[blockGroup.blockId]
|
||||
const blockConfig = block ? (getBlock(block.type) ?? null) : null
|
||||
const mergedSubBlocks = getMergedSubBlocks(blockGroup.blockId)
|
||||
|
||||
if (blockGroup) {
|
||||
const block = useWorkflowStore.getState().blocks[blockGroup.blockId]
|
||||
const blockConfig = block ? (getBlock(block.type) ?? null) : null
|
||||
const mergedSubBlocks = getMergedSubBlocks(blockGroup.blockId)
|
||||
const fieldType = getOutputTypeForPath(
|
||||
block,
|
||||
blockConfig,
|
||||
blockGroup.blockId,
|
||||
arrayFieldName,
|
||||
mergedSubBlocks
|
||||
)
|
||||
|
||||
const fieldType = getOutputTypeForPath(
|
||||
block,
|
||||
blockConfig,
|
||||
blockGroup.blockId,
|
||||
fieldName,
|
||||
mergedSubBlocks
|
||||
)
|
||||
|
||||
if (fieldType === 'files') {
|
||||
const blockAndField = parts.slice(0, -1).join('.')
|
||||
const property = parts[parts.length - 1]
|
||||
processedTag = `${blockAndField}[0].${property}`
|
||||
}
|
||||
if (fieldType === 'files' || fieldType === 'array') {
|
||||
const blockName = parts[0]
|
||||
const remainingPath = parts.slice(2).join('.')
|
||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { ReactElement } from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import 'prismjs/components/prism-json'
|
||||
import { Wand2 } from 'lucide-react'
|
||||
import Editor from 'react-simple-code-editor'
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
import { useTextHistoryStore } from '@/stores/text-history'
|
||||
|
||||
interface CodeEditorProps {
|
||||
value: string
|
||||
@@ -33,6 +34,11 @@ interface CodeEditorProps {
|
||||
showWandButton?: boolean
|
||||
onWandClick?: () => void
|
||||
wandButtonDisabled?: boolean
|
||||
/**
|
||||
* Unique identifier for text history. When provided, enables undo/redo functionality.
|
||||
* Format: "blockId:fieldName" e.g. "block-123:schema" or "block-123:code"
|
||||
*/
|
||||
historyId?: string
|
||||
}
|
||||
|
||||
export function CodeEditor({
|
||||
@@ -50,16 +56,125 @@ export function CodeEditor({
|
||||
showWandButton = false,
|
||||
onWandClick,
|
||||
wandButtonDisabled = false,
|
||||
historyId,
|
||||
}: CodeEditorProps) {
|
||||
const [code, setCode] = useState(value)
|
||||
const [visualLineHeights, setVisualLineHeights] = useState<number[]>([])
|
||||
|
||||
const editorRef = useRef<HTMLDivElement>(null)
|
||||
const lastInternalValueRef = useRef<string>(value)
|
||||
const initializedRef = useRef(false)
|
||||
|
||||
// Text history store for undo/redo
|
||||
const textHistoryStore = useTextHistoryStore()
|
||||
|
||||
// Parse historyId into blockId and subBlockId for the store
|
||||
const [historyBlockId, historySubBlockId] = historyId?.split(':') ?? ['', '']
|
||||
const hasHistory = Boolean(historyId && historyBlockId && historySubBlockId)
|
||||
|
||||
// Initialize history on mount
|
||||
useEffect(() => {
|
||||
setCode(value)
|
||||
if (hasHistory && !initializedRef.current) {
|
||||
textHistoryStore.initHistory(historyBlockId, historySubBlockId, value)
|
||||
initializedRef.current = true
|
||||
}
|
||||
}, [hasHistory, historyBlockId, historySubBlockId, value, textHistoryStore])
|
||||
|
||||
// Sync external value changes (but avoid resetting undo history for internal changes)
|
||||
useEffect(() => {
|
||||
if (value !== code && value !== lastInternalValueRef.current) {
|
||||
setCode(value)
|
||||
lastInternalValueRef.current = value
|
||||
}
|
||||
}, [value])
|
||||
|
||||
// Handle value change with history tracking
|
||||
const handleValueChange = useCallback(
|
||||
(newCode: string) => {
|
||||
setCode(newCode)
|
||||
lastInternalValueRef.current = newCode
|
||||
onChange(newCode)
|
||||
|
||||
// Record to history if enabled
|
||||
if (hasHistory) {
|
||||
textHistoryStore.recordChange(historyBlockId, historySubBlockId, newCode)
|
||||
}
|
||||
},
|
||||
[onChange, hasHistory, historyBlockId, historySubBlockId, textHistoryStore]
|
||||
)
|
||||
|
||||
// Handle undo
|
||||
const handleUndo = useCallback(() => {
|
||||
if (!hasHistory) return false
|
||||
|
||||
const previousValue = textHistoryStore.undo(historyBlockId, historySubBlockId)
|
||||
if (previousValue !== null) {
|
||||
setCode(previousValue)
|
||||
lastInternalValueRef.current = previousValue
|
||||
onChange(previousValue)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}, [hasHistory, historyBlockId, historySubBlockId, textHistoryStore, onChange])
|
||||
|
||||
// Handle redo
|
||||
const handleRedo = useCallback(() => {
|
||||
if (!hasHistory) return false
|
||||
|
||||
const nextValue = textHistoryStore.redo(historyBlockId, historySubBlockId)
|
||||
if (nextValue !== null) {
|
||||
setCode(nextValue)
|
||||
lastInternalValueRef.current = nextValue
|
||||
onChange(nextValue)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}, [hasHistory, historyBlockId, historySubBlockId, textHistoryStore, onChange])
|
||||
|
||||
// Handle keyboard events for undo/redo
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (disabled) return
|
||||
|
||||
const isMod = e.metaKey || e.ctrlKey
|
||||
|
||||
// Undo: Cmd+Z / Ctrl+Z
|
||||
if (isMod && e.key === 'z' && !e.shiftKey && hasHistory) {
|
||||
if (handleUndo()) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Redo: Cmd+Shift+Z / Ctrl+Shift+Z / Ctrl+Y
|
||||
if (hasHistory) {
|
||||
if (
|
||||
(isMod && e.key === 'z' && e.shiftKey) ||
|
||||
(isMod && e.key === 'Z') ||
|
||||
(e.ctrlKey && e.key === 'y')
|
||||
) {
|
||||
if (handleRedo()) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Call parent's onKeyDown if provided
|
||||
onKeyDown?.(e)
|
||||
},
|
||||
[disabled, hasHistory, handleUndo, handleRedo, onKeyDown]
|
||||
)
|
||||
|
||||
// Handle blur - commit pending history
|
||||
const handleBlur = useCallback(() => {
|
||||
if (hasHistory) {
|
||||
textHistoryStore.commitPending(historyBlockId, historySubBlockId)
|
||||
}
|
||||
}, [hasHistory, historyBlockId, historySubBlockId, textHistoryStore])
|
||||
|
||||
useEffect(() => {
|
||||
if (!editorRef.current) return
|
||||
|
||||
@@ -211,11 +326,9 @@ export function CodeEditor({
|
||||
|
||||
<Editor
|
||||
value={code}
|
||||
onValueChange={(newCode) => {
|
||||
setCode(newCode)
|
||||
onChange(newCode)
|
||||
}}
|
||||
onKeyDown={onKeyDown}
|
||||
onValueChange={handleValueChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onBlur={handleBlur}
|
||||
highlight={(code) => customHighlight(code)}
|
||||
disabled={disabled}
|
||||
{...getCodeEditorProps({ disabled })}
|
||||
|
||||
@@ -936,6 +936,7 @@ try {
|
||||
gutterClassName='bg-[var(--bg)]'
|
||||
disabled={schemaGeneration.isLoading || schemaGeneration.isStreaming}
|
||||
onKeyDown={handleKeyDown}
|
||||
historyId={`${blockId}:tool-schema`}
|
||||
/>
|
||||
</ModalTabsContent>
|
||||
|
||||
@@ -1018,6 +1019,7 @@ try {
|
||||
disabled={codeGeneration.isLoading || codeGeneration.isStreaming}
|
||||
onKeyDown={handleKeyDown}
|
||||
schemaParameters={schemaParameters}
|
||||
historyId={`${blockId}:tool-code`}
|
||||
/>
|
||||
|
||||
{showEnvVars && (
|
||||
|
||||
@@ -74,6 +74,7 @@ export function TriggerSave({
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
isPreview,
|
||||
useWebhookUrl: true, // to store the webhook url in the store
|
||||
})
|
||||
|
||||
const triggerConfig = useSubBlockStore((state) => state.getValue(blockId, 'triggerConfig'))
|
||||
|
||||
@@ -6,6 +6,61 @@ import { getBlock } from '@/blocks/registry'
|
||||
|
||||
const logger = createLogger('NodeUtilities')
|
||||
|
||||
/**
|
||||
* Estimates block dimensions based on block type.
|
||||
* Uses subblock count to estimate height for blocks that haven't been measured yet.
|
||||
*
|
||||
* @param blockType - The type of block (e.g., 'condition', 'agent')
|
||||
* @returns Estimated width and height for the block
|
||||
*/
|
||||
export function estimateBlockDimensions(blockType: string): { width: number; height: number } {
|
||||
const blockConfig = getBlock(blockType)
|
||||
const subBlockCount = blockConfig?.subBlocks?.length ?? 3
|
||||
// Many subblocks are conditionally rendered (advanced mode, provider-specific, etc.)
|
||||
// Use roughly half the config count as a reasonable estimate, capped between 3-7 rows
|
||||
const estimatedRows = Math.max(3, Math.min(Math.ceil(subBlockCount / 2), 7))
|
||||
const hasErrorRow = blockType !== 'starter' && blockType !== 'response' ? 1 : 0
|
||||
|
||||
const height =
|
||||
BLOCK_DIMENSIONS.HEADER_HEIGHT +
|
||||
BLOCK_DIMENSIONS.WORKFLOW_CONTENT_PADDING +
|
||||
(estimatedRows + hasErrorRow) * BLOCK_DIMENSIONS.WORKFLOW_ROW_HEIGHT
|
||||
|
||||
return {
|
||||
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
|
||||
height: Math.max(height, BLOCK_DIMENSIONS.MIN_HEIGHT),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clamps a position to keep a block fully inside a container's content area.
|
||||
* Content area starts after the header and padding, and ends before the right/bottom padding.
|
||||
*
|
||||
* @param position - Raw position relative to container origin
|
||||
* @param containerDimensions - Container width and height
|
||||
* @param blockDimensions - Block width and height
|
||||
* @returns Clamped position that keeps block inside content area
|
||||
*/
|
||||
export function clampPositionToContainer(
|
||||
position: { x: number; y: number },
|
||||
containerDimensions: { width: number; height: number },
|
||||
blockDimensions: { width: number; height: number }
|
||||
): { x: number; y: number } {
|
||||
const { width: containerWidth, height: containerHeight } = containerDimensions
|
||||
const { width: blockWidth, height: blockHeight } = blockDimensions
|
||||
|
||||
// Content area bounds (where blocks can be placed)
|
||||
const minX = CONTAINER_DIMENSIONS.LEFT_PADDING
|
||||
const minY = CONTAINER_DIMENSIONS.HEADER_HEIGHT + CONTAINER_DIMENSIONS.TOP_PADDING
|
||||
const maxX = containerWidth - CONTAINER_DIMENSIONS.RIGHT_PADDING - blockWidth
|
||||
const maxY = containerHeight - CONTAINER_DIMENSIONS.BOTTOM_PADDING - blockHeight
|
||||
|
||||
return {
|
||||
x: Math.max(minX, Math.min(position.x, Math.max(minX, maxX))),
|
||||
y: Math.max(minY, Math.min(position.y, Math.max(minY, maxY))),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook providing utilities for node position, hierarchy, and dimension calculations
|
||||
*/
|
||||
@@ -21,7 +76,7 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
|
||||
/**
|
||||
* Get the dimensions of a block.
|
||||
* For regular blocks, estimates height based on block config if not yet measured.
|
||||
* For regular blocks, uses stored height or estimates based on block config.
|
||||
*/
|
||||
const getBlockDimensions = useCallback(
|
||||
(blockId: string): { width: number; height: number } => {
|
||||
@@ -41,32 +96,16 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow block nodes have fixed visual width
|
||||
const width = BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
|
||||
// Prefer deterministic height published by the block component; fallback to estimate
|
||||
let height = block.height
|
||||
|
||||
if (!height) {
|
||||
// Estimate height based on block config's subblock count for more accurate initial sizing
|
||||
// This is critical for subflow containers to size correctly before child blocks are measured
|
||||
const blockConfig = getBlock(block.type)
|
||||
const subBlockCount = blockConfig?.subBlocks?.length ?? 3
|
||||
// Many subblocks are conditionally rendered (advanced mode, provider-specific, etc.)
|
||||
// Use roughly half the config count as a reasonable estimate, capped between 3-7 rows
|
||||
const estimatedRows = Math.max(3, Math.min(Math.ceil(subBlockCount / 2), 7))
|
||||
const hasErrorRow = block.type !== 'starter' && block.type !== 'response' ? 1 : 0
|
||||
|
||||
height =
|
||||
BLOCK_DIMENSIONS.HEADER_HEIGHT +
|
||||
BLOCK_DIMENSIONS.WORKFLOW_CONTENT_PADDING +
|
||||
(estimatedRows + hasErrorRow) * BLOCK_DIMENSIONS.WORKFLOW_ROW_HEIGHT
|
||||
if (block.height) {
|
||||
return {
|
||||
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
|
||||
height: Math.max(block.height, BLOCK_DIMENSIONS.MIN_HEIGHT),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
width,
|
||||
height: Math.max(height, BLOCK_DIMENSIONS.MIN_HEIGHT),
|
||||
}
|
||||
// Use shared estimation utility for blocks without measured height
|
||||
return estimateBlockDimensions(block.type)
|
||||
},
|
||||
[blocks, isContainerType]
|
||||
)
|
||||
@@ -164,29 +203,36 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
)
|
||||
|
||||
/**
|
||||
* Calculates the relative position of a node to a new parent's content area.
|
||||
* Accounts for header height and padding offsets in container nodes.
|
||||
* Calculates the relative position of a node to a new parent's origin.
|
||||
* React Flow positions children relative to parent origin, so we clamp
|
||||
* to the content area bounds (after header and padding).
|
||||
* @param nodeId ID of the node being repositioned
|
||||
* @param newParentId ID of the new parent
|
||||
* @returns Relative position coordinates {x, y} within the parent's content area
|
||||
* @returns Relative position coordinates {x, y} within the parent
|
||||
*/
|
||||
const calculateRelativePosition = useCallback(
|
||||
(nodeId: string, newParentId: string): { x: number; y: number } => {
|
||||
const nodeAbsPos = getNodeAbsolutePosition(nodeId)
|
||||
const parentAbsPos = getNodeAbsolutePosition(newParentId)
|
||||
const parentNode = getNodes().find((n) => n.id === newParentId)
|
||||
|
||||
// Account for container's header and padding
|
||||
// Children are positioned relative to content area, not container origin
|
||||
const headerHeight = 50
|
||||
const leftPadding = 16
|
||||
const topPadding = 16
|
||||
|
||||
return {
|
||||
x: nodeAbsPos.x - parentAbsPos.x - leftPadding,
|
||||
y: nodeAbsPos.y - parentAbsPos.y - headerHeight - topPadding,
|
||||
// Calculate raw relative position (relative to parent origin)
|
||||
const rawPosition = {
|
||||
x: nodeAbsPos.x - parentAbsPos.x,
|
||||
y: nodeAbsPos.y - parentAbsPos.y,
|
||||
}
|
||||
|
||||
// Get container and block dimensions
|
||||
const containerDimensions = {
|
||||
width: parentNode?.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
height: parentNode?.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
}
|
||||
const blockDimensions = getBlockDimensions(nodeId)
|
||||
|
||||
// Clamp position to keep block inside content area
|
||||
return clampPositionToContainer(rawPosition, containerDimensions, blockDimensions)
|
||||
},
|
||||
[getNodeAbsolutePosition]
|
||||
[getNodeAbsolutePosition, getNodes, getBlockDimensions]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -252,7 +298,11 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
*/
|
||||
const calculateLoopDimensions = useCallback(
|
||||
(nodeId: string): { width: number; height: number } => {
|
||||
const childNodes = getNodes().filter((node) => node.parentId === nodeId)
|
||||
// Check both React Flow's node.parentId AND blocks store's data.parentId
|
||||
// This ensures we catch children even if React Flow hasn't re-rendered yet
|
||||
const childNodes = getNodes().filter(
|
||||
(node) => node.parentId === nodeId || blocks[node.id]?.data?.parentId === nodeId
|
||||
)
|
||||
if (childNodes.length === 0) {
|
||||
return {
|
||||
width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
@@ -265,8 +315,11 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
|
||||
childNodes.forEach((node) => {
|
||||
const { width: nodeWidth, height: nodeHeight } = getBlockDimensions(node.id)
|
||||
maxRight = Math.max(maxRight, node.position.x + nodeWidth)
|
||||
maxBottom = Math.max(maxBottom, node.position.y + nodeHeight)
|
||||
// Use block position from store if available (more up-to-date)
|
||||
const block = blocks[node.id]
|
||||
const position = block?.position || node.position
|
||||
maxRight = Math.max(maxRight, position.x + nodeWidth)
|
||||
maxBottom = Math.max(maxBottom, position.y + nodeHeight)
|
||||
})
|
||||
|
||||
const width = Math.max(
|
||||
@@ -283,7 +336,7 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
|
||||
return { width, height }
|
||||
},
|
||||
[getNodes, getBlockDimensions]
|
||||
[getNodes, getBlockDimensions, blocks]
|
||||
)
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,7 +18,7 @@ import { useShallow } from 'zustand/react/shallow'
|
||||
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/other/oauth-request-access'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { OAuthProvider } from '@/lib/oauth'
|
||||
import { CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { useWorkspacePermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
@@ -40,6 +40,10 @@ import {
|
||||
useCurrentWorkflow,
|
||||
useNodeUtilities,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import {
|
||||
clampPositionToContainer,
|
||||
estimateBlockDimensions,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities'
|
||||
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { isAnnotationOnlyBlock } from '@/executor/constants'
|
||||
@@ -694,17 +698,19 @@ const WorkflowContent = React.memo(() => {
|
||||
return
|
||||
}
|
||||
|
||||
// Calculate position relative to the container's content area
|
||||
// Account for header (50px), left padding (16px), and top padding (16px)
|
||||
const headerHeight = 50
|
||||
const leftPadding = 16
|
||||
const topPadding = 16
|
||||
|
||||
const relativePosition = {
|
||||
x: position.x - containerInfo.loopPosition.x - leftPadding,
|
||||
y: position.y - containerInfo.loopPosition.y - headerHeight - topPadding,
|
||||
// Calculate raw position relative to container origin
|
||||
const rawPosition = {
|
||||
x: position.x - containerInfo.loopPosition.x,
|
||||
y: position.y - containerInfo.loopPosition.y,
|
||||
}
|
||||
|
||||
// Clamp position to keep block inside container's content area
|
||||
const relativePosition = clampPositionToContainer(
|
||||
rawPosition,
|
||||
containerInfo.dimensions,
|
||||
estimateBlockDimensions(data.type)
|
||||
)
|
||||
|
||||
// Capture existing child blocks before adding the new one
|
||||
const existingChildBlocks = Object.values(blocks).filter(
|
||||
(b) => b.data?.parentId === containerInfo.loopId
|
||||
@@ -1910,17 +1916,47 @@ const WorkflowContent = React.memo(() => {
|
||||
})
|
||||
document.body.style.cursor = ''
|
||||
|
||||
// Get the block's current parent (if any)
|
||||
const currentBlock = blocks[node.id]
|
||||
const currentParentId = currentBlock?.data?.parentId
|
||||
|
||||
// Calculate position - clamp if inside a container
|
||||
let finalPosition = node.position
|
||||
if (currentParentId) {
|
||||
// Block is inside a container - clamp position to keep it fully inside
|
||||
const parentNode = getNodes().find((n) => n.id === currentParentId)
|
||||
if (parentNode) {
|
||||
const containerDimensions = {
|
||||
width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
}
|
||||
const blockDimensions = {
|
||||
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
|
||||
height: Math.max(
|
||||
currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT,
|
||||
BLOCK_DIMENSIONS.MIN_HEIGHT
|
||||
),
|
||||
}
|
||||
|
||||
finalPosition = clampPositionToContainer(
|
||||
node.position,
|
||||
containerDimensions,
|
||||
blockDimensions
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Emit collaborative position update for the final position
|
||||
// This ensures other users see the smooth final position
|
||||
collaborativeUpdateBlockPosition(node.id, node.position, true)
|
||||
collaborativeUpdateBlockPosition(node.id, finalPosition, true)
|
||||
|
||||
// Record single move entry on drag end to avoid micro-moves
|
||||
const start = getDragStartPosition()
|
||||
if (start && start.id === node.id) {
|
||||
const before = { x: start.x, y: start.y, parentId: start.parentId }
|
||||
const after = {
|
||||
x: node.position.x,
|
||||
y: node.position.y,
|
||||
x: finalPosition.x,
|
||||
y: finalPosition.y,
|
||||
parentId: node.parentId || blocks[node.id]?.data?.parentId,
|
||||
}
|
||||
const moved =
|
||||
|
||||
@@ -43,6 +43,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
{ label: 'Delete Issue Link', id: 'delete_link' },
|
||||
{ label: 'Add Watcher', id: 'add_watcher' },
|
||||
{ label: 'Remove Watcher', id: 'remove_watcher' },
|
||||
{ label: 'Get Users', id: 'get_users' },
|
||||
],
|
||||
value: () => 'read',
|
||||
},
|
||||
@@ -194,6 +195,71 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: ['update', 'write'] },
|
||||
},
|
||||
// Write Issue additional fields
|
||||
{
|
||||
id: 'assignee',
|
||||
title: 'Assignee Account ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Assignee account ID (e.g., 5b109f2e9729b51b54dc274d)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'priority',
|
||||
title: 'Priority',
|
||||
type: 'short-input',
|
||||
placeholder: 'Priority ID or name (e.g., "10000" or "High")',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'labels',
|
||||
title: 'Labels',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated labels (e.g., bug, urgent)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'duedate',
|
||||
title: 'Due Date',
|
||||
type: 'short-input',
|
||||
placeholder: 'YYYY-MM-DD (e.g., 2024-12-31)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'reporter',
|
||||
title: 'Reporter Account ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Reporter account ID',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'environment',
|
||||
title: 'Environment',
|
||||
type: 'long-input',
|
||||
placeholder: 'Environment information (e.g., Production, Staging)',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'customFieldId',
|
||||
title: 'Custom Field ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., customfield_10001 or 10001',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
{
|
||||
id: 'teamUuid',
|
||||
title: 'Team UUID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., b3aa307a-76ea-462d-b6f1-a6e89ce9858a',
|
||||
dependsOn: ['projectId'],
|
||||
condition: { field: 'operation', value: 'write' },
|
||||
},
|
||||
// Delete Issue fields
|
||||
{
|
||||
id: 'deleteSubtasks',
|
||||
@@ -351,6 +417,28 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
placeholder: 'Enter link ID to delete',
|
||||
condition: { field: 'operation', value: 'delete_link' },
|
||||
},
|
||||
// Get Users fields
|
||||
{
|
||||
id: 'userAccountId',
|
||||
title: 'Account ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter account ID for specific user',
|
||||
condition: { field: 'operation', value: 'get_users' },
|
||||
},
|
||||
{
|
||||
id: 'usersStartAt',
|
||||
title: 'Start At',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination start index (default: 0)',
|
||||
condition: { field: 'operation', value: 'get_users' },
|
||||
},
|
||||
{
|
||||
id: 'usersMaxResults',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
placeholder: 'Maximum users to return (default: 50)',
|
||||
condition: { field: 'operation', value: 'get_users' },
|
||||
},
|
||||
// Trigger SubBlocks
|
||||
...getTrigger('jira_issue_created').subBlocks,
|
||||
...getTrigger('jira_issue_updated').subBlocks,
|
||||
@@ -383,6 +471,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
'jira_delete_issue_link',
|
||||
'jira_add_watcher',
|
||||
'jira_remove_watcher',
|
||||
'jira_get_users',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -438,6 +527,8 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
return 'jira_add_watcher'
|
||||
case 'remove_watcher':
|
||||
return 'jira_remove_watcher'
|
||||
case 'get_users':
|
||||
return 'jira_get_users'
|
||||
default:
|
||||
return 'jira_retrieve'
|
||||
}
|
||||
@@ -461,12 +552,29 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
'Project ID is required. Please select a project or enter a project ID manually.'
|
||||
)
|
||||
}
|
||||
// Parse comma-separated strings into arrays
|
||||
const parseCommaSeparated = (value: string | undefined): string[] | undefined => {
|
||||
if (!value || value.trim() === '') return undefined
|
||||
return value
|
||||
.split(',')
|
||||
.map((item) => item.trim())
|
||||
.filter((item) => item !== '')
|
||||
}
|
||||
|
||||
const writeParams = {
|
||||
projectId: effectiveProjectId,
|
||||
summary: params.summary || '',
|
||||
description: params.description || '',
|
||||
issueType: params.issueType || 'Task',
|
||||
parent: params.parentIssue ? { key: params.parentIssue } : undefined,
|
||||
assignee: params.assignee || undefined,
|
||||
priority: params.priority || undefined,
|
||||
labels: parseCommaSeparated(params.labels),
|
||||
duedate: params.duedate || undefined,
|
||||
reporter: params.reporter || undefined,
|
||||
environment: params.environment || undefined,
|
||||
customFieldId: params.customFieldId || undefined,
|
||||
customFieldValue: params.customFieldValue || undefined,
|
||||
}
|
||||
return {
|
||||
...baseParams,
|
||||
@@ -704,6 +812,16 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
accountId: params.accountId,
|
||||
}
|
||||
}
|
||||
case 'get_users': {
|
||||
return {
|
||||
...baseParams,
|
||||
accountId: params.userAccountId || undefined,
|
||||
startAt: params.usersStartAt ? Number.parseInt(params.usersStartAt) : undefined,
|
||||
maxResults: params.usersMaxResults
|
||||
? Number.parseInt(params.usersMaxResults)
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
default:
|
||||
return baseParams
|
||||
}
|
||||
@@ -722,6 +840,15 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
summary: { type: 'string', description: 'Issue summary' },
|
||||
description: { type: 'string', description: 'Issue description' },
|
||||
issueType: { type: 'string', description: 'Issue type' },
|
||||
// Write operation additional inputs
|
||||
assignee: { type: 'string', description: 'Assignee account ID' },
|
||||
priority: { type: 'string', description: 'Priority ID or name' },
|
||||
labels: { type: 'string', description: 'Comma-separated labels for the issue' },
|
||||
duedate: { type: 'string', description: 'Due date in YYYY-MM-DD format' },
|
||||
reporter: { type: 'string', description: 'Reporter account ID' },
|
||||
environment: { type: 'string', description: 'Environment information' },
|
||||
customFieldId: { type: 'string', description: 'Custom field ID (e.g., customfield_10001)' },
|
||||
customFieldValue: { type: 'string', description: 'Value for the custom field' },
|
||||
// Delete operation inputs
|
||||
deleteSubtasks: { type: 'string', description: 'Whether to delete subtasks (true/false)' },
|
||||
// Assign/Watcher operation inputs
|
||||
@@ -758,6 +885,13 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
linkType: { type: 'string', description: 'Type of link (e.g., "Blocks", "Relates")' },
|
||||
linkComment: { type: 'string', description: 'Optional comment for issue link' },
|
||||
linkId: { type: 'string', description: 'Link ID for delete operation' },
|
||||
// Get Users operation inputs
|
||||
userAccountId: {
|
||||
type: 'string',
|
||||
description: 'Account ID for specific user lookup (optional)',
|
||||
},
|
||||
usersStartAt: { type: 'string', description: 'Pagination start index for users' },
|
||||
usersMaxResults: { type: 'string', description: 'Maximum users to return' },
|
||||
},
|
||||
outputs: {
|
||||
// Common outputs across all Jira operations
|
||||
@@ -834,6 +968,12 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
// jira_add_watcher, jira_remove_watcher outputs
|
||||
watcherAccountId: { type: 'string', description: 'Watcher account ID' },
|
||||
|
||||
// jira_get_users outputs
|
||||
users: {
|
||||
type: 'json',
|
||||
description: 'Array of users with accountId, displayName, emailAddress, active status',
|
||||
},
|
||||
|
||||
// jira_bulk_read outputs
|
||||
// Note: bulk_read returns an array in the output field, each item contains:
|
||||
// ts, issueKey, summary, description, status, assignee, created, updated
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "Emir Karabeg",
|
||||
"url": "https://x.com/karabegemir",
|
||||
"xHandle": "karabegemir",
|
||||
"avatarUrl": "/studio/authors/emir.png"
|
||||
"avatarUrl": "/studio/authors/emir.jpg"
|
||||
}
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "Siddharth",
|
||||
"url": "https://x.com/sidganesan",
|
||||
"xHandle": "sidganesan",
|
||||
"avatarUrl": "/studio/authors/sid.png"
|
||||
"avatarUrl": "/studio/authors/sid.jpg"
|
||||
}
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "Waleed Latif",
|
||||
"url": "https://x.com/typingwala",
|
||||
"xHandle": "typingwala",
|
||||
"avatarUrl": "/studio/authors/waleed.png"
|
||||
"avatarUrl": "/studio/authors/waleed.jpg"
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ featured: true
|
||||
draft: false
|
||||
---
|
||||
|
||||

|
||||

|
||||
|
||||
## Why we’re excited
|
||||
|
||||
|
||||
@@ -128,6 +128,8 @@ export const DEFAULTS = {
|
||||
BLOCK_TITLE: 'Untitled Block',
|
||||
WORKFLOW_NAME: 'Workflow',
|
||||
MAX_LOOP_ITERATIONS: 1000,
|
||||
MAX_FOREACH_ITEMS: 1000,
|
||||
MAX_PARALLEL_BRANCHES: 20,
|
||||
MAX_WORKFLOW_DEPTH: 10,
|
||||
EXECUTION_TIME: 0,
|
||||
TOKENS: {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { LoopConstructor } from '@/executor/dag/construction/loops'
|
||||
import { NodeConstructor } from '@/executor/dag/construction/nodes'
|
||||
import { PathConstructor } from '@/executor/dag/construction/paths'
|
||||
import type { DAGEdge, NodeMetadata } from '@/executor/dag/types'
|
||||
import { buildSentinelStartId, extractBaseBlockId } from '@/executor/utils/subflow-utils'
|
||||
import type {
|
||||
SerializedBlock,
|
||||
SerializedLoop,
|
||||
@@ -79,6 +80,9 @@ export class DAGBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
// Validate loop and parallel structure
|
||||
this.validateSubflowStructure(dag)
|
||||
|
||||
logger.info('DAG built', {
|
||||
totalNodes: dag.nodes.size,
|
||||
loopCount: dag.loopConfigs.size,
|
||||
@@ -105,4 +109,43 @@ export class DAGBuilder {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that loops and parallels have proper internal structure.
|
||||
* Throws an error if a loop/parallel has no blocks inside or no connections from start.
|
||||
*/
|
||||
private validateSubflowStructure(dag: DAG): void {
|
||||
for (const [id, config] of dag.loopConfigs) {
|
||||
this.validateSubflow(dag, id, config.nodes, 'Loop')
|
||||
}
|
||||
for (const [id, config] of dag.parallelConfigs) {
|
||||
this.validateSubflow(dag, id, config.nodes, 'Parallel')
|
||||
}
|
||||
}
|
||||
|
||||
private validateSubflow(
|
||||
dag: DAG,
|
||||
id: string,
|
||||
nodes: string[] | undefined,
|
||||
type: 'Loop' | 'Parallel'
|
||||
): void {
|
||||
if (!nodes || nodes.length === 0) {
|
||||
throw new Error(
|
||||
`${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.`
|
||||
)
|
||||
}
|
||||
|
||||
const sentinelStartNode = dag.nodes.get(buildSentinelStartId(id))
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) =>
|
||||
nodes.includes(extractBaseBlockId(edge.target))
|
||||
)
|
||||
|
||||
if (!hasConnections) {
|
||||
throw new Error(
|
||||
`${type} start is not connected to any blocks. Connect a block to the ${type.toLowerCase()} start.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,8 +63,10 @@ export class DAGExecutor {
|
||||
|
||||
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
|
||||
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
|
||||
loopOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
|
||||
parallelOrchestrator.setResolver(resolver)
|
||||
parallelOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const allHandlers = createBlockHandlers()
|
||||
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
|
||||
@@ -14,6 +14,8 @@ export interface LoopScope {
|
||||
condition?: string
|
||||
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
|
||||
skipFirstConditionCheck?: boolean
|
||||
/** Error message if loop validation failed (e.g., exceeded max iterations) */
|
||||
validationError?: string
|
||||
}
|
||||
|
||||
export interface ParallelScope {
|
||||
@@ -23,6 +25,8 @@ export interface ParallelScope {
|
||||
completedCount: number
|
||||
totalExpectedNodes: number
|
||||
items?: any[]
|
||||
/** Error message if parallel validation failed (e.g., exceeded max branches) */
|
||||
validationError?: string
|
||||
}
|
||||
|
||||
export class ExecutionState implements BlockStateController {
|
||||
|
||||
@@ -17,27 +17,32 @@ vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/execution/isolated-vm', () => ({
|
||||
executeInIsolatedVM: vi.fn(),
|
||||
vi.mock('@/tools', () => ({
|
||||
executeTool: vi.fn(),
|
||||
}))
|
||||
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { executeTool } from '@/tools'
|
||||
|
||||
const mockExecuteInIsolatedVM = executeInIsolatedVM as ReturnType<typeof vi.fn>
|
||||
const mockExecuteTool = executeTool as ReturnType<typeof vi.fn>
|
||||
|
||||
function simulateIsolatedVMExecution(
|
||||
code: string,
|
||||
contextVariables: Record<string, unknown>
|
||||
): { result: unknown; stdout: string; error?: { message: string; name: string } } {
|
||||
/**
|
||||
* Simulates what the function_execute tool does when evaluating condition code
|
||||
*/
|
||||
function simulateConditionExecution(code: string): {
|
||||
success: boolean
|
||||
output?: { result: unknown }
|
||||
error?: string
|
||||
} {
|
||||
try {
|
||||
const fn = new Function(...Object.keys(contextVariables), code)
|
||||
const result = fn(...Object.values(contextVariables))
|
||||
return { result, stdout: '' }
|
||||
// The code is in format: "const context = {...};\nreturn Boolean(...)"
|
||||
// We need to execute it and return the result
|
||||
const fn = new Function(code)
|
||||
const result = fn()
|
||||
return { success: true, output: { result } }
|
||||
} catch (error: any) {
|
||||
return {
|
||||
result: null,
|
||||
stdout: '',
|
||||
error: { message: error.message, name: error.name || 'Error' },
|
||||
success: false,
|
||||
error: error.message,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -143,8 +148,8 @@ describe('ConditionBlockHandler', () => {
|
||||
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockExecuteInIsolatedVM.mockImplementation(async ({ code, contextVariables }) => {
|
||||
return simulateIsolatedVMExecution(code, contextVariables)
|
||||
mockExecuteTool.mockImplementation(async (_toolId: string, params: { code: string }) => {
|
||||
return simulateConditionExecution(params.code)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { BlockType, CONDITION, DEFAULTS, EDGE } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import { executeTool } from '@/tools'
|
||||
|
||||
const logger = createLogger('ConditionBlockHandler')
|
||||
|
||||
@@ -39,32 +38,38 @@ export async function evaluateConditionExpression(
|
||||
}
|
||||
|
||||
try {
|
||||
const requestId = generateRequestId()
|
||||
const contextSetup = `const context = ${JSON.stringify(evalContext)};`
|
||||
const code = `${contextSetup}\nreturn Boolean(${resolvedConditionValue})`
|
||||
|
||||
const code = `return Boolean(${resolvedConditionValue})`
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
{
|
||||
code,
|
||||
timeout: CONDITION_TIMEOUT_MS,
|
||||
envVars: {},
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
const result = await executeInIsolatedVM({
|
||||
code,
|
||||
params: {},
|
||||
envVars: {},
|
||||
contextVariables: { context: evalContext },
|
||||
timeoutMs: CONDITION_TIMEOUT_MS,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (result.error) {
|
||||
logger.error(`Failed to evaluate condition: ${result.error.message}`, {
|
||||
if (!result.success) {
|
||||
logger.error(`Failed to evaluate condition: ${result.error}`, {
|
||||
originalCondition: conditionExpression,
|
||||
resolvedCondition: resolvedConditionValue,
|
||||
evalContext,
|
||||
error: result.error,
|
||||
})
|
||||
throw new Error(
|
||||
`Evaluation error in condition: ${result.error.message}. (Resolved: ${resolvedConditionValue})`
|
||||
`Evaluation error in condition: ${result.error}. (Resolved: ${resolvedConditionValue})`
|
||||
)
|
||||
}
|
||||
|
||||
return Boolean(result.result)
|
||||
return Boolean(result.output?.result)
|
||||
} catch (evalError: any) {
|
||||
logger.error(`Failed to evaluate condition: ${evalError.message}`, {
|
||||
originalCondition: conditionExpression,
|
||||
|
||||
@@ -5,14 +5,17 @@ import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/constants'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
import type { EdgeManager } from '@/executor/execution/edge-manager'
|
||||
import type { LoopScope } from '@/executor/execution/state'
|
||||
import type { BlockStateController } from '@/executor/execution/types'
|
||||
import type { BlockStateController, ContextExtensions } from '@/executor/execution/types'
|
||||
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { LoopConfigWithNodes } from '@/executor/types/loop'
|
||||
import { replaceValidReferences } from '@/executor/utils/reference-validation'
|
||||
import {
|
||||
addSubflowErrorLog,
|
||||
buildSentinelEndId,
|
||||
buildSentinelStartId,
|
||||
extractBaseBlockId,
|
||||
resolveArrayInput,
|
||||
validateMaxCount,
|
||||
} from '@/executor/utils/subflow-utils'
|
||||
import type { VariableResolver } from '@/executor/variables/resolver'
|
||||
import type { SerializedLoop } from '@/serializer/types'
|
||||
@@ -32,6 +35,7 @@ export interface LoopContinuationResult {
|
||||
|
||||
export class LoopOrchestrator {
|
||||
private edgeManager: EdgeManager | null = null
|
||||
private contextExtensions: ContextExtensions | null = null
|
||||
|
||||
constructor(
|
||||
private dag: DAG,
|
||||
@@ -39,6 +43,10 @@ export class LoopOrchestrator {
|
||||
private resolver: VariableResolver
|
||||
) {}
|
||||
|
||||
setContextExtensions(contextExtensions: ContextExtensions): void {
|
||||
this.contextExtensions = contextExtensions
|
||||
}
|
||||
|
||||
setEdgeManager(edgeManager: EdgeManager): void {
|
||||
this.edgeManager = edgeManager
|
||||
}
|
||||
@@ -48,7 +56,6 @@ export class LoopOrchestrator {
|
||||
if (!loopConfig) {
|
||||
throw new Error(`Loop config not found: ${loopId}`)
|
||||
}
|
||||
|
||||
const scope: LoopScope = {
|
||||
iteration: 0,
|
||||
currentIterationOutputs: new Map(),
|
||||
@@ -58,15 +65,70 @@ export class LoopOrchestrator {
|
||||
const loopType = loopConfig.loopType
|
||||
|
||||
switch (loopType) {
|
||||
case 'for':
|
||||
case 'for': {
|
||||
scope.loopType = 'for'
|
||||
scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
|
||||
const requestedIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
|
||||
|
||||
const iterationError = validateMaxCount(
|
||||
requestedIterations,
|
||||
DEFAULTS.MAX_LOOP_ITERATIONS,
|
||||
'For loop iterations'
|
||||
)
|
||||
if (iterationError) {
|
||||
logger.error(iterationError, { loopId, requestedIterations })
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
|
||||
iterations: requestedIterations,
|
||||
})
|
||||
scope.maxIterations = 0
|
||||
scope.validationError = iterationError
|
||||
scope.condition = buildLoopIndexCondition(0)
|
||||
ctx.loopExecutions?.set(loopId, scope)
|
||||
throw new Error(iterationError)
|
||||
}
|
||||
|
||||
scope.maxIterations = requestedIterations
|
||||
scope.condition = buildLoopIndexCondition(scope.maxIterations)
|
||||
break
|
||||
}
|
||||
|
||||
case 'forEach': {
|
||||
scope.loopType = 'forEach'
|
||||
const items = this.resolveForEachItems(ctx, loopConfig.forEachItems)
|
||||
let items: any[]
|
||||
try {
|
||||
items = this.resolveForEachItems(ctx, loopConfig.forEachItems)
|
||||
} catch (error) {
|
||||
const errorMessage = `ForEach loop resolution failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
logger.error(errorMessage, { loopId, forEachItems: loopConfig.forEachItems })
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, errorMessage, {
|
||||
forEachItems: loopConfig.forEachItems,
|
||||
})
|
||||
scope.items = []
|
||||
scope.maxIterations = 0
|
||||
scope.validationError = errorMessage
|
||||
scope.condition = buildLoopIndexCondition(0)
|
||||
ctx.loopExecutions?.set(loopId, scope)
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
const sizeError = validateMaxCount(
|
||||
items.length,
|
||||
DEFAULTS.MAX_FOREACH_ITEMS,
|
||||
'ForEach loop collection size'
|
||||
)
|
||||
if (sizeError) {
|
||||
logger.error(sizeError, { loopId, collectionSize: items.length })
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, sizeError, {
|
||||
forEachItems: loopConfig.forEachItems,
|
||||
collectionSize: items.length,
|
||||
})
|
||||
scope.items = []
|
||||
scope.maxIterations = 0
|
||||
scope.validationError = sizeError
|
||||
scope.condition = buildLoopIndexCondition(0)
|
||||
ctx.loopExecutions?.set(loopId, scope)
|
||||
throw new Error(sizeError)
|
||||
}
|
||||
|
||||
scope.items = items
|
||||
scope.maxIterations = items.length
|
||||
scope.item = items[0]
|
||||
@@ -79,15 +141,35 @@ export class LoopOrchestrator {
|
||||
scope.condition = loopConfig.whileCondition
|
||||
break
|
||||
|
||||
case 'doWhile':
|
||||
case 'doWhile': {
|
||||
scope.loopType = 'doWhile'
|
||||
if (loopConfig.doWhileCondition) {
|
||||
scope.condition = loopConfig.doWhileCondition
|
||||
} else {
|
||||
scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
|
||||
const requestedIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
|
||||
|
||||
const iterationError = validateMaxCount(
|
||||
requestedIterations,
|
||||
DEFAULTS.MAX_LOOP_ITERATIONS,
|
||||
'Do-While loop iterations'
|
||||
)
|
||||
if (iterationError) {
|
||||
logger.error(iterationError, { loopId, requestedIterations })
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
|
||||
iterations: requestedIterations,
|
||||
})
|
||||
scope.maxIterations = 0
|
||||
scope.validationError = iterationError
|
||||
scope.condition = buildLoopIndexCondition(0)
|
||||
ctx.loopExecutions?.set(loopId, scope)
|
||||
throw new Error(iterationError)
|
||||
}
|
||||
|
||||
scope.maxIterations = requestedIterations
|
||||
scope.condition = buildLoopIndexCondition(scope.maxIterations)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown loop type: ${loopType}`)
|
||||
@@ -100,6 +182,23 @@ export class LoopOrchestrator {
|
||||
return scope
|
||||
}
|
||||
|
||||
private addLoopErrorLog(
|
||||
ctx: ExecutionContext,
|
||||
loopId: string,
|
||||
loopType: string,
|
||||
errorMessage: string,
|
||||
inputData?: any
|
||||
): void {
|
||||
addSubflowErrorLog(
|
||||
ctx,
|
||||
loopId,
|
||||
'loop',
|
||||
errorMessage,
|
||||
{ loopType, ...inputData },
|
||||
this.contextExtensions
|
||||
)
|
||||
}
|
||||
|
||||
storeLoopNodeOutput(
|
||||
ctx: ExecutionContext,
|
||||
loopId: string,
|
||||
@@ -412,54 +511,6 @@ export class LoopOrchestrator {
|
||||
}
|
||||
|
||||
private resolveForEachItems(ctx: ExecutionContext, items: any): any[] {
|
||||
if (Array.isArray(items)) {
|
||||
return items
|
||||
}
|
||||
|
||||
if (typeof items === 'object' && items !== null) {
|
||||
return Object.entries(items)
|
||||
}
|
||||
|
||||
if (typeof items === 'string') {
|
||||
if (items.startsWith('<') && items.endsWith('>')) {
|
||||
const resolved = this.resolver.resolveSingleReference(ctx, '', items)
|
||||
if (Array.isArray(resolved)) {
|
||||
return resolved
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
const normalized = items.replace(/'/g, '"')
|
||||
const parsed = JSON.parse(normalized)
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
return []
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse forEach items', { items, error })
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const resolved = this.resolver.resolveInputs(ctx, 'loop_foreach_items', { items }).items
|
||||
|
||||
if (Array.isArray(resolved)) {
|
||||
return resolved
|
||||
}
|
||||
|
||||
logger.warn('ForEach items did not resolve to array', {
|
||||
items,
|
||||
resolved,
|
||||
})
|
||||
|
||||
return []
|
||||
} catch (error: any) {
|
||||
logger.error('Error resolving forEach items, returning empty array:', {
|
||||
error: error.message,
|
||||
})
|
||||
return []
|
||||
}
|
||||
return resolveArrayInput(ctx, items, this.resolver)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { DEFAULTS } from '@/executor/constants'
|
||||
import type { DAG, DAGNode } from '@/executor/dag/builder'
|
||||
import type { ParallelScope } from '@/executor/execution/state'
|
||||
import type { BlockStateWriter } from '@/executor/execution/types'
|
||||
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
|
||||
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { ParallelConfigWithNodes } from '@/executor/types/parallel'
|
||||
import {
|
||||
addSubflowErrorLog,
|
||||
buildBranchNodeId,
|
||||
calculateBranchCount,
|
||||
extractBaseBlockId,
|
||||
extractBranchIndex,
|
||||
parseDistributionItems,
|
||||
resolveArrayInput,
|
||||
validateMaxCount,
|
||||
} from '@/executor/utils/subflow-utils'
|
||||
import type { VariableResolver } from '@/executor/variables/resolver'
|
||||
import type { SerializedParallel } from '@/serializer/types'
|
||||
@@ -32,6 +36,7 @@ export interface ParallelAggregationResult {
|
||||
|
||||
export class ParallelOrchestrator {
|
||||
private resolver: VariableResolver | null = null
|
||||
private contextExtensions: ContextExtensions | null = null
|
||||
|
||||
constructor(
|
||||
private dag: DAG,
|
||||
@@ -42,6 +47,10 @@ export class ParallelOrchestrator {
|
||||
this.resolver = resolver
|
||||
}
|
||||
|
||||
setContextExtensions(contextExtensions: ContextExtensions): void {
|
||||
this.contextExtensions = contextExtensions
|
||||
}
|
||||
|
||||
initializeParallelScope(
|
||||
ctx: ExecutionContext,
|
||||
parallelId: string,
|
||||
@@ -49,11 +58,42 @@ export class ParallelOrchestrator {
|
||||
terminalNodesCount = 1
|
||||
): ParallelScope {
|
||||
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
|
||||
const items = parallelConfig ? this.resolveDistributionItems(ctx, parallelConfig) : undefined
|
||||
|
||||
// If we have more items than pre-built branches, expand the DAG
|
||||
let items: any[] | undefined
|
||||
if (parallelConfig) {
|
||||
try {
|
||||
items = this.resolveDistributionItems(ctx, parallelConfig)
|
||||
} catch (error) {
|
||||
const errorMessage = `Parallel distribution resolution failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
logger.error(errorMessage, {
|
||||
parallelId,
|
||||
distribution: parallelConfig.distribution,
|
||||
})
|
||||
this.addParallelErrorLog(ctx, parallelId, errorMessage, {
|
||||
distribution: parallelConfig.distribution,
|
||||
})
|
||||
this.setErrorScope(ctx, parallelId, errorMessage)
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
const actualBranchCount = items && items.length > totalBranches ? items.length : totalBranches
|
||||
|
||||
const branchError = validateMaxCount(
|
||||
actualBranchCount,
|
||||
DEFAULTS.MAX_PARALLEL_BRANCHES,
|
||||
'Parallel branch count'
|
||||
)
|
||||
if (branchError) {
|
||||
logger.error(branchError, { parallelId, actualBranchCount })
|
||||
this.addParallelErrorLog(ctx, parallelId, branchError, {
|
||||
distribution: parallelConfig?.distribution,
|
||||
branchCount: actualBranchCount,
|
||||
})
|
||||
this.setErrorScope(ctx, parallelId, branchError)
|
||||
throw new Error(branchError)
|
||||
}
|
||||
|
||||
const scope: ParallelScope = {
|
||||
parallelId,
|
||||
totalBranches: actualBranchCount,
|
||||
@@ -108,6 +148,38 @@ export class ParallelOrchestrator {
|
||||
return scope
|
||||
}
|
||||
|
||||
private addParallelErrorLog(
|
||||
ctx: ExecutionContext,
|
||||
parallelId: string,
|
||||
errorMessage: string,
|
||||
inputData?: any
|
||||
): void {
|
||||
addSubflowErrorLog(
|
||||
ctx,
|
||||
parallelId,
|
||||
'parallel',
|
||||
errorMessage,
|
||||
inputData || {},
|
||||
this.contextExtensions
|
||||
)
|
||||
}
|
||||
|
||||
private setErrorScope(ctx: ExecutionContext, parallelId: string, errorMessage: string): void {
|
||||
const scope: ParallelScope = {
|
||||
parallelId,
|
||||
totalBranches: 0,
|
||||
branchOutputs: new Map(),
|
||||
completedCount: 0,
|
||||
totalExpectedNodes: 0,
|
||||
items: [],
|
||||
validationError: errorMessage,
|
||||
}
|
||||
if (!ctx.parallelExecutions) {
|
||||
ctx.parallelExecutions = new Map()
|
||||
}
|
||||
ctx.parallelExecutions.set(parallelId, scope)
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamically expand the DAG to include additional branch nodes when
|
||||
* the resolved item count exceeds the pre-built branch count.
|
||||
@@ -291,63 +363,11 @@ export class ParallelOrchestrator {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve distribution items at runtime, handling references like <previousBlock.items>
|
||||
* This mirrors how LoopOrchestrator.resolveForEachItems works.
|
||||
*/
|
||||
private resolveDistributionItems(ctx: ExecutionContext, config: SerializedParallel): any[] {
|
||||
const rawItems = config.distribution
|
||||
|
||||
if (rawItems === undefined || rawItems === null) {
|
||||
if (config.distribution === undefined || config.distribution === null) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Already an array - return as-is
|
||||
if (Array.isArray(rawItems)) {
|
||||
return rawItems
|
||||
}
|
||||
|
||||
// Object - convert to entries array (consistent with loop forEach behavior)
|
||||
if (typeof rawItems === 'object') {
|
||||
return Object.entries(rawItems)
|
||||
}
|
||||
|
||||
// String handling
|
||||
if (typeof rawItems === 'string') {
|
||||
// Resolve references at runtime using the variable resolver
|
||||
if (rawItems.startsWith('<') && rawItems.endsWith('>') && this.resolver) {
|
||||
const resolved = this.resolver.resolveSingleReference(ctx, '', rawItems)
|
||||
if (Array.isArray(resolved)) {
|
||||
return resolved
|
||||
}
|
||||
if (typeof resolved === 'object' && resolved !== null) {
|
||||
return Object.entries(resolved)
|
||||
}
|
||||
logger.warn('Distribution reference did not resolve to array or object', {
|
||||
rawItems,
|
||||
resolved,
|
||||
})
|
||||
return []
|
||||
}
|
||||
|
||||
// Try to parse as JSON
|
||||
try {
|
||||
const normalized = rawItems.replace(/'/g, '"')
|
||||
const parsed = JSON.parse(normalized)
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return Object.entries(parsed)
|
||||
}
|
||||
return []
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse distribution items', { rawItems, error })
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
return []
|
||||
return resolveArrayInput(ctx, config.distribution, this.resolver)
|
||||
}
|
||||
|
||||
handleParallelBranchCompletion(
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { LOOP, PARALLEL, PARSING, REFERENCE } from '@/executor/constants'
|
||||
import type { ContextExtensions } from '@/executor/execution/types'
|
||||
import type { BlockLog, ExecutionContext } from '@/executor/types'
|
||||
import type { VariableResolver } from '@/executor/variables/resolver'
|
||||
import type { SerializedParallel } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('SubflowUtils')
|
||||
@@ -132,3 +135,131 @@ export function normalizeNodeId(nodeId: string): string {
|
||||
}
|
||||
return nodeId
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a count doesn't exceed a maximum limit.
|
||||
* Returns an error message if validation fails, undefined otherwise.
|
||||
*/
|
||||
export function validateMaxCount(count: number, max: number, itemType: string): string | undefined {
|
||||
if (count > max) {
|
||||
return `${itemType} (${count}) exceeds maximum allowed (${max}). Execution blocked.`
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves array input at runtime. Handles arrays, objects, references, and JSON strings.
|
||||
* Used by both loop forEach and parallel distribution resolution.
|
||||
* Throws an error if resolution fails.
|
||||
*/
|
||||
export function resolveArrayInput(
|
||||
ctx: ExecutionContext,
|
||||
items: any,
|
||||
resolver: VariableResolver | null
|
||||
): any[] {
|
||||
if (Array.isArray(items)) {
|
||||
return items
|
||||
}
|
||||
|
||||
if (typeof items === 'object' && items !== null) {
|
||||
return Object.entries(items)
|
||||
}
|
||||
|
||||
if (typeof items === 'string') {
|
||||
if (items.startsWith(REFERENCE.START) && items.endsWith(REFERENCE.END) && resolver) {
|
||||
try {
|
||||
const resolved = resolver.resolveSingleReference(ctx, '', items)
|
||||
if (Array.isArray(resolved)) {
|
||||
return resolved
|
||||
}
|
||||
if (typeof resolved === 'object' && resolved !== null) {
|
||||
return Object.entries(resolved)
|
||||
}
|
||||
throw new Error(`Reference "${items}" did not resolve to an array or object`)
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.startsWith('Reference "')) {
|
||||
throw error
|
||||
}
|
||||
throw new Error(
|
||||
`Failed to resolve reference "${items}": ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const normalized = items.replace(/'/g, '"')
|
||||
const parsed = JSON.parse(normalized)
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return Object.entries(parsed)
|
||||
}
|
||||
throw new Error(`Parsed value is not an array or object`)
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.startsWith('Parsed value')) {
|
||||
throw error
|
||||
}
|
||||
throw new Error(`Failed to parse items as JSON: "${items}"`)
|
||||
}
|
||||
}
|
||||
|
||||
if (resolver) {
|
||||
try {
|
||||
const resolved = resolver.resolveInputs(ctx, 'subflow_items', { items }).items
|
||||
if (Array.isArray(resolved)) {
|
||||
return resolved
|
||||
}
|
||||
throw new Error(`Resolved items is not an array`)
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.startsWith('Resolved items')) {
|
||||
throw error
|
||||
}
|
||||
throw new Error(
|
||||
`Failed to resolve items: ${error instanceof Error ? error.message : String(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and logs an error for a subflow (loop or parallel).
|
||||
*/
|
||||
export function addSubflowErrorLog(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
blockType: 'loop' | 'parallel',
|
||||
errorMessage: string,
|
||||
inputData: Record<string, any>,
|
||||
contextExtensions: ContextExtensions | null
|
||||
): void {
|
||||
const now = new Date().toISOString()
|
||||
|
||||
const block = ctx.workflow?.blocks?.find((b) => b.id === blockId)
|
||||
const blockName = block?.metadata?.name || (blockType === 'loop' ? 'Loop' : 'Parallel')
|
||||
|
||||
const blockLog: BlockLog = {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
startedAt: now,
|
||||
endedAt: now,
|
||||
durationMs: 0,
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
input: inputData,
|
||||
output: { error: errorMessage },
|
||||
...(blockType === 'loop' ? { loopId: blockId } : { parallelId: blockId }),
|
||||
}
|
||||
ctx.blockLogs.push(blockLog)
|
||||
|
||||
if (contextExtensions?.onBlockComplete) {
|
||||
contextExtensions.onBlockComplete(blockId, blockName, blockType, {
|
||||
input: inputData,
|
||||
output: { error: errorMessage },
|
||||
executionTime: 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ interface SlackAccount {
|
||||
id: string
|
||||
accountId: string
|
||||
providerId: string
|
||||
displayName?: string
|
||||
}
|
||||
|
||||
interface UseSlackAccountsResult {
|
||||
|
||||
196
apps/sim/hooks/use-text-history.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { useTextHistoryStore } from '@/stores/text-history'
|
||||
|
||||
interface UseTextHistoryOptions {
|
||||
/** Block ID for the text field */
|
||||
blockId: string
|
||||
/** Sub-block ID for the text field */
|
||||
subBlockId: string
|
||||
/** Current value of the text field */
|
||||
value: string
|
||||
/** Callback to update the value */
|
||||
onChange: (value: string) => void
|
||||
/** Whether the field is disabled/readonly */
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
interface UseTextHistoryResult {
|
||||
/**
|
||||
* Handle text change - records to history with debouncing
|
||||
*/
|
||||
handleChange: (newValue: string) => void
|
||||
|
||||
/**
|
||||
* Handle keyboard events for undo/redo
|
||||
* Returns true if the event was handled
|
||||
*/
|
||||
handleKeyDown: (e: React.KeyboardEvent) => boolean
|
||||
|
||||
/**
|
||||
* Handle blur - commits any pending changes
|
||||
*/
|
||||
handleBlur: () => void
|
||||
|
||||
/**
|
||||
* Undo the last change
|
||||
*/
|
||||
undo: () => void
|
||||
|
||||
/**
|
||||
* Redo the last undone change
|
||||
*/
|
||||
redo: () => void
|
||||
|
||||
/**
|
||||
* Whether undo is available
|
||||
*/
|
||||
canUndo: boolean
|
||||
|
||||
/**
|
||||
* Whether redo is available
|
||||
*/
|
||||
canRedo: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing text undo/redo history for a specific text field.
|
||||
*
|
||||
* @remarks
|
||||
* - Provides debounced history recording (coalesces rapid changes)
|
||||
* - Handles Cmd+Z/Ctrl+Z for undo, Cmd+Shift+Z/Ctrl+Y for redo
|
||||
* - Commits pending changes on blur to preserve history
|
||||
* - Each blockId:subBlockId pair has its own independent history
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { handleChange, handleKeyDown, handleBlur } = useTextHistory({
|
||||
* blockId,
|
||||
* subBlockId,
|
||||
* value: code,
|
||||
* onChange: (newCode) => {
|
||||
* setCode(newCode)
|
||||
* setStoreValue(newCode)
|
||||
* },
|
||||
* })
|
||||
*
|
||||
* <textarea
|
||||
* value={code}
|
||||
* onChange={(e) => handleChange(e.target.value)}
|
||||
* onKeyDown={handleKeyDown}
|
||||
* onBlur={handleBlur}
|
||||
* />
|
||||
* ```
|
||||
*/
|
||||
export function useTextHistory({
|
||||
blockId,
|
||||
subBlockId,
|
||||
value,
|
||||
onChange,
|
||||
disabled = false,
|
||||
}: UseTextHistoryOptions): UseTextHistoryResult {
|
||||
const store = useTextHistoryStore()
|
||||
const initializedRef = useRef(false)
|
||||
const lastExternalValueRef = useRef(value)
|
||||
|
||||
// Initialize history on mount
|
||||
useEffect(() => {
|
||||
if (!initializedRef.current && blockId && subBlockId) {
|
||||
store.initHistory(blockId, subBlockId, value)
|
||||
initializedRef.current = true
|
||||
}
|
||||
}, [blockId, subBlockId, value, store])
|
||||
|
||||
// Handle external value changes (e.g., from AI generation or store sync)
|
||||
useEffect(() => {
|
||||
if (value !== lastExternalValueRef.current) {
|
||||
// This is an external change, commit any pending and record the new value
|
||||
store.commitPending(blockId, subBlockId)
|
||||
store.recordChange(blockId, subBlockId, value)
|
||||
store.commitPending(blockId, subBlockId)
|
||||
lastExternalValueRef.current = value
|
||||
}
|
||||
}, [value, blockId, subBlockId, store])
|
||||
|
||||
const handleChange = useCallback(
|
||||
(newValue: string) => {
|
||||
if (disabled) return
|
||||
|
||||
// Update the external value immediately
|
||||
onChange(newValue)
|
||||
lastExternalValueRef.current = newValue
|
||||
|
||||
// Record to history with debouncing
|
||||
store.recordChange(blockId, subBlockId, newValue)
|
||||
},
|
||||
[blockId, subBlockId, onChange, disabled, store]
|
||||
)
|
||||
|
||||
const undo = useCallback(() => {
|
||||
if (disabled) return
|
||||
|
||||
const previousValue = store.undo(blockId, subBlockId)
|
||||
if (previousValue !== null) {
|
||||
onChange(previousValue)
|
||||
lastExternalValueRef.current = previousValue
|
||||
}
|
||||
}, [blockId, subBlockId, onChange, disabled, store])
|
||||
|
||||
const redo = useCallback(() => {
|
||||
if (disabled) return
|
||||
|
||||
const nextValue = store.redo(blockId, subBlockId)
|
||||
if (nextValue !== null) {
|
||||
onChange(nextValue)
|
||||
lastExternalValueRef.current = nextValue
|
||||
}
|
||||
}, [blockId, subBlockId, onChange, disabled, store])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent): boolean => {
|
||||
if (disabled) return false
|
||||
|
||||
const isMod = e.metaKey || e.ctrlKey
|
||||
|
||||
// Undo: Cmd+Z / Ctrl+Z
|
||||
if (isMod && e.key === 'z' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
undo()
|
||||
return true
|
||||
}
|
||||
|
||||
// Redo: Cmd+Shift+Z / Ctrl+Shift+Z / Ctrl+Y
|
||||
if (
|
||||
(isMod && e.key === 'z' && e.shiftKey) ||
|
||||
(isMod && e.key === 'Z') ||
|
||||
(e.ctrlKey && e.key === 'y')
|
||||
) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
redo()
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
[disabled, undo, redo]
|
||||
)
|
||||
|
||||
const handleBlur = useCallback(() => {
|
||||
// Commit any pending changes when the field loses focus
|
||||
store.commitPending(blockId, subBlockId)
|
||||
}, [blockId, subBlockId, store])
|
||||
|
||||
const canUndo = store.canUndo(blockId, subBlockId)
|
||||
const canRedo = store.canRedo(blockId, subBlockId)
|
||||
|
||||
return {
|
||||
handleChange,
|
||||
handleKeyDown,
|
||||
handleBlur,
|
||||
undo,
|
||||
redo,
|
||||
canUndo,
|
||||
canRedo,
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ interface UseWebhookManagementProps {
|
||||
blockId: string
|
||||
triggerId?: string
|
||||
isPreview?: boolean
|
||||
useWebhookUrl?: boolean
|
||||
}
|
||||
|
||||
interface WebhookManagementState {
|
||||
@@ -90,6 +91,7 @@ export function useWebhookManagement({
|
||||
blockId,
|
||||
triggerId,
|
||||
isPreview = false,
|
||||
useWebhookUrl = false,
|
||||
}: UseWebhookManagementProps): WebhookManagementState {
|
||||
const params = useParams()
|
||||
const workflowId = params.workflowId as string
|
||||
@@ -204,9 +206,10 @@ export function useWebhookManagement({
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
loadWebhookOrGenerateUrl()
|
||||
}, [isPreview, triggerId, workflowId, blockId])
|
||||
if (useWebhookUrl) {
|
||||
loadWebhookOrGenerateUrl()
|
||||
}
|
||||
}, [isPreview, triggerId, workflowId, blockId, useWebhookUrl])
|
||||
|
||||
const createWebhook = async (
|
||||
effectiveTriggerId: string | undefined,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Check, Copy } from 'lucide-react'
|
||||
import { Code } from '@/components/emcn'
|
||||
|
||||
interface CodeBlockProps {
|
||||
@@ -8,5 +10,36 @@ interface CodeBlockProps {
|
||||
}
|
||||
|
||||
export function CodeBlock({ code, language }: CodeBlockProps) {
|
||||
return <Code.Viewer code={code} showGutter={true} language={language} />
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopy = () => {
|
||||
navigator.clipboard.writeText(code)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='dark w-full overflow-hidden rounded-md border border-[#2a2a2a] bg-[#1F1F1F] text-sm'>
|
||||
<div className='flex items-center justify-between border-[#2a2a2a] border-b px-4 py-1.5'>
|
||||
<span className='text-[#A3A3A3] text-xs'>{language}</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[#A3A3A3] transition-colors hover:text-gray-300'
|
||||
title='Copy code'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={code}
|
||||
showGutter
|
||||
language={language}
|
||||
className='[&_pre]:!pb-0 m-0 rounded-none border-0 bg-transparent'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
a: (props: any) => {
|
||||
const isAnchorLink = props.className?.includes('anchor')
|
||||
if (isAnchorLink) {
|
||||
return <a {...props} />
|
||||
return <a {...props} className={clsx('text-inherit no-underline', props.className)} />
|
||||
}
|
||||
return (
|
||||
<a
|
||||
@@ -113,7 +113,7 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
const mappedLanguage = languageMap[language.toLowerCase()] || 'javascript'
|
||||
|
||||
return (
|
||||
<div className='my-6'>
|
||||
<div className='not-prose my-6'>
|
||||
<CodeBlock
|
||||
code={typeof codeContent === 'string' ? codeContent.trim() : String(codeContent)}
|
||||
language={mappedLanguage}
|
||||
@@ -129,9 +129,10 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
<code
|
||||
{...props}
|
||||
className={clsx(
|
||||
'rounded bg-gray-100 px-1.5 py-0.5 font-mono text-[0.9em] text-red-600',
|
||||
'rounded bg-gray-100 px-1.5 py-0.5 font-mono font-normal text-[0.9em] text-red-600',
|
||||
props.className
|
||||
)}
|
||||
style={{ fontWeight: 400 }}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -38,7 +38,9 @@ function slugify(text: string): string {
|
||||
}
|
||||
|
||||
async function scanFrontmatters(): Promise<BlogMeta[]> {
|
||||
if (cachedMeta) return cachedMeta
|
||||
if (cachedMeta) {
|
||||
return cachedMeta
|
||||
}
|
||||
await ensureContentDirs()
|
||||
const entries = await fs.readdir(BLOG_DIR).catch(() => [])
|
||||
const authorsMap = await loadAuthors()
|
||||
|
||||
@@ -50,6 +50,8 @@ type SkippedItemType =
|
||||
| 'invalid_block_type'
|
||||
| 'invalid_edge_target'
|
||||
| 'invalid_edge_source'
|
||||
| 'invalid_source_handle'
|
||||
| 'invalid_target_handle'
|
||||
| 'invalid_subblock_field'
|
||||
| 'missing_required_params'
|
||||
| 'invalid_subflow_parent'
|
||||
@@ -734,8 +736,279 @@ function normalizeResponseFormat(value: any): string {
|
||||
}
|
||||
}
|
||||
|
||||
interface EdgeHandleValidationResult {
|
||||
valid: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to add connections as edges for a block
|
||||
* Validates source handle is valid for the block type
|
||||
*/
|
||||
function validateSourceHandleForBlock(
|
||||
sourceHandle: string,
|
||||
sourceBlockType: string,
|
||||
sourceBlock: any
|
||||
): EdgeHandleValidationResult {
|
||||
if (sourceHandle === 'error') {
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
switch (sourceBlockType) {
|
||||
case 'loop':
|
||||
if (sourceHandle === 'loop-start-source' || sourceHandle === 'loop-end-source') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for loop block. Valid handles: loop-start-source, loop-end-source, error`,
|
||||
}
|
||||
|
||||
case 'parallel':
|
||||
if (sourceHandle === 'parallel-start-source' || sourceHandle === 'parallel-end-source') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for parallel block. Valid handles: parallel-start-source, parallel-end-source, error`,
|
||||
}
|
||||
|
||||
case 'condition': {
|
||||
if (!sourceHandle.startsWith('condition-')) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for condition block. Must start with "condition-"`,
|
||||
}
|
||||
}
|
||||
|
||||
const conditionsValue = sourceBlock?.subBlocks?.conditions?.value
|
||||
if (!conditionsValue) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}" - no conditions defined`,
|
||||
}
|
||||
}
|
||||
|
||||
return validateConditionHandle(sourceHandle, sourceBlock.id, conditionsValue)
|
||||
}
|
||||
|
||||
case 'router':
|
||||
if (sourceHandle === 'source' || sourceHandle.startsWith('router-')) {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for router block. Valid handles: source, router-{targetId}, error`,
|
||||
}
|
||||
|
||||
default:
|
||||
if (sourceHandle === 'source') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for ${sourceBlockType} block. Valid handles: source, error`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates condition handle references a valid condition in the block.
|
||||
* Accepts both internal IDs (condition-blockId-if) and semantic keys (condition-blockId-else-if)
|
||||
*/
|
||||
function validateConditionHandle(
|
||||
sourceHandle: string,
|
||||
blockId: string,
|
||||
conditionsValue: string | any[]
|
||||
): EdgeHandleValidationResult {
|
||||
let conditions: any[]
|
||||
if (typeof conditionsValue === 'string') {
|
||||
try {
|
||||
conditions = JSON.parse(conditionsValue)
|
||||
} catch {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot validate condition handle "${sourceHandle}" - conditions is not valid JSON`,
|
||||
}
|
||||
}
|
||||
} else if (Array.isArray(conditionsValue)) {
|
||||
conditions = conditionsValue
|
||||
} else {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot validate condition handle "${sourceHandle}" - conditions is not an array`,
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(conditions) || conditions.length === 0) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}" - no conditions defined`,
|
||||
}
|
||||
}
|
||||
|
||||
const validHandles = new Set<string>()
|
||||
const semanticPrefix = `condition-${blockId}-`
|
||||
let elseIfCount = 0
|
||||
|
||||
for (const condition of conditions) {
|
||||
if (condition.id) {
|
||||
validHandles.add(`condition-${condition.id}`)
|
||||
}
|
||||
|
||||
const title = condition.title?.toLowerCase()
|
||||
if (title === 'if') {
|
||||
validHandles.add(`${semanticPrefix}if`)
|
||||
} else if (title === 'else if') {
|
||||
elseIfCount++
|
||||
validHandles.add(
|
||||
elseIfCount === 1 ? `${semanticPrefix}else-if` : `${semanticPrefix}else-if-${elseIfCount}`
|
||||
)
|
||||
} else if (title === 'else') {
|
||||
validHandles.add(`${semanticPrefix}else`)
|
||||
}
|
||||
}
|
||||
|
||||
if (validHandles.has(sourceHandle)) {
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
const validOptions = Array.from(validHandles).slice(0, 5)
|
||||
const moreCount = validHandles.size - validOptions.length
|
||||
let validOptionsStr = validOptions.join(', ')
|
||||
if (moreCount > 0) {
|
||||
validOptionsStr += `, ... and ${moreCount} more`
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}". Valid handles: ${validOptionsStr}`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates target handle is valid (must be 'target')
|
||||
*/
|
||||
function validateTargetHandle(targetHandle: string): EdgeHandleValidationResult {
|
||||
if (targetHandle === 'target') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid target handle "${targetHandle}". Expected "target"`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a validated edge between two blocks.
|
||||
* Returns true if edge was created, false if skipped due to validation errors.
|
||||
*/
|
||||
function createValidatedEdge(
|
||||
modifiedState: any,
|
||||
sourceBlockId: string,
|
||||
targetBlockId: string,
|
||||
sourceHandle: string,
|
||||
targetHandle: string,
|
||||
operationType: string,
|
||||
logger: ReturnType<typeof createLogger>,
|
||||
skippedItems?: SkippedItem[]
|
||||
): boolean {
|
||||
if (!modifiedState.blocks[targetBlockId]) {
|
||||
logger.warn(`Target block "${targetBlockId}" not found. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
sourceHandle,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_target',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - target block does not exist`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceBlock = modifiedState.blocks[sourceBlockId]
|
||||
if (!sourceBlock) {
|
||||
logger.warn(`Source block "${sourceBlockId}" not found. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_source',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - source block does not exist`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceBlockType = sourceBlock.type
|
||||
if (!sourceBlockType) {
|
||||
logger.warn(`Source block "${sourceBlockId}" has no type. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_source',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - source block has no type`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceValidation = validateSourceHandleForBlock(sourceHandle, sourceBlockType, sourceBlock)
|
||||
if (!sourceValidation.valid) {
|
||||
logger.warn(`Invalid source handle. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
sourceHandle,
|
||||
error: sourceValidation.error,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_source_handle',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: sourceValidation.error || `Invalid source handle "${sourceHandle}"`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const targetValidation = validateTargetHandle(targetHandle)
|
||||
if (!targetValidation.valid) {
|
||||
logger.warn(`Invalid target handle. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
targetHandle,
|
||||
error: targetValidation.error,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_target_handle',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: targetValidation.error || `Invalid target handle "${targetHandle}"`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: sourceBlockId,
|
||||
sourceHandle,
|
||||
target: targetBlockId,
|
||||
targetHandle,
|
||||
type: 'default',
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds connections as edges for a block
|
||||
*/
|
||||
function addConnectionsAsEdges(
|
||||
modifiedState: any,
|
||||
@@ -747,34 +1020,16 @@ function addConnectionsAsEdges(
|
||||
Object.entries(connections).forEach(([sourceHandle, targets]) => {
|
||||
const targetArray = Array.isArray(targets) ? targets : [targets]
|
||||
targetArray.forEach((targetId: string) => {
|
||||
// Validate target block exists - skip edge if target doesn't exist
|
||||
if (!modifiedState.blocks[targetId]) {
|
||||
logger.warn(
|
||||
`Target block "${targetId}" not found when creating connection from "${blockId}". ` +
|
||||
`Edge skipped.`,
|
||||
{
|
||||
sourceBlockId: blockId,
|
||||
targetBlockId: targetId,
|
||||
existingBlocks: Object.keys(modifiedState.blocks),
|
||||
}
|
||||
)
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_target',
|
||||
operationType: 'add_edge',
|
||||
blockId: blockId,
|
||||
reason: `Edge from "${blockId}" to "${targetId}" skipped - target block does not exist`,
|
||||
details: { sourceHandle, targetId },
|
||||
})
|
||||
return
|
||||
}
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: blockId,
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
blockId,
|
||||
targetId,
|
||||
sourceHandle,
|
||||
target: targetId,
|
||||
targetHandle: 'target',
|
||||
type: 'default',
|
||||
})
|
||||
'target',
|
||||
'add_edge',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1257,67 +1512,44 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
// Handle connections update (convert to edges)
|
||||
if (params?.connections) {
|
||||
// Remove existing edges from this block
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add new edges based on connections
|
||||
Object.entries(params.connections).forEach(([connectionType, targets]) => {
|
||||
if (targets === null) return
|
||||
|
||||
// Map semantic connection names to actual React Flow handle IDs
|
||||
// 'success' in YAML/connections maps to 'source' handle in React Flow
|
||||
const mapConnectionTypeToHandle = (type: string): string => {
|
||||
if (type === 'success') return 'source'
|
||||
if (type === 'error') return 'error'
|
||||
// Conditions and other types pass through as-is
|
||||
return type
|
||||
}
|
||||
|
||||
const actualSourceHandle = mapConnectionTypeToHandle(connectionType)
|
||||
const sourceHandle = mapConnectionTypeToHandle(connectionType)
|
||||
|
||||
const addEdge = (targetBlock: string, targetHandle?: string) => {
|
||||
// Validate target block exists - skip edge if target doesn't exist
|
||||
if (!modifiedState.blocks[targetBlock]) {
|
||||
logger.warn(
|
||||
`Target block "${targetBlock}" not found when creating connection from "${block_id}". ` +
|
||||
`Edge skipped.`,
|
||||
{
|
||||
sourceBlockId: block_id,
|
||||
targetBlockId: targetBlock,
|
||||
existingBlocks: Object.keys(modifiedState.blocks),
|
||||
}
|
||||
)
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'invalid_edge_target',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Edge from "${block_id}" to "${targetBlock}" skipped - target block does not exist`,
|
||||
details: { sourceHandle: actualSourceHandle, targetId: targetBlock },
|
||||
})
|
||||
return
|
||||
}
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: block_id,
|
||||
sourceHandle: actualSourceHandle,
|
||||
target: targetBlock,
|
||||
targetHandle: targetHandle || 'target',
|
||||
type: 'default',
|
||||
})
|
||||
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
block_id,
|
||||
targetBlock,
|
||||
sourceHandle,
|
||||
targetHandle || 'target',
|
||||
'edit',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdge(targets)
|
||||
addEdgeForTarget(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdge(target)
|
||||
addEdgeForTarget(target)
|
||||
} else if (target?.block) {
|
||||
addEdge(target.block, target.handle)
|
||||
addEdgeForTarget(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && (targets as any)?.block) {
|
||||
addEdge((targets as any).block, (targets as any).handle)
|
||||
addEdgeForTarget((targets as any).block, (targets as any).handle)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -37,8 +37,28 @@ export const isEmailVerificationEnabled = isTruthy(env.EMAIL_VERIFICATION_ENABLE
|
||||
|
||||
/**
|
||||
* Is authentication disabled (for self-hosted deployments behind private networks)
|
||||
* This flag is blocked when isHosted is true.
|
||||
*/
|
||||
export const isAuthDisabled = isTruthy(env.DISABLE_AUTH)
|
||||
export const isAuthDisabled = isTruthy(env.DISABLE_AUTH) && !isHosted
|
||||
|
||||
if (isTruthy(env.DISABLE_AUTH)) {
|
||||
import('@/lib/logs/console/logger')
|
||||
.then(({ createLogger }) => {
|
||||
const logger = createLogger('FeatureFlags')
|
||||
if (isHosted) {
|
||||
logger.error(
|
||||
'DISABLE_AUTH is set but ignored on hosted environment. Authentication remains enabled for security.'
|
||||
)
|
||||
} else {
|
||||
logger.warn(
|
||||
'DISABLE_AUTH is enabled. Authentication is bypassed and all requests use an anonymous session. Only use this in trusted private networks.'
|
||||
)
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
// Fallback during config compilation when logger is unavailable
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Is user registration disabled
|
||||
|
||||
@@ -31,20 +31,25 @@ vi.mock('crypto', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
ENCRYPTION_KEY: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef',
|
||||
OPENAI_API_KEY_1: 'test-openai-key-1',
|
||||
OPENAI_API_KEY_2: 'test-openai-key-2',
|
||||
OPENAI_API_KEY_3: 'test-openai-key-3',
|
||||
ANTHROPIC_API_KEY_1: 'test-anthropic-key-1',
|
||||
ANTHROPIC_API_KEY_2: 'test-anthropic-key-2',
|
||||
ANTHROPIC_API_KEY_3: 'test-anthropic-key-3',
|
||||
GEMINI_API_KEY_1: 'test-gemini-key-1',
|
||||
GEMINI_API_KEY_2: 'test-gemini-key-2',
|
||||
GEMINI_API_KEY_3: 'test-gemini-key-3',
|
||||
},
|
||||
}))
|
||||
vi.mock('@/lib/core/config/env', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('@/lib/core/config/env')>()
|
||||
return {
|
||||
...actual,
|
||||
env: {
|
||||
...actual.env,
|
||||
ENCRYPTION_KEY: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', // fake key for testing
|
||||
OPENAI_API_KEY_1: 'test-openai-key-1', // fake key for testing
|
||||
OPENAI_API_KEY_2: 'test-openai-key-2', // fake key for testing
|
||||
OPENAI_API_KEY_3: 'test-openai-key-3', // fake key for testing
|
||||
ANTHROPIC_API_KEY_1: 'test-anthropic-key-1', // fake key for testing
|
||||
ANTHROPIC_API_KEY_2: 'test-anthropic-key-2', // fake key for testing
|
||||
ANTHROPIC_API_KEY_3: 'test-anthropic-key-3', // fake key for testing
|
||||
GEMINI_API_KEY_1: 'test-gemini-key-1', // fake key for testing
|
||||
GEMINI_API_KEY_2: 'test-gemini-key-2', // fake key for testing
|
||||
GEMINI_API_KEY_3: 'test-gemini-key-3', // fake key for testing
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
@@ -1,3 +1,22 @@
|
||||
import { getBaseUrl } from './urls'
|
||||
|
||||
/**
|
||||
* Checks if a URL is same-origin with the application's base URL.
|
||||
* Used to prevent open redirect vulnerabilities.
|
||||
*
|
||||
* @param url - The URL to validate
|
||||
* @returns True if the URL is same-origin, false otherwise (secure default)
|
||||
*/
|
||||
export function isSameOrigin(url: string): boolean {
|
||||
try {
|
||||
const targetUrl = new URL(url)
|
||||
const appUrl = new URL(getBaseUrl())
|
||||
return targetUrl.origin === appUrl.origin
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a name by removing any characters that could cause issues
|
||||
* with variable references or node naming.
|
||||
|
||||
@@ -81,8 +81,8 @@ export async function emitWorkflowExecutionCompleted(log: WorkflowExecutionLog):
|
||||
)
|
||||
|
||||
for (const subscription of subscriptions) {
|
||||
const levelMatches = subscription.levelFilter?.includes(log.level) ?? true
|
||||
const triggerMatches = subscription.triggerFilter?.includes(log.trigger) ?? true
|
||||
const levelMatches = subscription.levelFilter.includes(log.level)
|
||||
const triggerMatches = subscription.triggerFilter.includes(log.trigger)
|
||||
|
||||
if (!levelMatches || !triggerMatches) {
|
||||
logger.debug(`Skipping subscription ${subscription.id} due to filter mismatch`)
|
||||
@@ -98,6 +98,7 @@ export async function emitWorkflowExecutionCompleted(log: WorkflowExecutionLog):
|
||||
status: log.level === 'error' ? 'error' : 'success',
|
||||
durationMs: log.totalDurationMs || 0,
|
||||
cost: (log.cost as { total?: number })?.total || 0,
|
||||
triggerFilter: subscription.triggerFilter,
|
||||
}
|
||||
|
||||
const shouldAlert = await shouldTriggerAlert(alertConfig, context, subscription.lastAlertAt)
|
||||
|
||||
@@ -471,8 +471,10 @@ function groupIterationBlocks(spans: TraceSpan[]): TraceSpan[] {
|
||||
}
|
||||
})
|
||||
|
||||
// Include loop/parallel spans that have errors (e.g., validation errors that blocked execution)
|
||||
// These won't have iteration children, so they should appear directly in results
|
||||
const nonIterationContainerSpans = normalSpans.filter(
|
||||
(span) => span.type !== 'parallel' && span.type !== 'loop'
|
||||
(span) => (span.type !== 'parallel' && span.type !== 'loop') || span.status === 'error'
|
||||
)
|
||||
|
||||
if (iterationSpans.length > 0) {
|
||||
|
||||
@@ -51,8 +51,11 @@ export interface ExecutionEnvironment {
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export const ALL_TRIGGER_TYPES = ['api', 'webhook', 'schedule', 'manual', 'chat'] as const
|
||||
export type TriggerType = (typeof ALL_TRIGGER_TYPES)[number]
|
||||
|
||||
export interface ExecutionTrigger {
|
||||
type: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | string
|
||||
type: TriggerType | string
|
||||
source: string
|
||||
data?: Record<string, unknown>
|
||||
timestamp: string
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { and, avg, count, desc, eq, gte } from 'drizzle-orm'
|
||||
import { and, avg, count, desc, eq, gte, inArray } from 'drizzle-orm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('AlertRules')
|
||||
@@ -135,25 +135,29 @@ export function isInCooldown(lastAlertAt: Date | null): boolean {
|
||||
return new Date() < cooldownEnd
|
||||
}
|
||||
|
||||
/**
|
||||
* Context passed to alert check functions
|
||||
*/
|
||||
export interface AlertCheckContext {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
status: 'success' | 'error'
|
||||
durationMs: number
|
||||
cost: number
|
||||
triggerFilter: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if consecutive failures threshold is met
|
||||
*/
|
||||
async function checkConsecutiveFailures(workflowId: string, threshold: number): Promise<boolean> {
|
||||
async function checkConsecutiveFailures(
|
||||
workflowId: string,
|
||||
threshold: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const recentLogs = await db
|
||||
.select({ level: workflowExecutionLogs.level })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.workflowId, workflowId))
|
||||
.where(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(workflowExecutionLogs.createdAt))
|
||||
.limit(threshold)
|
||||
|
||||
@@ -162,13 +166,11 @@ async function checkConsecutiveFailures(workflowId: string, threshold: number):
|
||||
return recentLogs.every((log) => log.level === 'error')
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if failure rate exceeds threshold
|
||||
*/
|
||||
async function checkFailureRate(
|
||||
workflowId: string,
|
||||
ratePercent: number,
|
||||
windowHours: number
|
||||
windowHours: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const windowStart = new Date(Date.now() - windowHours * 60 * 60 * 1000)
|
||||
|
||||
@@ -181,7 +183,8 @@ async function checkFailureRate(
|
||||
.where(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
gte(workflowExecutionLogs.createdAt, windowStart)
|
||||
gte(workflowExecutionLogs.createdAt, windowStart),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
.orderBy(workflowExecutionLogs.createdAt)
|
||||
@@ -206,14 +209,12 @@ function checkLatencyThreshold(durationMs: number, thresholdMs: number): boolean
|
||||
return durationMs > thresholdMs
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if execution duration is significantly above average
|
||||
*/
|
||||
async function checkLatencySpike(
|
||||
workflowId: string,
|
||||
currentDurationMs: number,
|
||||
spikePercent: number,
|
||||
windowHours: number
|
||||
windowHours: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const windowStart = new Date(Date.now() - windowHours * 60 * 60 * 1000)
|
||||
|
||||
@@ -226,7 +227,8 @@ async function checkLatencySpike(
|
||||
.where(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
gte(workflowExecutionLogs.createdAt, windowStart)
|
||||
gte(workflowExecutionLogs.createdAt, windowStart),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -248,13 +250,11 @@ function checkCostThreshold(cost: number, thresholdDollars: number): boolean {
|
||||
return cost > thresholdDollars
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if error count exceeds threshold within window
|
||||
*/
|
||||
async function checkErrorCount(
|
||||
workflowId: string,
|
||||
threshold: number,
|
||||
windowHours: number
|
||||
windowHours: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const windowStart = new Date(Date.now() - windowHours * 60 * 60 * 1000)
|
||||
|
||||
@@ -265,7 +265,8 @@ async function checkErrorCount(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
eq(workflowExecutionLogs.level, 'error'),
|
||||
gte(workflowExecutionLogs.createdAt, windowStart)
|
||||
gte(workflowExecutionLogs.createdAt, windowStart),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -273,9 +274,6 @@ async function checkErrorCount(
|
||||
return errorCount >= threshold
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates if an alert should be triggered based on the configuration
|
||||
*/
|
||||
export async function shouldTriggerAlert(
|
||||
config: AlertConfig,
|
||||
context: AlertCheckContext,
|
||||
@@ -287,16 +285,21 @@ export async function shouldTriggerAlert(
|
||||
}
|
||||
|
||||
const { rule } = config
|
||||
const { workflowId, status, durationMs, cost } = context
|
||||
const { workflowId, status, durationMs, cost, triggerFilter } = context
|
||||
|
||||
switch (rule) {
|
||||
case 'consecutive_failures':
|
||||
if (status !== 'error') return false
|
||||
return checkConsecutiveFailures(workflowId, config.consecutiveFailures!)
|
||||
return checkConsecutiveFailures(workflowId, config.consecutiveFailures!, triggerFilter)
|
||||
|
||||
case 'failure_rate':
|
||||
if (status !== 'error') return false
|
||||
return checkFailureRate(workflowId, config.failureRatePercent!, config.windowHours!)
|
||||
return checkFailureRate(
|
||||
workflowId,
|
||||
config.failureRatePercent!,
|
||||
config.windowHours!,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
case 'latency_threshold':
|
||||
return checkLatencyThreshold(durationMs, config.durationThresholdMs!)
|
||||
@@ -306,19 +309,24 @@ export async function shouldTriggerAlert(
|
||||
workflowId,
|
||||
durationMs,
|
||||
config.latencySpikePercent!,
|
||||
config.windowHours!
|
||||
config.windowHours!,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
case 'cost_threshold':
|
||||
return checkCostThreshold(cost, config.costThresholdDollars!)
|
||||
|
||||
case 'no_activity':
|
||||
// no_activity alerts are handled by the hourly polling job, not execution events
|
||||
return false
|
||||
|
||||
case 'error_count':
|
||||
if (status !== 'error') return false
|
||||
return checkErrorCount(workflowId, config.errorCountThreshold!, config.windowHours!)
|
||||
return checkErrorCount(
|
||||
workflowId,
|
||||
config.errorCountThreshold!,
|
||||
config.windowHours!,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
default:
|
||||
logger.warn(`Unknown alert rule: ${rule}`)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
workflowExecutionLogs,
|
||||
workspaceNotificationDelivery,
|
||||
workspaceNotificationSubscription,
|
||||
@@ -9,15 +10,81 @@ import { and, eq, gte, inArray, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { TRIGGER_TYPES } from '@/lib/workflows/triggers/triggers'
|
||||
import {
|
||||
executeNotificationDelivery,
|
||||
workspaceNotificationDeliveryTask,
|
||||
} from '@/background/workspace-notification-delivery'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import type { AlertConfig } from './alert-rules'
|
||||
import { isInCooldown } from './alert-rules'
|
||||
|
||||
const logger = createLogger('InactivityPolling')
|
||||
|
||||
const SCHEDULE_BLOCK_TYPES: string[] = [TRIGGER_TYPES.SCHEDULE]
|
||||
const WEBHOOK_BLOCK_TYPES: string[] = [TRIGGER_TYPES.WEBHOOK, TRIGGER_TYPES.GENERIC_WEBHOOK]
|
||||
|
||||
function deploymentHasTriggerType(
|
||||
deploymentState: Pick<WorkflowState, 'blocks'>,
|
||||
triggerFilter: string[]
|
||||
): boolean {
|
||||
const blocks = deploymentState.blocks
|
||||
if (!blocks) return false
|
||||
|
||||
const alwaysAvailable = ['api', 'manual', 'chat']
|
||||
if (triggerFilter.some((t) => alwaysAvailable.includes(t))) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (const block of Object.values(blocks)) {
|
||||
if (triggerFilter.includes('schedule') && SCHEDULE_BLOCK_TYPES.includes(block.type)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (triggerFilter.includes('webhook')) {
|
||||
if (WEBHOOK_BLOCK_TYPES.includes(block.type)) {
|
||||
return true
|
||||
}
|
||||
if (block.triggerMode === true) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
async function getWorkflowsWithTriggerTypes(
|
||||
workspaceId: string,
|
||||
triggerFilter: string[]
|
||||
): Promise<Set<string>> {
|
||||
const workflowIds = new Set<string>()
|
||||
|
||||
const deployedWorkflows = await db
|
||||
.select({
|
||||
workflowId: workflow.id,
|
||||
deploymentState: workflowDeploymentVersion.state,
|
||||
})
|
||||
.from(workflow)
|
||||
.innerJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(and(eq(workflow.workspaceId, workspaceId), eq(workflow.isDeployed, true)))
|
||||
|
||||
for (const w of deployedWorkflows) {
|
||||
const state = w.deploymentState as WorkflowState | null
|
||||
if (state && deploymentHasTriggerType(state, triggerFilter)) {
|
||||
workflowIds.add(w.workflowId)
|
||||
}
|
||||
}
|
||||
|
||||
return workflowIds
|
||||
}
|
||||
|
||||
interface InactivityCheckResult {
|
||||
subscriptionId: string
|
||||
workflowId: string
|
||||
@@ -25,9 +92,6 @@ interface InactivityCheckResult {
|
||||
reason?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks a single workflow for inactivity and triggers notification if needed
|
||||
*/
|
||||
async function checkWorkflowInactivity(
|
||||
subscription: typeof workspaceNotificationSubscription.$inferSelect,
|
||||
workflowId: string,
|
||||
@@ -141,9 +205,6 @@ async function checkWorkflowInactivity(
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Polls all active no_activity subscriptions and triggers alerts as needed
|
||||
*/
|
||||
export async function pollInactivityAlerts(): Promise<{
|
||||
total: number
|
||||
triggered: number
|
||||
@@ -179,19 +240,30 @@ export async function pollInactivityAlerts(): Promise<{
|
||||
continue
|
||||
}
|
||||
|
||||
const triggerFilter = subscription.triggerFilter as string[]
|
||||
if (!triggerFilter || triggerFilter.length === 0) {
|
||||
logger.warn(`Subscription ${subscription.id} has no trigger filter, skipping`)
|
||||
continue
|
||||
}
|
||||
|
||||
const eligibleWorkflowIds = await getWorkflowsWithTriggerTypes(
|
||||
subscription.workspaceId,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
let workflowIds: string[] = []
|
||||
|
||||
if (subscription.allWorkflows) {
|
||||
const workflows = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, subscription.workspaceId))
|
||||
|
||||
workflowIds = workflows.map((w) => w.id)
|
||||
workflowIds = Array.from(eligibleWorkflowIds)
|
||||
} else {
|
||||
workflowIds = subscription.workflowIds || []
|
||||
workflowIds = (subscription.workflowIds || []).filter((id) => eligibleWorkflowIds.has(id))
|
||||
}
|
||||
|
||||
logger.debug(`Checking ${workflowIds.length} workflows for subscription ${subscription.id}`, {
|
||||
triggerFilter,
|
||||
eligibleCount: eligibleWorkflowIds.size,
|
||||
})
|
||||
|
||||
for (const workflowId of workflowIds) {
|
||||
const result = await checkWorkflowInactivity(subscription, workflowId, alertConfig)
|
||||
results.push(result)
|
||||
|
||||
@@ -81,7 +81,11 @@ async function formatTeamsGraphNotification(
|
||||
foundWorkflow: any,
|
||||
request: NextRequest
|
||||
): Promise<any> {
|
||||
const notification = body.value[0]
|
||||
const notification = body.value?.[0]
|
||||
if (!notification) {
|
||||
logger.warn('Received empty Teams notification body')
|
||||
return null
|
||||
}
|
||||
const changeType = notification.changeType || 'created'
|
||||
const resource = notification.resource || ''
|
||||
const subscriptionId = notification.subscriptionId || ''
|
||||
|
||||
@@ -225,6 +225,13 @@ export function getBlockOutputs(
|
||||
return getUnifiedStartOutputs(subBlocks)
|
||||
}
|
||||
|
||||
if (blockType === 'human_in_the_loop') {
|
||||
// For human_in_the_loop, only expose url (inputFormat fields are only available after resume)
|
||||
return {
|
||||
url: { type: 'string', description: 'Resume UI URL' },
|
||||
}
|
||||
}
|
||||
|
||||
if (blockType === 'approval') {
|
||||
// Start with only url (apiUrl commented out - not accessible as output)
|
||||
const pauseResumeOutputs: Record<string, any> = {
|
||||
|
||||
@@ -84,6 +84,7 @@ const nextConfig: NextConfig = {
|
||||
],
|
||||
outputFileTracingIncludes: {
|
||||
'/api/tools/stagehand/*': ['./node_modules/ws/**/*'],
|
||||
'/*': ['./node_modules/sharp/**/*', './node_modules/@img/**/*'],
|
||||
},
|
||||
experimental: {
|
||||
optimizeCss: true,
|
||||
|
||||
BIN
apps/sim/public/studio/authors/emir.jpg
Normal file
|
After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 2.0 MiB |
BIN
apps/sim/public/studio/authors/sid.jpg
Normal file
|
After Width: | Height: | Size: 349 KiB |
|
Before Width: | Height: | Size: 123 KiB |
BIN
apps/sim/public/studio/authors/waleed.jpg
Normal file
|
After Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 2.4 MiB |
BIN
apps/sim/public/studio/series-a/team.jpg
Normal file
|
After Width: | Height: | Size: 515 KiB |
|
Before Width: | Height: | Size: 10 MiB |
1
apps/sim/stores/text-history/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { useTextHistoryStore } from './store'
|
||||
339
apps/sim/stores/text-history/store.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
import { create } from 'zustand'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('TextHistoryStore')
|
||||
|
||||
/**
|
||||
* Default debounce delay in milliseconds.
|
||||
* Changes within this window are coalesced into a single history entry.
|
||||
*/
|
||||
const DEBOUNCE_DELAY_MS = 500
|
||||
|
||||
/**
|
||||
* Maximum number of history entries per text field.
|
||||
*/
|
||||
const MAX_HISTORY_SIZE = 10
|
||||
|
||||
interface TextHistoryEntry {
|
||||
/** The undo/redo stack of text values */
|
||||
stack: string[]
|
||||
/** Current position in the stack (0 = oldest) */
|
||||
index: number
|
||||
/** Pending value that hasn't been committed to history yet */
|
||||
pending: string | null
|
||||
/** Timer ID for debounced commit */
|
||||
debounceTimer: ReturnType<typeof setTimeout> | null
|
||||
/** Timestamp of last change (for coalescing logic) */
|
||||
lastChangeAt: number
|
||||
}
|
||||
|
||||
interface TextHistoryState {
|
||||
/** Map of "blockId:subBlockId" to history entry */
|
||||
histories: Record<string, TextHistoryEntry>
|
||||
|
||||
/**
|
||||
* Records a text change with debouncing.
|
||||
* Multiple rapid changes are coalesced into a single history entry.
|
||||
*/
|
||||
recordChange: (blockId: string, subBlockId: string, value: string) => void
|
||||
|
||||
/**
|
||||
* Immediately commits any pending changes to history.
|
||||
* Call this on blur or before navigation.
|
||||
*/
|
||||
commitPending: (blockId: string, subBlockId: string) => void
|
||||
|
||||
/**
|
||||
* Undo the last text change for a specific field.
|
||||
* @returns The previous value, or null if at the beginning of history
|
||||
*/
|
||||
undo: (blockId: string, subBlockId: string) => string | null
|
||||
|
||||
/**
|
||||
* Redo the last undone text change for a specific field.
|
||||
* @returns The next value, or null if at the end of history
|
||||
*/
|
||||
redo: (blockId: string, subBlockId: string) => string | null
|
||||
|
||||
/**
|
||||
* Check if undo is available for a field.
|
||||
*/
|
||||
canUndo: (blockId: string, subBlockId: string) => boolean
|
||||
|
||||
/**
|
||||
* Check if redo is available for a field.
|
||||
*/
|
||||
canRedo: (blockId: string, subBlockId: string) => boolean
|
||||
|
||||
/**
|
||||
* Initialize history for a field with an initial value.
|
||||
* Called when a text field first mounts.
|
||||
*/
|
||||
initHistory: (blockId: string, subBlockId: string, initialValue: string) => void
|
||||
|
||||
/**
|
||||
* Clear history for a specific field.
|
||||
*/
|
||||
clearHistory: (blockId: string, subBlockId: string) => void
|
||||
|
||||
/**
|
||||
* Clear all history for a block (when block is deleted).
|
||||
*/
|
||||
clearBlockHistory: (blockId: string) => void
|
||||
}
|
||||
|
||||
function getKey(blockId: string, subBlockId: string): string {
|
||||
return `${blockId}:${subBlockId}`
|
||||
}
|
||||
|
||||
function createEmptyEntry(initialValue: string): TextHistoryEntry {
|
||||
return {
|
||||
stack: [initialValue],
|
||||
index: 0,
|
||||
pending: null,
|
||||
debounceTimer: null,
|
||||
lastChangeAt: 0,
|
||||
}
|
||||
}
|
||||
|
||||
export const useTextHistoryStore = create<TextHistoryState>((set, get) => ({
|
||||
histories: {},
|
||||
|
||||
initHistory: (blockId: string, subBlockId: string, initialValue: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const state = get()
|
||||
|
||||
// Only initialize if not already present
|
||||
if (!state.histories[key]) {
|
||||
set({
|
||||
histories: {
|
||||
...state.histories,
|
||||
[key]: createEmptyEntry(initialValue),
|
||||
},
|
||||
})
|
||||
logger.debug('Initialized text history', { blockId, subBlockId })
|
||||
}
|
||||
},
|
||||
|
||||
recordChange: (blockId: string, subBlockId: string, value: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const state = get()
|
||||
let entry = state.histories[key]
|
||||
|
||||
// Initialize if needed
|
||||
if (!entry) {
|
||||
entry = createEmptyEntry('')
|
||||
}
|
||||
|
||||
// Clear any existing debounce timer
|
||||
if (entry.debounceTimer) {
|
||||
clearTimeout(entry.debounceTimer)
|
||||
}
|
||||
|
||||
// Set up new debounce timer
|
||||
const timer = setTimeout(() => {
|
||||
get().commitPending(blockId, subBlockId)
|
||||
}, DEBOUNCE_DELAY_MS)
|
||||
|
||||
// Update entry with pending value
|
||||
set({
|
||||
histories: {
|
||||
...get().histories,
|
||||
[key]: {
|
||||
...entry,
|
||||
pending: value,
|
||||
debounceTimer: timer,
|
||||
lastChangeAt: Date.now(),
|
||||
},
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
commitPending: (blockId: string, subBlockId: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const state = get()
|
||||
const entry = state.histories[key]
|
||||
|
||||
if (!entry || entry.pending === null) {
|
||||
return
|
||||
}
|
||||
|
||||
// Clear the timer
|
||||
if (entry.debounceTimer) {
|
||||
clearTimeout(entry.debounceTimer)
|
||||
}
|
||||
|
||||
const currentValue = entry.stack[entry.index]
|
||||
|
||||
// Don't commit if value hasn't changed
|
||||
if (entry.pending === currentValue) {
|
||||
set({
|
||||
histories: {
|
||||
...state.histories,
|
||||
[key]: {
|
||||
...entry,
|
||||
pending: null,
|
||||
debounceTimer: null,
|
||||
},
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Truncate any redo history (we're branching)
|
||||
const newStack = entry.stack.slice(0, entry.index + 1)
|
||||
|
||||
// Add the new value
|
||||
newStack.push(entry.pending)
|
||||
|
||||
// Enforce max size (remove oldest entries)
|
||||
while (newStack.length > MAX_HISTORY_SIZE) {
|
||||
newStack.shift()
|
||||
}
|
||||
|
||||
const newIndex = newStack.length - 1
|
||||
|
||||
set({
|
||||
histories: {
|
||||
...state.histories,
|
||||
[key]: {
|
||||
stack: newStack,
|
||||
index: newIndex,
|
||||
pending: null,
|
||||
debounceTimer: null,
|
||||
lastChangeAt: entry.lastChangeAt,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
logger.debug('Committed text change to history', {
|
||||
blockId,
|
||||
subBlockId,
|
||||
stackSize: newStack.length,
|
||||
index: newIndex,
|
||||
})
|
||||
},
|
||||
|
||||
undo: (blockId: string, subBlockId: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const state = get()
|
||||
const entry = state.histories[key]
|
||||
|
||||
if (!entry) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Commit any pending changes first
|
||||
if (entry.pending !== null) {
|
||||
get().commitPending(blockId, subBlockId)
|
||||
// Re-fetch after commit
|
||||
const updatedEntry = get().histories[key]
|
||||
if (!updatedEntry || updatedEntry.index <= 0) {
|
||||
return null
|
||||
}
|
||||
const newIndex = updatedEntry.index - 1
|
||||
set({
|
||||
histories: {
|
||||
...get().histories,
|
||||
[key]: {
|
||||
...updatedEntry,
|
||||
index: newIndex,
|
||||
},
|
||||
},
|
||||
})
|
||||
logger.debug('Text undo', { blockId, subBlockId, newIndex })
|
||||
return updatedEntry.stack[newIndex]
|
||||
}
|
||||
|
||||
if (entry.index <= 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
const newIndex = entry.index - 1
|
||||
set({
|
||||
histories: {
|
||||
...state.histories,
|
||||
[key]: {
|
||||
...entry,
|
||||
index: newIndex,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
logger.debug('Text undo', { blockId, subBlockId, newIndex })
|
||||
return entry.stack[newIndex]
|
||||
},
|
||||
|
||||
redo: (blockId: string, subBlockId: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const state = get()
|
||||
const entry = state.histories[key]
|
||||
|
||||
if (!entry || entry.index >= entry.stack.length - 1) {
|
||||
return null
|
||||
}
|
||||
|
||||
const newIndex = entry.index + 1
|
||||
set({
|
||||
histories: {
|
||||
...state.histories,
|
||||
[key]: {
|
||||
...entry,
|
||||
index: newIndex,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
logger.debug('Text redo', { blockId, subBlockId, newIndex })
|
||||
return entry.stack[newIndex]
|
||||
},
|
||||
|
||||
canUndo: (blockId: string, subBlockId: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const entry = get().histories[key]
|
||||
if (!entry) return false
|
||||
// Can undo if we have pending changes or index > 0
|
||||
return entry.pending !== null || entry.index > 0
|
||||
},
|
||||
|
||||
canRedo: (blockId: string, subBlockId: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const entry = get().histories[key]
|
||||
if (!entry) return false
|
||||
return entry.index < entry.stack.length - 1
|
||||
},
|
||||
|
||||
clearHistory: (blockId: string, subBlockId: string) => {
|
||||
const key = getKey(blockId, subBlockId)
|
||||
const state = get()
|
||||
const entry = state.histories[key]
|
||||
|
||||
if (entry?.debounceTimer) {
|
||||
clearTimeout(entry.debounceTimer)
|
||||
}
|
||||
|
||||
const { [key]: _, ...rest } = state.histories
|
||||
set({ histories: rest })
|
||||
|
||||
logger.debug('Cleared text history', { blockId, subBlockId })
|
||||
},
|
||||
|
||||
clearBlockHistory: (blockId: string) => {
|
||||
const state = get()
|
||||
const prefix = `${blockId}:`
|
||||
const newHistories: Record<string, TextHistoryEntry> = {}
|
||||
|
||||
for (const [key, entry] of Object.entries(state.histories)) {
|
||||
if (key.startsWith(prefix)) {
|
||||
if (entry.debounceTimer) {
|
||||
clearTimeout(entry.debounceTimer)
|
||||
}
|
||||
} else {
|
||||
newHistories[key] = entry
|
||||
}
|
||||
}
|
||||
|
||||
set({ histories: newHistories })
|
||||
logger.debug('Cleared all text history for block', { blockId })
|
||||
},
|
||||
}))
|
||||
217
apps/sim/tools/jira/get_users.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface JiraGetUsersParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
accountId?: string
|
||||
startAt?: number
|
||||
maxResults?: number
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface JiraUser {
|
||||
accountId: string
|
||||
accountType?: string
|
||||
active: boolean
|
||||
displayName: string
|
||||
emailAddress?: string
|
||||
avatarUrls?: {
|
||||
'16x16'?: string
|
||||
'24x24'?: string
|
||||
'32x32'?: string
|
||||
'48x48'?: string
|
||||
}
|
||||
timeZone?: string
|
||||
self?: string
|
||||
}
|
||||
|
||||
export interface JiraGetUsersResponse extends ToolResponse {
|
||||
output: {
|
||||
ts: string
|
||||
users: JiraUser[]
|
||||
total?: number
|
||||
startAt?: number
|
||||
maxResults?: number
|
||||
}
|
||||
}
|
||||
|
||||
export const jiraGetUsersTool: ToolConfig<JiraGetUsersParams, JiraGetUsersResponse> = {
|
||||
id: 'jira_get_users',
|
||||
name: 'Jira Get Users',
|
||||
description:
|
||||
'Get Jira users. If an account ID is provided, returns a single user. Otherwise, returns a list of all users.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'jira',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Jira',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Jira domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
accountId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Optional account ID to get a specific user. If not provided, returns all users.',
|
||||
},
|
||||
startAt: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The index of the first user to return (for pagination, default: 0)',
|
||||
},
|
||||
maxResults: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of users to return (default: 50)',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description:
|
||||
'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: JiraGetUsersParams) => {
|
||||
if (params.cloudId) {
|
||||
if (params.accountId) {
|
||||
return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/user?accountId=${encodeURIComponent(params.accountId)}`
|
||||
}
|
||||
const queryParams = new URLSearchParams()
|
||||
if (params.startAt !== undefined) queryParams.append('startAt', String(params.startAt))
|
||||
if (params.maxResults !== undefined)
|
||||
queryParams.append('maxResults', String(params.maxResults))
|
||||
const queryString = queryParams.toString()
|
||||
return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/users/search${queryString ? `?${queryString}` : ''}`
|
||||
}
|
||||
return 'https://api.atlassian.com/oauth/token/accessible-resources'
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: JiraGetUsersParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params?: JiraGetUsersParams) => {
|
||||
if (!params?.cloudId) {
|
||||
const cloudId = await getJiraCloudId(params!.domain, params!.accessToken)
|
||||
|
||||
let usersUrl: string
|
||||
if (params!.accountId) {
|
||||
usersUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/user?accountId=${encodeURIComponent(params!.accountId)}`
|
||||
} else {
|
||||
const queryParams = new URLSearchParams()
|
||||
if (params!.startAt !== undefined) queryParams.append('startAt', String(params!.startAt))
|
||||
if (params!.maxResults !== undefined)
|
||||
queryParams.append('maxResults', String(params!.maxResults))
|
||||
const queryString = queryParams.toString()
|
||||
usersUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/users/search${queryString ? `?${queryString}` : ''}`
|
||||
}
|
||||
|
||||
const usersResponse = await fetch(usersUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params!.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!usersResponse.ok) {
|
||||
let message = `Failed to get Jira users (${usersResponse.status})`
|
||||
try {
|
||||
const err = await usersResponse.json()
|
||||
message = err?.errorMessages?.join(', ') || err?.message || message
|
||||
} catch (_e) {}
|
||||
throw new Error(message)
|
||||
}
|
||||
|
||||
const data = await usersResponse.json()
|
||||
|
||||
const users = params!.accountId ? [data] : data
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
users: users.map((user: any) => ({
|
||||
accountId: user.accountId,
|
||||
accountType: user.accountType,
|
||||
active: user.active,
|
||||
displayName: user.displayName,
|
||||
emailAddress: user.emailAddress,
|
||||
avatarUrls: user.avatarUrls,
|
||||
timeZone: user.timeZone,
|
||||
self: user.self,
|
||||
})),
|
||||
total: params!.accountId ? 1 : users.length,
|
||||
startAt: params!.startAt || 0,
|
||||
maxResults: params!.maxResults || 50,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
let message = `Failed to get Jira users (${response.status})`
|
||||
try {
|
||||
const err = await response.json()
|
||||
message = err?.errorMessages?.join(', ') || err?.message || message
|
||||
} catch (_e) {}
|
||||
throw new Error(message)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const users = params?.accountId ? [data] : data
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
users: users.map((user: any) => ({
|
||||
accountId: user.accountId,
|
||||
accountType: user.accountType,
|
||||
active: user.active,
|
||||
displayName: user.displayName,
|
||||
emailAddress: user.emailAddress,
|
||||
avatarUrls: user.avatarUrls,
|
||||
timeZone: user.timeZone,
|
||||
self: user.self,
|
||||
})),
|
||||
total: params?.accountId ? 1 : users.length,
|
||||
startAt: params?.startAt || 0,
|
||||
maxResults: params?.maxResults || 50,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||
users: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Array of users with accountId, displayName, emailAddress, active status, and avatarUrls',
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of users returned' },
|
||||
startAt: { type: 'number', description: 'Pagination start index' },
|
||||
maxResults: { type: 'number', description: 'Maximum results per page' },
|
||||
},
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import { jiraDeleteIssueLinkTool } from '@/tools/jira/delete_issue_link'
|
||||
import { jiraDeleteWorklogTool } from '@/tools/jira/delete_worklog'
|
||||
import { jiraGetAttachmentsTool } from '@/tools/jira/get_attachments'
|
||||
import { jiraGetCommentsTool } from '@/tools/jira/get_comments'
|
||||
import { jiraGetUsersTool } from '@/tools/jira/get_users'
|
||||
import { jiraGetWorklogsTool } from '@/tools/jira/get_worklogs'
|
||||
import { jiraRemoveWatcherTool } from '@/tools/jira/remove_watcher'
|
||||
import { jiraRetrieveTool } from '@/tools/jira/retrieve'
|
||||
@@ -44,4 +45,5 @@ export {
|
||||
jiraDeleteIssueLinkTool,
|
||||
jiraAddWatcherTool,
|
||||
jiraRemoveWatcherTool,
|
||||
jiraGetUsersTool,
|
||||
}
|
||||
|
||||
@@ -69,6 +69,12 @@ export interface JiraWriteParams {
|
||||
cloudId?: string
|
||||
issueType: string
|
||||
parent?: { key: string }
|
||||
labels?: string[]
|
||||
duedate?: string
|
||||
reporter?: string
|
||||
environment?: string
|
||||
customFieldId?: string
|
||||
customFieldValue?: string
|
||||
}
|
||||
|
||||
export interface JiraWriteResponse extends ToolResponse {
|
||||
|
||||
@@ -46,14 +46,14 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
||||
priority: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Priority for the issue',
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Priority ID or name for the issue (e.g., "10000" or "High")',
|
||||
},
|
||||
assignee: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Assignee for the issue',
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Assignee account ID for the issue',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
@@ -68,6 +68,42 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
||||
visibility: 'hidden',
|
||||
description: 'Type of issue to create (e.g., Task, Story)',
|
||||
},
|
||||
labels: {
|
||||
type: 'array',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Labels for the issue (array of label names)',
|
||||
},
|
||||
duedate: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Due date for the issue (format: YYYY-MM-DD)',
|
||||
},
|
||||
reporter: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Reporter account ID for the issue',
|
||||
},
|
||||
environment: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Environment information for the issue',
|
||||
},
|
||||
customFieldId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Custom field ID (e.g., customfield_10001)',
|
||||
},
|
||||
customFieldValue: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Value for the custom field',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -89,6 +125,12 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
||||
cloudId: params.cloudId,
|
||||
issueType: params.issueType,
|
||||
parent: params.parent,
|
||||
labels: params.labels,
|
||||
duedate: params.duedate,
|
||||
reporter: params.reporter,
|
||||
environment: params.environment,
|
||||
customFieldId: params.customFieldId,
|
||||
customFieldValue: params.customFieldValue,
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -134,5 +176,6 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
||||
issueKey: { type: 'string', description: 'Created issue key (e.g., PROJ-123)' },
|
||||
summary: { type: 'string', description: 'Issue summary' },
|
||||
url: { type: 'string', description: 'URL to the created issue' },
|
||||
assigneeId: { type: 'string', description: 'Account ID of the assigned user (if assigned)' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -463,6 +463,7 @@ import {
|
||||
jiraDeleteWorklogTool,
|
||||
jiraGetAttachmentsTool,
|
||||
jiraGetCommentsTool,
|
||||
jiraGetUsersTool,
|
||||
jiraGetWorklogsTool,
|
||||
jiraRemoveWatcherTool,
|
||||
jiraRetrieveTool,
|
||||
@@ -1478,6 +1479,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
jira_delete_issue_link: jiraDeleteIssueLinkTool,
|
||||
jira_add_watcher: jiraAddWatcherTool,
|
||||
jira_remove_watcher: jiraRemoveWatcherTool,
|
||||
jira_get_users: jiraGetUsersTool,
|
||||
kalshi_get_markets: kalshiGetMarketsTool,
|
||||
kalshi_get_market: kalshiGetMarketTool,
|
||||
kalshi_get_events: kalshiGetEventsTool,
|
||||
|
||||
@@ -1,39 +1,12 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceCreateAccountParams,
|
||||
SalesforceCreateAccountResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('SalesforceCreateAccount')
|
||||
|
||||
export interface SalesforceCreateAccountParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
name: string
|
||||
type?: string
|
||||
industry?: string
|
||||
phone?: string
|
||||
website?: string
|
||||
billingStreet?: string
|
||||
billingCity?: string
|
||||
billingState?: string
|
||||
billingPostalCode?: string
|
||||
billingCountry?: string
|
||||
description?: string
|
||||
annualRevenue?: string
|
||||
numberOfEmployees?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateAccountTool: ToolConfig<
|
||||
SalesforceCreateAccountParams,
|
||||
SalesforceCreateAccountResponse
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateCaseParams,
|
||||
SalesforceCreateCaseResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateCaseParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
subject: string
|
||||
status?: string
|
||||
priority?: string
|
||||
origin?: string
|
||||
contactId?: string
|
||||
accountId?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateCaseTool: ToolConfig<
|
||||
SalesforceCreateCaseParams,
|
||||
|
||||
@@ -1,38 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceCreateContactParams,
|
||||
SalesforceCreateContactResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceCreateContactParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
lastName: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
accountId?: string
|
||||
title?: string
|
||||
department?: string
|
||||
mailingStreet?: string
|
||||
mailingCity?: string
|
||||
mailingState?: string
|
||||
mailingPostalCode?: string
|
||||
mailingCountry?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: { operation: 'create_contact' }
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateContactTool: ToolConfig<
|
||||
SalesforceCreateContactParams,
|
||||
SalesforceCreateContactResponse
|
||||
|
||||
@@ -1,32 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateLeadParams,
|
||||
SalesforceCreateLeadResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateLeadParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
lastName: string
|
||||
company: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
status?: string
|
||||
leadSource?: string
|
||||
title?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateLeadTool: ToolConfig<
|
||||
SalesforceCreateLeadParams,
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateOpportunityParams,
|
||||
SalesforceCreateOpportunityResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateOpportunityParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
name: string
|
||||
stageName: string
|
||||
closeDate: string
|
||||
accountId?: string
|
||||
amount?: string
|
||||
probability?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateOpportunityTool: ToolConfig<
|
||||
SalesforceCreateOpportunityParams,
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateTaskParams,
|
||||
SalesforceCreateTaskResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateTaskParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
subject: string
|
||||
status?: string
|
||||
priority?: string
|
||||
activityDate?: string
|
||||
whoId?: string
|
||||
whatId?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateTaskTool: ToolConfig<
|
||||
SalesforceCreateTaskParams,
|
||||
|
||||
@@ -1,26 +1,12 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceDeleteAccountParams,
|
||||
SalesforceDeleteAccountResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('SalesforceDeleteAccount')
|
||||
|
||||
export interface SalesforceDeleteAccountParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
accountId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteAccountTool: ToolConfig<
|
||||
SalesforceDeleteAccountParams,
|
||||
SalesforceDeleteAccountResponse
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteCaseParams,
|
||||
SalesforceDeleteCaseResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteCaseParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
caseId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteCaseTool: ToolConfig<
|
||||
SalesforceDeleteCaseParams,
|
||||
|
||||
@@ -1,25 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceDeleteContactParams,
|
||||
SalesforceDeleteContactResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceDeleteContactParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
contactId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: { operation: 'delete_contact' }
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteContactTool: ToolConfig<
|
||||
SalesforceDeleteContactParams,
|
||||
SalesforceDeleteContactResponse
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteLeadParams,
|
||||
SalesforceDeleteLeadResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteLeadParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
leadId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteLeadTool: ToolConfig<
|
||||
SalesforceDeleteLeadParams,
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteOpportunityParams,
|
||||
SalesforceDeleteOpportunityResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteOpportunityParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
opportunityId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteOpportunityTool: ToolConfig<
|
||||
SalesforceDeleteOpportunityParams,
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteTaskParams,
|
||||
SalesforceDeleteTaskResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteTaskParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
taskId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteTaskTool: ToolConfig<
|
||||
SalesforceDeleteTaskParams,
|
||||
|
||||
@@ -1,38 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceDescribeObjectParams,
|
||||
SalesforceDescribeObjectResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceQuery')
|
||||
|
||||
export interface SalesforceDescribeObjectParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
objectName: string
|
||||
}
|
||||
|
||||
export interface SalesforceDescribeObjectResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
objectName: string
|
||||
label?: string
|
||||
labelPlural?: string
|
||||
fields?: any[]
|
||||
keyPrefix?: string
|
||||
queryable?: boolean
|
||||
createable?: boolean
|
||||
updateable?: boolean
|
||||
deletable?: boolean
|
||||
childRelationships?: any[]
|
||||
recordTypeInfos?: any[]
|
||||
metadata: {
|
||||
operation: 'describe_object'
|
||||
fieldCount: number
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Describe a Salesforce object to get its metadata/fields
|
||||
* Useful for discovering available fields for queries
|
||||
|
||||
@@ -1,34 +1,6 @@
|
||||
import type { SalesforceGetCasesParams, SalesforceGetCasesResponse } from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetCasesParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
caseId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetCasesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
case?: any
|
||||
cases?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_cases'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetCasesTool: ToolConfig<
|
||||
SalesforceGetCasesParams,
|
||||
|
||||
@@ -1,39 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceGetContactsParams,
|
||||
SalesforceGetContactsResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceGetContactsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
contactId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetContactsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
contacts?: any[]
|
||||
contact?: any
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_contacts'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
singleContact?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetContactsTool: ToolConfig<
|
||||
SalesforceGetContactsParams,
|
||||
SalesforceGetContactsResponse
|
||||
|
||||
@@ -1,32 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceGetDashboardParams,
|
||||
SalesforceGetDashboardResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceDashboards')
|
||||
|
||||
export interface SalesforceGetDashboardParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
dashboardId: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetDashboardResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
dashboard: any
|
||||
dashboardId: string
|
||||
components: any[]
|
||||
metadata: {
|
||||
operation: 'get_dashboard'
|
||||
dashboardName?: string
|
||||
folderId?: string
|
||||
runningUser?: any
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get details for a specific dashboard
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_dashboard_results.htm
|
||||
|
||||
@@ -1,35 +1,6 @@
|
||||
import type { SalesforceGetLeadsParams, SalesforceGetLeadsResponse } from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetLeadsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
leadId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetLeadsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
lead?: any
|
||||
leads?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_leads'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
singleLead?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetLeadsTool: ToolConfig<
|
||||
SalesforceGetLeadsParams,
|
||||
|
||||
@@ -1,34 +1,9 @@
|
||||
import type {
|
||||
SalesforceGetOpportunitiesParams,
|
||||
SalesforceGetOpportunitiesResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetOpportunitiesParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
opportunityId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetOpportunitiesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
opportunity?: any
|
||||
opportunities?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_opportunities'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetOpportunitiesTool: ToolConfig<
|
||||
SalesforceGetOpportunitiesParams,
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceGetReportParams,
|
||||
SalesforceGetReportResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceReports')
|
||||
|
||||
export interface SalesforceGetReportParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
reportId: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetReportResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
report: any
|
||||
reportId: string
|
||||
metadata: {
|
||||
operation: 'get_report'
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get metadata for a specific report
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_get_reportmetadata.htm
|
||||
|
||||
@@ -1,34 +1,6 @@
|
||||
import type { SalesforceGetTasksParams, SalesforceGetTasksResponse } from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetTasksParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
taskId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetTasksResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
task?: any
|
||||
tasks?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_tasks'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetTasksTool: ToolConfig<
|
||||
SalesforceGetTasksParams,
|
||||
|
||||