mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 15:38:00 -05:00
Compare commits
29 Commits
improvemen
...
v0.5.51
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4fbec0a43f | ||
|
|
d248557042 | ||
|
|
8215a819e5 | ||
|
|
155f544ce8 | ||
|
|
22f949a41c | ||
|
|
f9aef6ae22 | ||
|
|
46b04a964d | ||
|
|
964b40de45 | ||
|
|
75aca00b6e | ||
|
|
d25084e05d | ||
|
|
445932c1c8 | ||
|
|
cc3f565d5e | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -123,8 +123,6 @@ Kontostand und Portfoliowert von Kalshi abrufen
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | Kontostand in Cent |
|
||||
| `portfolioValue` | number | Portfoliowert in Cent |
|
||||
| `balanceDollars` | number | Kontostand in Dollar |
|
||||
| `portfolioValueDollars` | number | Portfoliowert in Dollar |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
|
||||
@@ -47,10 +47,11 @@ Daten aus einer Supabase-Tabelle abfragen
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Ja | Ihre Supabase-Projekt-ID \(z. B. jdrkgepadsdopsntdlom\) |
|
||||
| `projectId` | string | Ja | Ihre Supabase-Projekt-ID \(z.B. jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Ja | Der Name der abzufragenden Supabase-Tabelle |
|
||||
| `schema` | string | Nein | Datenbankschema für die Abfrage \(Standard: public\). Verwenden Sie dies, um auf Tabellen in anderen Schemas zuzugreifen. |
|
||||
| `filter` | string | Nein | PostgREST-Filter \(z. B. "id=eq.123"\) |
|
||||
| `select` | string | Nein | Zurückzugebende Spalten \(durch Komma getrennt\). Standard ist * \(alle Spalten\) |
|
||||
| `filter` | string | Nein | PostgREST-Filter \(z.B. "id=eq.123"\) |
|
||||
| `orderBy` | string | Nein | Spalte zum Sortieren \(fügen Sie DESC für absteigende Sortierung hinzu\) |
|
||||
| `limit` | number | Nein | Maximale Anzahl der zurückzugebenden Zeilen |
|
||||
| `apiKey` | string | Ja | Ihr Supabase Service Role Secret Key |
|
||||
@@ -91,10 +92,11 @@ Eine einzelne Zeile aus einer Supabase-Tabelle basierend auf Filterkriterien abr
|
||||
|
||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Ja | Ihre Supabase-Projekt-ID \(z. B. jdrkgepadsdopsntdlom\) |
|
||||
| `projectId` | string | Ja | Ihre Supabase-Projekt-ID \(z.B. jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Ja | Der Name der abzufragenden Supabase-Tabelle |
|
||||
| `schema` | string | Nein | Datenbankschema für die Abfrage \(Standard: public\). Verwenden Sie dies, um auf Tabellen in anderen Schemas zuzugreifen. |
|
||||
| `filter` | string | Ja | PostgREST-Filter zum Auffinden der spezifischen Zeile \(z. B. "id=eq.123"\) |
|
||||
| `select` | string | Nein | Zurückzugebende Spalten \(durch Komma getrennt\). Standard ist * \(alle Spalten\) |
|
||||
| `filter` | string | Ja | PostgREST-Filter zum Finden der spezifischen Zeile \(z.B. "id=eq.123"\) |
|
||||
| `apiKey` | string | Ja | Ihr Supabase Service Role Secret Key |
|
||||
|
||||
#### Ausgabe
|
||||
|
||||
@@ -126,8 +126,6 @@ Retrieve your account balance and portfolio value from Kalshi
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | Account balance in cents |
|
||||
| `portfolioValue` | number | Portfolio value in cents |
|
||||
| `balanceDollars` | number | Account balance in dollars |
|
||||
| `portfolioValueDollars` | number | Portfolio value in dollars |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@ Query data from a Supabase table
|
||||
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Yes | The name of the Supabase table to query |
|
||||
| `schema` | string | No | Database schema to query from \(default: public\). Use this to access tables in other schemas. |
|
||||
| `select` | string | No | Columns to return \(comma-separated\). Defaults to * \(all columns\) |
|
||||
| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) |
|
||||
| `orderBy` | string | No | Column to order by \(add DESC for descending\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
@@ -97,6 +98,7 @@ Get a single row from a Supabase table based on filter criteria
|
||||
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Yes | The name of the Supabase table to query |
|
||||
| `schema` | string | No | Database schema to query from \(default: public\). Use this to access tables in other schemas. |
|
||||
| `select` | string | No | Columns to return \(comma-separated\). Defaults to * \(all columns\) |
|
||||
| `filter` | string | Yes | PostgREST filter to find the specific row \(e.g., "id=eq.123"\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
|
||||
@@ -122,9 +122,7 @@ Recuperar el saldo de tu cuenta y el valor de la cartera desde Kalshi
|
||||
| Parámetro | Tipo | Descripción |
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | Saldo de la cuenta en centavos |
|
||||
| `portfolioValue` | number | Valor de la cartera en centavos |
|
||||
| `balanceDollars` | number | Saldo de la cuenta en dólares |
|
||||
| `portfolioValueDollars` | number | Valor de la cartera en dólares |
|
||||
| `portfolioValue` | number | Valor del portafolio en centavos |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
|
||||
@@ -46,12 +46,13 @@ Consultar datos de una tabla de Supabase
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Sí | ID de tu proyecto Supabase \(p. ej., jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Sí | Nombre de la tabla Supabase a consultar |
|
||||
| `schema` | string | No | Esquema de base de datos desde donde consultar \(predeterminado: public\). Usa esto para acceder a tablas en otros esquemas. |
|
||||
| `schema` | string | No | Esquema de base de datos desde el que consultar \(predeterminado: public\). Usa esto para acceder a tablas en otros esquemas. |
|
||||
| `select` | string | No | Columnas a devolver \(separadas por comas\). Predeterminado: * \(todas las columnas\) |
|
||||
| `filter` | string | No | Filtro PostgREST \(p. ej., "id=eq.123"\) |
|
||||
| `orderBy` | string | No | Columna para ordenar \(añade DESC para descendente\) |
|
||||
| `orderBy` | string | No | Columna por la que ordenar \(añade DESC para orden descendente\) |
|
||||
| `limit` | number | No | Número máximo de filas a devolver |
|
||||
| `apiKey` | string | Sí | Tu clave secreta de rol de servicio de Supabase |
|
||||
|
||||
@@ -90,10 +91,11 @@ Obtener una sola fila de una tabla de Supabase basada en criterios de filtro
|
||||
#### Entrada
|
||||
|
||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Sí | ID de tu proyecto Supabase \(p. ej., jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Sí | Nombre de la tabla Supabase a consultar |
|
||||
| `schema` | string | No | Esquema de base de datos desde donde consultar \(predeterminado: public\). Usa esto para acceder a tablas en otros esquemas. |
|
||||
| `schema` | string | No | Esquema de base de datos desde el que consultar \(predeterminado: public\). Usa esto para acceder a tablas en otros esquemas. |
|
||||
| `select` | string | No | Columnas a devolver \(separadas por comas\). Predeterminado: * \(todas las columnas\) |
|
||||
| `filter` | string | Sí | Filtro PostgREST para encontrar la fila específica \(p. ej., "id=eq.123"\) |
|
||||
| `apiKey` | string | Sí | Tu clave secreta de rol de servicio de Supabase |
|
||||
|
||||
|
||||
@@ -123,8 +123,6 @@ Récupérer le solde de votre compte et la valeur de votre portefeuille depuis K
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | Solde du compte en centimes |
|
||||
| `portfolioValue` | number | Valeur du portefeuille en centimes |
|
||||
| `balanceDollars` | number | Solde du compte en dollars |
|
||||
| `portfolioValueDollars` | number | Valeur du portefeuille en dollars |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
|
||||
@@ -49,7 +49,8 @@ Interroger des données d'une table Supabase
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `projectId` | string | Oui | L'ID de votre projet Supabase \(ex. : jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Oui | Le nom de la table Supabase à interroger |
|
||||
| `schema` | string | Non | Schéma de base de données à interroger \(par défaut : public\). Utilisez ceci pour accéder aux tables dans d'autres schémas. |
|
||||
| `schema` | string | Non | Schéma de base de données à partir duquel interroger \(par défaut : public\). Utilisez ceci pour accéder aux tables dans d'autres schémas. |
|
||||
| `select` | string | Non | Colonnes à retourner \(séparées par des virgules\). Par défaut * \(toutes les colonnes\) |
|
||||
| `filter` | string | Non | Filtre PostgREST \(ex. : "id=eq.123"\) |
|
||||
| `orderBy` | string | Non | Colonne pour le tri \(ajoutez DESC pour l'ordre décroissant\) |
|
||||
| `limit` | number | Non | Nombre maximum de lignes à retourner |
|
||||
@@ -93,7 +94,8 @@ Obtenir une seule ligne d'une table Supabase selon des critères de filtrage
|
||||
| --------- | ---- | ----------- | ----------- |
|
||||
| `projectId` | string | Oui | L'ID de votre projet Supabase \(ex. : jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | Oui | Le nom de la table Supabase à interroger |
|
||||
| `schema` | string | Non | Schéma de base de données à interroger \(par défaut : public\). Utilisez ceci pour accéder aux tables dans d'autres schémas. |
|
||||
| `schema` | string | Non | Schéma de base de données à partir duquel interroger \(par défaut : public\). Utilisez ceci pour accéder aux tables dans d'autres schémas. |
|
||||
| `select` | string | Non | Colonnes à retourner \(séparées par des virgules\). Par défaut * \(toutes les colonnes\) |
|
||||
| `filter` | string | Oui | Filtre PostgREST pour trouver la ligne spécifique \(ex. : "id=eq.123"\) |
|
||||
| `apiKey` | string | Oui | Votre clé secrète de rôle de service Supabase |
|
||||
|
||||
|
||||
@@ -121,10 +121,8 @@ Kalshiからアカウント残高とポートフォリオ価値を取得
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | セント単位のアカウント残高 |
|
||||
| `portfolioValue` | number | セント単位のポートフォリオ価値 |
|
||||
| `balanceDollars` | number | ドル単位のアカウント残高 |
|
||||
| `portfolioValueDollars` | number | ドル単位のポートフォリオ価値 |
|
||||
| `balance` | number | アカウント残高(セント単位) |
|
||||
| `portfolioValue` | number | ポートフォリオ価値(セント単位) |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
|
||||
@@ -49,7 +49,8 @@ Supabaseテーブルからデータを照会する
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | はい | あなたのSupabaseプロジェクトID(例:jdrkgepadsdopsntdlom) |
|
||||
| `table` | string | はい | クエリするSupabaseテーブルの名前 |
|
||||
| `schema` | string | いいえ | クエリするデータベーススキーマ(デフォルト:public)。他のスキーマのテーブルにアクセスする場合に使用します。 |
|
||||
| `schema` | string | いいえ | クエリ元のデータベーススキーマ(デフォルト:public)。他のスキーマのテーブルにアクセスする場合に使用します。 |
|
||||
| `select` | string | いいえ | 返す列(カンマ区切り)。デフォルトは*(すべての列) |
|
||||
| `filter` | string | いいえ | PostgRESTフィルター(例:"id=eq.123") |
|
||||
| `orderBy` | string | いいえ | 並べ替える列(降順の場合はDESCを追加) |
|
||||
| `limit` | number | いいえ | 返す最大行数 |
|
||||
@@ -93,7 +94,8 @@ Supabaseテーブルにデータを挿入する
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | はい | あなたのSupabaseプロジェクトID(例:jdrkgepadsdopsntdlom) |
|
||||
| `table` | string | はい | クエリするSupabaseテーブルの名前 |
|
||||
| `schema` | string | いいえ | クエリするデータベーススキーマ(デフォルト:public)。他のスキーマのテーブルにアクセスする場合に使用します。 |
|
||||
| `schema` | string | いいえ | クエリ元のデータベーススキーマ(デフォルト:public)。他のスキーマのテーブルにアクセスする場合に使用します。 |
|
||||
| `select` | string | いいえ | 返す列(カンマ区切り)。デフォルトは*(すべての列) |
|
||||
| `filter` | string | はい | 特定の行を見つけるためのPostgRESTフィルター(例:"id=eq.123") |
|
||||
| `apiKey` | string | はい | あなたのSupabaseサービスロールシークレットキー |
|
||||
|
||||
|
||||
@@ -123,8 +123,6 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
| --------- | ---- | ----------- |
|
||||
| `balance` | number | 账户余额(以分为单位) |
|
||||
| `portfolioValue` | number | 投资组合价值(以分为单位) |
|
||||
| `balanceDollars` | number | 账户余额(以美元为单位) |
|
||||
| `portfolioValueDollars` | number | 投资组合价值(以美元为单位) |
|
||||
|
||||
### `kalshi_get_positions`
|
||||
|
||||
|
||||
@@ -50,8 +50,9 @@ Sim 的 Supabase 集成使您能够轻松地将代理工作流连接到您的 Su
|
||||
| `projectId` | string | 是 | 您的 Supabase 项目 ID \(例如:jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | 是 | 要查询的 Supabase 表名 |
|
||||
| `schema` | string | 否 | 要查询的数据库 schema \(默认:public\)。用于访问其他 schema 下的表。|
|
||||
| `select` | string | 否 | 要返回的列(逗号分隔)。默认为 *(所有列)|
|
||||
| `filter` | string | 否 | PostgREST 过滤条件 \(例如:"id=eq.123"\) |
|
||||
| `orderBy` | string | 否 | 排序的列名 \(添加 DESC 表示降序\) |
|
||||
| `orderBy` | string | 否 | 排序的列(添加 DESC 表示降序)|
|
||||
| `limit` | number | 否 | 返回的最大行数 |
|
||||
| `apiKey` | string | 是 | 您的 Supabase 服务角色密钥 |
|
||||
|
||||
@@ -94,7 +95,8 @@ Sim 的 Supabase 集成使您能够轻松地将代理工作流连接到您的 Su
|
||||
| `projectId` | string | 是 | 您的 Supabase 项目 ID \(例如:jdrkgepadsdopsntdlom\) |
|
||||
| `table` | string | 是 | 要查询的 Supabase 表名 |
|
||||
| `schema` | string | 否 | 要查询的数据库 schema \(默认:public\)。用于访问其他 schema 下的表。|
|
||||
| `filter` | string | 是 | 用于查找特定行的 PostgREST 过滤条件 \(例如:"id=eq.123"\) |
|
||||
| `select` | string | 否 | 要返回的列(逗号分隔)。默认为 *(所有列)|
|
||||
| `filter` | string | 是 | PostgREST 过滤条件,用于查找特定行 \(例如:"id=eq.123"\) |
|
||||
| `apiKey` | string | 是 | 您的 Supabase 服务角色密钥 |
|
||||
|
||||
#### 输出
|
||||
|
||||
@@ -700,7 +700,7 @@ checksums:
|
||||
content/11: 04bd9805ef6a50af8469463c34486dbf
|
||||
content/12: a3671dd7ba76a87dc75464d9bf9b7b4b
|
||||
content/13: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/14: 80578981b8b3a1cf579e52ff05e7468d
|
||||
content/14: 5102b3705883f9e0c5440aeabafd1d24
|
||||
content/15: bcadfc362b69078beee0088e5936c98b
|
||||
content/16: 09ed43219d02501c829594dbf4128959
|
||||
content/17: 88ae2285d728c80937e1df8194d92c60
|
||||
@@ -712,7 +712,7 @@ checksums:
|
||||
content/23: 7d96d99e45880195ccbd34bddaac6319
|
||||
content/24: 75d05f96dff406db06b338d9ab8d0bd7
|
||||
content/25: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/26: cfd801fa517b4bcfa5fa034b2c4e908a
|
||||
content/26: 38373ac018fd7db3a20ba5308beac81e
|
||||
content/27: bcadfc362b69078beee0088e5936c98b
|
||||
content/28: a0284632eb0a15e66f69479ec477c5b1
|
||||
content/29: b1e60734e590a8ad894a96581a253bf4
|
||||
@@ -48276,7 +48276,7 @@ checksums:
|
||||
content/35: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/36: bddd30707802c07aac61620721bfaf16
|
||||
content/37: bcadfc362b69078beee0088e5936c98b
|
||||
content/38: fa2c581e6fb204f5ddbd0ffcbf0f7123
|
||||
content/38: 4619dad6a45478396332397f1e53db85
|
||||
content/39: 65de097e276f762b71d59fa7f9b0a207
|
||||
content/40: 013f52c249b5919fdb6d96700b25f379
|
||||
content/41: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
|
||||
@@ -136,16 +136,29 @@ vi.mock('@sim/db', () => {
|
||||
},
|
||||
}),
|
||||
}),
|
||||
delete: () => ({
|
||||
where: () => Promise.resolve(),
|
||||
}),
|
||||
insert: () => ({
|
||||
values: (records: any) => {
|
||||
dbOps.order.push('insert')
|
||||
dbOps.insertRecords.push(records)
|
||||
return Promise.resolve()
|
||||
},
|
||||
}),
|
||||
transaction: vi.fn(async (fn: any) => {
|
||||
await fn({
|
||||
insert: (table: any) => ({
|
||||
delete: () => ({
|
||||
where: () => Promise.resolve(),
|
||||
}),
|
||||
insert: () => ({
|
||||
values: (records: any) => {
|
||||
dbOps.order.push('insert')
|
||||
dbOps.insertRecords.push(records)
|
||||
return Promise.resolve()
|
||||
},
|
||||
}),
|
||||
update: (table: any) => ({
|
||||
update: () => ({
|
||||
set: (payload: any) => ({
|
||||
where: () => {
|
||||
dbOps.updatePayloads.push(payload)
|
||||
|
||||
@@ -21,14 +21,15 @@ export async function POST(
|
||||
) {
|
||||
const { workflowId, executionId, contextId } = await params
|
||||
|
||||
// Allow resume from dashboard without requiring deployment
|
||||
const access = await validateWorkflowAccess(request, workflowId, false)
|
||||
if (access.error) {
|
||||
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
|
||||
}
|
||||
|
||||
const workflow = access.workflow!
|
||||
const workflow = access.workflow
|
||||
|
||||
let payload: any = {}
|
||||
let payload: Record<string, unknown> = {}
|
||||
try {
|
||||
payload = await request.json()
|
||||
} catch {
|
||||
@@ -148,6 +149,7 @@ export async function GET(
|
||||
) {
|
||||
const { workflowId, executionId, contextId } = await params
|
||||
|
||||
// Allow access without API key for browser-based UI (same as parent execution endpoint)
|
||||
const access = await validateWorkflowAccess(request, workflowId, false)
|
||||
if (access.error) {
|
||||
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
|
||||
interface ChunkContextMenuProps {
|
||||
isOpen: boolean
|
||||
@@ -102,6 +108,7 @@ export function ChunkContextMenu({
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{hasChunk ? (
|
||||
<>
|
||||
{/* Navigation */}
|
||||
{!isMultiSelect && onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
@@ -112,6 +119,9 @@ export function ChunkContextMenu({
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && onOpenInNewTab && <PopoverDivider />}
|
||||
|
||||
{/* Edit and copy actions */}
|
||||
{!isMultiSelect && onEdit && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
@@ -132,6 +142,9 @@ export function ChunkContextMenu({
|
||||
Copy content
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && (onEdit || onCopyContent) && <PopoverDivider />}
|
||||
|
||||
{/* State toggle */}
|
||||
{onToggleEnabled && (
|
||||
<PopoverItem
|
||||
disabled={disableToggleEnabled}
|
||||
@@ -143,6 +156,13 @@ export function ChunkContextMenu({
|
||||
{getToggleLabel()}
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
{onDelete &&
|
||||
((!isMultiSelect && onOpenInNewTab) ||
|
||||
(!isMultiSelect && onEdit) ||
|
||||
(!isMultiSelect && onCopyContent) ||
|
||||
onToggleEnabled) && <PopoverDivider />}
|
||||
{onDelete && (
|
||||
<PopoverItem
|
||||
disabled={disableDelete}
|
||||
|
||||
@@ -453,6 +453,8 @@ export function KnowledgeBase({
|
||||
error: knowledgeBaseError,
|
||||
refresh: refreshKnowledgeBase,
|
||||
} = useKnowledgeBase(id)
|
||||
const [hasProcessingDocuments, setHasProcessingDocuments] = useState(false)
|
||||
|
||||
const {
|
||||
documents,
|
||||
pagination,
|
||||
@@ -468,6 +470,7 @@ export function KnowledgeBase({
|
||||
offset: (currentPage - 1) * DOCUMENTS_PER_PAGE,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
refetchInterval: hasProcessingDocuments && !isDeleting ? 3000 : false,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
@@ -534,25 +537,15 @@ export function KnowledgeBase({
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const hasProcessingDocuments = documents.some(
|
||||
const processing = documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
setHasProcessingDocuments(processing)
|
||||
|
||||
if (!hasProcessingDocuments) return
|
||||
|
||||
const refreshInterval = setInterval(async () => {
|
||||
try {
|
||||
if (!isDeleting) {
|
||||
await checkForDeadProcesses()
|
||||
await refreshDocuments()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error refreshing documents:', error)
|
||||
}
|
||||
}, 3000)
|
||||
|
||||
return () => clearInterval(refreshInterval)
|
||||
}, [documents, refreshDocuments, isDeleting])
|
||||
if (processing) {
|
||||
checkForDeadProcesses()
|
||||
}
|
||||
}, [documents])
|
||||
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
@@ -672,25 +665,6 @@ export function KnowledgeBase({
|
||||
|
||||
await refreshDocuments()
|
||||
|
||||
let refreshAttempts = 0
|
||||
const maxRefreshAttempts = 3
|
||||
const refreshInterval = setInterval(async () => {
|
||||
try {
|
||||
refreshAttempts++
|
||||
await refreshDocuments()
|
||||
if (refreshAttempts >= maxRefreshAttempts) {
|
||||
clearInterval(refreshInterval)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error refreshing documents after retry:', error)
|
||||
clearInterval(refreshInterval)
|
||||
}
|
||||
}, 1000)
|
||||
|
||||
setTimeout(() => {
|
||||
clearInterval(refreshInterval)
|
||||
}, 4000)
|
||||
|
||||
logger.info(`Document retry initiated successfully for: ${docId}`)
|
||||
} catch (err) {
|
||||
logger.error('Error retrying document:', err)
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
|
||||
interface DocumentContextMenuProps {
|
||||
isOpen: boolean
|
||||
@@ -107,6 +113,7 @@ export function DocumentContextMenu({
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{hasDocument ? (
|
||||
<>
|
||||
{/* Navigation */}
|
||||
{!isMultiSelect && onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
@@ -117,6 +124,9 @@ export function DocumentContextMenu({
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && onOpenInNewTab && <PopoverDivider />}
|
||||
|
||||
{/* Edit and view actions */}
|
||||
{!isMultiSelect && onRename && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
@@ -137,6 +147,9 @@ export function DocumentContextMenu({
|
||||
View tags
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && (onRename || (hasTags && onViewTags)) && <PopoverDivider />}
|
||||
|
||||
{/* State toggle */}
|
||||
{onToggleEnabled && (
|
||||
<PopoverItem
|
||||
disabled={disableToggleEnabled}
|
||||
@@ -148,6 +161,13 @@ export function DocumentContextMenu({
|
||||
{getToggleLabel()}
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
{onDelete &&
|
||||
((!isMultiSelect && onOpenInNewTab) ||
|
||||
(!isMultiSelect && onRename) ||
|
||||
(!isMultiSelect && hasTags && onViewTags) ||
|
||||
onToggleEnabled) && <PopoverDivider />}
|
||||
{onDelete && (
|
||||
<PopoverItem
|
||||
disabled={disableDelete}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
|
||||
interface KnowledgeBaseContextMenuProps {
|
||||
/**
|
||||
@@ -104,6 +110,7 @@ export function KnowledgeBaseContextMenu({
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{/* Navigation */}
|
||||
{showOpenInNewTab && onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
@@ -114,6 +121,9 @@ export function KnowledgeBaseContextMenu({
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{showOpenInNewTab && onOpenInNewTab && <PopoverDivider />}
|
||||
|
||||
{/* View and copy actions */}
|
||||
{showViewTags && onViewTags && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
@@ -134,6 +144,9 @@ export function KnowledgeBaseContextMenu({
|
||||
Copy ID
|
||||
</PopoverItem>
|
||||
)}
|
||||
{((showViewTags && onViewTags) || onCopyId) && <PopoverDivider />}
|
||||
|
||||
{/* Edit action */}
|
||||
{showEdit && onEdit && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
@@ -145,6 +158,14 @@ export function KnowledgeBaseContextMenu({
|
||||
Edit
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
{showDelete &&
|
||||
onDelete &&
|
||||
((showOpenInNewTab && onOpenInNewTab) ||
|
||||
(showViewTags && onViewTags) ||
|
||||
onCopyId ||
|
||||
(showEdit && onEdit)) && <PopoverDivider />}
|
||||
{showDelete && onDelete && (
|
||||
<PopoverItem
|
||||
disabled={disableDelete}
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
'use client'
|
||||
|
||||
import type { RefObject } from 'react'
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
interface LogRowContextMenuProps {
|
||||
@@ -50,7 +56,7 @@ export function LogRowContextMenu({
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{/* Copy Execution ID */}
|
||||
{/* Copy action */}
|
||||
<PopoverItem
|
||||
disabled={!hasExecutionId}
|
||||
onClick={() => {
|
||||
@@ -61,7 +67,8 @@ export function LogRowContextMenu({
|
||||
Copy Execution ID
|
||||
</PopoverItem>
|
||||
|
||||
{/* Open Workflow */}
|
||||
{/* Navigation */}
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
disabled={!hasWorkflow}
|
||||
onClick={() => {
|
||||
@@ -72,7 +79,8 @@ export function LogRowContextMenu({
|
||||
Open Workflow
|
||||
</PopoverItem>
|
||||
|
||||
{/* Filter by Workflow - only show when not already filtered by this workflow */}
|
||||
{/* Filter actions */}
|
||||
<PopoverDivider />
|
||||
{!isFilteredByThisWorkflow && (
|
||||
<PopoverItem
|
||||
disabled={!hasWorkflow}
|
||||
@@ -84,8 +92,6 @@ export function LogRowContextMenu({
|
||||
Filter by Workflow
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Clear All Filters - show when any filters are active */}
|
||||
{hasActiveFilters && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
|
||||
@@ -755,6 +755,24 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
}
|
||||
} else if (sourceBlock.type === 'human_in_the_loop') {
|
||||
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
|
||||
const isSelfReference = activeSourceBlockId === blockId
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
// For self-reference, only show url and resumeEndpoint (not response format fields)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
} else {
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
}
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(activeSourceBlockId, 'operation')
|
||||
@@ -1074,7 +1092,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
}
|
||||
} else if (accessibleBlock.type === 'human_in_the_loop') {
|
||||
blockTags = [`${normalizedBlockName}.url`]
|
||||
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
|
||||
const isSelfReference = accessibleBlockId === blockId
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
// For self-reference, only show url and resumeEndpoint (not response format fields)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
} else {
|
||||
blockTags = [`${normalizedBlockName}.url`, `${normalizedBlockName}.resumeEndpoint`]
|
||||
}
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(accessibleBlockId, 'operation')
|
||||
|
||||
@@ -760,6 +760,7 @@ function CodeEditorSyncWrapper({
|
||||
* in the tool selection dropdown.
|
||||
*/
|
||||
const BUILT_IN_TOOL_TYPES = new Set([
|
||||
'api',
|
||||
'file',
|
||||
'function',
|
||||
'knowledge',
|
||||
@@ -772,6 +773,7 @@ const BUILT_IN_TOOL_TYPES = new Set([
|
||||
'tts',
|
||||
'stt',
|
||||
'memory',
|
||||
'webhook_request',
|
||||
'workflow',
|
||||
])
|
||||
|
||||
@@ -926,6 +928,8 @@ export function ToolInput({
|
||||
const toolBlocks = getAllBlocks().filter(
|
||||
(block) =>
|
||||
(block.category === 'tools' ||
|
||||
block.type === 'api' ||
|
||||
block.type === 'webhook_request' ||
|
||||
block.type === 'workflow' ||
|
||||
block.type === 'knowledge' ||
|
||||
block.type === 'function') &&
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
export { LogRowContextMenu } from './log-row-context-menu'
|
||||
export { OutputContextMenu } from './output-context-menu'
|
||||
export { PrettierOutput } from './prettier-output'
|
||||
|
||||
@@ -0,0 +1,145 @@
|
||||
'use client'
|
||||
|
||||
import type { RefObject } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
|
||||
interface ContextMenuPosition {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
interface TerminalFilters {
|
||||
blockIds: Set<string>
|
||||
statuses: Set<'error' | 'info'>
|
||||
runIds: Set<string>
|
||||
}
|
||||
|
||||
interface LogRowContextMenuProps {
|
||||
isOpen: boolean
|
||||
position: ContextMenuPosition
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
onClose: () => void
|
||||
entry: ConsoleEntry | null
|
||||
filters: TerminalFilters
|
||||
onFilterByBlock: (blockId: string) => void
|
||||
onFilterByStatus: (status: 'error' | 'info') => void
|
||||
onFilterByRunId: (runId: string) => void
|
||||
onClearFilters: () => void
|
||||
onClearConsole: () => void
|
||||
hasActiveFilters: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for terminal log rows (left side).
|
||||
* Displays filtering options based on the selected row's properties.
|
||||
*/
|
||||
export function LogRowContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
entry,
|
||||
filters,
|
||||
onFilterByBlock,
|
||||
onFilterByStatus,
|
||||
onFilterByRunId,
|
||||
onClearFilters,
|
||||
onClearConsole,
|
||||
hasActiveFilters,
|
||||
}: LogRowContextMenuProps) {
|
||||
const hasRunId = entry?.executionId != null
|
||||
|
||||
const isBlockFiltered = entry ? filters.blockIds.has(entry.blockId) : false
|
||||
const entryStatus = entry?.success ? 'info' : 'error'
|
||||
const isStatusFiltered = entry ? filters.statuses.has(entryStatus) : false
|
||||
const isRunIdFiltered = entry?.executionId ? filters.runIds.has(entry.executionId) : false
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={onClose}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{/* Clear filters at top when active */}
|
||||
{hasActiveFilters && (
|
||||
<>
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onClearFilters()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Clear All Filters
|
||||
</PopoverItem>
|
||||
{entry && <PopoverDivider />}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Filter actions */}
|
||||
{entry && (
|
||||
<>
|
||||
<PopoverItem
|
||||
showCheck={isBlockFiltered}
|
||||
onClick={() => {
|
||||
onFilterByBlock(entry.blockId)
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Filter by Block
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
showCheck={isStatusFiltered}
|
||||
onClick={() => {
|
||||
onFilterByStatus(entryStatus)
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Filter by Status
|
||||
</PopoverItem>
|
||||
{hasRunId && (
|
||||
<PopoverItem
|
||||
showCheck={isRunIdFiltered}
|
||||
onClick={() => {
|
||||
onFilterByRunId(entry.executionId!)
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Filter by Run ID
|
||||
</PopoverItem>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
{(entry || hasActiveFilters) && <PopoverDivider />}
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onClearConsole()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Clear Console
|
||||
</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
'use client'
|
||||
|
||||
import type { RefObject } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
|
||||
interface ContextMenuPosition {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
interface OutputContextMenuProps {
|
||||
isOpen: boolean
|
||||
position: ContextMenuPosition
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
onClose: () => void
|
||||
onCopySelection: () => void
|
||||
onCopyAll: () => void
|
||||
onSearch: () => void
|
||||
wrapText: boolean
|
||||
onToggleWrap: () => void
|
||||
openOnRun: boolean
|
||||
onToggleOpenOnRun: () => void
|
||||
onClearConsole: () => void
|
||||
hasSelection: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for terminal output panel (right side).
|
||||
* Displays copy, search, and display options for the code viewer.
|
||||
*/
|
||||
export function OutputContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
onCopySelection,
|
||||
onCopyAll,
|
||||
onSearch,
|
||||
wrapText,
|
||||
onToggleWrap,
|
||||
openOnRun,
|
||||
onToggleOpenOnRun,
|
||||
onClearConsole,
|
||||
hasSelection,
|
||||
}: OutputContextMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={onClose}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{/* Copy and search actions */}
|
||||
<PopoverItem
|
||||
disabled={!hasSelection}
|
||||
onClick={() => {
|
||||
onCopySelection()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Copy Selection
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onCopyAll()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Copy All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onSearch()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Search
|
||||
</PopoverItem>
|
||||
|
||||
{/* Display settings - toggles don't close menu */}
|
||||
<PopoverDivider />
|
||||
<PopoverItem showCheck={wrapText} onClick={onToggleWrap}>
|
||||
Wrap Text
|
||||
</PopoverItem>
|
||||
<PopoverItem showCheck={openOnRun} onClick={onToggleOpenOnRun}>
|
||||
Open on Run
|
||||
</PopoverItem>
|
||||
|
||||
{/* Destructive action */}
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onClearConsole()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Clear Console
|
||||
</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -38,11 +38,16 @@ import {
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import { createCommands } from '@/app/workspace/[workspaceId]/utils/commands-utils'
|
||||
import {
|
||||
LogRowContextMenu,
|
||||
OutputContextMenu,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components'
|
||||
import {
|
||||
useOutputPanelResize,
|
||||
useTerminalFilters,
|
||||
useTerminalResize,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/hooks'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { OUTPUT_PANEL_WIDTH, TERMINAL_HEIGHT } from '@/stores/constants'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
@@ -365,6 +370,28 @@ export function Terminal() {
|
||||
hasActiveFilters,
|
||||
} = useTerminalFilters()
|
||||
|
||||
// Context menu state
|
||||
const [hasSelection, setHasSelection] = useState(false)
|
||||
const [contextMenuEntry, setContextMenuEntry] = useState<ConsoleEntry | null>(null)
|
||||
const [storedSelectionText, setStoredSelectionText] = useState('')
|
||||
|
||||
// Context menu hooks
|
||||
const {
|
||||
isOpen: isLogRowMenuOpen,
|
||||
position: logRowMenuPosition,
|
||||
menuRef: logRowMenuRef,
|
||||
handleContextMenu: handleLogRowContextMenu,
|
||||
closeMenu: closeLogRowMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
const {
|
||||
isOpen: isOutputMenuOpen,
|
||||
position: outputMenuPosition,
|
||||
menuRef: outputMenuRef,
|
||||
handleContextMenu: handleOutputContextMenu,
|
||||
closeMenu: closeOutputMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
/**
|
||||
* Expands the terminal to its last meaningful height, with safeguards:
|
||||
* - Never expands below {@link DEFAULT_EXPANDED_HEIGHT}.
|
||||
@@ -511,15 +538,11 @@ export function Terminal() {
|
||||
const handleRowClick = useCallback((entry: ConsoleEntry) => {
|
||||
setSelectedEntry((prev) => {
|
||||
const isDeselecting = prev?.id === entry.id
|
||||
// Re-enable auto-select when deselecting, disable when selecting
|
||||
setAutoSelectEnabled(isDeselecting)
|
||||
return isDeselecting ? null : entry
|
||||
})
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle header click - toggle between expanded and collapsed
|
||||
*/
|
||||
const handleHeaderClick = useCallback(() => {
|
||||
if (isExpanded) {
|
||||
setIsToggling(true)
|
||||
@@ -529,16 +552,10 @@ export function Terminal() {
|
||||
}
|
||||
}, [expandToLastHeight, isExpanded, setTerminalHeight])
|
||||
|
||||
/**
|
||||
* Handle transition end - reset toggling state
|
||||
*/
|
||||
const handleTransitionEnd = useCallback(() => {
|
||||
setIsToggling(false)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle copy output to clipboard
|
||||
*/
|
||||
const handleCopy = useCallback(() => {
|
||||
if (!selectedEntry) return
|
||||
|
||||
@@ -560,9 +577,6 @@ export function Terminal() {
|
||||
}
|
||||
}, [activeWorkflowId, clearWorkflowConsole])
|
||||
|
||||
/**
|
||||
* Activates output search and focuses the search input.
|
||||
*/
|
||||
const activateOutputSearch = useCallback(() => {
|
||||
setIsOutputSearchActive(true)
|
||||
setTimeout(() => {
|
||||
@@ -570,9 +584,6 @@ export function Terminal() {
|
||||
}, 0)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Closes output search and clears the query.
|
||||
*/
|
||||
const closeOutputSearch = useCallback(() => {
|
||||
setIsOutputSearchActive(false)
|
||||
setOutputSearchQuery('')
|
||||
@@ -604,9 +615,6 @@ export function Terminal() {
|
||||
setCurrentMatchIndex(0)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle clear console for current workflow via mouse interaction.
|
||||
*/
|
||||
const handleClearConsole = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
@@ -615,10 +623,6 @@ export function Terminal() {
|
||||
[clearCurrentWorkflowConsole]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle export of console entries for the current workflow via mouse interaction.
|
||||
* Mirrors the visibility and interaction behavior of the clear console action.
|
||||
*/
|
||||
const handleExportConsole = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
@@ -629,9 +633,60 @@ export function Terminal() {
|
||||
[activeWorkflowId, exportConsoleCSV]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle training button click - toggle training state or open modal
|
||||
*/
|
||||
const handleCopySelection = useCallback(() => {
|
||||
if (storedSelectionText) {
|
||||
navigator.clipboard.writeText(storedSelectionText)
|
||||
setShowCopySuccess(true)
|
||||
}
|
||||
}, [storedSelectionText])
|
||||
|
||||
const handleOutputPanelContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
const selection = window.getSelection()
|
||||
const selectionText = selection?.toString() || ''
|
||||
setStoredSelectionText(selectionText)
|
||||
setHasSelection(selectionText.length > 0)
|
||||
handleOutputContextMenu(e)
|
||||
},
|
||||
[handleOutputContextMenu]
|
||||
)
|
||||
|
||||
const handleRowContextMenu = useCallback(
|
||||
(e: React.MouseEvent, entry: ConsoleEntry) => {
|
||||
setContextMenuEntry(entry)
|
||||
handleLogRowContextMenu(e)
|
||||
},
|
||||
[handleLogRowContextMenu]
|
||||
)
|
||||
|
||||
const handleFilterByBlock = useCallback(
|
||||
(blockId: string) => {
|
||||
toggleBlock(blockId)
|
||||
closeLogRowMenu()
|
||||
},
|
||||
[toggleBlock, closeLogRowMenu]
|
||||
)
|
||||
|
||||
const handleFilterByStatus = useCallback(
|
||||
(status: 'error' | 'info') => {
|
||||
toggleStatus(status)
|
||||
closeLogRowMenu()
|
||||
},
|
||||
[toggleStatus, closeLogRowMenu]
|
||||
)
|
||||
|
||||
const handleFilterByRunId = useCallback(
|
||||
(runId: string) => {
|
||||
toggleRunId(runId)
|
||||
closeLogRowMenu()
|
||||
},
|
||||
[toggleRunId, closeLogRowMenu]
|
||||
)
|
||||
|
||||
const handleClearConsoleFromMenu = useCallback(() => {
|
||||
clearCurrentWorkflowConsole()
|
||||
}, [clearCurrentWorkflowConsole])
|
||||
|
||||
const handleTrainingClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
@@ -644,9 +699,6 @@ export function Terminal() {
|
||||
[isTraining, stopTraining, toggleTrainingModal]
|
||||
)
|
||||
|
||||
/**
|
||||
* Whether training controls should be visible
|
||||
*/
|
||||
const shouldShowTrainingButton = isTrainingEnvEnabled && showTrainingControls
|
||||
|
||||
/**
|
||||
@@ -721,6 +773,23 @@ export function Terminal() {
|
||||
}
|
||||
}, [showCopySuccess])
|
||||
|
||||
/**
|
||||
* Track text selection state for context menu.
|
||||
* Skip updates when the context menu is open to prevent the selection
|
||||
* state from changing mid-click (which would disable the copy button).
|
||||
*/
|
||||
useEffect(() => {
|
||||
const handleSelectionChange = () => {
|
||||
if (isOutputMenuOpen) return
|
||||
|
||||
const selection = window.getSelection()
|
||||
setHasSelection(Boolean(selection && selection.toString().length > 0))
|
||||
}
|
||||
|
||||
document.addEventListener('selectionchange', handleSelectionChange)
|
||||
return () => document.removeEventListener('selectionchange', handleSelectionChange)
|
||||
}, [isOutputMenuOpen])
|
||||
|
||||
/**
|
||||
* Auto-select the latest entry when new logs arrive
|
||||
* Re-enables auto-selection when all entries are cleared
|
||||
@@ -1311,6 +1380,7 @@ export function Terminal() {
|
||||
isSelected && 'bg-[var(--surface-6)] dark:bg-[var(--surface-4)]'
|
||||
)}
|
||||
onClick={() => handleRowClick(entry)}
|
||||
onContextMenu={(e) => handleRowContextMenu(e, entry)}
|
||||
>
|
||||
{/* Block */}
|
||||
<div
|
||||
@@ -1327,7 +1397,13 @@ export function Terminal() {
|
||||
</div>
|
||||
|
||||
{/* Status */}
|
||||
<div className={clsx(COLUMN_WIDTHS.STATUS, COLUMN_BASE_CLASS)}>
|
||||
<div
|
||||
className={clsx(
|
||||
COLUMN_WIDTHS.STATUS,
|
||||
COLUMN_BASE_CLASS,
|
||||
'flex items-center'
|
||||
)}
|
||||
>
|
||||
{statusInfo ? (
|
||||
<Badge variant={statusInfo.isError ? 'red' : 'gray'} dot>
|
||||
{statusInfo.label}
|
||||
@@ -1719,7 +1795,10 @@ export function Terminal() {
|
||||
)}
|
||||
|
||||
{/* Content */}
|
||||
<div className={clsx('flex-1 overflow-y-auto', !wrapText && 'overflow-x-auto')}>
|
||||
<div
|
||||
className={clsx('flex-1 overflow-y-auto', !wrapText && 'overflow-x-auto')}
|
||||
onContextMenu={handleOutputPanelContextMenu}
|
||||
>
|
||||
{shouldShowCodeDisplay ? (
|
||||
<OutputCodeContent
|
||||
code={selectedEntry.input.code}
|
||||
@@ -1748,6 +1827,42 @@ export function Terminal() {
|
||||
)}
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
{/* Log Row Context Menu */}
|
||||
<LogRowContextMenu
|
||||
isOpen={isLogRowMenuOpen}
|
||||
position={logRowMenuPosition}
|
||||
menuRef={logRowMenuRef}
|
||||
onClose={closeLogRowMenu}
|
||||
entry={contextMenuEntry}
|
||||
filters={filters}
|
||||
onFilterByBlock={handleFilterByBlock}
|
||||
onFilterByStatus={handleFilterByStatus}
|
||||
onFilterByRunId={handleFilterByRunId}
|
||||
onClearFilters={() => {
|
||||
clearFilters()
|
||||
closeLogRowMenu()
|
||||
}}
|
||||
onClearConsole={handleClearConsoleFromMenu}
|
||||
hasActiveFilters={hasActiveFilters}
|
||||
/>
|
||||
|
||||
{/* Output Panel Context Menu */}
|
||||
<OutputContextMenu
|
||||
isOpen={isOutputMenuOpen}
|
||||
position={outputMenuPosition}
|
||||
menuRef={outputMenuRef}
|
||||
onClose={closeOutputMenu}
|
||||
onCopySelection={handleCopySelection}
|
||||
onCopyAll={handleCopy}
|
||||
onSearch={activateOutputSearch}
|
||||
wrapText={wrapText}
|
||||
onToggleWrap={() => setWrapText(!wrapText)}
|
||||
openOnRun={openOnRun}
|
||||
onToggleOpenOnRun={() => setOpenOnRun(!openOnRun)}
|
||||
onClearConsole={handleClearConsoleFromMenu}
|
||||
hasSelection={hasSelection}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -795,6 +795,13 @@ const WorkflowContent = React.memo(() => {
|
||||
event.preventDefault()
|
||||
redo()
|
||||
} else if ((event.ctrlKey || event.metaKey) && event.key === 'c') {
|
||||
const selection = window.getSelection()
|
||||
const hasTextSelection = selection && selection.toString().length > 0
|
||||
|
||||
if (hasTextSelection) {
|
||||
return
|
||||
}
|
||||
|
||||
const selectedNodes = getNodes().filter((node) => node.selected)
|
||||
if (selectedNodes.length > 0) {
|
||||
event.preventDefault()
|
||||
|
||||
@@ -21,6 +21,7 @@ import { signOut, useSession } from '@/lib/auth/auth-client'
|
||||
import { ANONYMOUS_USER_ID } from '@/lib/auth/constants'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { useProfilePictureUpload } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/hooks/use-profile-picture-upload'
|
||||
import { useGeneralSettings, useUpdateGeneralSetting } from '@/hooks/queries/general-settings'
|
||||
@@ -565,13 +566,15 @@ export function General({ onOpenChange }: GeneralProps) {
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
<Button
|
||||
onClick={() => window.open('/?from=settings', '_blank', 'noopener,noreferrer')}
|
||||
variant='active'
|
||||
className='ml-auto'
|
||||
>
|
||||
Home Page
|
||||
</Button>
|
||||
{isHosted && (
|
||||
<Button
|
||||
onClick={() => window.open('/?from=settings', '_blank', 'noopener,noreferrer')}
|
||||
variant='active'
|
||||
className='ml-auto'
|
||||
>
|
||||
Home Page
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Password Reset Confirmation Modal */}
|
||||
|
||||
@@ -27,6 +27,7 @@ export type DocumentProcessingPayload = {
|
||||
export const processDocument = task({
|
||||
id: 'knowledge-process-document',
|
||||
maxDuration: env.KB_CONFIG_MAX_DURATION || 600,
|
||||
machine: 'large-1x', // 2 vCPU, 2GB RAM - needed for large PDF processing
|
||||
retry: {
|
||||
maxAttempts: env.KB_CONFIG_MAX_ATTEMPTS || 3,
|
||||
factor: env.KB_CONFIG_RETRY_FACTOR || 2,
|
||||
|
||||
@@ -2,6 +2,7 @@ import { GrainIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
import { grainTriggerOptions } from '@/triggers/grain/utils'
|
||||
|
||||
export const GrainBlock: BlockConfig = {
|
||||
type: 'grain',
|
||||
@@ -207,13 +208,21 @@ export const GrainBlock: BlockConfig = {
|
||||
value: ['grain_delete_hook'],
|
||||
},
|
||||
},
|
||||
// Trigger SubBlocks
|
||||
...getTrigger('grain_recording_created').subBlocks,
|
||||
...getTrigger('grain_recording_updated').subBlocks,
|
||||
...getTrigger('grain_highlight_created').subBlocks,
|
||||
...getTrigger('grain_highlight_updated').subBlocks,
|
||||
...getTrigger('grain_story_created').subBlocks,
|
||||
...getTrigger('grain_webhook').subBlocks,
|
||||
{
|
||||
id: 'selectedTriggerId',
|
||||
title: 'Trigger Type',
|
||||
type: 'dropdown',
|
||||
mode: 'trigger',
|
||||
options: grainTriggerOptions,
|
||||
value: () => 'grain_webhook',
|
||||
required: true,
|
||||
},
|
||||
...getTrigger('grain_recording_created').subBlocks.slice(1),
|
||||
...getTrigger('grain_recording_updated').subBlocks.slice(1),
|
||||
...getTrigger('grain_highlight_created').subBlocks.slice(1),
|
||||
...getTrigger('grain_highlight_updated').subBlocks.slice(1),
|
||||
...getTrigger('grain_story_created').subBlocks.slice(1),
|
||||
...getTrigger('grain_webhook').subBlocks.slice(1),
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
|
||||
@@ -27,7 +27,7 @@ export const HumanInTheLoopBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
// },
|
||||
{
|
||||
id: 'builderData',
|
||||
title: 'Paused Output',
|
||||
title: 'Display Data',
|
||||
type: 'response-format',
|
||||
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
|
||||
description:
|
||||
@@ -35,7 +35,7 @@ export const HumanInTheLoopBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
},
|
||||
{
|
||||
id: 'notification',
|
||||
title: 'Notification',
|
||||
title: 'Notification (Send URL)',
|
||||
type: 'tool-input',
|
||||
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
|
||||
description: 'Configure notification tools to alert approvers (e.g., Slack, Email)',
|
||||
@@ -57,7 +57,7 @@ export const HumanInTheLoopBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
// },
|
||||
{
|
||||
id: 'inputFormat',
|
||||
title: 'Resume Input',
|
||||
title: 'Resume Form',
|
||||
type: 'input-format',
|
||||
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
|
||||
description: 'Define the fields the approver can fill in when resuming',
|
||||
@@ -157,6 +157,9 @@ export const HumanInTheLoopBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
},
|
||||
outputs: {
|
||||
url: { type: 'string', description: 'Resume UI URL' },
|
||||
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
|
||||
resumeEndpoint: {
|
||||
type: 'string',
|
||||
description: 'Resume API endpoint URL for direct curl requests',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -79,6 +79,16 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
|
||||
value: ['query', 'get_row', 'insert', 'update', 'delete', 'upsert', 'count', 'text_search'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'select',
|
||||
title: 'Select Columns',
|
||||
type: 'short-input',
|
||||
placeholder: '* (all columns) or id,name,email',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['query', 'get_row'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'Service Role Secret',
|
||||
@@ -1044,6 +1054,7 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
projectId: { type: 'string', description: 'Supabase project identifier' },
|
||||
table: { type: 'string', description: 'Database table name' },
|
||||
schema: { type: 'string', description: 'Database schema (default: public)' },
|
||||
select: { type: 'string', description: 'Columns to return (comma-separated, defaults to *)' },
|
||||
apiKey: { type: 'string', description: 'Service role secret key' },
|
||||
// Data for insert/update operations
|
||||
data: { type: 'json', description: 'Row data' },
|
||||
|
||||
86
apps/sim/blocks/blocks/webhook_request.ts
Normal file
86
apps/sim/blocks/blocks/webhook_request.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { WebhookIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { RequestResponse } from '@/tools/http/types'
|
||||
|
||||
export const WebhookRequestBlock: BlockConfig<RequestResponse> = {
|
||||
type: 'webhook_request',
|
||||
name: 'Webhook',
|
||||
description: 'Send a webhook request',
|
||||
longDescription:
|
||||
'Send an HTTP POST request to a webhook URL with automatic webhook headers. Optionally sign the payload with HMAC-SHA256 for secure webhook delivery.',
|
||||
docsLink: 'https://docs.sim.ai/blocks/webhook',
|
||||
category: 'blocks',
|
||||
bgColor: '#10B981',
|
||||
icon: WebhookIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'url',
|
||||
title: 'Webhook URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'https://example.com/webhook',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'body',
|
||||
title: 'Payload',
|
||||
type: 'code',
|
||||
placeholder: 'Enter JSON payload...',
|
||||
language: 'json',
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `You are an expert JSON programmer.
|
||||
Generate ONLY the raw JSON object based on the user's request.
|
||||
The output MUST be a single, valid JSON object, starting with { and ending with }.
|
||||
|
||||
Current payload: {context}
|
||||
|
||||
Do not include any explanations, markdown formatting, or other text outside the JSON object.
|
||||
|
||||
You have access to the following variables you can use to generate the JSON payload:
|
||||
- Use angle brackets for workflow variables, e.g., '<blockName.output>'.
|
||||
- Use double curly braces for environment variables, e.g., '{{ENV_VAR_NAME}}'.
|
||||
|
||||
Example:
|
||||
{
|
||||
"event": "workflow.completed",
|
||||
"data": {
|
||||
"result": "<agent.content>",
|
||||
"timestamp": "<function.result>"
|
||||
}
|
||||
}`,
|
||||
placeholder: 'Describe the webhook payload you need...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'secret',
|
||||
title: 'Signing Secret',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional: Secret for HMAC signature',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
},
|
||||
{
|
||||
id: 'headers',
|
||||
title: 'Additional Headers',
|
||||
type: 'table',
|
||||
columns: ['Key', 'Value'],
|
||||
description: 'Optional custom headers to include with the webhook request',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['webhook_request'],
|
||||
},
|
||||
inputs: {
|
||||
url: { type: 'string', description: 'Webhook URL to send the request to' },
|
||||
body: { type: 'json', description: 'JSON payload to send' },
|
||||
secret: { type: 'string', description: 'Optional secret for HMAC-SHA256 signature' },
|
||||
headers: { type: 'json', description: 'Optional additional headers' },
|
||||
},
|
||||
outputs: {
|
||||
data: { type: 'json', description: 'Response data from the webhook endpoint' },
|
||||
status: { type: 'number', description: 'HTTP status code' },
|
||||
headers: { type: 'json', description: 'Response headers' },
|
||||
},
|
||||
}
|
||||
@@ -131,6 +131,7 @@ import { WaitBlock } from '@/blocks/blocks/wait'
|
||||
import { WealthboxBlock } from '@/blocks/blocks/wealthbox'
|
||||
import { WebflowBlock } from '@/blocks/blocks/webflow'
|
||||
import { WebhookBlock } from '@/blocks/blocks/webhook'
|
||||
import { WebhookRequestBlock } from '@/blocks/blocks/webhook_request'
|
||||
import { WhatsAppBlock } from '@/blocks/blocks/whatsapp'
|
||||
import { WikipediaBlock } from '@/blocks/blocks/wikipedia'
|
||||
import { WordPressBlock } from '@/blocks/blocks/wordpress'
|
||||
@@ -280,6 +281,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
wealthbox: WealthboxBlock,
|
||||
webflow: WebflowBlock,
|
||||
webhook: WebhookBlock,
|
||||
webhook_request: WebhookRequestBlock,
|
||||
whatsapp: WhatsAppBlock,
|
||||
wikipedia: WikipediaBlock,
|
||||
wordpress: WordPressBlock,
|
||||
|
||||
@@ -217,6 +217,7 @@ export interface SubBlockConfig {
|
||||
hideFromPreview?: boolean // Hide this subblock from the workflow block preview
|
||||
requiresFeature?: string // Environment variable name that must be truthy for this subblock to be visible
|
||||
description?: string
|
||||
tooltip?: string // Tooltip text displayed via info icon next to the title
|
||||
value?: (params: Record<string, any>) => string
|
||||
grouped?: boolean
|
||||
scrollable?: boolean
|
||||
|
||||
@@ -119,10 +119,8 @@ const STYLES = {
|
||||
'hover:bg-[var(--border-1)] hover:text-[var(--text-primary)] hover:[&_svg]:text-[var(--text-primary)]',
|
||||
},
|
||||
secondary: {
|
||||
active:
|
||||
'bg-[var(--brand-secondary)] text-[var(--text-inverse)] [&_svg]:text-[var(--text-inverse)]',
|
||||
hover:
|
||||
'hover:bg-[var(--brand-secondary)] hover:text-[var(--text-inverse)] dark:hover:text-[var(--text-inverse)] hover:[&_svg]:text-[var(--text-inverse)] dark:hover:[&_svg]:text-[var(--text-inverse)]',
|
||||
active: 'bg-[var(--brand-secondary)] text-white [&_svg]:text-white',
|
||||
hover: 'hover:bg-[var(--brand-secondary)] hover:text-white hover:[&_svg]:text-white',
|
||||
},
|
||||
inverted: {
|
||||
active:
|
||||
@@ -474,14 +472,20 @@ const PopoverScrollArea = React.forwardRef<HTMLDivElement, PopoverScrollAreaProp
|
||||
PopoverScrollArea.displayName = 'PopoverScrollArea'
|
||||
|
||||
export interface PopoverItemProps extends React.HTMLAttributes<HTMLDivElement> {
|
||||
/** Whether this item is currently active/selected */
|
||||
/**
|
||||
* Whether this item has active/highlighted background styling.
|
||||
* Use for keyboard navigation focus or persistent highlight states.
|
||||
*/
|
||||
active?: boolean
|
||||
/** Only show when not inside any folder */
|
||||
rootOnly?: boolean
|
||||
/** Whether this item is disabled */
|
||||
disabled?: boolean
|
||||
/**
|
||||
* Show checkmark when active
|
||||
* Show a checkmark to indicate selection/checked state.
|
||||
* Unlike `active`, this only shows the checkmark without background highlight,
|
||||
* following the pattern where hover provides interaction feedback
|
||||
* and checkmarks indicate current value.
|
||||
* @default false
|
||||
*/
|
||||
showCheck?: boolean
|
||||
@@ -528,7 +532,7 @@ const PopoverItem = React.forwardRef<HTMLDivElement, PopoverItemProps>(
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
{showCheck && active && <Check className={cn('ml-auto', STYLES.size[size].icon)} />}
|
||||
{showCheck && <Check className={cn('ml-auto', STYLES.size[size].icon)} />}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -331,6 +331,22 @@ export class BlockExecutor {
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
const isTrigger =
|
||||
block.metadata?.category === 'triggers' ||
|
||||
block.config?.params?.triggerMode === true ||
|
||||
block.metadata?.id === BlockType.STARTER
|
||||
|
||||
if (isTrigger) {
|
||||
const filtered: NormalizedBlockOutput = {}
|
||||
const internalKeys = ['webhook', 'workflowId', 'input']
|
||||
for (const [key, value] of Object.entries(output)) {
|
||||
if (internalKeys.includes(key)) continue
|
||||
filtered[key] = value
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
@@ -510,7 +526,7 @@ export class BlockExecutor {
|
||||
const placeholderState: BlockState = {
|
||||
output: {
|
||||
url: resumeLinks.uiUrl,
|
||||
// apiUrl: resumeLinks.apiUrl, // Hidden from output
|
||||
resumeEndpoint: resumeLinks.apiUrl,
|
||||
},
|
||||
executed: false,
|
||||
executionTime: existingState?.executionTime ?? 0,
|
||||
|
||||
@@ -227,7 +227,7 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
|
||||
if (resumeLinks) {
|
||||
output.url = resumeLinks.uiUrl
|
||||
// output.apiUrl = resumeLinks.apiUrl // Hidden from output
|
||||
output.resumeEndpoint = resumeLinks.apiUrl
|
||||
}
|
||||
|
||||
return output
|
||||
@@ -576,9 +576,9 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
if (context.resumeLinks.uiUrl) {
|
||||
pauseOutput.url = context.resumeLinks.uiUrl
|
||||
}
|
||||
// if (context.resumeLinks.apiUrl) {
|
||||
// pauseOutput.apiUrl = context.resumeLinks.apiUrl
|
||||
// } // Hidden from output
|
||||
if (context.resumeLinks.apiUrl) {
|
||||
pauseOutput.resumeEndpoint = context.resumeLinks.apiUrl
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(context.inputFormat)) {
|
||||
|
||||
@@ -205,7 +205,6 @@ describe('TriggerBlockHandler', () => {
|
||||
|
||||
const result = await handler.execute(mockContext, scheduleBlock, {})
|
||||
|
||||
// Schedule triggers typically don't have input data, just trigger the workflow
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
|
||||
@@ -31,10 +31,7 @@ export class TriggerBlockHandler implements BlockHandler {
|
||||
|
||||
const existingState = ctx.blockStates.get(block.id)
|
||||
if (existingState?.output && Object.keys(existingState.output).length > 0) {
|
||||
const existingOutput = existingState.output as any
|
||||
const existingProvider = existingOutput?.webhook?.data?.provider
|
||||
|
||||
return existingOutput
|
||||
return existingState.output
|
||||
}
|
||||
|
||||
const starterBlock = ctx.workflow?.blocks?.find((b) => b.metadata?.id === 'starter')
|
||||
@@ -44,88 +41,8 @@ export class TriggerBlockHandler implements BlockHandler {
|
||||
const starterOutput = starterState.output
|
||||
|
||||
if (starterOutput.webhook?.data) {
|
||||
const webhookData = starterOutput.webhook?.data || {}
|
||||
const provider = webhookData.provider
|
||||
|
||||
if (provider === 'github') {
|
||||
const payloadSource = webhookData.payload || {}
|
||||
return {
|
||||
...payloadSource,
|
||||
webhook: starterOutput.webhook,
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'microsoft-teams') {
|
||||
const providerData = (starterOutput as any)[provider] || webhookData[provider] || {}
|
||||
const payloadSource = providerData?.message?.raw || webhookData.payload || {}
|
||||
return {
|
||||
...payloadSource,
|
||||
[provider]: providerData,
|
||||
webhook: starterOutput.webhook,
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'airtable') {
|
||||
return starterOutput
|
||||
}
|
||||
|
||||
const result: any = {
|
||||
input: starterOutput.input,
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(starterOutput)) {
|
||||
if (key !== 'webhook' && key !== provider) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (provider && starterOutput[provider]) {
|
||||
const providerData = starterOutput[provider]
|
||||
|
||||
for (const [key, value] of Object.entries(providerData)) {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
if (!result[key]) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result[provider] = providerData
|
||||
} else if (provider && webhookData[provider]) {
|
||||
const providerData = webhookData[provider]
|
||||
|
||||
for (const [key, value] of Object.entries(providerData)) {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
if (!result[key]) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result[provider] = providerData
|
||||
} else if (
|
||||
provider &&
|
||||
(provider === 'gmail' || provider === 'outlook') &&
|
||||
webhookData.payload?.email
|
||||
) {
|
||||
const emailData = webhookData.payload.email
|
||||
|
||||
for (const [key, value] of Object.entries(emailData)) {
|
||||
if (!result[key]) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
result.email = emailData
|
||||
|
||||
if (webhookData.payload.timestamp) {
|
||||
result.timestamp = webhookData.payload.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
if (starterOutput.webhook) result.webhook = starterOutput.webhook
|
||||
|
||||
return result
|
||||
const { webhook, workflowId, ...cleanOutput } = starterOutput
|
||||
return cleanOutput
|
||||
}
|
||||
|
||||
return starterOutput
|
||||
|
||||
@@ -109,6 +109,9 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
contextExtensions: {
|
||||
isChildExecution: true,
|
||||
isDeployedContext: ctx.isDeployedContext === true,
|
||||
workspaceId: ctx.workspaceId,
|
||||
userId: ctx.userId,
|
||||
executionId: ctx.executionId,
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -228,6 +228,7 @@ export function useKnowledgeDocumentsQuery(
|
||||
params: KnowledgeDocumentsParams,
|
||||
options?: {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
) {
|
||||
const paramsKey = serializeDocumentParams(params)
|
||||
@@ -237,6 +238,7 @@ export function useKnowledgeDocumentsQuery(
|
||||
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
@@ -92,6 +93,7 @@ export function useKnowledgeBaseDocuments(
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
refetchInterval: options?.refetchInterval,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ interface HeaderInfo {
|
||||
interface Frontmatter {
|
||||
title?: string
|
||||
description?: string
|
||||
[key: string]: any
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
const logger = createLogger('DocsChunker')
|
||||
|
||||
@@ -6,6 +6,11 @@ import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||
|
||||
const logger = createLogger('JsonYamlChunker')
|
||||
|
||||
type JsonPrimitive = string | number | boolean | null
|
||||
type JsonValue = JsonPrimitive | JsonObject | JsonArray
|
||||
type JsonObject = { [key: string]: JsonValue }
|
||||
type JsonArray = JsonValue[]
|
||||
|
||||
function getTokenCount(text: string): number {
|
||||
try {
|
||||
return getAccurateTokenCount(text, 'text-embedding-3-small')
|
||||
@@ -59,11 +64,11 @@ export class JsonYamlChunker {
|
||||
*/
|
||||
async chunk(content: string): Promise<Chunk[]> {
|
||||
try {
|
||||
let data: any
|
||||
let data: JsonValue
|
||||
try {
|
||||
data = JSON.parse(content)
|
||||
data = JSON.parse(content) as JsonValue
|
||||
} catch {
|
||||
data = yaml.load(content)
|
||||
data = yaml.load(content) as JsonValue
|
||||
}
|
||||
const chunks = this.chunkStructuredData(data)
|
||||
|
||||
@@ -86,7 +91,7 @@ export class JsonYamlChunker {
|
||||
/**
|
||||
* Chunk structured data based on its structure
|
||||
*/
|
||||
private chunkStructuredData(data: any, path: string[] = []): Chunk[] {
|
||||
private chunkStructuredData(data: JsonValue, path: string[] = []): Chunk[] {
|
||||
const chunks: Chunk[] = []
|
||||
|
||||
if (Array.isArray(data)) {
|
||||
@@ -94,7 +99,7 @@ export class JsonYamlChunker {
|
||||
}
|
||||
|
||||
if (typeof data === 'object' && data !== null) {
|
||||
return this.chunkObject(data, path)
|
||||
return this.chunkObject(data as JsonObject, path)
|
||||
}
|
||||
|
||||
const content = JSON.stringify(data, null, 2)
|
||||
@@ -118,9 +123,9 @@ export class JsonYamlChunker {
|
||||
/**
|
||||
* Chunk an array intelligently
|
||||
*/
|
||||
private chunkArray(arr: any[], path: string[]): Chunk[] {
|
||||
private chunkArray(arr: JsonArray, path: string[]): Chunk[] {
|
||||
const chunks: Chunk[] = []
|
||||
let currentBatch: any[] = []
|
||||
let currentBatch: JsonValue[] = []
|
||||
let currentTokens = 0
|
||||
|
||||
const contextHeader = path.length > 0 ? `// ${path.join('.')}\n` : ''
|
||||
@@ -194,7 +199,7 @@ export class JsonYamlChunker {
|
||||
/**
|
||||
* Chunk an object intelligently
|
||||
*/
|
||||
private chunkObject(obj: Record<string, any>, path: string[]): Chunk[] {
|
||||
private chunkObject(obj: JsonObject, path: string[]): Chunk[] {
|
||||
const chunks: Chunk[] = []
|
||||
const entries = Object.entries(obj)
|
||||
|
||||
@@ -213,7 +218,7 @@ export class JsonYamlChunker {
|
||||
return chunks
|
||||
}
|
||||
|
||||
let currentObj: Record<string, any> = {}
|
||||
let currentObj: JsonObject = {}
|
||||
let currentTokens = 0
|
||||
let currentKeys: string[] = []
|
||||
|
||||
|
||||
@@ -110,10 +110,12 @@ export class TextChunker {
|
||||
chunks.push(currentChunk.trim())
|
||||
}
|
||||
|
||||
// Start new chunk with current part
|
||||
// If part itself is too large, split it further
|
||||
if (this.estimateTokens(part) > this.chunkSize) {
|
||||
chunks.push(...(await this.splitRecursively(part, separatorIndex + 1)))
|
||||
const subChunks = await this.splitRecursively(part, separatorIndex + 1)
|
||||
for (const subChunk of subChunks) {
|
||||
chunks.push(subChunk)
|
||||
}
|
||||
currentChunk = ''
|
||||
} else {
|
||||
currentChunk = part
|
||||
|
||||
@@ -178,6 +178,7 @@ export const env = createEnv({
|
||||
KB_CONFIG_BATCH_SIZE: z.number().optional().default(2000), // Chunks to process per embedding batch
|
||||
KB_CONFIG_DELAY_BETWEEN_BATCHES: z.number().optional().default(0), // Delay between batches in ms (0 for max speed)
|
||||
KB_CONFIG_DELAY_BETWEEN_DOCUMENTS: z.number().optional().default(50), // Delay between documents in ms
|
||||
KB_CONFIG_CHUNK_CONCURRENCY: z.number().optional().default(10), // Concurrent PDF chunk OCR processing
|
||||
|
||||
// Real-time Communication
|
||||
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
|
||||
@@ -17,8 +17,6 @@ export class DocParser implements FileParser {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
|
||||
logger.info(`Parsing DOC file: ${filePath}`)
|
||||
|
||||
const buffer = await readFile(filePath)
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
@@ -29,53 +27,80 @@ export class DocParser implements FileParser {
|
||||
|
||||
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
|
||||
try {
|
||||
logger.info('Parsing DOC buffer, size:', buffer.length)
|
||||
|
||||
if (!buffer || buffer.length === 0) {
|
||||
throw new Error('Empty buffer provided')
|
||||
}
|
||||
|
||||
let parseOfficeAsync
|
||||
try {
|
||||
const officeParser = await import('officeparser')
|
||||
parseOfficeAsync = officeParser.parseOfficeAsync
|
||||
} catch (importError) {
|
||||
logger.warn('officeparser not available, using fallback extraction')
|
||||
return this.fallbackExtraction(buffer)
|
||||
const result = await officeParser.parseOfficeAsync(buffer)
|
||||
|
||||
if (result) {
|
||||
const resultString = typeof result === 'string' ? result : String(result)
|
||||
const content = sanitizeTextForUTF8(resultString.trim())
|
||||
|
||||
if (content.length > 0) {
|
||||
return {
|
||||
content,
|
||||
metadata: {
|
||||
characterCount: content.length,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (officeError) {
|
||||
logger.warn('officeparser failed, trying mammoth:', officeError)
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await parseOfficeAsync(buffer)
|
||||
const mammoth = await import('mammoth')
|
||||
const result = await mammoth.extractRawText({ buffer })
|
||||
|
||||
if (!result) {
|
||||
throw new Error('officeparser returned no result')
|
||||
if (result.value && result.value.trim().length > 0) {
|
||||
const content = sanitizeTextForUTF8(result.value.trim())
|
||||
return {
|
||||
content,
|
||||
metadata: {
|
||||
characterCount: content.length,
|
||||
extractionMethod: 'mammoth',
|
||||
messages: result.messages,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const resultString = typeof result === 'string' ? result : String(result)
|
||||
|
||||
const content = sanitizeTextForUTF8(resultString.trim())
|
||||
|
||||
logger.info('DOC parsing completed successfully with officeparser')
|
||||
|
||||
return {
|
||||
content: content,
|
||||
metadata: {
|
||||
characterCount: content.length,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
}
|
||||
} catch (extractError) {
|
||||
logger.warn('officeparser failed, using fallback:', extractError)
|
||||
return this.fallbackExtraction(buffer)
|
||||
} catch (mammothError) {
|
||||
logger.warn('mammoth failed:', mammothError)
|
||||
}
|
||||
|
||||
return this.fallbackExtraction(buffer)
|
||||
} catch (error) {
|
||||
logger.error('DOC buffer parsing error:', error)
|
||||
logger.error('DOC parsing error:', error)
|
||||
throw new Error(`Failed to parse DOC buffer: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
private fallbackExtraction(buffer: Buffer): FileParseResult {
|
||||
logger.info('Using fallback text extraction for DOC file')
|
||||
const isBinaryDoc = buffer.length >= 2 && buffer[0] === 0xd0 && buffer[1] === 0xcf
|
||||
|
||||
if (!isBinaryDoc) {
|
||||
const textContent = buffer.toString('utf8').trim()
|
||||
|
||||
if (textContent.length > 0) {
|
||||
const printableChars = textContent.match(/[\x20-\x7E\n\r\t]/g)?.length || 0
|
||||
const isProbablyText = printableChars / textContent.length > 0.9
|
||||
|
||||
if (isProbablyText) {
|
||||
return {
|
||||
content: sanitizeTextForUTF8(textContent),
|
||||
metadata: {
|
||||
extractionMethod: 'plaintext-fallback',
|
||||
characterCount: textContent.length,
|
||||
warning: 'File is not a valid DOC format, extracted as plain text',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 100000))
|
||||
|
||||
|
||||
@@ -2,13 +2,18 @@ import { readFile } from 'fs/promises'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import mammoth from 'mammoth'
|
||||
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
|
||||
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
|
||||
|
||||
const logger = createLogger('DocxParser')
|
||||
|
||||
// Define interface for mammoth result
|
||||
interface MammothMessage {
|
||||
type: 'warning' | 'error'
|
||||
message: string
|
||||
}
|
||||
|
||||
interface MammothResult {
|
||||
value: string
|
||||
messages: any[]
|
||||
messages: MammothMessage[]
|
||||
}
|
||||
|
||||
export class DocxParser implements FileParser {
|
||||
@@ -19,7 +24,6 @@ export class DocxParser implements FileParser {
|
||||
}
|
||||
|
||||
const buffer = await readFile(filePath)
|
||||
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
logger.error('DOCX file error:', error)
|
||||
@@ -29,26 +33,74 @@ export class DocxParser implements FileParser {
|
||||
|
||||
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
|
||||
try {
|
||||
logger.info('Parsing buffer, size:', buffer.length)
|
||||
if (!buffer || buffer.length === 0) {
|
||||
throw new Error('Empty buffer provided')
|
||||
}
|
||||
|
||||
const result = await mammoth.extractRawText({ buffer })
|
||||
|
||||
let htmlResult: MammothResult = { value: '', messages: [] }
|
||||
try {
|
||||
htmlResult = await mammoth.convertToHtml({ buffer })
|
||||
} catch (htmlError) {
|
||||
logger.warn('HTML conversion warning:', htmlError)
|
||||
const result = await mammoth.extractRawText({ buffer })
|
||||
|
||||
if (result.value && result.value.trim().length > 0) {
|
||||
let htmlResult: MammothResult = { value: '', messages: [] }
|
||||
try {
|
||||
htmlResult = await mammoth.convertToHtml({ buffer })
|
||||
} catch {
|
||||
// HTML conversion is optional
|
||||
}
|
||||
|
||||
return {
|
||||
content: sanitizeTextForUTF8(result.value),
|
||||
metadata: {
|
||||
extractionMethod: 'mammoth',
|
||||
messages: [...result.messages, ...htmlResult.messages],
|
||||
html: htmlResult.value,
|
||||
},
|
||||
}
|
||||
}
|
||||
} catch (mammothError) {
|
||||
logger.warn('mammoth failed, trying officeparser:', mammothError)
|
||||
}
|
||||
|
||||
return {
|
||||
content: result.value,
|
||||
metadata: {
|
||||
messages: [...result.messages, ...htmlResult.messages],
|
||||
html: htmlResult.value,
|
||||
},
|
||||
try {
|
||||
const officeParser = await import('officeparser')
|
||||
const result = await officeParser.parseOfficeAsync(buffer)
|
||||
|
||||
if (result) {
|
||||
const resultString = typeof result === 'string' ? result : String(result)
|
||||
const content = sanitizeTextForUTF8(resultString.trim())
|
||||
|
||||
if (content.length > 0) {
|
||||
return {
|
||||
content,
|
||||
metadata: {
|
||||
extractionMethod: 'officeparser',
|
||||
characterCount: content.length,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (officeError) {
|
||||
logger.warn('officeparser failed:', officeError)
|
||||
}
|
||||
|
||||
const isZipFile = buffer.length >= 2 && buffer[0] === 0x50 && buffer[1] === 0x4b
|
||||
if (!isZipFile) {
|
||||
const textContent = buffer.toString('utf8').trim()
|
||||
if (textContent.length > 0) {
|
||||
return {
|
||||
content: sanitizeTextForUTF8(textContent),
|
||||
metadata: {
|
||||
extractionMethod: 'plaintext-fallback',
|
||||
characterCount: textContent.length,
|
||||
warning: 'File is not a valid DOCX format, extracted as plain text',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Failed to extract text from DOCX file')
|
||||
} catch (error) {
|
||||
logger.error('DOCX buffer parsing error:', error)
|
||||
logger.error('DOCX parsing error:', error)
|
||||
throw new Error(`Failed to parse DOCX buffer: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,22 @@
|
||||
export interface FileParseMetadata {
|
||||
characterCount?: number
|
||||
pageCount?: number
|
||||
extractionMethod?: string
|
||||
warning?: string
|
||||
messages?: unknown[]
|
||||
html?: string
|
||||
type?: string
|
||||
headers?: string[]
|
||||
totalRows?: number
|
||||
rowCount?: number
|
||||
sheetNames?: string[]
|
||||
source?: string
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
export interface FileParseResult {
|
||||
content: string
|
||||
metadata?: Record<string, any>
|
||||
metadata?: FileParseMetadata
|
||||
}
|
||||
|
||||
export interface FileParser {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { PDFDocument } from 'pdf-lib'
|
||||
import { getBYOKKey } from '@/lib/api-key/byok'
|
||||
import { type Chunk, JsonYamlChunker, StructuredDataChunker, TextChunker } from '@/lib/chunkers'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { parseBuffer, parseFile } from '@/lib/file-parsers'
|
||||
import type { FileParseMetadata } from '@/lib/file-parsers/types'
|
||||
import { retryWithExponentialBackoff } from '@/lib/knowledge/documents/utils'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { downloadFileFromUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -15,6 +17,8 @@ const TIMEOUTS = {
|
||||
MISTRAL_OCR_API: 120000,
|
||||
} as const
|
||||
|
||||
const MAX_CONCURRENT_CHUNKS = env.KB_CONFIG_CHUNK_CONCURRENCY
|
||||
|
||||
type OCRResult = {
|
||||
success: boolean
|
||||
error?: string
|
||||
@@ -36,6 +40,61 @@ type OCRRequestBody = {
|
||||
include_image_base64: boolean
|
||||
}
|
||||
|
||||
const MISTRAL_MAX_PAGES = 1000
|
||||
|
||||
/**
|
||||
* Get page count from a PDF buffer using unpdf
|
||||
*/
|
||||
async function getPdfPageCount(buffer: Buffer): Promise<number> {
|
||||
try {
|
||||
const { getDocumentProxy } = await import('unpdf')
|
||||
const uint8Array = new Uint8Array(buffer)
|
||||
const pdf = await getDocumentProxy(uint8Array)
|
||||
return pdf.numPages
|
||||
} catch (error) {
|
||||
logger.warn('Failed to get PDF page count:', error)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Split a PDF buffer into multiple smaller PDFs
|
||||
* Returns an array of PDF buffers, each with at most maxPages pages
|
||||
*/
|
||||
async function splitPdfIntoChunks(
|
||||
pdfBuffer: Buffer,
|
||||
maxPages: number
|
||||
): Promise<{ buffer: Buffer; startPage: number; endPage: number }[]> {
|
||||
const sourcePdf = await PDFDocument.load(pdfBuffer)
|
||||
const totalPages = sourcePdf.getPageCount()
|
||||
|
||||
if (totalPages <= maxPages) {
|
||||
return [{ buffer: pdfBuffer, startPage: 0, endPage: totalPages - 1 }]
|
||||
}
|
||||
|
||||
const chunks: { buffer: Buffer; startPage: number; endPage: number }[] = []
|
||||
|
||||
for (let startPage = 0; startPage < totalPages; startPage += maxPages) {
|
||||
const endPage = Math.min(startPage + maxPages - 1, totalPages - 1)
|
||||
const pageCount = endPage - startPage + 1
|
||||
|
||||
const newPdf = await PDFDocument.create()
|
||||
const pageIndices = Array.from({ length: pageCount }, (_, i) => startPage + i)
|
||||
const copiedPages = await newPdf.copyPages(sourcePdf, pageIndices)
|
||||
|
||||
copiedPages.forEach((page) => newPdf.addPage(page))
|
||||
|
||||
const pdfBytes = await newPdf.save()
|
||||
chunks.push({
|
||||
buffer: Buffer.from(pdfBytes),
|
||||
startPage,
|
||||
endPage,
|
||||
})
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
type AzureOCRResponse = {
|
||||
pages?: OCRPage[]
|
||||
[key: string]: unknown
|
||||
@@ -81,7 +140,7 @@ export async function processDocument(
|
||||
const cloudUrl = 'cloudUrl' in parseResult ? parseResult.cloudUrl : undefined
|
||||
|
||||
let chunks: Chunk[]
|
||||
const metadata = 'metadata' in parseResult ? parseResult.metadata : {}
|
||||
const metadata: FileParseMetadata = parseResult.metadata ?? {}
|
||||
|
||||
const isJsonYaml =
|
||||
metadata.type === 'json' ||
|
||||
@@ -97,10 +156,11 @@ export async function processDocument(
|
||||
})
|
||||
} else if (StructuredDataChunker.isStructuredData(content, mimeType)) {
|
||||
logger.info('Using structured data chunker for spreadsheet/CSV content')
|
||||
const rowCount = metadata.totalRows ?? metadata.rowCount
|
||||
chunks = await StructuredDataChunker.chunkStructuredData(content, {
|
||||
chunkSize,
|
||||
headers: metadata.headers,
|
||||
totalRows: metadata.totalRows || metadata.rowCount,
|
||||
totalRows: typeof rowCount === 'number' ? rowCount : undefined,
|
||||
sheetName: metadata.sheetNames?.[0],
|
||||
})
|
||||
} else {
|
||||
@@ -153,7 +213,7 @@ async function parseDocument(
|
||||
content: string
|
||||
processingMethod: 'file-parser' | 'mistral-ocr'
|
||||
cloudUrl?: string
|
||||
metadata?: any
|
||||
metadata?: FileParseMetadata
|
||||
}> {
|
||||
const isPDF = mimeType === 'application/pdf'
|
||||
const hasAzureMistralOCR =
|
||||
@@ -165,7 +225,7 @@ async function parseDocument(
|
||||
if (isPDF && (hasAzureMistralOCR || hasMistralOCR)) {
|
||||
if (hasAzureMistralOCR) {
|
||||
logger.info(`Using Azure Mistral OCR: ${filename}`)
|
||||
return parseWithAzureMistralOCR(fileUrl, filename, mimeType, userId, workspaceId)
|
||||
return parseWithAzureMistralOCR(fileUrl, filename, mimeType)
|
||||
}
|
||||
|
||||
if (hasMistralOCR) {
|
||||
@@ -188,13 +248,32 @@ async function handleFileForOCR(
|
||||
const isExternalHttps = fileUrl.startsWith('https://') && !fileUrl.includes('/api/files/serve/')
|
||||
|
||||
if (isExternalHttps) {
|
||||
return { httpsUrl: fileUrl }
|
||||
if (mimeType === 'application/pdf') {
|
||||
logger.info(`handleFileForOCR: Downloading external PDF to check page count`)
|
||||
try {
|
||||
const buffer = await downloadFileWithTimeout(fileUrl)
|
||||
logger.info(`handleFileForOCR: Downloaded external PDF: ${buffer.length} bytes`)
|
||||
return { httpsUrl: fileUrl, buffer }
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`handleFileForOCR: Failed to download external PDF for page count check, proceeding without batching`,
|
||||
{
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
}
|
||||
)
|
||||
return { httpsUrl: fileUrl, buffer: undefined }
|
||||
}
|
||||
}
|
||||
logger.info(`handleFileForOCR: Using external URL directly`)
|
||||
return { httpsUrl: fileUrl, buffer: undefined }
|
||||
}
|
||||
|
||||
logger.info(`Uploading "${filename}" to cloud storage for OCR`)
|
||||
|
||||
const buffer = await downloadFileWithTimeout(fileUrl)
|
||||
|
||||
logger.info(`Downloaded ${filename}: ${buffer.length} bytes`)
|
||||
|
||||
try {
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: filename,
|
||||
@@ -224,8 +303,7 @@ async function handleFileForOCR(
|
||||
900 // 15 minutes
|
||||
)
|
||||
|
||||
logger.info(`Successfully uploaded for OCR: ${cloudResult.key}`)
|
||||
return { httpsUrl, cloudUrl: httpsUrl }
|
||||
return { httpsUrl, cloudUrl: httpsUrl, buffer }
|
||||
} catch (uploadError) {
|
||||
const message = uploadError instanceof Error ? uploadError.message : 'Unknown error'
|
||||
throw new Error(`Cloud upload failed: ${message}. Cloud upload is required for OCR.`)
|
||||
@@ -321,13 +399,7 @@ async function makeOCRRequest(
|
||||
}
|
||||
}
|
||||
|
||||
async function parseWithAzureMistralOCR(
|
||||
fileUrl: string,
|
||||
filename: string,
|
||||
mimeType: string,
|
||||
userId?: string,
|
||||
workspaceId?: string | null
|
||||
) {
|
||||
async function parseWithAzureMistralOCR(fileUrl: string, filename: string, mimeType: string) {
|
||||
validateOCRConfig(
|
||||
env.OCR_AZURE_API_KEY,
|
||||
env.OCR_AZURE_ENDPOINT,
|
||||
@@ -336,6 +408,19 @@ async function parseWithAzureMistralOCR(
|
||||
)
|
||||
|
||||
const fileBuffer = await downloadFileForBase64(fileUrl)
|
||||
|
||||
if (mimeType === 'application/pdf') {
|
||||
const pageCount = await getPdfPageCount(fileBuffer)
|
||||
if (pageCount > MISTRAL_MAX_PAGES) {
|
||||
logger.info(
|
||||
`PDF has ${pageCount} pages, exceeds Azure OCR limit of ${MISTRAL_MAX_PAGES}. ` +
|
||||
`Falling back to file parser.`
|
||||
)
|
||||
return parseWithFileParser(fileUrl, filename, mimeType)
|
||||
}
|
||||
logger.info(`Azure Mistral OCR: PDF page count for ${filename}: ${pageCount}`)
|
||||
}
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
const dataUri = `data:${mimeType};base64,${base64Data}`
|
||||
|
||||
@@ -374,17 +459,7 @@ async function parseWithAzureMistralOCR(
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
|
||||
const fallbackMistralKey = await getMistralApiKey(workspaceId)
|
||||
if (fallbackMistralKey) {
|
||||
return parseWithMistralOCR(
|
||||
fileUrl,
|
||||
filename,
|
||||
mimeType,
|
||||
userId,
|
||||
workspaceId,
|
||||
fallbackMistralKey
|
||||
)
|
||||
}
|
||||
logger.info(`Falling back to file parser: ${filename}`)
|
||||
return parseWithFileParser(fileUrl, filename, mimeType)
|
||||
}
|
||||
}
|
||||
@@ -406,50 +481,35 @@ async function parseWithMistralOCR(
|
||||
throw new Error('Mistral parser tool not configured')
|
||||
}
|
||||
|
||||
const { httpsUrl, cloudUrl } = await handleFileForOCR(
|
||||
const { httpsUrl, cloudUrl, buffer } = await handleFileForOCR(
|
||||
fileUrl,
|
||||
filename,
|
||||
mimeType,
|
||||
userId,
|
||||
workspaceId
|
||||
)
|
||||
|
||||
logger.info(`Mistral OCR: Using presigned URL for ${filename}: ${httpsUrl.substring(0, 120)}...`)
|
||||
|
||||
let pageCount = 0
|
||||
if (mimeType === 'application/pdf' && buffer) {
|
||||
pageCount = await getPdfPageCount(buffer)
|
||||
logger.info(`PDF page count for ${filename}: ${pageCount}`)
|
||||
}
|
||||
|
||||
const needsBatching = pageCount > MISTRAL_MAX_PAGES
|
||||
|
||||
if (needsBatching && buffer) {
|
||||
logger.info(
|
||||
`PDF has ${pageCount} pages, exceeds limit of ${MISTRAL_MAX_PAGES}. Splitting and processing in chunks.`
|
||||
)
|
||||
return processMistralOCRInBatches(filename, apiKey, buffer, userId, cloudUrl)
|
||||
}
|
||||
|
||||
const params = { filePath: httpsUrl, apiKey, resultType: 'text' as const }
|
||||
|
||||
try {
|
||||
const response = await retryWithExponentialBackoff(
|
||||
async () => {
|
||||
let url =
|
||||
typeof mistralParserTool.request!.url === 'function'
|
||||
? mistralParserTool.request!.url(params)
|
||||
: mistralParserTool.request!.url
|
||||
|
||||
const isInternalRoute = url.startsWith('/')
|
||||
|
||||
if (isInternalRoute) {
|
||||
const { getBaseUrl } = await import('@/lib/core/utils/urls')
|
||||
url = `${getBaseUrl()}${url}`
|
||||
}
|
||||
|
||||
let headers =
|
||||
typeof mistralParserTool.request!.headers === 'function'
|
||||
? mistralParserTool.request!.headers(params)
|
||||
: mistralParserTool.request!.headers
|
||||
|
||||
if (isInternalRoute) {
|
||||
const { generateInternalToken } = await import('@/lib/auth/internal')
|
||||
const internalToken = await generateInternalToken(userId)
|
||||
headers = {
|
||||
...headers,
|
||||
Authorization: `Bearer ${internalToken}`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestBody = mistralParserTool.request!.body!(params) as OCRRequestBody
|
||||
return makeOCRRequest(url, headers as Record<string, string>, requestBody)
|
||||
},
|
||||
{ maxRetries: 3, initialDelayMs: 1000, maxDelayMs: 10000 }
|
||||
)
|
||||
|
||||
const response = await executeMistralOCRRequest(params, userId)
|
||||
const result = (await mistralParserTool.transformResponse!(response, params)) as OCRResult
|
||||
const content = processOCRContent(result, filename)
|
||||
|
||||
@@ -464,10 +524,204 @@ async function parseWithMistralOCR(
|
||||
}
|
||||
}
|
||||
|
||||
async function executeMistralOCRRequest(
|
||||
params: { filePath: string; apiKey: string; resultType: 'text' },
|
||||
userId?: string
|
||||
): Promise<Response> {
|
||||
return retryWithExponentialBackoff(
|
||||
async () => {
|
||||
let url =
|
||||
typeof mistralParserTool.request!.url === 'function'
|
||||
? mistralParserTool.request!.url(params)
|
||||
: mistralParserTool.request!.url
|
||||
|
||||
const isInternalRoute = url.startsWith('/')
|
||||
|
||||
if (isInternalRoute) {
|
||||
const { getBaseUrl } = await import('@/lib/core/utils/urls')
|
||||
url = `${getBaseUrl()}${url}`
|
||||
}
|
||||
|
||||
let headers =
|
||||
typeof mistralParserTool.request!.headers === 'function'
|
||||
? mistralParserTool.request!.headers(params)
|
||||
: mistralParserTool.request!.headers
|
||||
|
||||
if (isInternalRoute) {
|
||||
const { generateInternalToken } = await import('@/lib/auth/internal')
|
||||
const internalToken = await generateInternalToken(userId)
|
||||
headers = {
|
||||
...headers,
|
||||
Authorization: `Bearer ${internalToken}`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestBody = mistralParserTool.request!.body!(params) as OCRRequestBody
|
||||
return makeOCRRequest(url, headers as Record<string, string>, requestBody)
|
||||
},
|
||||
{ maxRetries: 3, initialDelayMs: 1000, maxDelayMs: 10000 }
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single PDF chunk: upload to S3, OCR, cleanup
|
||||
*/
|
||||
async function processChunk(
|
||||
chunk: { buffer: Buffer; startPage: number; endPage: number },
|
||||
chunkIndex: number,
|
||||
totalChunks: number,
|
||||
filename: string,
|
||||
apiKey: string,
|
||||
userId?: string
|
||||
): Promise<{ index: number; content: string | null }> {
|
||||
const chunkPageCount = chunk.endPage - chunk.startPage + 1
|
||||
|
||||
logger.info(
|
||||
`Processing chunk ${chunkIndex + 1}/${totalChunks} (pages ${chunk.startPage + 1}-${chunk.endPage + 1}, ${chunkPageCount} pages)`
|
||||
)
|
||||
|
||||
let uploadedKey: string | null = null
|
||||
|
||||
try {
|
||||
// Upload the chunk to S3
|
||||
const timestamp = Date.now()
|
||||
const uniqueId = Math.random().toString(36).substring(2, 9)
|
||||
const safeFileName = filename.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const chunkKey = `kb/${timestamp}-${uniqueId}-chunk${chunkIndex + 1}-${safeFileName}`
|
||||
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: `${filename}_chunk${chunkIndex + 1}`,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
purpose: 'knowledge-base',
|
||||
...(userId && { userId }),
|
||||
}
|
||||
|
||||
const uploadResult = await StorageService.uploadFile({
|
||||
file: chunk.buffer,
|
||||
fileName: `${filename}_chunk${chunkIndex + 1}`,
|
||||
contentType: 'application/pdf',
|
||||
context: 'knowledge-base',
|
||||
customKey: chunkKey,
|
||||
metadata,
|
||||
})
|
||||
|
||||
uploadedKey = uploadResult.key
|
||||
|
||||
const chunkUrl = await StorageService.generatePresignedDownloadUrl(
|
||||
uploadResult.key,
|
||||
'knowledge-base',
|
||||
900 // 15 minutes
|
||||
)
|
||||
|
||||
logger.info(`Uploaded chunk ${chunkIndex + 1} to S3: ${chunkKey}`)
|
||||
|
||||
// Process the chunk with Mistral OCR
|
||||
const params = {
|
||||
filePath: chunkUrl,
|
||||
apiKey,
|
||||
resultType: 'text' as const,
|
||||
}
|
||||
|
||||
const response = await executeMistralOCRRequest(params, userId)
|
||||
const result = (await mistralParserTool.transformResponse!(response, params)) as OCRResult
|
||||
|
||||
if (result.success && result.output?.content) {
|
||||
logger.info(`Chunk ${chunkIndex + 1}/${totalChunks} completed successfully`)
|
||||
return { index: chunkIndex, content: result.output.content }
|
||||
}
|
||||
logger.warn(`Chunk ${chunkIndex + 1}/${totalChunks} returned no content`)
|
||||
return { index: chunkIndex, content: null }
|
||||
} catch (error) {
|
||||
logger.error(`Chunk ${chunkIndex + 1}/${totalChunks} failed:`, {
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return { index: chunkIndex, content: null }
|
||||
} finally {
|
||||
// Clean up the chunk file from S3 after processing
|
||||
if (uploadedKey) {
|
||||
try {
|
||||
await StorageService.deleteFile({ key: uploadedKey, context: 'knowledge-base' })
|
||||
logger.info(`Cleaned up chunk ${chunkIndex + 1} from S3`)
|
||||
} catch (deleteError) {
|
||||
logger.warn(`Failed to clean up chunk ${chunkIndex + 1} from S3:`, {
|
||||
message: deleteError instanceof Error ? deleteError.message : String(deleteError),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function processMistralOCRInBatches(
|
||||
filename: string,
|
||||
apiKey: string,
|
||||
pdfBuffer: Buffer,
|
||||
userId?: string,
|
||||
cloudUrl?: string
|
||||
): Promise<{
|
||||
content: string
|
||||
processingMethod: 'mistral-ocr'
|
||||
cloudUrl?: string
|
||||
}> {
|
||||
const totalPages = await getPdfPageCount(pdfBuffer)
|
||||
logger.info(
|
||||
`Splitting ${filename} (${totalPages} pages) into chunks of ${MISTRAL_MAX_PAGES} pages`
|
||||
)
|
||||
|
||||
const pdfChunks = await splitPdfIntoChunks(pdfBuffer, MISTRAL_MAX_PAGES)
|
||||
logger.info(
|
||||
`Split into ${pdfChunks.length} chunks, processing with concurrency ${MAX_CONCURRENT_CHUNKS}`
|
||||
)
|
||||
|
||||
// Process chunks concurrently with limited concurrency
|
||||
const results: { index: number; content: string | null }[] = []
|
||||
|
||||
for (let i = 0; i < pdfChunks.length; i += MAX_CONCURRENT_CHUNKS) {
|
||||
const batch = pdfChunks.slice(i, i + MAX_CONCURRENT_CHUNKS)
|
||||
const batchPromises = batch.map((chunk, batchIndex) =>
|
||||
processChunk(chunk, i + batchIndex, pdfChunks.length, filename, apiKey, userId)
|
||||
)
|
||||
|
||||
const batchResults = await Promise.all(batchPromises)
|
||||
for (const result of batchResults) {
|
||||
results.push(result)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Completed batch ${Math.floor(i / MAX_CONCURRENT_CHUNKS) + 1}/${Math.ceil(pdfChunks.length / MAX_CONCURRENT_CHUNKS)}`
|
||||
)
|
||||
}
|
||||
|
||||
// Sort by index to maintain page order and filter out nulls
|
||||
const sortedResults = results
|
||||
.sort((a, b) => a.index - b.index)
|
||||
.filter((r) => r.content !== null)
|
||||
.map((r) => r.content as string)
|
||||
|
||||
if (sortedResults.length === 0) {
|
||||
// Don't fall back to file parser for large PDFs - it produces poor results
|
||||
// Better to fail clearly than return low-quality extraction
|
||||
throw new Error(
|
||||
`OCR failed for all ${pdfChunks.length} chunks of ${filename}. ` +
|
||||
`Large PDFs require OCR - file parser fallback would produce poor results.`
|
||||
)
|
||||
}
|
||||
|
||||
const combinedContent = sortedResults.join('\n\n')
|
||||
logger.info(
|
||||
`Successfully processed ${sortedResults.length}/${pdfChunks.length} chunks for ${filename}`
|
||||
)
|
||||
|
||||
return {
|
||||
content: combinedContent,
|
||||
processingMethod: 'mistral-ocr',
|
||||
cloudUrl,
|
||||
}
|
||||
}
|
||||
|
||||
async function parseWithFileParser(fileUrl: string, filename: string, mimeType: string) {
|
||||
try {
|
||||
let content: string
|
||||
let metadata: any = {}
|
||||
let metadata: FileParseMetadata = {}
|
||||
|
||||
if (fileUrl.startsWith('data:')) {
|
||||
content = await parseDataURI(fileUrl, filename, mimeType)
|
||||
@@ -513,7 +767,7 @@ async function parseDataURI(fileUrl: string, filename: string, mimeType: string)
|
||||
async function parseHttpFile(
|
||||
fileUrl: string,
|
||||
filename: string
|
||||
): Promise<{ content: string; metadata?: any }> {
|
||||
): Promise<{ content: string; metadata?: FileParseMetadata }> {
|
||||
const buffer = await downloadFileWithTimeout(fileUrl)
|
||||
|
||||
const extension = filename.split('.').pop()?.toLowerCase()
|
||||
|
||||
@@ -212,7 +212,6 @@ export async function processDocumentTags(
|
||||
return result
|
||||
}
|
||||
|
||||
// Fetch existing tag definitions
|
||||
const existingDefinitions = await db
|
||||
.select()
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
@@ -220,18 +219,15 @@ export async function processDocumentTags(
|
||||
|
||||
const existingByName = new Map(existingDefinitions.map((def) => [def.displayName, def]))
|
||||
|
||||
// First pass: collect all validation errors
|
||||
const undefinedTags: string[] = []
|
||||
const typeErrors: string[] = []
|
||||
|
||||
for (const tag of tagData) {
|
||||
// Skip if no tag name
|
||||
if (!tag.tagName?.trim()) continue
|
||||
|
||||
const tagName = tag.tagName.trim()
|
||||
const fieldType = tag.fieldType || 'text'
|
||||
|
||||
// For boolean, check if value is defined; for others, check if value is non-empty
|
||||
const hasValue =
|
||||
fieldType === 'boolean'
|
||||
? tag.value !== undefined && tag.value !== null && tag.value !== ''
|
||||
@@ -239,14 +235,12 @@ export async function processDocumentTags(
|
||||
|
||||
if (!hasValue) continue
|
||||
|
||||
// Check if tag exists
|
||||
const existingDef = existingByName.get(tagName)
|
||||
if (!existingDef) {
|
||||
undefinedTags.push(tagName)
|
||||
continue
|
||||
}
|
||||
|
||||
// Validate value type using shared validation
|
||||
const rawValue = typeof tag.value === 'string' ? tag.value.trim() : tag.value
|
||||
const actualFieldType = existingDef.fieldType || fieldType
|
||||
const validationError = validateTagValue(tagName, String(rawValue), actualFieldType)
|
||||
@@ -255,7 +249,6 @@ export async function processDocumentTags(
|
||||
}
|
||||
}
|
||||
|
||||
// Throw combined error if there are any validation issues
|
||||
if (undefinedTags.length > 0 || typeErrors.length > 0) {
|
||||
const errorParts: string[] = []
|
||||
|
||||
@@ -270,7 +263,6 @@ export async function processDocumentTags(
|
||||
throw new Error(errorParts.join('\n'))
|
||||
}
|
||||
|
||||
// Second pass: process valid tags
|
||||
for (const tag of tagData) {
|
||||
if (!tag.tagName?.trim()) continue
|
||||
|
||||
@@ -285,14 +277,13 @@ export async function processDocumentTags(
|
||||
if (!hasValue) continue
|
||||
|
||||
const existingDef = existingByName.get(tagName)
|
||||
if (!existingDef) continue // Already validated above
|
||||
if (!existingDef) continue
|
||||
|
||||
const targetSlot = existingDef.tagSlot
|
||||
const actualFieldType = existingDef.fieldType || fieldType
|
||||
const rawValue = typeof tag.value === 'string' ? tag.value.trim() : tag.value
|
||||
const stringValue = String(rawValue).trim()
|
||||
|
||||
// Assign value to the slot with proper type conversion (values already validated)
|
||||
if (actualFieldType === 'boolean') {
|
||||
setTagValue(result, targetSlot, parseBooleanValue(stringValue) ?? false)
|
||||
} else if (actualFieldType === 'number') {
|
||||
@@ -440,7 +431,6 @@ export async function processDocumentAsync(
|
||||
|
||||
logger.info(`[${documentId}] Status updated to 'processing', starting document processor`)
|
||||
|
||||
// Use KB's chunkingConfig as fallback if processingOptions not provided
|
||||
const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number }
|
||||
|
||||
await withTimeout(
|
||||
@@ -469,7 +459,6 @@ export async function processDocumentAsync(
|
||||
`[${documentId}] Document parsed successfully, generating embeddings for ${processed.chunks.length} chunks`
|
||||
)
|
||||
|
||||
// Generate embeddings in batches for large documents
|
||||
const chunkTexts = processed.chunks.map((chunk) => chunk.text)
|
||||
const embeddings: number[][] = []
|
||||
|
||||
@@ -485,7 +474,9 @@ export async function processDocumentAsync(
|
||||
|
||||
logger.info(`[${documentId}] Processing embedding batch ${batchNum}/${totalBatches}`)
|
||||
const batchEmbeddings = await generateEmbeddings(batch, undefined, kb[0].workspaceId)
|
||||
embeddings.push(...batchEmbeddings)
|
||||
for (const emb of batchEmbeddings) {
|
||||
embeddings.push(emb)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -562,23 +553,18 @@ export async function processDocumentAsync(
|
||||
}))
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
// Insert embeddings in batches for large documents
|
||||
if (embeddingRecords.length > 0) {
|
||||
const batchSize = LARGE_DOC_CONFIG.MAX_CHUNKS_PER_BATCH
|
||||
const totalBatches = Math.ceil(embeddingRecords.length / batchSize)
|
||||
await tx.delete(embedding).where(eq(embedding.documentId, documentId))
|
||||
|
||||
logger.info(
|
||||
`[${documentId}] Inserting ${embeddingRecords.length} embeddings in ${totalBatches} batches`
|
||||
)
|
||||
|
||||
for (let i = 0; i < embeddingRecords.length; i += batchSize) {
|
||||
const batch = embeddingRecords.slice(i, i + batchSize)
|
||||
const batchNum = Math.floor(i / batchSize) + 1
|
||||
const insertBatchSize = LARGE_DOC_CONFIG.MAX_CHUNKS_PER_BATCH
|
||||
const batches: (typeof embeddingRecords)[] = []
|
||||
for (let i = 0; i < embeddingRecords.length; i += insertBatchSize) {
|
||||
batches.push(embeddingRecords.slice(i, i + insertBatchSize))
|
||||
}
|
||||
|
||||
logger.info(`[${documentId}] Inserting ${embeddingRecords.length} embeddings`)
|
||||
for (const batch of batches) {
|
||||
await tx.insert(embedding).values(batch)
|
||||
logger.info(
|
||||
`[${documentId}] Inserted batch ${batchNum}/${totalBatches} (${batch.length} records)`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -689,11 +675,9 @@ export async function createDocumentRecords(
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<DocumentData[]> {
|
||||
// Check storage limits before creating documents
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
@@ -713,7 +697,7 @@ export async function createDocumentRecords(
|
||||
for (const docData of documents) {
|
||||
const documentId = randomUUID()
|
||||
|
||||
let processedTags: Record<string, any> = {}
|
||||
let processedTags: Partial<ProcessedDocumentTags> = {}
|
||||
|
||||
if (docData.documentTagsData) {
|
||||
try {
|
||||
@@ -722,7 +706,6 @@ export async function createDocumentRecords(
|
||||
processedTags = await processDocumentTags(knowledgeBaseId, tagData, requestId)
|
||||
}
|
||||
} catch (error) {
|
||||
// Re-throw validation errors, only catch JSON parse errors
|
||||
if (error instanceof SyntaxError) {
|
||||
logger.warn(`[${requestId}] Failed to parse documentTagsData for bulk document:`, error)
|
||||
} else {
|
||||
@@ -791,7 +774,6 @@ export async function createDocumentRecords(
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
@@ -1079,7 +1061,7 @@ export async function createSingleDocument(
|
||||
const now = new Date()
|
||||
|
||||
// Process structured tag data if provided
|
||||
let processedTags: Record<string, any> = {
|
||||
let processedTags: ProcessedDocumentTags = {
|
||||
// Text tags (7 slots)
|
||||
tag1: documentData.tag1 ?? null,
|
||||
tag2: documentData.tag2 ?? null,
|
||||
@@ -1555,23 +1537,30 @@ export async function updateDocument(
|
||||
return value || null
|
||||
}
|
||||
|
||||
// Type-safe access to tag slots in updateData
|
||||
type UpdateDataWithTags = typeof updateData & Record<TagSlot, string | undefined>
|
||||
const typedUpdateData = updateData as UpdateDataWithTags
|
||||
|
||||
ALL_TAG_SLOTS.forEach((slot: TagSlot) => {
|
||||
const updateValue = (updateData as any)[slot]
|
||||
const updateValue = typedUpdateData[slot]
|
||||
if (updateValue !== undefined) {
|
||||
;(dbUpdateData as any)[slot] = convertTagValue(slot, updateValue)
|
||||
;(dbUpdateData as Record<TagSlot, string | number | Date | boolean | null>)[slot] =
|
||||
convertTagValue(slot, updateValue)
|
||||
}
|
||||
})
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.update(document).set(dbUpdateData).where(eq(document.id, documentId))
|
||||
|
||||
const hasTagUpdates = ALL_TAG_SLOTS.some((field) => (updateData as any)[field] !== undefined)
|
||||
const hasTagUpdates = ALL_TAG_SLOTS.some((field) => typedUpdateData[field] !== undefined)
|
||||
|
||||
if (hasTagUpdates) {
|
||||
const embeddingUpdateData: Record<string, any> = {}
|
||||
const embeddingUpdateData: Partial<ProcessedDocumentTags> = {}
|
||||
ALL_TAG_SLOTS.forEach((field) => {
|
||||
if ((updateData as any)[field] !== undefined) {
|
||||
embeddingUpdateData[field] = convertTagValue(field, (updateData as any)[field])
|
||||
if (typedUpdateData[field] !== undefined) {
|
||||
;(embeddingUpdateData as Record<TagSlot, string | number | Date | boolean | null>)[
|
||||
field
|
||||
] = convertTagValue(field, typedUpdateData[field])
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ export interface RetryOptions {
|
||||
initialDelayMs?: number
|
||||
maxDelayMs?: number
|
||||
backoffMultiplier?: number
|
||||
retryCondition?: (error: RetryableError) => boolean
|
||||
retryCondition?: (error: unknown) => boolean
|
||||
}
|
||||
|
||||
export interface RetryResult<T> {
|
||||
@@ -30,11 +30,18 @@ function hasStatus(
|
||||
return typeof error === 'object' && error !== null && 'status' in error
|
||||
}
|
||||
|
||||
function isRetryableErrorType(error: unknown): error is RetryableError {
|
||||
if (!error) return false
|
||||
if (error instanceof Error) return true
|
||||
if (typeof error === 'object' && ('status' in error || 'message' in error)) return true
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Default retry condition for rate limiting errors
|
||||
*/
|
||||
export function isRetryableError(error: RetryableError): boolean {
|
||||
if (!error) return false
|
||||
export function isRetryableError(error: unknown): boolean {
|
||||
if (!isRetryableErrorType(error)) return false
|
||||
|
||||
// Check for rate limiting status codes
|
||||
if (
|
||||
@@ -45,7 +52,7 @@ export function isRetryableError(error: RetryableError): boolean {
|
||||
}
|
||||
|
||||
// Check for rate limiting in error messages
|
||||
const errorMessage = error.message || error.toString()
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const rateLimitKeywords = [
|
||||
'rate limit',
|
||||
'rate_limit',
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { getBYOKKey } from '@/lib/api-key/byok'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isRetryableError, retryWithExponentialBackoff } from '@/lib/knowledge/documents/utils'
|
||||
import { batchByTokenLimit, getTotalTokenCount } from '@/lib/tokenization'
|
||||
import { batchByTokenLimit } from '@/lib/tokenization'
|
||||
|
||||
const logger = createLogger('EmbeddingUtils')
|
||||
|
||||
@@ -26,6 +26,20 @@ interface EmbeddingConfig {
|
||||
modelName: string
|
||||
}
|
||||
|
||||
interface EmbeddingResponseItem {
|
||||
embedding: number[]
|
||||
index: number
|
||||
}
|
||||
|
||||
interface EmbeddingAPIResponse {
|
||||
data: EmbeddingResponseItem[]
|
||||
model: string
|
||||
usage: {
|
||||
prompt_tokens: number
|
||||
total_tokens: number
|
||||
}
|
||||
}
|
||||
|
||||
async function getEmbeddingConfig(
|
||||
embeddingModel = 'text-embedding-3-small',
|
||||
workspaceId?: string | null
|
||||
@@ -104,14 +118,14 @@ async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Prom
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return data.data.map((item: any) => item.embedding)
|
||||
const data: EmbeddingAPIResponse = await response.json()
|
||||
return data.data.map((item) => item.embedding)
|
||||
},
|
||||
{
|
||||
maxRetries: 3,
|
||||
initialDelayMs: 1000,
|
||||
maxDelayMs: 10000,
|
||||
retryCondition: (error: any) => {
|
||||
retryCondition: (error: unknown) => {
|
||||
if (error instanceof EmbeddingAPIError) {
|
||||
return error.status === 429 || error.status >= 500
|
||||
}
|
||||
@@ -153,44 +167,27 @@ export async function generateEmbeddings(
|
||||
): Promise<number[][]> {
|
||||
const config = await getEmbeddingConfig(embeddingModel, workspaceId)
|
||||
|
||||
logger.info(
|
||||
`Using ${config.useAzure ? 'Azure OpenAI' : 'OpenAI'} for embeddings generation (${texts.length} texts)`
|
||||
)
|
||||
|
||||
const batches = batchByTokenLimit(texts, MAX_TOKENS_PER_REQUEST, embeddingModel)
|
||||
|
||||
logger.info(
|
||||
`Split ${texts.length} texts into ${batches.length} batches (max ${MAX_TOKENS_PER_REQUEST} tokens per batch, ${MAX_CONCURRENT_BATCHES} concurrent)`
|
||||
)
|
||||
|
||||
const batchResults = await processWithConcurrency(
|
||||
batches,
|
||||
MAX_CONCURRENT_BATCHES,
|
||||
async (batch, i) => {
|
||||
const batchTokenCount = getTotalTokenCount(batch, embeddingModel)
|
||||
|
||||
logger.info(
|
||||
`Processing batch ${i + 1}/${batches.length}: ${batch.length} texts, ${batchTokenCount} tokens`
|
||||
)
|
||||
|
||||
try {
|
||||
const batchEmbeddings = await callEmbeddingAPI(batch, config)
|
||||
|
||||
logger.info(
|
||||
`Generated ${batchEmbeddings.length} embeddings for batch ${i + 1}/${batches.length}`
|
||||
)
|
||||
|
||||
return batchEmbeddings
|
||||
return await callEmbeddingAPI(batch, config)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to generate embeddings for batch ${i + 1}:`, error)
|
||||
logger.error(`Failed to generate embeddings for batch ${i + 1}/${batches.length}:`, error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const allEmbeddings = batchResults.flat()
|
||||
|
||||
logger.info(`Successfully generated ${allEmbeddings.length} embeddings total`)
|
||||
const allEmbeddings: number[][] = []
|
||||
for (const batch of batchResults) {
|
||||
for (const emb of batch) {
|
||||
allEmbeddings.push(emb)
|
||||
}
|
||||
}
|
||||
|
||||
return allEmbeddings
|
||||
}
|
||||
|
||||
@@ -769,6 +769,80 @@ describe('buildTraceSpans', () => {
|
||||
expect(functionSpan?.status).toBe('error')
|
||||
expect((functionSpan?.output as { error?: string })?.error).toContain('Syntax Error')
|
||||
})
|
||||
|
||||
test('should remove childTraceSpans from output after integrating them as children', () => {
|
||||
const mockExecutionResult: ExecutionResult = {
|
||||
success: true,
|
||||
output: { result: 'parent output' },
|
||||
logs: [
|
||||
{
|
||||
blockId: 'workflow-1',
|
||||
blockName: 'Parent Workflow',
|
||||
blockType: 'workflow',
|
||||
startedAt: '2024-01-01T10:00:00.000Z',
|
||||
endedAt: '2024-01-01T10:00:05.000Z',
|
||||
durationMs: 5000,
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
childWorkflowName: 'Child Workflow',
|
||||
result: { data: 'some result' },
|
||||
childTraceSpans: [
|
||||
{
|
||||
id: 'child-block-1',
|
||||
name: 'Supabase Query',
|
||||
type: 'supabase',
|
||||
blockId: 'supabase-1',
|
||||
duration: 2000,
|
||||
startTime: '2024-01-01T10:00:01.000Z',
|
||||
endTime: '2024-01-01T10:00:03.000Z',
|
||||
status: 'success' as const,
|
||||
output: {
|
||||
records: [
|
||||
{ id: 1, logo: 'data:image/png;base64,VeryLargeBase64StringHere...' },
|
||||
{ id: 2, logo: 'data:image/png;base64,AnotherLargeBase64StringHere...' },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'child-block-2',
|
||||
name: 'Transform Data',
|
||||
type: 'function',
|
||||
blockId: 'function-1',
|
||||
duration: 500,
|
||||
startTime: '2024-01-01T10:00:03.000Z',
|
||||
endTime: '2024-01-01T10:00:03.500Z',
|
||||
status: 'success' as const,
|
||||
output: { transformed: true },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const { traceSpans } = buildTraceSpans(mockExecutionResult)
|
||||
|
||||
expect(traceSpans).toHaveLength(1)
|
||||
const workflowSpan = traceSpans[0]
|
||||
expect(workflowSpan.type).toBe('workflow')
|
||||
|
||||
expect(workflowSpan.children).toBeDefined()
|
||||
expect(workflowSpan.children).toHaveLength(2)
|
||||
expect(workflowSpan.children?.[0].name).toBe('Supabase Query')
|
||||
expect(workflowSpan.children?.[1].name).toBe('Transform Data')
|
||||
|
||||
expect(workflowSpan.output).toBeDefined()
|
||||
expect((workflowSpan.output as { childTraceSpans?: unknown }).childTraceSpans).toBeUndefined()
|
||||
|
||||
expect((workflowSpan.output as { success?: boolean }).success).toBe(true)
|
||||
expect((workflowSpan.output as { childWorkflowName?: string }).childWorkflowName).toBe(
|
||||
'Child Workflow'
|
||||
)
|
||||
expect((workflowSpan.output as { result?: { data: string } }).result).toEqual({
|
||||
data: 'some result',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('stripCustomToolPrefix', () => {
|
||||
|
||||
@@ -326,6 +326,11 @@ export function buildTraceSpans(result: ExecutionResult): {
|
||||
const childTraceSpans = log.output.childTraceSpans as TraceSpan[]
|
||||
const flattenedChildren = flattenWorkflowChildren(childTraceSpans)
|
||||
span.children = mergeTraceSpanChildren(span.children || [], flattenedChildren)
|
||||
|
||||
const { childTraceSpans: _, ...cleanOutput } = span.output as {
|
||||
childTraceSpans?: TraceSpan[]
|
||||
} & Record<string, unknown>
|
||||
span.output = cleanOutput
|
||||
}
|
||||
|
||||
spanMap.set(spanId, span)
|
||||
|
||||
@@ -127,24 +127,6 @@ export function truncateToTokenLimit(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get token count for multiple texts (for batching decisions)
|
||||
* Returns array of token counts in same order as input
|
||||
*/
|
||||
export function getTokenCountsForBatch(
|
||||
texts: string[],
|
||||
modelName = 'text-embedding-3-small'
|
||||
): number[] {
|
||||
return texts.map((text) => getAccurateTokenCount(text, modelName))
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate total tokens across multiple texts
|
||||
*/
|
||||
export function getTotalTokenCount(texts: string[], modelName = 'text-embedding-3-small'): number {
|
||||
return texts.reduce((total, text) => total + getAccurateTokenCount(text, modelName), 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch texts by token count to stay within API limits
|
||||
* Returns array of batches where each batch's total tokens <= maxTokensPerBatch
|
||||
|
||||
@@ -12,8 +12,6 @@ export {
|
||||
estimateOutputTokens,
|
||||
estimateTokenCount,
|
||||
getAccurateTokenCount,
|
||||
getTokenCountsForBatch,
|
||||
getTotalTokenCount,
|
||||
truncateToTokenLimit,
|
||||
} from '@/lib/tokenization/estimators'
|
||||
export { processStreamingBlockLog, processStreamingBlockLogs } from '@/lib/tokenization/streaming'
|
||||
|
||||
@@ -749,7 +749,6 @@ export async function formatWebhookInput(
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for unknown Telegram update types
|
||||
logger.warn('Unknown Telegram update type', {
|
||||
updateId: body.update_id,
|
||||
bodyKeys: Object.keys(body || {}),
|
||||
@@ -778,7 +777,6 @@ export async function formatWebhookInput(
|
||||
|
||||
if (foundWebhook.provider === 'twilio_voice') {
|
||||
return {
|
||||
// Root-level properties matching trigger outputs for easy access
|
||||
callSid: body.CallSid,
|
||||
accountSid: body.AccountSid,
|
||||
from: body.From,
|
||||
@@ -792,8 +790,6 @@ export async function formatWebhookInput(
|
||||
speechResult: body.SpeechResult,
|
||||
recordingUrl: body.RecordingUrl,
|
||||
recordingSid: body.RecordingSid,
|
||||
|
||||
// Additional fields from Twilio payload
|
||||
called: body.Called,
|
||||
caller: body.Caller,
|
||||
toCity: body.ToCity,
|
||||
@@ -830,14 +826,48 @@ export async function formatWebhookInput(
|
||||
|
||||
if (foundWebhook.provider === 'gmail') {
|
||||
if (body && typeof body === 'object' && 'email' in body) {
|
||||
return body
|
||||
const email = body.email as Record<string, any>
|
||||
const timestamp = body.timestamp
|
||||
return {
|
||||
...email,
|
||||
email,
|
||||
...(timestamp !== undefined && { timestamp }),
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'gmail',
|
||||
path: foundWebhook.path,
|
||||
providerConfig: foundWebhook.providerConfig,
|
||||
payload: body,
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
method: request.method,
|
||||
},
|
||||
},
|
||||
workflowId: foundWorkflow.id,
|
||||
}
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'outlook') {
|
||||
if (body && typeof body === 'object' && 'email' in body) {
|
||||
return body
|
||||
const email = body.email as Record<string, any>
|
||||
const timestamp = body.timestamp
|
||||
return {
|
||||
...email,
|
||||
email,
|
||||
...(timestamp !== undefined && { timestamp }),
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'outlook',
|
||||
path: foundWebhook.path,
|
||||
providerConfig: foundWebhook.providerConfig,
|
||||
payload: body,
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
method: request.method,
|
||||
},
|
||||
},
|
||||
workflowId: foundWorkflow.id,
|
||||
}
|
||||
}
|
||||
return body
|
||||
}
|
||||
@@ -926,19 +956,16 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'microsoft-teams') {
|
||||
// Check if this is a Microsoft Graph change notification
|
||||
if (body?.value && Array.isArray(body.value) && body.value.length > 0) {
|
||||
return await formatTeamsGraphNotification(body, foundWebhook, foundWorkflow, request)
|
||||
}
|
||||
|
||||
// Microsoft Teams outgoing webhook - Teams sending data to us
|
||||
const messageText = body?.text || ''
|
||||
const messageId = body?.id || ''
|
||||
const timestamp = body?.timestamp || body?.localTimestamp || ''
|
||||
const from = body?.from || {}
|
||||
const conversation = body?.conversation || {}
|
||||
|
||||
// Construct the message object
|
||||
const messageObj = {
|
||||
raw: {
|
||||
attachments: body?.attachments || [],
|
||||
@@ -951,14 +978,12 @@ export async function formatWebhookInput(
|
||||
},
|
||||
}
|
||||
|
||||
// Construct the from object
|
||||
const fromObj = {
|
||||
id: from.id || '',
|
||||
name: from.name || '',
|
||||
aadObjectId: from.aadObjectId || '',
|
||||
}
|
||||
|
||||
// Construct the conversation object
|
||||
const conversationObj = {
|
||||
id: conversation.id || '',
|
||||
name: conversation.name || '',
|
||||
@@ -968,13 +993,11 @@ export async function formatWebhookInput(
|
||||
conversationType: conversation.conversationType || '',
|
||||
}
|
||||
|
||||
// Construct the activity object
|
||||
const activityObj = body || {}
|
||||
|
||||
return {
|
||||
input: messageText, // Primary workflow input - the message text
|
||||
input: messageText,
|
||||
|
||||
// Top-level properties for direct access with <microsoftteams.from.name> syntax
|
||||
from: fromObj,
|
||||
message: messageObj,
|
||||
activity: activityObj,
|
||||
@@ -995,11 +1018,9 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'slack') {
|
||||
// Slack input formatting logic - check for valid event
|
||||
const event = body?.event
|
||||
|
||||
if (event && body?.type === 'event_callback') {
|
||||
// Extract event text with fallbacks for different event types
|
||||
let input = ''
|
||||
|
||||
if (event.text) {
|
||||
@@ -1010,13 +1031,12 @@ export async function formatWebhookInput(
|
||||
input = 'Slack event received'
|
||||
}
|
||||
|
||||
// Create the event object for easier access
|
||||
const eventObj = {
|
||||
event_type: event.type || '',
|
||||
channel: event.channel || '',
|
||||
channel_name: '', // Could be resolved via additional API calls if needed
|
||||
channel_name: '',
|
||||
user: event.user || '',
|
||||
user_name: '', // Could be resolved via additional API calls if needed
|
||||
user_name: '',
|
||||
text: event.text || '',
|
||||
timestamp: event.ts || event.event_ts || '',
|
||||
team_id: body.team_id || event.team || '',
|
||||
@@ -1024,12 +1044,9 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
return {
|
||||
input, // Primary workflow input - the event content
|
||||
input,
|
||||
|
||||
// // // Top-level properties for backward compatibility with <blockName.event> syntax
|
||||
event: eventObj,
|
||||
|
||||
// Keep the nested structure for the new slack.event.text syntax
|
||||
slack: {
|
||||
event: eventObj,
|
||||
},
|
||||
@@ -1047,7 +1064,6 @@ export async function formatWebhookInput(
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for unknown Slack event types
|
||||
logger.warn('Unknown Slack event type', {
|
||||
type: body?.type,
|
||||
hasEvent: !!body?.event,
|
||||
@@ -1283,9 +1299,7 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
return {
|
||||
// Expose raw GitHub payload at the root
|
||||
...body,
|
||||
// Include webhook metadata alongside
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'github',
|
||||
@@ -1364,10 +1378,7 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'linear') {
|
||||
// Linear webhook payload structure:
|
||||
// { action, type, webhookId, webhookTimestamp, organizationId, createdAt, actor, data, updatedFrom? }
|
||||
return {
|
||||
// Extract top-level fields from Linear payload
|
||||
action: body.action || '',
|
||||
type: body.type || '',
|
||||
webhookId: body.webhookId || '',
|
||||
@@ -1377,8 +1388,6 @@ export async function formatWebhookInput(
|
||||
actor: body.actor || null,
|
||||
data: body.data || null,
|
||||
updatedFrom: body.updatedFrom || null,
|
||||
|
||||
// Keep webhook metadata
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'linear',
|
||||
@@ -1393,7 +1402,6 @@ export async function formatWebhookInput(
|
||||
}
|
||||
}
|
||||
|
||||
// Jira webhook format
|
||||
if (foundWebhook.provider === 'jira') {
|
||||
const { extractIssueData, extractCommentData, extractWorklogData } = await import(
|
||||
'@/triggers/jira/utils'
|
||||
@@ -1445,7 +1453,6 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'calendly') {
|
||||
// Calendly webhook payload format matches the trigger outputs
|
||||
return {
|
||||
event: body.event,
|
||||
created_at: body.created_at,
|
||||
@@ -1466,9 +1473,7 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'circleback') {
|
||||
// Circleback webhook payload - meeting notes, action items, transcript
|
||||
return {
|
||||
// Top-level fields from Circleback payload
|
||||
id: body.id,
|
||||
name: body.name,
|
||||
createdAt: body.createdAt,
|
||||
@@ -1482,10 +1487,7 @@ export async function formatWebhookInput(
|
||||
actionItems: body.actionItems || [],
|
||||
transcript: body.transcript || [],
|
||||
insights: body.insights || {},
|
||||
|
||||
// Full meeting object for convenience
|
||||
meeting: body,
|
||||
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'circleback',
|
||||
@@ -1501,9 +1503,7 @@ export async function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'grain') {
|
||||
// Grain webhook payload structure: { type, user_id, data: {...} }
|
||||
return {
|
||||
// Top-level fields from Grain payload
|
||||
type: body.type,
|
||||
user_id: body.user_id,
|
||||
data: body.data || {},
|
||||
|
||||
@@ -226,10 +226,27 @@ export function getBlockOutputs(
|
||||
}
|
||||
|
||||
if (blockType === 'human_in_the_loop') {
|
||||
// For human_in_the_loop, only expose url (inputFormat fields are only available after resume)
|
||||
return {
|
||||
const hitlOutputs: Record<string, any> = {
|
||||
url: { type: 'string', description: 'Resume UI URL' },
|
||||
resumeEndpoint: {
|
||||
type: 'string',
|
||||
description: 'Resume API endpoint URL for direct curl requests',
|
||||
},
|
||||
}
|
||||
|
||||
const normalizedInputFormat = normalizeInputFormatValue(subBlocks?.inputFormat?.value)
|
||||
|
||||
for (const field of normalizedInputFormat) {
|
||||
const fieldName = field?.name?.trim()
|
||||
if (!fieldName) continue
|
||||
|
||||
hitlOutputs[fieldName] = {
|
||||
type: (field?.type || 'any') as any,
|
||||
description: `Field from resume form`,
|
||||
}
|
||||
}
|
||||
|
||||
return hitlOutputs
|
||||
}
|
||||
|
||||
if (blockType === 'approval') {
|
||||
|
||||
@@ -538,15 +538,15 @@ export class PauseResumeManager {
|
||||
|
||||
mergedOutput.resume = mergedOutput.resume ?? mergedResponse.resume
|
||||
|
||||
// Preserve url from resume links (apiUrl hidden from output)
|
||||
// Preserve url and resumeEndpoint from resume links
|
||||
const resumeLinks = mergedOutput.resume ?? mergedResponse.resume
|
||||
if (resumeLinks && typeof resumeLinks === 'object') {
|
||||
if (resumeLinks.uiUrl) {
|
||||
mergedOutput.url = resumeLinks.uiUrl
|
||||
}
|
||||
// if (resumeLinks.apiUrl) {
|
||||
// mergedOutput.apiUrl = resumeLinks.apiUrl
|
||||
// } // Hidden from output
|
||||
if (resumeLinks.apiUrl) {
|
||||
mergedOutput.resumeEndpoint = resumeLinks.apiUrl
|
||||
}
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(submissionPayload)) {
|
||||
|
||||
@@ -127,6 +127,7 @@
|
||||
"onedollarstats": "0.0.10",
|
||||
"openai": "^4.91.1",
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-lib": "1.17.1",
|
||||
"postgres": "^3.4.5",
|
||||
"posthog-js": "1.268.9",
|
||||
"posthog-node": "5.9.2",
|
||||
|
||||
@@ -90,6 +90,17 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
transformResponse: async (response: Response): Promise<CodeExecutionOutput> => {
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
result: null,
|
||||
stdout: result.output?.stdout || '',
|
||||
},
|
||||
error: result.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { requestTool } from './request'
|
||||
import { webhookRequestTool } from './webhook_request'
|
||||
|
||||
export const httpRequestTool = requestTool
|
||||
export { webhookRequestTool }
|
||||
|
||||
@@ -4,7 +4,7 @@ export interface RequestParams {
|
||||
url: string
|
||||
method?: HttpMethod
|
||||
headers?: TableRow[]
|
||||
body?: any
|
||||
body?: unknown
|
||||
params?: TableRow[]
|
||||
pathParams?: Record<string, string>
|
||||
formData?: Record<string, string | Blob>
|
||||
@@ -12,8 +12,15 @@ export interface RequestParams {
|
||||
|
||||
export interface RequestResponse extends ToolResponse {
|
||||
output: {
|
||||
data: any
|
||||
data: unknown
|
||||
status: number
|
||||
headers: Record<string, string>
|
||||
}
|
||||
}
|
||||
|
||||
export interface WebhookRequestParams {
|
||||
url: string
|
||||
body?: unknown
|
||||
secret?: string
|
||||
headers?: Record<string, string>
|
||||
}
|
||||
|
||||
130
apps/sim/tools/http/webhook_request.ts
Normal file
130
apps/sim/tools/http/webhook_request.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { createHmac } from 'crypto'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { RequestResponse, WebhookRequestParams } from './types'
|
||||
|
||||
/**
|
||||
* Generates HMAC-SHA256 signature for webhook payload
|
||||
*/
|
||||
function generateSignature(secret: string, timestamp: number, body: string): string {
|
||||
const signatureBase = `${timestamp}.${body}`
|
||||
return createHmac('sha256', secret).update(signatureBase).digest('hex')
|
||||
}
|
||||
|
||||
export const webhookRequestTool: ToolConfig<WebhookRequestParams, RequestResponse> = {
|
||||
id: 'webhook_request',
|
||||
name: 'Webhook Request',
|
||||
description: 'Send a webhook request with automatic headers and optional HMAC signing',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
url: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'The webhook URL to send the request to',
|
||||
},
|
||||
body: {
|
||||
type: 'object',
|
||||
description: 'JSON payload to send',
|
||||
},
|
||||
secret: {
|
||||
type: 'string',
|
||||
description: 'Optional secret for HMAC-SHA256 signature',
|
||||
},
|
||||
headers: {
|
||||
type: 'object',
|
||||
description: 'Additional headers to include',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: WebhookRequestParams) => params.url,
|
||||
|
||||
method: () => 'POST',
|
||||
|
||||
headers: (params: WebhookRequestParams) => {
|
||||
const timestamp = Date.now()
|
||||
const deliveryId = uuidv4()
|
||||
|
||||
// Start with webhook-specific headers
|
||||
const webhookHeaders: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Webhook-Timestamp': timestamp.toString(),
|
||||
'X-Delivery-ID': deliveryId,
|
||||
'Idempotency-Key': deliveryId,
|
||||
}
|
||||
|
||||
// Add signature if secret is provided
|
||||
if (params.secret) {
|
||||
const bodyString =
|
||||
typeof params.body === 'string' ? params.body : JSON.stringify(params.body || {})
|
||||
const signature = generateSignature(params.secret, timestamp, bodyString)
|
||||
webhookHeaders['X-Webhook-Signature'] = `t=${timestamp},v1=${signature}`
|
||||
}
|
||||
|
||||
// Merge with user-provided headers (user headers take precedence)
|
||||
const userHeaders = params.headers || {}
|
||||
|
||||
return { ...webhookHeaders, ...userHeaders }
|
||||
},
|
||||
|
||||
body: (params: WebhookRequestParams) => params.body,
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
|
||||
const headers: Record<string, string> = {}
|
||||
response.headers.forEach((value, key) => {
|
||||
headers[key] = value
|
||||
})
|
||||
|
||||
const data = await (contentType.includes('application/json')
|
||||
? response.json()
|
||||
: response.text())
|
||||
|
||||
// Check if this is a proxy response
|
||||
if (
|
||||
contentType.includes('application/json') &&
|
||||
typeof data === 'object' &&
|
||||
data !== null &&
|
||||
data.data !== undefined &&
|
||||
data.status !== undefined
|
||||
) {
|
||||
return {
|
||||
success: data.success,
|
||||
output: {
|
||||
data: data.data,
|
||||
status: data.status,
|
||||
headers: data.headers || {},
|
||||
},
|
||||
error: data.success ? undefined : data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: response.ok,
|
||||
output: {
|
||||
data,
|
||||
status: response.status,
|
||||
headers,
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
data: {
|
||||
type: 'json',
|
||||
description: 'Response data from the webhook endpoint',
|
||||
},
|
||||
status: {
|
||||
type: 'number',
|
||||
description: 'HTTP status code',
|
||||
},
|
||||
headers: {
|
||||
type: 'object',
|
||||
description: 'Response headers',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -376,7 +376,7 @@ import {
|
||||
greptileStatusTool,
|
||||
} from '@/tools/greptile'
|
||||
import { guardrailsValidateTool } from '@/tools/guardrails'
|
||||
import { httpRequestTool } from '@/tools/http'
|
||||
import { httpRequestTool, webhookRequestTool } from '@/tools/http'
|
||||
import {
|
||||
hubspotCreateCompanyTool,
|
||||
hubspotCreateContactTool,
|
||||
@@ -1415,6 +1415,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
browser_use_run_task: browserUseRunTaskTool,
|
||||
openai_embeddings: openAIEmbeddingsTool,
|
||||
http_request: httpRequestTool,
|
||||
webhook_request: webhookRequestTool,
|
||||
huggingface_chat: huggingfaceChatTool,
|
||||
llm_chat: llmChatTool,
|
||||
function_execute: functionExecuteTool,
|
||||
|
||||
@@ -27,6 +27,12 @@ export const getRowTool: ToolConfig<SupabaseGetRowParams, SupabaseGetRowResponse
|
||||
description:
|
||||
'Database schema to query from (default: public). Use this to access tables in other schemas.',
|
||||
},
|
||||
select: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Columns to return (comma-separated). Defaults to * (all columns)',
|
||||
},
|
||||
filter: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
@@ -44,7 +50,8 @@ export const getRowTool: ToolConfig<SupabaseGetRowParams, SupabaseGetRowResponse
|
||||
request: {
|
||||
url: (params) => {
|
||||
// Construct the URL for the Supabase REST API
|
||||
let url = `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=*`
|
||||
const selectColumns = params.select?.trim() || '*'
|
||||
let url = `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=${encodeURIComponent(selectColumns)}`
|
||||
|
||||
// Add filters (required for get_row) - using PostgREST syntax
|
||||
if (params.filter?.trim()) {
|
||||
|
||||
@@ -27,6 +27,12 @@ export const queryTool: ToolConfig<SupabaseQueryParams, SupabaseQueryResponse> =
|
||||
description:
|
||||
'Database schema to query from (default: public). Use this to access tables in other schemas.',
|
||||
},
|
||||
select: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Columns to return (comma-separated). Defaults to * (all columns)',
|
||||
},
|
||||
filter: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
@@ -56,7 +62,8 @@ export const queryTool: ToolConfig<SupabaseQueryParams, SupabaseQueryResponse> =
|
||||
request: {
|
||||
url: (params) => {
|
||||
// Construct the URL for the Supabase REST API
|
||||
let url = `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=*`
|
||||
const selectColumns = params.select?.trim() || '*'
|
||||
let url = `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=${encodeURIComponent(selectColumns)}`
|
||||
|
||||
// Add filters if provided - using PostgREST syntax
|
||||
if (params.filter?.trim()) {
|
||||
|
||||
@@ -5,6 +5,7 @@ export interface SupabaseQueryParams {
|
||||
projectId: string
|
||||
table: string
|
||||
schema?: string
|
||||
select?: string
|
||||
filter?: string
|
||||
orderBy?: string
|
||||
limit?: number
|
||||
@@ -23,6 +24,7 @@ export interface SupabaseGetRowParams {
|
||||
projectId: string
|
||||
table: string
|
||||
schema?: string
|
||||
select?: string
|
||||
filter: string
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ export default defineConfig({
|
||||
build: {
|
||||
extensions: [
|
||||
additionalPackages({
|
||||
packages: ['unpdf'],
|
||||
packages: ['unpdf', 'pdf-lib'],
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { GrainIcon } from '@/components/icons'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
import { buildHighlightOutputs, grainSetupInstructions } from './utils'
|
||||
import { buildHighlightOutputs, grainSetupInstructions, grainTriggerOptions } from './utils'
|
||||
|
||||
export const grainHighlightCreatedTrigger: TriggerConfig = {
|
||||
id: 'grain_highlight_created',
|
||||
@@ -11,6 +11,15 @@ export const grainHighlightCreatedTrigger: TriggerConfig = {
|
||||
icon: GrainIcon,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'selectedTriggerId',
|
||||
title: 'Trigger Type',
|
||||
type: 'dropdown',
|
||||
mode: 'trigger',
|
||||
options: grainTriggerOptions,
|
||||
value: () => 'grain_highlight_created',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
@@ -25,42 +34,6 @@ export const grainHighlightCreatedTrigger: TriggerConfig = {
|
||||
value: 'grain_highlight_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeHighlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
description: 'Include highlights/clips in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_highlight_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeParticipants',
|
||||
title: 'Include Participants',
|
||||
type: 'switch',
|
||||
description: 'Include participant list in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_highlight_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeAiSummary',
|
||||
title: 'Include AI Summary',
|
||||
type: 'switch',
|
||||
description: 'Include AI-generated summary in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_highlight_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'triggerInstructions',
|
||||
title: 'Setup Instructions',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { GrainIcon } from '@/components/icons'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
import { buildHighlightOutputs, grainSetupInstructions } from './utils'
|
||||
import { buildHighlightOutputs, grainSetupInstructions, grainTriggerOptions } from './utils'
|
||||
|
||||
export const grainHighlightUpdatedTrigger: TriggerConfig = {
|
||||
id: 'grain_highlight_updated',
|
||||
@@ -11,6 +11,15 @@ export const grainHighlightUpdatedTrigger: TriggerConfig = {
|
||||
icon: GrainIcon,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'selectedTriggerId',
|
||||
title: 'Trigger Type',
|
||||
type: 'dropdown',
|
||||
mode: 'trigger',
|
||||
options: grainTriggerOptions,
|
||||
value: () => 'grain_highlight_updated',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
@@ -25,42 +34,6 @@ export const grainHighlightUpdatedTrigger: TriggerConfig = {
|
||||
value: 'grain_highlight_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeHighlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
description: 'Include highlights/clips in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_highlight_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeParticipants',
|
||||
title: 'Include Participants',
|
||||
type: 'switch',
|
||||
description: 'Include participant list in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_highlight_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeAiSummary',
|
||||
title: 'Include AI Summary',
|
||||
type: 'switch',
|
||||
description: 'Include AI-generated summary in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_highlight_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'triggerInstructions',
|
||||
title: 'Setup Instructions',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { GrainIcon } from '@/components/icons'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
import { buildRecordingOutputs, grainSetupInstructions } from './utils'
|
||||
import { buildRecordingOutputs, grainSetupInstructions, grainTriggerOptions } from './utils'
|
||||
|
||||
export const grainRecordingCreatedTrigger: TriggerConfig = {
|
||||
id: 'grain_recording_created',
|
||||
@@ -11,6 +11,15 @@ export const grainRecordingCreatedTrigger: TriggerConfig = {
|
||||
icon: GrainIcon,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'selectedTriggerId',
|
||||
title: 'Trigger Type',
|
||||
type: 'dropdown',
|
||||
mode: 'trigger',
|
||||
options: grainTriggerOptions,
|
||||
value: () => 'grain_recording_created',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
@@ -25,42 +34,6 @@ export const grainRecordingCreatedTrigger: TriggerConfig = {
|
||||
value: 'grain_recording_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeHighlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
description: 'Include highlights/clips in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_recording_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeParticipants',
|
||||
title: 'Include Participants',
|
||||
type: 'switch',
|
||||
description: 'Include participant list in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_recording_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeAiSummary',
|
||||
title: 'Include AI Summary',
|
||||
type: 'switch',
|
||||
description: 'Include AI-generated summary in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_recording_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'triggerInstructions',
|
||||
title: 'Setup Instructions',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { GrainIcon } from '@/components/icons'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
import { buildRecordingOutputs, grainSetupInstructions } from './utils'
|
||||
import { buildRecordingOutputs, grainSetupInstructions, grainTriggerOptions } from './utils'
|
||||
|
||||
export const grainRecordingUpdatedTrigger: TriggerConfig = {
|
||||
id: 'grain_recording_updated',
|
||||
@@ -11,6 +11,15 @@ export const grainRecordingUpdatedTrigger: TriggerConfig = {
|
||||
icon: GrainIcon,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'selectedTriggerId',
|
||||
title: 'Trigger Type',
|
||||
type: 'dropdown',
|
||||
mode: 'trigger',
|
||||
options: grainTriggerOptions,
|
||||
value: () => 'grain_recording_updated',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
@@ -25,42 +34,6 @@ export const grainRecordingUpdatedTrigger: TriggerConfig = {
|
||||
value: 'grain_recording_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeHighlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
description: 'Include highlights/clips in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_recording_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeParticipants',
|
||||
title: 'Include Participants',
|
||||
type: 'switch',
|
||||
description: 'Include participant list in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_recording_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeAiSummary',
|
||||
title: 'Include AI Summary',
|
||||
type: 'switch',
|
||||
description: 'Include AI-generated summary in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_recording_updated',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'triggerInstructions',
|
||||
title: 'Setup Instructions',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { GrainIcon } from '@/components/icons'
|
||||
import type { TriggerConfig } from '@/triggers/types'
|
||||
import { buildStoryOutputs, grainSetupInstructions } from './utils'
|
||||
import { buildStoryOutputs, grainSetupInstructions, grainTriggerOptions } from './utils'
|
||||
|
||||
export const grainStoryCreatedTrigger: TriggerConfig = {
|
||||
id: 'grain_story_created',
|
||||
@@ -11,6 +11,15 @@ export const grainStoryCreatedTrigger: TriggerConfig = {
|
||||
icon: GrainIcon,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'selectedTriggerId',
|
||||
title: 'Trigger Type',
|
||||
type: 'dropdown',
|
||||
mode: 'trigger',
|
||||
options: grainTriggerOptions,
|
||||
value: () => 'grain_story_created',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
@@ -25,42 +34,6 @@ export const grainStoryCreatedTrigger: TriggerConfig = {
|
||||
value: 'grain_story_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeHighlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
description: 'Include highlights/clips in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_story_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeParticipants',
|
||||
title: 'Include Participants',
|
||||
type: 'switch',
|
||||
description: 'Include participant list in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_story_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeAiSummary',
|
||||
title: 'Include AI Summary',
|
||||
type: 'switch',
|
||||
description: 'Include AI-generated summary in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_story_created',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'triggerInstructions',
|
||||
title: 'Setup Instructions',
|
||||
|
||||
@@ -19,7 +19,6 @@ export function grainSetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Enter your Grain API Key (Personal Access Token) above.',
|
||||
'You can find or create your API key in Grain at <strong>Settings > Integrations > API</strong>.',
|
||||
'Optionally configure filters to narrow which recordings trigger the webhook.',
|
||||
`Click <strong>"Save Configuration"</strong> to automatically create the webhook in Grain for <strong>${eventType}</strong> events.`,
|
||||
'The webhook will be automatically deleted when you remove this trigger.',
|
||||
]
|
||||
@@ -40,75 +39,61 @@ export function buildRecordingOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
type: {
|
||||
type: 'string',
|
||||
description: 'Event type (recording_added)',
|
||||
description: 'Event type',
|
||||
},
|
||||
user_id: {
|
||||
type: 'string',
|
||||
description: 'User UUID who triggered the event',
|
||||
},
|
||||
data: {
|
||||
type: 'object',
|
||||
description: 'Recording data object',
|
||||
},
|
||||
'data.id': {
|
||||
type: 'string',
|
||||
description: 'Recording UUID',
|
||||
},
|
||||
'data.title': {
|
||||
type: 'string',
|
||||
description: 'Recording title',
|
||||
},
|
||||
'data.start_datetime': {
|
||||
type: 'string',
|
||||
description: 'ISO8601 start timestamp',
|
||||
},
|
||||
'data.end_datetime': {
|
||||
type: 'string',
|
||||
description: 'ISO8601 end timestamp',
|
||||
},
|
||||
'data.duration_ms': {
|
||||
type: 'number',
|
||||
description: 'Duration in milliseconds',
|
||||
},
|
||||
'data.media_type': {
|
||||
type: 'string',
|
||||
description: 'audio, transcript, or video',
|
||||
},
|
||||
'data.source': {
|
||||
type: 'string',
|
||||
description: 'Recording source (zoom, meet, teams, etc.)',
|
||||
},
|
||||
'data.url': {
|
||||
type: 'string',
|
||||
description: 'URL to view in Grain',
|
||||
},
|
||||
'data.thumbnail_url': {
|
||||
type: 'string',
|
||||
description: 'Thumbnail URL (nullable)',
|
||||
},
|
||||
'data.tags': {
|
||||
type: 'array',
|
||||
description: 'Array of tag strings',
|
||||
},
|
||||
'data.teams': {
|
||||
type: 'array',
|
||||
description: 'Teams the recording belongs to',
|
||||
},
|
||||
'data.meeting_type': {
|
||||
type: 'object',
|
||||
description: 'Meeting type info (nullable)',
|
||||
},
|
||||
'data.highlights': {
|
||||
type: 'array',
|
||||
description: 'Highlights (if configured in hook)',
|
||||
},
|
||||
'data.participants': {
|
||||
type: 'array',
|
||||
description: 'Participants (if configured in hook)',
|
||||
},
|
||||
'data.ai_summary': {
|
||||
type: 'object',
|
||||
description: 'AI summary (if configured in hook)',
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'Recording UUID',
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
description: 'Recording title',
|
||||
},
|
||||
start_datetime: {
|
||||
type: 'string',
|
||||
description: 'ISO8601 start timestamp',
|
||||
},
|
||||
end_datetime: {
|
||||
type: 'string',
|
||||
description: 'ISO8601 end timestamp',
|
||||
},
|
||||
duration_ms: {
|
||||
type: 'number',
|
||||
description: 'Duration in milliseconds',
|
||||
},
|
||||
media_type: {
|
||||
type: 'string',
|
||||
description: 'audio, transcript, or video',
|
||||
},
|
||||
source: {
|
||||
type: 'string',
|
||||
description: 'Recording source (zoom, meet, local_capture, etc.)',
|
||||
},
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL to view in Grain',
|
||||
},
|
||||
thumbnail_url: {
|
||||
type: 'string',
|
||||
description: 'Thumbnail URL (nullable)',
|
||||
},
|
||||
tags: {
|
||||
type: 'array',
|
||||
description: 'Array of tag strings',
|
||||
},
|
||||
teams: {
|
||||
type: 'array',
|
||||
description: 'Array of team objects',
|
||||
},
|
||||
meeting_type: {
|
||||
type: 'object',
|
||||
description: 'Meeting type info with id, name, scope (nullable)',
|
||||
},
|
||||
},
|
||||
} as Record<string, TriggerOutput>
|
||||
}
|
||||
@@ -128,52 +113,50 @@ export function buildHighlightOutputs(): Record<string, TriggerOutput> {
|
||||
description: 'User UUID who triggered the event',
|
||||
},
|
||||
data: {
|
||||
type: 'object',
|
||||
description: 'Highlight data object',
|
||||
},
|
||||
'data.id': {
|
||||
type: 'string',
|
||||
description: 'Highlight UUID',
|
||||
},
|
||||
'data.recording_id': {
|
||||
type: 'string',
|
||||
description: 'Parent recording UUID',
|
||||
},
|
||||
'data.text': {
|
||||
type: 'string',
|
||||
description: 'Highlight title/description',
|
||||
},
|
||||
'data.transcript': {
|
||||
type: 'string',
|
||||
description: 'Transcript text of the clip',
|
||||
},
|
||||
'data.speakers': {
|
||||
type: 'array',
|
||||
description: 'Array of speaker names',
|
||||
},
|
||||
'data.timestamp': {
|
||||
type: 'number',
|
||||
description: 'Start timestamp in ms',
|
||||
},
|
||||
'data.duration': {
|
||||
type: 'number',
|
||||
description: 'Duration in ms',
|
||||
},
|
||||
'data.tags': {
|
||||
type: 'array',
|
||||
description: 'Array of tag strings',
|
||||
},
|
||||
'data.url': {
|
||||
type: 'string',
|
||||
description: 'URL to view in Grain',
|
||||
},
|
||||
'data.thumbnail_url': {
|
||||
type: 'string',
|
||||
description: 'Thumbnail URL',
|
||||
},
|
||||
'data.created_datetime': {
|
||||
type: 'string',
|
||||
description: 'ISO8601 creation timestamp',
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'Highlight UUID',
|
||||
},
|
||||
recording_id: {
|
||||
type: 'string',
|
||||
description: 'Parent recording UUID',
|
||||
},
|
||||
text: {
|
||||
type: 'string',
|
||||
description: 'Highlight title/description',
|
||||
},
|
||||
transcript: {
|
||||
type: 'string',
|
||||
description: 'Transcript text of the clip',
|
||||
},
|
||||
speakers: {
|
||||
type: 'array',
|
||||
description: 'Array of speaker names',
|
||||
},
|
||||
timestamp: {
|
||||
type: 'number',
|
||||
description: 'Start timestamp in ms',
|
||||
},
|
||||
duration: {
|
||||
type: 'number',
|
||||
description: 'Duration in ms',
|
||||
},
|
||||
tags: {
|
||||
type: 'array',
|
||||
description: 'Array of tag strings',
|
||||
},
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL to view in Grain',
|
||||
},
|
||||
thumbnail_url: {
|
||||
type: 'string',
|
||||
description: 'Thumbnail URL',
|
||||
},
|
||||
created_datetime: {
|
||||
type: 'string',
|
||||
description: 'ISO8601 creation timestamp',
|
||||
},
|
||||
},
|
||||
} as Record<string, TriggerOutput>
|
||||
}
|
||||
@@ -193,24 +176,22 @@ export function buildStoryOutputs(): Record<string, TriggerOutput> {
|
||||
description: 'User UUID who triggered the event',
|
||||
},
|
||||
data: {
|
||||
type: 'object',
|
||||
description: 'Story data object',
|
||||
},
|
||||
'data.id': {
|
||||
type: 'string',
|
||||
description: 'Story UUID',
|
||||
},
|
||||
'data.title': {
|
||||
type: 'string',
|
||||
description: 'Story title',
|
||||
},
|
||||
'data.url': {
|
||||
type: 'string',
|
||||
description: 'URL to view in Grain',
|
||||
},
|
||||
'data.created_datetime': {
|
||||
type: 'string',
|
||||
description: 'ISO8601 creation timestamp',
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'Story UUID',
|
||||
},
|
||||
title: {
|
||||
type: 'string',
|
||||
description: 'Story title',
|
||||
},
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL to view in Grain',
|
||||
},
|
||||
created_datetime: {
|
||||
type: 'string',
|
||||
description: 'ISO8601 creation timestamp',
|
||||
},
|
||||
},
|
||||
} as Record<string, TriggerOutput>
|
||||
}
|
||||
|
||||
@@ -34,42 +34,6 @@ export const grainWebhookTrigger: TriggerConfig = {
|
||||
value: 'grain_webhook',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeHighlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
description: 'Include highlights/clips in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_webhook',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeParticipants',
|
||||
title: 'Include Participants',
|
||||
type: 'switch',
|
||||
description: 'Include participant list in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_webhook',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'includeAiSummary',
|
||||
title: 'Include AI Summary',
|
||||
type: 'switch',
|
||||
description: 'Include AI-generated summary in webhook payload.',
|
||||
defaultValue: false,
|
||||
mode: 'trigger',
|
||||
condition: {
|
||||
field: 'selectedTriggerId',
|
||||
value: 'grain_webhook',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'triggerInstructions',
|
||||
title: 'Setup Instructions',
|
||||
|
||||
26
bun.lock
26
bun.lock
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "simstudio",
|
||||
@@ -11,7 +12,7 @@
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"husky": "9.1.7",
|
||||
"lint-staged": "16.0.0",
|
||||
"turbo": "2.7.2",
|
||||
"turbo": "2.7.3",
|
||||
},
|
||||
},
|
||||
"apps/docs": {
|
||||
@@ -156,6 +157,7 @@
|
||||
"onedollarstats": "0.0.10",
|
||||
"openai": "^4.91.1",
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-lib": "1.17.1",
|
||||
"postgres": "^3.4.5",
|
||||
"posthog-js": "1.268.9",
|
||||
"posthog-node": "5.9.2",
|
||||
@@ -912,6 +914,10 @@
|
||||
|
||||
"@orama/orama": ["@orama/orama@3.1.18", "", {}, "sha512-a61ljmRVVyG5MC/698C8/FfFDw5a8LOIvyOLW5fztgUXqUpc1jOfQzOitSCbge657OgXXThmY3Tk8fpiDb4UcA=="],
|
||||
|
||||
"@pdf-lib/standard-fonts": ["@pdf-lib/standard-fonts@1.0.0", "", { "dependencies": { "pako": "^1.0.6" } }, "sha512-hU30BK9IUN/su0Mn9VdlVKsWBS6GyhVfqjwl1FjZN4TxP6cCw0jP2w7V3Hf5uX7M0AZJ16vey9yE0ny7Sa59ZA=="],
|
||||
|
||||
"@pdf-lib/upng": ["@pdf-lib/upng@1.0.1", "", { "dependencies": { "pako": "^1.0.10" } }, "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ=="],
|
||||
|
||||
"@peculiar/asn1-android": ["@peculiar/asn1-android@2.6.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-cBRCKtYPF7vJGN76/yG8VbxRcHLPF3HnkoHhKOZeHpoVtbMYfY9ROKtH3DtYUY9m8uI1Mh47PRhHf2hSK3xcSQ=="],
|
||||
|
||||
"@peculiar/asn1-cms": ["@peculiar/asn1-cms@2.6.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.6.0", "@peculiar/asn1-x509": "^2.6.0", "@peculiar/asn1-x509-attr": "^2.6.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-2uZqP+ggSncESeUF/9Su8rWqGclEfEiz1SyU02WX5fUONFfkjzS2Z/F1Li0ofSmf4JqYXIOdCAZqIXAIBAT1OA=="],
|
||||
@@ -2864,6 +2870,8 @@
|
||||
|
||||
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
|
||||
|
||||
"pdf-lib": ["pdf-lib@1.17.1", "", { "dependencies": { "@pdf-lib/standard-fonts": "^1.0.0", "@pdf-lib/upng": "^1.0.1", "pako": "^1.0.11", "tslib": "^1.11.1" } }, "sha512-V/mpyJAoTsN4cnP31vc0wfNA1+p20evqqnap0KLoRUN0Yk/p3wN52DOEsL4oBFcLdb76hlpKPtzJIgo67j/XLw=="],
|
||||
|
||||
"pdfjs-dist": ["pdfjs-dist@5.4.449", "", { "optionalDependencies": { "@napi-rs/canvas": "^0.1.81" } }, "sha512-CegnUaT0QwAyQMS+7o2POr4wWUNNe8VaKKlcuoRHeYo98cVnqPpwOXNSx6Trl6szH02JrRcsPgletV6GmF3LtQ=="],
|
||||
|
||||
"peberminta": ["peberminta@0.9.0", "", {}, "sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ=="],
|
||||
@@ -3362,19 +3370,19 @@
|
||||
|
||||
"tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="],
|
||||
|
||||
"turbo": ["turbo@2.7.2", "", { "optionalDependencies": { "turbo-darwin-64": "2.7.2", "turbo-darwin-arm64": "2.7.2", "turbo-linux-64": "2.7.2", "turbo-linux-arm64": "2.7.2", "turbo-windows-64": "2.7.2", "turbo-windows-arm64": "2.7.2" }, "bin": { "turbo": "bin/turbo" } }, "sha512-5JIA5aYBAJSAhrhbyag1ZuMSgUZnHtI+Sq3H8D3an4fL8PeF+L1yYvbEJg47akP1PFfATMf5ehkqFnxfkmuwZQ=="],
|
||||
"turbo": ["turbo@2.7.3", "", { "optionalDependencies": { "turbo-darwin-64": "2.7.3", "turbo-darwin-arm64": "2.7.3", "turbo-linux-64": "2.7.3", "turbo-linux-arm64": "2.7.3", "turbo-windows-64": "2.7.3", "turbo-windows-arm64": "2.7.3" }, "bin": { "turbo": "bin/turbo" } }, "sha512-+HjKlP4OfYk+qzvWNETA3cUO5UuK6b5MSc2UJOKyvBceKucQoQGb2g7HlC2H1GHdkfKrk4YF1VPvROkhVZDDLQ=="],
|
||||
|
||||
"turbo-darwin-64": ["turbo-darwin-64@2.7.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-dxY3X6ezcT5vm3coK6VGixbrhplbQMwgNsCsvZamS/+/6JiebqW9DKt4NwpgYXhDY2HdH00I7FWs3wkVuan4rA=="],
|
||||
"turbo-darwin-64": ["turbo-darwin-64@2.7.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-aZHhvRiRHXbJw1EcEAq4aws1hsVVUZ9DPuSFaq9VVFAKCup7niIEwc22glxb7240yYEr1vLafdQ2U294Vcwz+w=="],
|
||||
|
||||
"turbo-darwin-arm64": ["turbo-darwin-arm64@2.7.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-1bXmuwPLqNFt3mzrtYcVx1sdJ8UYb124Bf48nIgcpMCGZy3kDhgxNv1503kmuK/37OGOZbsWSQFU4I08feIuSg=="],
|
||||
"turbo-darwin-arm64": ["turbo-darwin-arm64@2.7.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-CkVrHSq+Bnhl9sX2LQgqQYVfLTWC2gvI74C4758OmU0djfrssDKU9d4YQF0AYXXhIIRZipSXfxClQziIMD+EAg=="],
|
||||
|
||||
"turbo-linux-64": ["turbo-linux-64@2.7.2", "", { "os": "linux", "cpu": "x64" }, "sha512-kP+TiiMaiPugbRlv57VGLfcjFNsFbo8H64wMBCPV2270Or2TpDCBULMzZrvEsvWFjT3pBFvToYbdp8/Kw0jAQg=="],
|
||||
"turbo-linux-64": ["turbo-linux-64@2.7.3", "", { "os": "linux", "cpu": "x64" }, "sha512-GqDsCNnzzr89kMaLGpRALyigUklzgxIrSy2pHZVXyifgczvYPnLglex78Aj3T2gu+T3trPPH2iJ+pWucVOCC2Q=="],
|
||||
|
||||
"turbo-linux-arm64": ["turbo-linux-arm64@2.7.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-VDJwQ0+8zjAfbyY6boNaWfP6RIez4ypKHxwkuB6SrWbOSk+vxTyW5/hEjytTwK8w/TsbKVcMDyvpora8tEsRFw=="],
|
||||
"turbo-linux-arm64": ["turbo-linux-arm64@2.7.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-NdCDTfIcIo3dWjsiaAHlxu5gW61Ed/8maah1IAF/9E3EtX0aAHNiBMbuYLZaR4vRJ7BeVkYB6xKWRtdFLZ0y3g=="],
|
||||
|
||||
"turbo-windows-64": ["turbo-windows-64@2.7.2", "", { "os": "win32", "cpu": "x64" }, "sha512-rPjqQXVnI6A6oxgzNEE8DNb6Vdj2Wwyhfv3oDc+YM3U9P7CAcBIlKv/868mKl4vsBtz4ouWpTQNXG8vljgJO+w=="],
|
||||
"turbo-windows-64": ["turbo-windows-64@2.7.3", "", { "os": "win32", "cpu": "x64" }, "sha512-7bVvO987daXGSJVYBoG8R4Q+csT1pKIgLJYZevXRQ0Hqw0Vv4mKme/TOjYXs9Qb1xMKh51Tb3bXKDbd8/4G08g=="],
|
||||
|
||||
"turbo-windows-arm64": ["turbo-windows-arm64@2.7.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-tcnHvBhO515OheIFWdxA+qUvZzNqqcHbLVFc1+n+TJ1rrp8prYicQtbtmsiKgMvr/54jb9jOabU62URAobnB7g=="],
|
||||
"turbo-windows-arm64": ["turbo-windows-arm64@2.7.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-nTodweTbPmkvwMu/a55XvjMsPtuyUSC+sV7f/SR57K36rB2I0YG21qNETN+00LOTUW9B3omd8XkiXJkt4kx/cw=="],
|
||||
|
||||
"tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="],
|
||||
|
||||
@@ -4046,6 +4054,8 @@
|
||||
|
||||
"path-scurry/lru-cache": ["lru-cache@11.2.4", "", {}, "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg=="],
|
||||
|
||||
"pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="],
|
||||
|
||||
"pino/thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
|
||||
|
||||
"pino-pretty/pino-abstract-transport": ["pino-abstract-transport@3.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg=="],
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"husky": "9.1.7",
|
||||
"lint-staged": "16.0.0",
|
||||
"turbo": "2.7.2"
|
||||
"turbo": "2.7.3"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx,json,css,scss}": [
|
||||
|
||||
Reference in New Issue
Block a user