mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-06 03:00:16 -04:00
v0.5.5: slack tool updates, logs search, response block fixes, reactquery migrations
This commit is contained in:
@@ -71,8 +71,10 @@ Senden Sie Nachrichten an Slack-Kanäle oder Benutzer über die Slack-API. Unter
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | Zeitstempel der Nachricht |
|
||||
| `channel` | string | Kanal-ID, wohin die Nachricht gesendet wurde |
|
||||
| `message` | object | Vollständiges Nachrichtenobjekt mit allen von Slack zurückgegebenen Eigenschaften |
|
||||
| `ts` | string | Nachrichtenzeitstempel |
|
||||
| `channel` | string | Kanal-ID, in dem die Nachricht gesendet wurde |
|
||||
| `fileCount` | number | Anzahl der hochgeladenen Dateien \(wenn Dateien angehängt sind\) |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
@@ -155,6 +157,7 @@ Eine zuvor vom Bot in Slack gesendete Nachricht aktualisieren
|
||||
|
||||
| Parameter | Typ | Beschreibung |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Vollständiges aktualisiertes Nachrichtenobjekt mit allen von Slack zurückgegebenen Eigenschaften |
|
||||
| `content` | string | Erfolgsmeldung |
|
||||
| `metadata` | object | Metadaten der aktualisierten Nachricht |
|
||||
|
||||
|
||||
@@ -73,8 +73,10 @@ Send messages to Slack channels or users through the Slack API. Supports Slack m
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Complete message object with all properties returned by Slack |
|
||||
| `ts` | string | Message timestamp |
|
||||
| `channel` | string | Channel ID where message was sent |
|
||||
| `fileCount` | number | Number of files uploaded \(when files are attached\) |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
@@ -157,6 +159,7 @@ Update a message previously sent by the bot in Slack
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Complete updated message object with all properties returned by Slack |
|
||||
| `content` | string | Success message |
|
||||
| `metadata` | object | Updated message metadata |
|
||||
|
||||
|
||||
@@ -71,8 +71,10 @@ Envía mensajes a canales o usuarios de Slack a través de la API de Slack. Comp
|
||||
|
||||
| Parámetro | Tipo | Descripción |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Objeto de mensaje completo con todas las propiedades devueltas por Slack |
|
||||
| `ts` | string | Marca de tiempo del mensaje |
|
||||
| `channel` | string | ID del canal donde se envió el mensaje |
|
||||
| `fileCount` | number | Número de archivos subidos (cuando se adjuntan archivos) |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
@@ -155,6 +157,7 @@ Actualizar un mensaje enviado previamente por el bot en Slack
|
||||
|
||||
| Parámetro | Tipo | Descripción |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Objeto de mensaje actualizado completo con todas las propiedades devueltas por Slack |
|
||||
| `content` | string | Mensaje de éxito |
|
||||
| `metadata` | object | Metadatos del mensaje actualizado |
|
||||
|
||||
|
||||
@@ -71,8 +71,10 @@ Envoyez des messages aux canaux ou utilisateurs Slack via l'API Slack. Prend en
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | objet | Objet de message complet avec toutes les propriétés renvoyées par Slack |
|
||||
| `ts` | chaîne | Horodatage du message |
|
||||
| `channel` | chaîne | ID du canal où le message a été envoyé |
|
||||
| `fileCount` | nombre | Nombre de fichiers téléchargés \(lorsque des fichiers sont joints\) |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
@@ -155,6 +157,7 @@ Mettre à jour un message précédemment envoyé par le bot dans Slack
|
||||
|
||||
| Paramètre | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | objet | Objet de message mis à jour complet avec toutes les propriétés renvoyées par Slack |
|
||||
| `content` | chaîne | Message de succès |
|
||||
| `metadata` | objet | Métadonnées du message mis à jour |
|
||||
|
||||
|
||||
@@ -70,8 +70,10 @@ Slack APIを通じてSlackチャンネルまたはユーザーにメッセージ
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Slackから返されたすべてのプロパティを含む完全なメッセージオブジェクト |
|
||||
| `ts` | string | メッセージのタイムスタンプ |
|
||||
| `channel` | string | メッセージが送信されたチャンネルID |
|
||||
| `fileCount` | number | アップロードされたファイル数(ファイルが添付されている場合) |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
@@ -154,6 +156,7 @@ Slackでボットが以前送信したメッセージを更新する
|
||||
|
||||
| パラメータ | 型 | 説明 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | Slackから返されたすべてのプロパティを含む完全に更新されたメッセージオブジェクト |
|
||||
| `content` | string | 成功メッセージ |
|
||||
| `metadata` | object | 更新されたメッセージのメタデータ |
|
||||
|
||||
|
||||
@@ -69,8 +69,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | 包含 Slack 返回的所有属性的完整消息对象 |
|
||||
| `ts` | string | 消息时间戳 |
|
||||
| `channel` | string | 消息发送的频道 ID |
|
||||
| `channel` | string | 发送消息的频道 ID |
|
||||
| `fileCount` | number | 上传的文件数量(当附加文件时) |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
@@ -153,8 +155,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
| 参数 | 类型 | 描述 |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | object | 包含 Slack 返回的所有属性的完整更新消息对象 |
|
||||
| `content` | string | 成功消息 |
|
||||
| `metadata` | object | 更新后的消息元数据 |
|
||||
| `metadata` | object | 更新的消息元数据 |
|
||||
|
||||
### `slack_delete_message`
|
||||
|
||||
|
||||
@@ -894,7 +894,7 @@ checksums:
|
||||
content/14: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/15: 2f696275726cdeefd7d7280b5bb43b21
|
||||
content/16: bcadfc362b69078beee0088e5936c98b
|
||||
content/17: 1f31e78210417a7f251f29e0b93a8528
|
||||
content/17: bb43e4f36fdc1eb6211f46ddeed9e0aa
|
||||
content/18: 05540cb3028d4d781521c14e5f9e3835
|
||||
content/19: 14583a25c48ebea2cef414b4758b883d
|
||||
content/20: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
@@ -918,7 +918,7 @@ checksums:
|
||||
content/38: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
content/39: 5319bf5409aced353e6b9d67f597ffef
|
||||
content/40: bcadfc362b69078beee0088e5936c98b
|
||||
content/41: bc5ec1d17d0dbc502ad0f27b6b8b1242
|
||||
content/41: d67f49be147c5ea63f88554f3b8eaed1
|
||||
content/42: e10ecb501eb65fd1a59501a40b707c7a
|
||||
content/43: d829a82e9bcbcfb6239ca2ed9e10ba77
|
||||
content/44: 371d0e46b4bd2c23f559b8bc112f6955
|
||||
|
||||
@@ -513,7 +513,7 @@ function SignupFormContent({
|
||||
disabled={isLoading}
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Creating account...' : 'Create account'}
|
||||
{isLoading ? 'Creating account' : 'Create account'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
|
||||
@@ -215,7 +215,7 @@ export default function Hero() {
|
||||
{ key: 'linear', icon: LinearIcon, label: 'Linear', style: { color: '#5E6AD2' } },
|
||||
{ key: 'discord', icon: DiscordIcon, label: 'Discord', style: { color: '#5765F2' } },
|
||||
{ key: 'airtable', icon: AirtableIcon, label: 'Airtable' },
|
||||
{ key: 'stripe', icon: StripeIcon, label: 'Stripe' },
|
||||
{ key: 'stripe', icon: StripeIcon, label: 'Stripe', style: { color: '#635BFF' } },
|
||||
{ key: 'notion', icon: NotionIcon, label: 'Notion' },
|
||||
{ key: 'googleSheets', icon: GoogleSheetsIcon, label: 'Google Sheets' },
|
||||
{ key: 'googleDrive', icon: GoogleDriveIcon, label: 'Google Drive' },
|
||||
|
||||
@@ -60,7 +60,12 @@ export async function GET(request: NextRequest) {
|
||||
let conditions: SQL | undefined = eq(workflow.workspaceId, params.workspaceId)
|
||||
|
||||
if (params.level && params.level !== 'all') {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, params.level))
|
||||
const levels = params.level.split(',').filter(Boolean)
|
||||
if (levels.length === 1) {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, levels[0]))
|
||||
} else if (levels.length > 1) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.level, levels))
|
||||
}
|
||||
}
|
||||
|
||||
if (params.workflowIds) {
|
||||
|
||||
@@ -126,9 +126,14 @@ export async function GET(request: NextRequest) {
|
||||
// Build additional conditions for the query
|
||||
let conditions: SQL | undefined
|
||||
|
||||
// Filter by level
|
||||
// Filter by level (supports comma-separated for OR conditions)
|
||||
if (params.level && params.level !== 'all') {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, params.level))
|
||||
const levels = params.level.split(',').filter(Boolean)
|
||||
if (levels.length === 1) {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, levels[0]))
|
||||
} else if (levels.length > 1) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.level, levels))
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by specific workflow IDs
|
||||
|
||||
@@ -6,6 +6,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { normalizeExcelValues } from '@/tools/onedrive/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -13,6 +14,14 @@ const logger = createLogger('OneDriveUploadAPI')
|
||||
|
||||
const MICROSOFT_GRAPH_BASE = 'https://graph.microsoft.com/v1.0'
|
||||
|
||||
const ExcelCellSchema = z.union([z.string(), z.number(), z.boolean(), z.null()])
|
||||
const ExcelRowSchema = z.array(ExcelCellSchema)
|
||||
const ExcelValuesSchema = z.union([
|
||||
z.string(),
|
||||
z.array(ExcelRowSchema),
|
||||
z.array(z.record(ExcelCellSchema)),
|
||||
])
|
||||
|
||||
const OneDriveUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
@@ -20,7 +29,7 @@ const OneDriveUploadSchema = z.object({
|
||||
folderId: z.string().optional().nullable(),
|
||||
mimeType: z.string().optional(),
|
||||
// Optional Excel write-after-create inputs
|
||||
values: z.array(z.array(z.union([z.string(), z.number(), z.boolean(), z.null()]))).optional(),
|
||||
values: ExcelValuesSchema.optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -46,6 +55,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = OneDriveUploadSchema.parse(body)
|
||||
const excelValues = normalizeExcelValues(validatedData.values)
|
||||
|
||||
let fileBuffer: Buffer
|
||||
let mimeType: string
|
||||
@@ -180,7 +190,7 @@ export async function POST(request: NextRequest) {
|
||||
// If this is an Excel creation and values were provided, write them using the Excel API
|
||||
let excelWriteResult: any | undefined
|
||||
const shouldWriteExcelContent =
|
||||
isExcelCreation && Array.isArray(validatedData.values) && validatedData.values.length > 0
|
||||
isExcelCreation && Array.isArray(excelValues) && excelValues.length > 0
|
||||
|
||||
if (shouldWriteExcelContent) {
|
||||
try {
|
||||
@@ -232,7 +242,7 @@ export async function POST(request: NextRequest) {
|
||||
logger.warn(`[${requestId}] Error listing worksheets, using default Sheet1`, listError)
|
||||
}
|
||||
|
||||
let processedValues: any = validatedData.values || []
|
||||
let processedValues: any = excelValues || []
|
||||
|
||||
if (
|
||||
Array.isArray(processedValues) &&
|
||||
|
||||
@@ -78,9 +78,16 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Message sent successfully`)
|
||||
const messageObj = data.message || {
|
||||
type: 'message',
|
||||
ts: data.ts,
|
||||
text: validatedData.text,
|
||||
channel: data.channel,
|
||||
}
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: messageObj,
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
@@ -107,9 +114,16 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
const messageObj = data.message || {
|
||||
type: 'message',
|
||||
ts: data.ts,
|
||||
text: validatedData.text,
|
||||
channel: data.channel,
|
||||
}
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: messageObj,
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
@@ -174,9 +188,16 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
const messageObj = data.message || {
|
||||
type: 'message',
|
||||
ts: data.ts,
|
||||
text: validatedData.text,
|
||||
channel: data.channel,
|
||||
}
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: messageObj,
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
@@ -211,10 +232,28 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Files uploaded and shared successfully`)
|
||||
|
||||
// For file uploads, construct a message object
|
||||
const fileTs = completeData.files?.[0]?.created?.toString() || (Date.now() / 1000).toString()
|
||||
const fileMessage = {
|
||||
type: 'message',
|
||||
ts: fileTs,
|
||||
text: validatedData.text,
|
||||
channel: validatedData.channel,
|
||||
files: completeData.files?.map((file: any) => ({
|
||||
id: file?.id,
|
||||
name: file?.name,
|
||||
mimetype: file?.mimetype,
|
||||
size: file?.size,
|
||||
url_private: file?.url_private,
|
||||
permalink: file?.permalink,
|
||||
})),
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: completeData.files?.[0]?.created || Date.now() / 1000,
|
||||
message: fileMessage,
|
||||
ts: fileTs,
|
||||
channel: validatedData.channel,
|
||||
fileCount: uploadedFileIds.length,
|
||||
},
|
||||
|
||||
@@ -78,14 +78,22 @@ export async function POST(request: NextRequest) {
|
||||
timestamp: data.ts,
|
||||
})
|
||||
|
||||
const messageObj = data.message || {
|
||||
type: 'message',
|
||||
ts: data.ts,
|
||||
text: data.text || validatedData.text,
|
||||
channel: data.channel,
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: messageObj,
|
||||
content: 'Message updated successfully',
|
||||
metadata: {
|
||||
channel: data.channel,
|
||||
timestamp: data.ts,
|
||||
text: data.text,
|
||||
text: data.text || validatedData.text,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -74,6 +74,30 @@
|
||||
animation: dash-animation 1.5s linear infinite !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Active block ring animation - cycles through gray tones using box-shadow
|
||||
*/
|
||||
@keyframes ring-pulse-colors {
|
||||
0%,
|
||||
100% {
|
||||
box-shadow: 0 0 0 4px var(--surface-14);
|
||||
}
|
||||
33% {
|
||||
box-shadow: 0 0 0 4px var(--surface-12);
|
||||
}
|
||||
66% {
|
||||
box-shadow: 0 0 0 4px var(--surface-15);
|
||||
}
|
||||
}
|
||||
|
||||
.dark .animate-ring-pulse {
|
||||
animation: ring-pulse-colors 2s ease-in-out infinite !important;
|
||||
}
|
||||
|
||||
.light .animate-ring-pulse {
|
||||
animation: ring-pulse-colors 2s ease-in-out infinite !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dark color tokens - single source of truth for all colors (dark-only)
|
||||
*/
|
||||
|
||||
@@ -44,8 +44,12 @@ import {
|
||||
SearchInput,
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowledge'
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import {
|
||||
useKnowledgeBase,
|
||||
useKnowledgeBaseDocuments,
|
||||
useKnowledgeBasesList,
|
||||
} from '@/hooks/use-knowledge'
|
||||
import type { DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
@@ -125,10 +129,10 @@ export function KnowledgeBase({
|
||||
id,
|
||||
knowledgeBaseName: passedKnowledgeBaseName,
|
||||
}: KnowledgeBaseProps) {
|
||||
const { removeKnowledgeBase } = useKnowledgeStore()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false })
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import { Loader2, RefreshCw, Search } from 'lucide-react'
|
||||
import { ArrowUp, Loader2, RefreshCw, Search } from 'lucide-react'
|
||||
import { Button, Tooltip } from '@/components/emcn'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { cn } from '@/lib/utils'
|
||||
@@ -16,7 +16,6 @@ export function Controls({
|
||||
viewMode,
|
||||
setViewMode,
|
||||
searchComponent,
|
||||
showExport = true,
|
||||
onExport,
|
||||
}: {
|
||||
searchQuery?: string
|
||||
@@ -72,6 +71,23 @@ export function Controls({
|
||||
)}
|
||||
|
||||
<div className='ml-auto flex flex-shrink-0 items-center gap-3'>
|
||||
{viewMode !== 'dashboard' && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={onExport}
|
||||
className='h-9 w-9 p-0 hover:bg-secondary'
|
||||
aria-label='Export CSV'
|
||||
>
|
||||
<ArrowUp className='h-4 w-4' />
|
||||
<span className='sr-only'>Export CSV</span>
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Export CSV</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -81,9 +97,9 @@ export function Controls({
|
||||
disabled={isRefetching}
|
||||
>
|
||||
{isRefetching ? (
|
||||
<Loader2 className='h-5 w-5 animate-spin' />
|
||||
<Loader2 className='h-4 w-4 animate-spin' />
|
||||
) : (
|
||||
<RefreshCw className='h-5 w-5' />
|
||||
<RefreshCw className='h-4 w-4' />
|
||||
)}
|
||||
<span className='sr-only'>Refresh</span>
|
||||
</Button>
|
||||
@@ -91,32 +107,6 @@ export function Controls({
|
||||
<Tooltip.Content>{isRefetching ? 'Refreshing...' : 'Refresh'}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={onExport}
|
||||
className='h-9 w-9 p-0 hover:bg-secondary'
|
||||
aria-label='Export CSV'
|
||||
>
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
className='h-5 w-5'
|
||||
>
|
||||
<path d='M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4' />
|
||||
<polyline points='7 10 12 15 17 10' />
|
||||
<line x1='12' y1='15' x2='12' y2='3' />
|
||||
</svg>
|
||||
<span className='sr-only'>Export CSV</span>
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Export CSV</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<div className='inline-flex h-9 items-center rounded-[11px] border bg-card p-1 shadow-sm'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
|
||||
@@ -9,25 +9,25 @@ export interface AggregateMetrics {
|
||||
export function KPIs({ aggregate }: { aggregate: AggregateMetrics }) {
|
||||
return (
|
||||
<div className='mb-2 grid grid-cols-1 gap-3 sm:grid-cols-2 lg:grid-cols-4'>
|
||||
<div className='border bg-card p-4 shadow-sm'>
|
||||
<div className='rounded-[11px] border bg-card p-4 shadow-sm'>
|
||||
<div className='text-muted-foreground text-xs'>Total executions</div>
|
||||
<div className='mt-1 font-[440] text-[22px] leading-6'>
|
||||
{aggregate.totalExecutions.toLocaleString()}
|
||||
</div>
|
||||
</div>
|
||||
<div className='border bg-card p-4 shadow-sm'>
|
||||
<div className='rounded-[11px] border bg-card p-4 shadow-sm'>
|
||||
<div className='text-muted-foreground text-xs'>Success rate</div>
|
||||
<div className='mt-1 font-[440] text-[22px] leading-6'>
|
||||
{aggregate.successRate.toFixed(1)}%
|
||||
</div>
|
||||
</div>
|
||||
<div className='border bg-card p-4 shadow-sm'>
|
||||
<div className='rounded-[11px] border bg-card p-4 shadow-sm'>
|
||||
<div className='text-muted-foreground text-xs'>Failed executions</div>
|
||||
<div className='mt-1 font-[440] text-[22px] leading-6'>
|
||||
{aggregate.failedExecutions.toLocaleString()}
|
||||
</div>
|
||||
</div>
|
||||
<div className='border bg-card p-4 shadow-sm'>
|
||||
<div className='rounded-[11px] border bg-card p-4 shadow-sm'>
|
||||
<div className='text-muted-foreground text-xs'>Active workflows</div>
|
||||
<div className='mt-1 font-[440] text-[22px] leading-6'>{aggregate.activeWorkflows}</div>
|
||||
</div>
|
||||
|
||||
@@ -174,55 +174,48 @@ export function LineChart({
|
||||
ref={containerRef}
|
||||
className='w-full overflow-hidden rounded-[11px] border bg-card p-4 shadow-sm'
|
||||
>
|
||||
<div className='mb-3 flex items-center justify-between'>
|
||||
<div className='flex items-center gap-3'>
|
||||
<h4 className='font-medium text-foreground text-sm'>{label}</h4>
|
||||
{allSeries.length > 1 && (
|
||||
<div className='flex items-center gap-2'>
|
||||
{scaledSeries.slice(1).map((s) => {
|
||||
const isActive = activeSeriesId ? activeSeriesId === s.id : true
|
||||
const isHovered = hoverSeriesId === s.id
|
||||
const dimmed = activeSeriesId ? !isActive : false
|
||||
return (
|
||||
<button
|
||||
key={`legend-${s.id}`}
|
||||
type='button'
|
||||
aria-pressed={activeSeriesId === s.id}
|
||||
aria-label={`Toggle ${s.label}`}
|
||||
className='inline-flex items-center gap-1 rounded-md px-1.5 py-0.5 text-[10px]'
|
||||
style={{
|
||||
color: s.color,
|
||||
opacity: dimmed ? 0.4 : isHovered ? 1 : 0.9,
|
||||
border: '1px solid hsl(var(--border))',
|
||||
background: 'transparent',
|
||||
}}
|
||||
onMouseEnter={() => setHoverSeriesId(s.id || null)}
|
||||
onMouseLeave={() => setHoverSeriesId((prev) => (prev === s.id ? null : prev))}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
setActiveSeriesId((prev) => (prev === s.id ? null : s.id || null))
|
||||
}
|
||||
}}
|
||||
onClick={() =>
|
||||
<div className='mb-3 flex items-center gap-3'>
|
||||
<h4 className='font-medium text-foreground text-sm'>{label}</h4>
|
||||
{allSeries.length > 1 && (
|
||||
<div className='flex items-center gap-2'>
|
||||
{scaledSeries.slice(1).map((s) => {
|
||||
const isActive = activeSeriesId ? activeSeriesId === s.id : true
|
||||
const isHovered = hoverSeriesId === s.id
|
||||
const dimmed = activeSeriesId ? !isActive : false
|
||||
return (
|
||||
<button
|
||||
key={`legend-${s.id}`}
|
||||
type='button'
|
||||
aria-pressed={activeSeriesId === s.id}
|
||||
aria-label={`Toggle ${s.label}`}
|
||||
className='inline-flex items-center gap-1 rounded-md px-1.5 py-0.5 text-[10px]'
|
||||
style={{
|
||||
color: s.color,
|
||||
opacity: dimmed ? 0.4 : isHovered ? 1 : 0.9,
|
||||
border: '1px solid hsl(var(--border))',
|
||||
background: 'transparent',
|
||||
}}
|
||||
onMouseEnter={() => setHoverSeriesId(s.id || null)}
|
||||
onMouseLeave={() => setHoverSeriesId((prev) => (prev === s.id ? null : prev))}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
setActiveSeriesId((prev) => (prev === s.id ? null : s.id || null))
|
||||
}
|
||||
>
|
||||
<span
|
||||
aria-hidden='true'
|
||||
className='inline-block h-[6px] w-[6px] rounded-full'
|
||||
style={{ backgroundColor: s.color }}
|
||||
/>
|
||||
<span style={{ color: 'hsl(var(--muted-foreground))' }}>{s.label}</span>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{currentHoverDate ? (
|
||||
<div className='text-[10px] text-muted-foreground'>{currentHoverDate}</div>
|
||||
) : null}
|
||||
}}
|
||||
onClick={() => setActiveSeriesId((prev) => (prev === s.id ? null : s.id || null))}
|
||||
>
|
||||
<span
|
||||
aria-hidden='true'
|
||||
className='inline-block h-[6px] w-[6px] rounded-full'
|
||||
style={{ backgroundColor: s.color }}
|
||||
/>
|
||||
<span style={{ color: 'hsl(var(--muted-foreground))' }}>{s.label}</span>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className='relative' style={{ width, height }}>
|
||||
<svg
|
||||
@@ -556,6 +549,9 @@ export function LineChart({
|
||||
className='pointer-events-none absolute rounded-md bg-background/80 px-2 py-1 font-medium text-[11px] shadow-sm ring-1 ring-border backdrop-blur'
|
||||
style={{ left, top }}
|
||||
>
|
||||
{currentHoverDate && (
|
||||
<div className='mb-1 text-[10px] text-muted-foreground'>{currentHoverDate}</div>
|
||||
)}
|
||||
{toDisplay.map((s) => {
|
||||
const seriesIndex = allSeries.findIndex((x) => x.id === s.id)
|
||||
const val = allSeries[seriesIndex]?.data?.[hoverIndex]?.value
|
||||
|
||||
@@ -2,13 +2,20 @@ import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { ArrowUpRight, Info, Loader2 } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { highlight, languages } from 'prismjs'
|
||||
import 'prismjs/components/prism-javascript'
|
||||
import 'prismjs/components/prism-python'
|
||||
import 'prismjs/components/prism-json'
|
||||
import { CopyButton } from '@/components/ui/copy-button'
|
||||
import { cn } from '@/lib/utils'
|
||||
import LineChart, {
|
||||
type LineChartPoint,
|
||||
} from '@/app/workspace/[workspaceId]/logs/components/dashboard/line-chart'
|
||||
import { getTriggerColor } from '@/app/workspace/[workspaceId]/logs/components/dashboard/utils'
|
||||
import LogMarkdownRenderer from '@/app/workspace/[workspaceId]/logs/components/sidebar/components/markdown-renderer'
|
||||
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import '@/components/emcn/components/code/code.css'
|
||||
|
||||
export interface ExecutionLogItem {
|
||||
id: string
|
||||
@@ -31,6 +38,27 @@ export interface ExecutionLogItem {
|
||||
hasPendingPause?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to parse a string as JSON and prettify it
|
||||
*/
|
||||
const tryPrettifyJson = (content: string): { isJson: boolean; formatted: string } => {
|
||||
try {
|
||||
const trimmed = content.trim()
|
||||
if (
|
||||
!(trimmed.startsWith('{') || trimmed.startsWith('[')) ||
|
||||
!(trimmed.endsWith('}') || trimmed.endsWith(']'))
|
||||
) {
|
||||
return { isJson: false, formatted: content }
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(trimmed)
|
||||
const prettified = JSON.stringify(parsed, null, 2)
|
||||
return { isJson: true, formatted: prettified }
|
||||
} catch (_e) {
|
||||
return { isJson: false, formatted: content }
|
||||
}
|
||||
}
|
||||
|
||||
export interface WorkflowDetailsData {
|
||||
errorRates: LineChartPoint[]
|
||||
durations?: LineChartPoint[]
|
||||
@@ -50,6 +78,9 @@ export function WorkflowDetails({
|
||||
details,
|
||||
selectedSegmentIndex,
|
||||
selectedSegment,
|
||||
selectedSegmentTimeRange,
|
||||
selectedWorkflowNames,
|
||||
segmentDurationMs,
|
||||
clearSegmentSelection,
|
||||
formatCost,
|
||||
onLoadMore,
|
||||
@@ -63,6 +94,9 @@ export function WorkflowDetails({
|
||||
details: WorkflowDetailsData | undefined
|
||||
selectedSegmentIndex: number[] | null
|
||||
selectedSegment: { timestamp: string; totalExecutions: number } | null
|
||||
selectedSegmentTimeRange?: { start: Date; end: Date } | null
|
||||
selectedWorkflowNames?: string[]
|
||||
segmentDurationMs?: number
|
||||
clearSegmentSelection: () => void
|
||||
formatCost: (n: number) => string
|
||||
onLoadMore?: () => void
|
||||
@@ -128,29 +162,111 @@ export function WorkflowDetails({
|
||||
<div className='border-b bg-muted/30 px-4 py-2.5'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<button
|
||||
onClick={() => router.push(`/workspace/${workspaceId}/w/${expandedWorkflowId}`)}
|
||||
className='group inline-flex items-center gap-2 text-left'
|
||||
>
|
||||
<span
|
||||
className='h-[14px] w-[14px] flex-shrink-0 rounded'
|
||||
style={{ backgroundColor: workflowColor }}
|
||||
/>
|
||||
<span className='font-[480] text-sm tracking-tight group-hover:text-primary dark:font-[560]'>
|
||||
{workflowName}
|
||||
</span>
|
||||
</button>
|
||||
{expandedWorkflowId !== 'all' && expandedWorkflowId !== '__multi__' ? (
|
||||
<button
|
||||
onClick={() => router.push(`/workspace/${workspaceId}/w/${expandedWorkflowId}`)}
|
||||
className='group inline-flex items-center gap-2 text-left transition-opacity hover:opacity-70'
|
||||
>
|
||||
<span
|
||||
className='h-[14px] w-[14px] flex-shrink-0 rounded'
|
||||
style={{ backgroundColor: workflowColor }}
|
||||
/>
|
||||
<span className='font-[480] text-sm tracking-tight dark:font-[560]'>
|
||||
{workflowName}
|
||||
</span>
|
||||
</button>
|
||||
) : (
|
||||
<div className='inline-flex items-center gap-2'>
|
||||
<span
|
||||
className='h-[14px] w-[14px] flex-shrink-0 rounded'
|
||||
style={{ backgroundColor: workflowColor }}
|
||||
/>
|
||||
<span className='font-[480] text-sm tracking-tight dark:font-[560]'>
|
||||
{workflowName}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{Array.isArray(selectedSegmentIndex) &&
|
||||
selectedSegmentIndex.length > 0 &&
|
||||
(selectedSegment || selectedSegmentTimeRange || expandedWorkflowId === '__multi__') &&
|
||||
(() => {
|
||||
let tsLabel = 'Selected segment'
|
||||
if (selectedSegmentTimeRange) {
|
||||
const start = selectedSegmentTimeRange.start
|
||||
const end = selectedSegmentTimeRange.end
|
||||
const startFormatted = start.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
hour12: true,
|
||||
})
|
||||
const endFormatted = end.toLocaleString('en-US', {
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
hour12: true,
|
||||
})
|
||||
tsLabel = `${startFormatted} – ${endFormatted}`
|
||||
} else if (selectedSegment?.timestamp) {
|
||||
const tsObj = new Date(selectedSegment.timestamp)
|
||||
if (!Number.isNaN(tsObj.getTime())) {
|
||||
tsLabel = tsObj.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
hour12: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const isMultiWorkflow =
|
||||
expandedWorkflowId === '__multi__' &&
|
||||
selectedWorkflowNames &&
|
||||
selectedWorkflowNames.length > 0
|
||||
const workflowLabel = isMultiWorkflow
|
||||
? selectedWorkflowNames.length <= 2
|
||||
? selectedWorkflowNames.join(', ')
|
||||
: `${selectedWorkflowNames.slice(0, 2).join(', ')} +${selectedWorkflowNames.length - 2}`
|
||||
: null
|
||||
|
||||
return (
|
||||
<div className='inline-flex h-7 items-center gap-1.5 rounded-md border bg-muted/50 px-2.5'>
|
||||
{isMultiWorkflow && workflowLabel && (
|
||||
<span className='font-medium text-[11px] text-muted-foreground'>
|
||||
{workflowLabel}
|
||||
</span>
|
||||
)}
|
||||
<span className='font-medium text-[11px] text-foreground'>
|
||||
{tsLabel}
|
||||
{selectedSegmentIndex.length > 1 && !isMultiWorkflow
|
||||
? ` (+${selectedSegmentIndex.length - 1})`
|
||||
: ''}
|
||||
</span>
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
clearSegmentSelection()
|
||||
}}
|
||||
className='ml-0.5 flex h-4 w-4 items-center justify-center rounded text-muted-foreground text-xs transition-colors hover:bg-muted hover:text-foreground focus:outline-none focus:ring-1 focus:ring-primary/40'
|
||||
aria-label='Clear filter'
|
||||
>
|
||||
×
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
})()}
|
||||
</div>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='inline-flex h-7 items-center gap-2 border px-2.5'>
|
||||
<div className='inline-flex h-7 items-center gap-2 rounded border px-2.5'>
|
||||
<span className='text-[11px] text-muted-foreground'>Executions</span>
|
||||
<span className='font-[500] text-sm leading-none'>{overview.total}</span>
|
||||
</div>
|
||||
<div className='inline-flex h-7 items-center gap-2 border px-2.5'>
|
||||
<div className='inline-flex h-7 items-center gap-2 rounded border px-2.5'>
|
||||
<span className='text-[11px] text-muted-foreground'>Success</span>
|
||||
<span className='font-[500] text-sm leading-none'>{overview.rate.toFixed(1)}%</span>
|
||||
</div>
|
||||
<div className='inline-flex h-7 items-center gap-2 border px-2.5'>
|
||||
<div className='inline-flex h-7 items-center gap-2 rounded border px-2.5'>
|
||||
<span className='text-[11px] text-muted-foreground'>Failures</span>
|
||||
<span className='font-[500] text-sm leading-none'>{overview.failures}</span>
|
||||
</div>
|
||||
@@ -160,53 +276,14 @@ export function WorkflowDetails({
|
||||
<div className='p-4'>
|
||||
{details ? (
|
||||
<>
|
||||
{Array.isArray(selectedSegmentIndex) &&
|
||||
selectedSegmentIndex.length > 0 &&
|
||||
selectedSegment &&
|
||||
(() => {
|
||||
const tsObj = selectedSegment?.timestamp
|
||||
? new Date(selectedSegment.timestamp)
|
||||
: null
|
||||
const tsLabel =
|
||||
tsObj && !Number.isNaN(tsObj.getTime())
|
||||
? tsObj.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
hour12: true,
|
||||
})
|
||||
: 'Selected segment'
|
||||
return (
|
||||
<div className='mb-4 flex items-center justify-between border bg-muted/30 px-3 py-2 text-[13px] text-foreground'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='h-1.5 w-1.5 rounded-full bg-primary ring-2 ring-primary/30' />
|
||||
<span className='font-medium'>
|
||||
Filtered to {tsLabel}
|
||||
{selectedSegmentIndex.length > 1
|
||||
? ` (+${selectedSegmentIndex.length - 1} more segment${selectedSegmentIndex.length - 1 > 1 ? 's' : ''})`
|
||||
: ''}
|
||||
— {selectedSegment.totalExecutions} execution
|
||||
{selectedSegment.totalExecutions !== 1 ? 's' : ''}
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
onClick={clearSegmentSelection}
|
||||
className='rounded px-2 py-1 text-foreground text-xs hover:bg-muted focus:outline-none focus:ring-2 focus:ring-primary/40'
|
||||
>
|
||||
Clear filter
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
})()}
|
||||
|
||||
{(() => {
|
||||
const hasDuration = Array.isArray(details.durations) && details.durations.length > 0
|
||||
const gridCols = hasDuration
|
||||
? 'md:grid-cols-2 xl:grid-cols-4'
|
||||
: 'md:grid-cols-2 xl:grid-cols-3'
|
||||
const gridGap = hasDuration ? 'gap-2 xl:gap-2.5' : 'gap-3'
|
||||
return (
|
||||
<div className={`mb-3 grid grid-cols-1 gap-3 ${gridCols}`}>
|
||||
<div className={`mb-3 grid grid-cols-1 ${gridGap} ${gridCols}`}>
|
||||
<LineChart
|
||||
data={details.errorRates}
|
||||
label='Error Rate'
|
||||
@@ -431,7 +508,7 @@ export function WorkflowDetails({
|
||||
{log.workflowName ? (
|
||||
<div className='inline-flex items-center gap-2'>
|
||||
<span
|
||||
className='h-3.5 w-3.5'
|
||||
className='h-3.5 w-3.5 flex-shrink-0 rounded'
|
||||
style={{ backgroundColor: log.workflowColor || '#64748b' }}
|
||||
/>
|
||||
<span
|
||||
@@ -483,10 +560,31 @@ export function WorkflowDetails({
|
||||
</div>
|
||||
{isExpanded && (
|
||||
<div className='px-2 pt-0 pb-4'>
|
||||
<div className='border bg-muted/30 p-2'>
|
||||
<pre className='max-h-60 overflow-auto whitespace-pre-wrap break-words text-xs'>
|
||||
{log.level === 'error' && errorStr ? errorStr : outputsStr}
|
||||
</pre>
|
||||
<div className='group relative w-full rounded-[4px] border border-[var(--border-strong)] bg-[#1F1F1F] p-3'>
|
||||
<CopyButton
|
||||
text={log.level === 'error' && errorStr ? errorStr : outputsStr}
|
||||
className='z-10 h-7 w-7'
|
||||
/>
|
||||
{(() => {
|
||||
const content =
|
||||
log.level === 'error' && errorStr ? errorStr : outputsStr
|
||||
const { isJson, formatted } = tryPrettifyJson(content)
|
||||
|
||||
return isJson ? (
|
||||
<div className='code-editor-theme'>
|
||||
<pre
|
||||
className='max-h-[300px] w-full overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-all font-mono text-[#eeeeee] text-[11px] leading-[16px]'
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: highlight(formatted, languages.json, 'json'),
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<div className='max-h-[300px] overflow-y-auto'>
|
||||
<LogMarkdownRenderer content={formatted} />
|
||||
</div>
|
||||
)
|
||||
})()}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -59,7 +59,7 @@ export function WorkflowsList({
|
||||
}
|
||||
return (
|
||||
<div
|
||||
className='overflow-hidden border bg-card shadow-sm'
|
||||
className='overflow-hidden rounded-[11px] border bg-card shadow-sm'
|
||||
style={{ height: '380px', display: 'flex', flexDirection: 'column' }}
|
||||
>
|
||||
<div className='flex-shrink-0 border-b bg-muted/30 px-4 py-2'>
|
||||
@@ -97,7 +97,7 @@ export function WorkflowsList({
|
||||
<div className='w-52 min-w-0 flex-shrink-0'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div
|
||||
className='h-[14px] w-[14px] flex-shrink-0'
|
||||
className='h-[14px] w-[14px] flex-shrink-0 rounded'
|
||||
style={{
|
||||
backgroundColor: workflows[workflow.workflowId]?.color || '#64748b',
|
||||
}}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { useMemo, useState } from 'react'
|
||||
import { Check, ChevronDown } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
@@ -22,7 +22,8 @@ import {
|
||||
filterButtonClass,
|
||||
folderDropdownListStyle,
|
||||
} from '@/app/workspace/[workspaceId]/logs/components/filters/components/shared'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
|
||||
const logger = createLogger('LogsFolderFilter')
|
||||
@@ -36,56 +37,37 @@ interface FolderOption {
|
||||
|
||||
export default function FolderFilter() {
|
||||
const { folderIds, toggleFolderId, setFolderIds } = useFilterStore()
|
||||
const { getFolderTree, fetchFolders } = useFolderStore()
|
||||
const { getFolderTree } = useFolderStore()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const [folders, setFolders] = useState<FolderOption[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [search, setSearch] = useState('')
|
||||
const { isLoading: foldersLoading } = useFolders(workspaceId)
|
||||
|
||||
// Fetch all available folders from the API
|
||||
useEffect(() => {
|
||||
const fetchFoldersData = async () => {
|
||||
try {
|
||||
setLoading(true)
|
||||
if (workspaceId) {
|
||||
await fetchFolders(workspaceId)
|
||||
const folderTree = getFolderTree(workspaceId)
|
||||
const folderTree = workspaceId ? getFolderTree(workspaceId) : []
|
||||
|
||||
// Flatten the folder tree and create options with full paths
|
||||
const flattenFolders = (nodes: any[], parentPath = ''): FolderOption[] => {
|
||||
const result: FolderOption[] = []
|
||||
const folders: FolderOption[] = useMemo(() => {
|
||||
const flattenFolders = (nodes: FolderTreeNode[], parentPath = ''): FolderOption[] => {
|
||||
const result: FolderOption[] = []
|
||||
|
||||
for (const node of nodes) {
|
||||
const currentPath = parentPath ? `${parentPath} / ${node.name}` : node.name
|
||||
result.push({
|
||||
id: node.id,
|
||||
name: node.name,
|
||||
color: node.color || '#6B7280',
|
||||
path: currentPath,
|
||||
})
|
||||
for (const node of nodes) {
|
||||
const currentPath = parentPath ? `${parentPath} / ${node.name}` : node.name
|
||||
result.push({
|
||||
id: node.id,
|
||||
name: node.name,
|
||||
color: node.color || '#6B7280',
|
||||
path: currentPath,
|
||||
})
|
||||
|
||||
// Add children recursively
|
||||
if (node.children && node.children.length > 0) {
|
||||
result.push(...flattenFolders(node.children, currentPath))
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const folderOptions = flattenFolders(folderTree)
|
||||
setFolders(folderOptions)
|
||||
if (node.children && node.children.length > 0) {
|
||||
result.push(...flattenFolders(node.children, currentPath))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch folders', { error })
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
fetchFoldersData()
|
||||
}, [workspaceId, fetchFolders, getFolderTree])
|
||||
return flattenFolders(folderTree)
|
||||
}, [folderTree])
|
||||
|
||||
// Get display text for the dropdown button
|
||||
const getSelectedFoldersText = () => {
|
||||
@@ -111,7 +93,7 @@ export default function FolderFilter() {
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant='outline' className={filterButtonClass}>
|
||||
{loading ? 'Loading folders...' : getSelectedFoldersText()}
|
||||
{foldersLoading ? 'Loading folders...' : getSelectedFoldersText()}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
@@ -125,7 +107,9 @@ export default function FolderFilter() {
|
||||
<Command>
|
||||
<CommandInput placeholder='Search folders...' onValueChange={(v) => setSearch(v)} />
|
||||
<CommandList className={commandListClass} style={folderDropdownListStyle}>
|
||||
<CommandEmpty>{loading ? 'Loading folders...' : 'No folders found.'}</CommandEmpty>
|
||||
<CommandEmpty>
|
||||
{foldersLoading ? 'Loading folders...' : 'No folders found.'}
|
||||
</CommandEmpty>
|
||||
<CommandGroup>
|
||||
<CommandItem
|
||||
value='all-folders'
|
||||
|
||||
@@ -1,21 +1,23 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { Loader2, Search, X } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { parseQuery } from '@/lib/logs/query-parser'
|
||||
import { SearchSuggestions } from '@/lib/logs/search-suggestions'
|
||||
import { Search, X } from 'lucide-react'
|
||||
import { Button, Popover, PopoverAnchor, PopoverContent } from '@/components/emcn'
|
||||
import { type ParsedFilter, parseQuery } from '@/lib/logs/query-parser'
|
||||
import {
|
||||
type FolderData,
|
||||
SearchSuggestions,
|
||||
type WorkflowData,
|
||||
} from '@/lib/logs/search-suggestions'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useAutocomplete } from '@/app/workspace/[workspaceId]/logs/hooks/use-autocomplete'
|
||||
import { useSearchState } from '@/app/workspace/[workspaceId]/logs/hooks/use-search-state'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface AutocompleteSearchProps {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
placeholder?: string
|
||||
availableWorkflows?: string[]
|
||||
availableFolders?: string[]
|
||||
className?: string
|
||||
onOpenChange?: (open: boolean) => void
|
||||
}
|
||||
@@ -24,301 +26,307 @@ export function AutocompleteSearch({
|
||||
value,
|
||||
onChange,
|
||||
placeholder = 'Search logs...',
|
||||
availableWorkflows = [],
|
||||
availableFolders = [],
|
||||
className,
|
||||
onOpenChange,
|
||||
}: AutocompleteSearchProps) {
|
||||
const workflows = useWorkflowRegistry((state) => state.workflows)
|
||||
const folders = useFolderStore((state) => state.folders)
|
||||
|
||||
const workflowsData = useMemo<WorkflowData[]>(() => {
|
||||
return Object.values(workflows).map((w) => ({
|
||||
id: w.id,
|
||||
name: w.name,
|
||||
description: w.description,
|
||||
}))
|
||||
}, [workflows])
|
||||
|
||||
const foldersData = useMemo<FolderData[]>(() => {
|
||||
return Object.values(folders).map((f) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
}))
|
||||
}, [folders])
|
||||
|
||||
const suggestionEngine = useMemo(() => {
|
||||
return new SearchSuggestions(availableWorkflows, availableFolders)
|
||||
}, [availableWorkflows, availableFolders])
|
||||
return new SearchSuggestions(workflowsData, foldersData)
|
||||
}, [workflowsData, foldersData])
|
||||
|
||||
const handleFiltersChange = (filters: ParsedFilter[], textSearch: string) => {
|
||||
const filterStrings = filters.map(
|
||||
(f) => `${f.field}:${f.operator !== '=' ? f.operator : ''}${f.originalValue}`
|
||||
)
|
||||
const fullQuery = [...filterStrings, textSearch].filter(Boolean).join(' ')
|
||||
onChange(fullQuery)
|
||||
}
|
||||
|
||||
const {
|
||||
state,
|
||||
appliedFilters,
|
||||
currentInput,
|
||||
textSearch,
|
||||
isOpen,
|
||||
suggestions,
|
||||
sections,
|
||||
highlightedIndex,
|
||||
highlightedBadgeIndex,
|
||||
inputRef,
|
||||
dropdownRef,
|
||||
handleInputChange,
|
||||
handleCursorChange,
|
||||
handleSuggestionHover,
|
||||
handleSuggestionSelect,
|
||||
handleKeyDown,
|
||||
handleFocus,
|
||||
handleBlur,
|
||||
reset: resetAutocomplete,
|
||||
closeDropdown,
|
||||
} = useAutocomplete({
|
||||
getSuggestions: (inputValue, cursorPos) =>
|
||||
suggestionEngine.getSuggestions(inputValue, cursorPos),
|
||||
generatePreview: (suggestion, inputValue, cursorPos) =>
|
||||
suggestionEngine.generatePreview(suggestion, inputValue, cursorPos),
|
||||
onQueryChange: onChange,
|
||||
validateQuery: (query) => suggestionEngine.validateQuery(query),
|
||||
debounceMs: 100,
|
||||
removeBadge,
|
||||
clearAll,
|
||||
setHighlightedIndex,
|
||||
initializeFromQuery,
|
||||
} = useSearchState({
|
||||
onFiltersChange: handleFiltersChange,
|
||||
getSuggestions: (input) => suggestionEngine.getSuggestions(input),
|
||||
})
|
||||
|
||||
const clearAll = () => {
|
||||
resetAutocomplete()
|
||||
closeDropdown()
|
||||
onChange('')
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
// Initialize from external value (URL params) - only on mount
|
||||
useEffect(() => {
|
||||
if (value) {
|
||||
const parsed = parseQuery(value)
|
||||
initializeFromQuery(parsed.textSearch, parsed.filters)
|
||||
}
|
||||
}
|
||||
|
||||
const parsedQuery = parseQuery(value)
|
||||
const hasFilters = parsedQuery.filters.length > 0
|
||||
const hasTextSearch = parsedQuery.textSearch.length > 0
|
||||
|
||||
const listboxId = 'logs-search-listbox'
|
||||
const inputId = 'logs-search-input'
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
const [dropdownWidth, setDropdownWidth] = useState(500)
|
||||
useEffect(() => {
|
||||
onOpenChange?.(state.isOpen)
|
||||
}, [state.isOpen, onOpenChange])
|
||||
|
||||
useEffect(() => {
|
||||
if (!state.isOpen || state.highlightedIndex < 0) return
|
||||
const container = dropdownRef.current
|
||||
const optionEl = document.getElementById(`${listboxId}-option-${state.highlightedIndex}`)
|
||||
if (container && optionEl) {
|
||||
try {
|
||||
optionEl.scrollIntoView({ block: 'nearest', behavior: 'smooth' })
|
||||
} catch {
|
||||
optionEl.scrollIntoView({ block: 'nearest' })
|
||||
const measure = () => {
|
||||
if (inputRef.current) {
|
||||
setDropdownWidth(inputRef.current.parentElement?.offsetWidth || 500)
|
||||
}
|
||||
}
|
||||
}, [state.isOpen, state.highlightedIndex])
|
||||
measure()
|
||||
window.addEventListener('resize', measure)
|
||||
return () => window.removeEventListener('resize', measure)
|
||||
}, [])
|
||||
|
||||
const [showSpinner, setShowSpinner] = useState(false)
|
||||
useEffect(() => {
|
||||
if (!state.pendingQuery) {
|
||||
setShowSpinner(false)
|
||||
return
|
||||
onOpenChange?.(isOpen)
|
||||
}, [isOpen, onOpenChange])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isOpen || highlightedIndex < 0) return
|
||||
const container = dropdownRef.current
|
||||
const optionEl = container?.querySelector(`[data-index="${highlightedIndex}"]`)
|
||||
if (container && optionEl) {
|
||||
optionEl.scrollIntoView({ block: 'nearest', behavior: 'smooth' })
|
||||
}
|
||||
const t = setTimeout(() => setShowSpinner(true), 200)
|
||||
return () => clearTimeout(t)
|
||||
}, [state.pendingQuery])
|
||||
}, [isOpen, highlightedIndex])
|
||||
|
||||
const onInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const newValue = e.target.value
|
||||
const cursorPos = e.target.selectionStart || 0
|
||||
handleInputChange(newValue, cursorPos)
|
||||
}
|
||||
|
||||
const updateCursorPosition = (element: HTMLInputElement) => {
|
||||
const cursorPos = element.selectionStart || 0
|
||||
handleCursorChange(cursorPos)
|
||||
}
|
||||
|
||||
const removeFilter = (filterToRemove: (typeof parsedQuery.filters)[0]) => {
|
||||
const remainingFilters = parsedQuery.filters.filter(
|
||||
(f) => !(f.field === filterToRemove.field && f.value === filterToRemove.value)
|
||||
)
|
||||
|
||||
const filterStrings = remainingFilters.map(
|
||||
(f) => `${f.field}:${f.operator !== '=' ? f.operator : ''}${f.originalValue}`
|
||||
)
|
||||
|
||||
const newQuery = [...filterStrings, parsedQuery.textSearch].filter(Boolean).join(' ')
|
||||
handleInputChange(newQuery, newQuery.length)
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
}
|
||||
}
|
||||
const hasFilters = appliedFilters.length > 0
|
||||
const hasTextSearch = textSearch.length > 0
|
||||
const suggestionType =
|
||||
sections.length > 0 ? 'multi-section' : suggestions.length > 0 ? suggestions[0]?.category : null
|
||||
|
||||
return (
|
||||
<div className={cn('relative', className)}>
|
||||
{/* Search Input */}
|
||||
<div
|
||||
className={cn(
|
||||
'relative flex items-center gap-2 border bg-background pr-2 pl-3 transition-all duration-200',
|
||||
'h-9 w-full min-w-[600px] max-w-[800px]',
|
||||
state.isOpen && 'ring-1 ring-ring'
|
||||
)}
|
||||
{/* Search Input with Inline Badges */}
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
setHighlightedIndex(-1)
|
||||
}
|
||||
}}
|
||||
>
|
||||
{showSpinner ? (
|
||||
<Loader2 className='h-4 w-4 flex-shrink-0 animate-spin text-muted-foreground' />
|
||||
) : (
|
||||
<Search className='h-4 w-4 flex-shrink-0 text-muted-foreground' strokeWidth={2} />
|
||||
)}
|
||||
<PopoverAnchor asChild>
|
||||
<div className='relative flex h-9 w-[500px] items-center rounded-[4px] border border-[var(--surface-11)] bg-[var(--surface-6)] transition-colors focus-within:border-[var(--surface-14)] focus-within:ring-1 focus-within:ring-ring hover:border-[var(--surface-14)] dark:bg-[var(--surface-9)] dark:hover:border-[var(--surface-13)]'>
|
||||
{/* Search Icon */}
|
||||
<Search
|
||||
className='ml-2.5 h-4 w-4 flex-shrink-0 text-muted-foreground'
|
||||
strokeWidth={2}
|
||||
/>
|
||||
|
||||
{/* Text display with ghost text */}
|
||||
<div className='relative flex-1 font-[380] font-sans text-base leading-none'>
|
||||
{/* Invisible input for cursor and interactions */}
|
||||
<Input
|
||||
ref={inputRef}
|
||||
id={inputId}
|
||||
placeholder={state.inputValue ? '' : placeholder}
|
||||
value={state.inputValue}
|
||||
onChange={onInputChange}
|
||||
onFocus={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onClick={(e) => updateCursorPosition(e.currentTarget)}
|
||||
onKeyDown={handleKeyDown}
|
||||
onSelect={(e) => updateCursorPosition(e.currentTarget)}
|
||||
className='relative z-10 w-full border-0 bg-transparent p-0 font-[380] font-sans text-base text-transparent leading-none placeholder:text-muted-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
style={{ background: 'transparent' }}
|
||||
role='combobox'
|
||||
aria-expanded={state.isOpen}
|
||||
aria-controls={state.isOpen ? listboxId : undefined}
|
||||
aria-autocomplete='list'
|
||||
aria-activedescendant={
|
||||
state.isOpen && state.highlightedIndex >= 0
|
||||
? `${listboxId}-option-${state.highlightedIndex}`
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
|
||||
{/* Always-visible text overlay */}
|
||||
<div className='pointer-events-none absolute inset-0 flex items-center'>
|
||||
<span className='whitespace-pre font-[380] font-sans text-base leading-none'>
|
||||
<span className='text-foreground'>{state.inputValue}</span>
|
||||
{state.showPreview &&
|
||||
state.previewValue &&
|
||||
state.previewValue !== state.inputValue &&
|
||||
state.inputValue && (
|
||||
<span className='text-muted-foreground/50'>
|
||||
{state.previewValue.slice(state.inputValue.length)}
|
||||
{/* Scrollable container for badges */}
|
||||
<div className='flex flex-1 items-center gap-1.5 overflow-x-auto px-2 [scrollbar-width:none] [&::-webkit-scrollbar]:hidden'>
|
||||
{/* Applied Filter Badges */}
|
||||
{appliedFilters.map((filter, index) => (
|
||||
<Button
|
||||
key={`${filter.field}-${filter.value}-${index}`}
|
||||
variant='outline'
|
||||
className={cn(
|
||||
'h-6 flex-shrink-0 gap-1 rounded-[6px] px-2 text-[11px]',
|
||||
highlightedBadgeIndex === index && 'border-white dark:border-white'
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
removeBadge(index)
|
||||
}}
|
||||
>
|
||||
<span className='text-[var(--text-muted)]'>{filter.field}:</span>
|
||||
<span className='text-[var(--text-primary)]'>
|
||||
{filter.operator !== '=' && filter.operator}
|
||||
{filter.originalValue}
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
))}
|
||||
|
||||
{/* Clear all button */}
|
||||
{(hasFilters || hasTextSearch) && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
className='h-6 w-6 p-0 hover:bg-muted/50'
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
clearAll()
|
||||
}}
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
{/* Text Search Badge (if present) */}
|
||||
{hasTextSearch && (
|
||||
<Button
|
||||
variant='outline'
|
||||
className='h-6 flex-shrink-0 gap-1 rounded-[6px] px-2 text-[11px]'
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
handleFiltersChange(appliedFilters, '')
|
||||
}}
|
||||
>
|
||||
<span className='text-[var(--text-primary)]'>"{textSearch}"</span>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{/* Suggestions Dropdown */}
|
||||
{state.isOpen && state.suggestions.length > 0 && (
|
||||
<div
|
||||
ref={dropdownRef}
|
||||
className='min-w[500px] absolute z-[9999] mt-1 w-full overflow-hidden border bg-popover shadow-md'
|
||||
id={listboxId}
|
||||
role='listbox'
|
||||
aria-labelledby={inputId}
|
||||
>
|
||||
<div className='max-h-96 overflow-y-auto py-1'>
|
||||
{state.suggestionType === 'filter-keys' && (
|
||||
<div className='border-border/50 border-b px-3 py-1 font-medium text-muted-foreground/70 text-xs uppercase tracking-wide'>
|
||||
SUGGESTED FILTERS
|
||||
</div>
|
||||
)}
|
||||
{state.suggestionType === 'filter-values' && (
|
||||
<div className='border-border/50 border-b px-3 py-1 font-medium text-muted-foreground/70 text-xs uppercase tracking-wide'>
|
||||
{state.suggestions[0]?.category?.toUpperCase() || 'VALUES'}
|
||||
</div>
|
||||
)}
|
||||
{/* Input - only current typing */}
|
||||
<input
|
||||
ref={inputRef}
|
||||
type='text'
|
||||
placeholder={hasFilters || hasTextSearch ? '' : placeholder}
|
||||
value={currentInput}
|
||||
onChange={(e) => handleInputChange(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
className='min-w-[100px] flex-1 border-0 bg-transparent font-sans text-foreground text-sm outline-none placeholder:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
|
||||
{state.suggestions.map((suggestion, index) => (
|
||||
{/* Clear All Button */}
|
||||
{(hasFilters || hasTextSearch) && (
|
||||
<button
|
||||
key={suggestion.id}
|
||||
className={cn(
|
||||
'w-full px-3 py-2 text-left text-sm',
|
||||
'focus:bg-accent focus:text-accent-foreground focus:outline-none',
|
||||
'transition-colors hover:bg-accent hover:text-accent-foreground',
|
||||
index === state.highlightedIndex && 'bg-accent text-accent-foreground'
|
||||
)}
|
||||
onMouseEnter={() => {
|
||||
if (typeof window !== 'undefined' && (window as any).__logsKeyboardNavActive) {
|
||||
return
|
||||
}
|
||||
handleSuggestionHover(index)
|
||||
}}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleSuggestionSelect(suggestion)
|
||||
}}
|
||||
id={`${listboxId}-option-${index}`}
|
||||
role='option'
|
||||
aria-selected={index === state.highlightedIndex}
|
||||
>
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex-1'>
|
||||
<div className='font-medium text-sm'>{suggestion.label}</div>
|
||||
{suggestion.description && (
|
||||
<div className='mt-0.5 text-muted-foreground text-xs'>
|
||||
{suggestion.description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className='ml-4 font-mono text-muted-foreground text-xs'>
|
||||
{suggestion.value}
|
||||
</div>
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Active filters as chips */}
|
||||
{hasFilters && (
|
||||
<div className='mt-3 flex flex-wrap items-center gap-2'>
|
||||
<span className='font-medium text-muted-foreground text-xs'>ACTIVE FILTERS:</span>
|
||||
{parsedQuery.filters.map((filter, index) => (
|
||||
<Badge
|
||||
key={`${filter.field}-${filter.value}-${index}`}
|
||||
variant='secondary'
|
||||
className='h-6 border border-border/50 bg-muted/50 font-mono text-muted-foreground text-xs hover:bg-muted'
|
||||
>
|
||||
<span className='mr-1'>{filter.field}:</span>
|
||||
<span>
|
||||
{filter.operator !== '=' && filter.operator}
|
||||
{filter.originalValue}
|
||||
</span>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
className='ml-1 h-3 w-3 p-0 text-muted-foreground hover:bg-muted/50 hover:text-foreground'
|
||||
onClick={() => removeFilter(filter)}
|
||||
className='mr-2.5 flex h-5 w-5 flex-shrink-0 items-center justify-center text-muted-foreground transition-colors hover:text-foreground'
|
||||
onClick={clearAll}
|
||||
>
|
||||
<X className='h-2.5 w-2.5' />
|
||||
</Button>
|
||||
</Badge>
|
||||
))}
|
||||
{parsedQuery.filters.length > 1 && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
className='h-6 text-muted-foreground text-xs hover:text-foreground'
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
const newQuery = parsedQuery.textSearch
|
||||
handleInputChange(newQuery, newQuery.length)
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
}
|
||||
}}
|
||||
>
|
||||
Clear all
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<X className='h-4 w-4' />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</PopoverAnchor>
|
||||
|
||||
{/* Text search indicator */}
|
||||
{hasTextSearch && (
|
||||
<div className='mt-2 flex items-center gap-2'>
|
||||
<span className='font-medium text-muted-foreground text-xs'>TEXT SEARCH:</span>
|
||||
<Badge variant='outline' className='text-xs'>
|
||||
"{parsedQuery.textSearch}"
|
||||
</Badge>
|
||||
</div>
|
||||
)}
|
||||
{/* Dropdown */}
|
||||
<PopoverContent
|
||||
ref={dropdownRef}
|
||||
className='p-0'
|
||||
style={{ width: dropdownWidth }}
|
||||
align='start'
|
||||
sideOffset={4}
|
||||
onOpenAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<div className='max-h-96 overflow-y-auto'>
|
||||
{sections.length > 0 ? (
|
||||
// Multi-section layout
|
||||
<div className='py-1'>
|
||||
{/* Show all results (no header) */}
|
||||
{suggestions[0]?.category === 'show-all' && (
|
||||
<button
|
||||
key={suggestions[0].id}
|
||||
data-index={0}
|
||||
className={cn(
|
||||
'w-full px-3 py-1.5 text-left transition-colors focus:outline-none',
|
||||
'hover:bg-[var(--surface-9)] dark:hover:bg-[var(--surface-9)]',
|
||||
highlightedIndex === 0 && 'bg-[var(--surface-9)] dark:bg-[var(--surface-9)]'
|
||||
)}
|
||||
onMouseEnter={() => setHighlightedIndex(0)}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
handleSuggestionSelect(suggestions[0])
|
||||
}}
|
||||
>
|
||||
<div className='text-[13px]'>{suggestions[0].label}</div>
|
||||
</button>
|
||||
)}
|
||||
|
||||
{sections.map((section) => (
|
||||
<div key={section.title}>
|
||||
<div className='border-border/50 border-t px-3 py-1.5 font-medium text-[11px] text-[var(--text-muted)] uppercase tracking-wide'>
|
||||
{section.title}
|
||||
</div>
|
||||
{section.suggestions.map((suggestion) => {
|
||||
if (suggestion.category === 'show-all') return null
|
||||
|
||||
const index = suggestions.indexOf(suggestion)
|
||||
const isHighlighted = index === highlightedIndex
|
||||
|
||||
return (
|
||||
<button
|
||||
key={suggestion.id}
|
||||
data-index={index}
|
||||
className={cn(
|
||||
'w-full px-3 py-1.5 text-left transition-colors focus:outline-none',
|
||||
'hover:bg-[var(--surface-9)] dark:hover:bg-[var(--surface-9)]',
|
||||
isHighlighted && 'bg-[var(--surface-9)] dark:bg-[var(--surface-9)]'
|
||||
)}
|
||||
onMouseEnter={() => setHighlightedIndex(index)}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
handleSuggestionSelect(suggestion)
|
||||
}}
|
||||
>
|
||||
<div className='flex items-center justify-between gap-3'>
|
||||
<div className='min-w-0 flex-1 truncate text-[13px]'>
|
||||
{suggestion.label}
|
||||
</div>
|
||||
{suggestion.value !== suggestion.label && (
|
||||
<div className='flex-shrink-0 font-mono text-[11px] text-[var(--text-muted)]'>
|
||||
{suggestion.category === 'workflow' ||
|
||||
suggestion.category === 'folder'
|
||||
? `${suggestion.category}:`
|
||||
: ''}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
// Single section layout
|
||||
<div className='py-1'>
|
||||
{suggestionType === 'filters' && (
|
||||
<div className='border-border/50 border-b px-3 py-1.5 font-medium text-[11px] text-[var(--text-muted)] uppercase tracking-wide'>
|
||||
SUGGESTED FILTERS
|
||||
</div>
|
||||
)}
|
||||
|
||||
{suggestions.map((suggestion, index) => (
|
||||
<button
|
||||
key={suggestion.id}
|
||||
data-index={index}
|
||||
className={cn(
|
||||
'w-full px-3 py-1.5 text-left transition-colors focus:outline-none',
|
||||
'hover:bg-[var(--surface-9)] dark:hover:bg-[var(--surface-9)]',
|
||||
index === highlightedIndex &&
|
||||
'bg-[var(--surface-9)] dark:bg-[var(--surface-9)]'
|
||||
)}
|
||||
onMouseEnter={() => setHighlightedIndex(index)}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
handleSuggestionSelect(suggestion)
|
||||
}}
|
||||
>
|
||||
<div className='flex items-center justify-between gap-3'>
|
||||
<div className='min-w-0 flex-1 text-[13px]'>{suggestion.label}</div>
|
||||
{suggestion.description && (
|
||||
<div className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
{suggestion.value}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -606,10 +606,25 @@ export default function Dashboard() {
|
||||
|
||||
setLastAnchorIndices((prev) => ({ ...prev, [workflowId]: segmentIndex }))
|
||||
} else if (mode === 'single') {
|
||||
// Single mode: Clear all selections and select only this segment
|
||||
setExpandedWorkflowId(workflowId)
|
||||
setSelectedSegments({ [workflowId]: [segmentIndex] })
|
||||
setLastAnchorIndices({ [workflowId]: segmentIndex })
|
||||
// Single mode: Select this segment, or deselect if already selected
|
||||
setSelectedSegments((prev) => {
|
||||
const currentSegments = prev[workflowId] || []
|
||||
const isOnlySelectedSegment =
|
||||
currentSegments.length === 1 && currentSegments[0] === segmentIndex
|
||||
const isOnlyWorkflowSelected = Object.keys(prev).length === 1 && prev[workflowId]
|
||||
|
||||
// If this is the only selected segment in the only selected workflow, deselect it
|
||||
if (isOnlySelectedSegment && isOnlyWorkflowSelected) {
|
||||
setExpandedWorkflowId(null)
|
||||
setLastAnchorIndices({})
|
||||
return {}
|
||||
}
|
||||
|
||||
// Otherwise, select only this segment
|
||||
setExpandedWorkflowId(workflowId)
|
||||
setLastAnchorIndices({ [workflowId]: segmentIndex })
|
||||
return { [workflowId]: [segmentIndex] }
|
||||
})
|
||||
} else if (mode === 'range') {
|
||||
// Range mode: Expand selection within the current workflow
|
||||
if (expandedWorkflowId === workflowId) {
|
||||
@@ -987,6 +1002,51 @@ export default function Dashboard() {
|
||||
const totalRate =
|
||||
totalExecutions > 0 ? (totalSuccess / totalExecutions) * 100 : 100
|
||||
|
||||
// Calculate overall time range across all selected workflows
|
||||
let multiWorkflowTimeRange: { start: Date; end: Date } | null = null
|
||||
if (sortedIndices.length > 0) {
|
||||
const firstIdx = sortedIndices[0]
|
||||
const lastIdx = sortedIndices[sortedIndices.length - 1]
|
||||
|
||||
// Find earliest start time
|
||||
let earliestStart: Date | null = null
|
||||
for (const wfId of selectedWorkflowIds) {
|
||||
const wf = executions.find((w) => w.workflowId === wfId)
|
||||
const segment = wf?.segments[firstIdx]
|
||||
if (segment) {
|
||||
const start = new Date(segment.timestamp)
|
||||
if (!earliestStart || start < earliestStart) {
|
||||
earliestStart = start
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find latest end time
|
||||
let latestEnd: Date | null = null
|
||||
for (const wfId of selectedWorkflowIds) {
|
||||
const wf = executions.find((w) => w.workflowId === wfId)
|
||||
const segment = wf?.segments[lastIdx]
|
||||
if (segment) {
|
||||
const end = new Date(new Date(segment.timestamp).getTime() + segMs)
|
||||
if (!latestEnd || end > latestEnd) {
|
||||
latestEnd = end
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (earliestStart && latestEnd) {
|
||||
multiWorkflowTimeRange = {
|
||||
start: earliestStart,
|
||||
end: latestEnd,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get workflow names
|
||||
const workflowNames = selectedWorkflowIds
|
||||
.map((id) => executions.find((w) => w.workflowId === id)?.workflowName)
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
return (
|
||||
<WorkflowDetails
|
||||
workspaceId={workspaceId}
|
||||
@@ -1007,8 +1067,11 @@ export default function Dashboard() {
|
||||
allLogs: allLogs,
|
||||
} as any
|
||||
}
|
||||
selectedSegmentIndex={[]}
|
||||
selectedSegmentIndex={sortedIndices}
|
||||
selectedSegment={null}
|
||||
selectedSegmentTimeRange={multiWorkflowTimeRange}
|
||||
selectedWorkflowNames={workflowNames}
|
||||
segmentDurationMs={segMs}
|
||||
clearSegmentSelection={() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
@@ -1121,6 +1184,9 @@ export default function Dashboard() {
|
||||
const idxSet = new Set(workflowSelectedIndices)
|
||||
const selectedSegs = wf.segments.filter((_, i) => idxSet.has(i))
|
||||
;(details as any).__filtered = buildSeriesFromSegments(selectedSegs as any)
|
||||
} else if (details) {
|
||||
// Clear filtered data when no segments are selected
|
||||
;(details as any).__filtered = undefined
|
||||
}
|
||||
|
||||
const detailsWithFilteredLogs = details
|
||||
@@ -1148,6 +1214,28 @@ export default function Dashboard() {
|
||||
? wf.segments[workflowSelectedIndices[0]]
|
||||
: null
|
||||
|
||||
// Calculate time range for selected segments
|
||||
const segMs =
|
||||
(endTime.getTime() - getStartTime().getTime()) / Math.max(1, segmentCount)
|
||||
const selectedSegmentsData = workflowSelectedIndices
|
||||
.map((idx) => wf.segments[idx])
|
||||
.filter(Boolean)
|
||||
const timeRange =
|
||||
selectedSegmentsData.length > 0
|
||||
? (() => {
|
||||
const sortedIndices = [...workflowSelectedIndices].sort((a, b) => a - b)
|
||||
const firstSegment = wf.segments[sortedIndices[0]]
|
||||
const lastSegment = wf.segments[sortedIndices[sortedIndices.length - 1]]
|
||||
if (!firstSegment || !lastSegment) return null
|
||||
const rangeStart = new Date(firstSegment.timestamp)
|
||||
const rangeEnd = new Date(lastSegment.timestamp).getTime() + segMs
|
||||
return {
|
||||
start: rangeStart,
|
||||
end: new Date(rangeEnd),
|
||||
}
|
||||
})()
|
||||
: null
|
||||
|
||||
return (
|
||||
<WorkflowDetails
|
||||
workspaceId={workspaceId}
|
||||
@@ -1164,6 +1252,9 @@ export default function Dashboard() {
|
||||
}
|
||||
: null
|
||||
}
|
||||
selectedSegmentTimeRange={timeRange}
|
||||
selectedWorkflowNames={undefined}
|
||||
segmentDurationMs={segMs}
|
||||
clearSegmentSelection={() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
@@ -1197,6 +1288,9 @@ export default function Dashboard() {
|
||||
details={globalDetails as any}
|
||||
selectedSegmentIndex={[]}
|
||||
selectedSegment={null}
|
||||
selectedSegmentTimeRange={null}
|
||||
selectedWorkflowNames={undefined}
|
||||
segmentDurationMs={undefined}
|
||||
clearSegmentSelection={() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
|
||||
@@ -1,423 +0,0 @@
|
||||
import { useCallback, useEffect, useMemo, useReducer, useRef } from 'react'
|
||||
|
||||
export interface Suggestion {
|
||||
id: string
|
||||
value: string
|
||||
label: string
|
||||
description?: string
|
||||
category?:
|
||||
| 'filters'
|
||||
| 'level'
|
||||
| 'trigger'
|
||||
| 'cost'
|
||||
| 'date'
|
||||
| 'duration'
|
||||
| 'workflow'
|
||||
| 'folder'
|
||||
| 'workflowId'
|
||||
| 'executionId'
|
||||
}
|
||||
|
||||
export interface SuggestionGroup {
|
||||
type: 'filter-keys' | 'filter-values'
|
||||
filterKey?: string
|
||||
suggestions: Suggestion[]
|
||||
}
|
||||
|
||||
interface AutocompleteState {
|
||||
// Input state
|
||||
inputValue: string
|
||||
cursorPosition: number
|
||||
|
||||
// Dropdown state
|
||||
isOpen: boolean
|
||||
suggestions: Suggestion[]
|
||||
suggestionType: 'filter-keys' | 'filter-values' | null
|
||||
highlightedIndex: number
|
||||
|
||||
// Preview state
|
||||
previewValue: string
|
||||
showPreview: boolean
|
||||
|
||||
// Query state
|
||||
isValidQuery: boolean
|
||||
pendingQuery: string | null
|
||||
}
|
||||
|
||||
type AutocompleteAction =
|
||||
| { type: 'SET_INPUT_VALUE'; payload: { value: string; cursorPosition: number } }
|
||||
| { type: 'SET_CURSOR_POSITION'; payload: number }
|
||||
| { type: 'OPEN_DROPDOWN'; payload: SuggestionGroup }
|
||||
| { type: 'CLOSE_DROPDOWN' }
|
||||
| { type: 'HIGHLIGHT_SUGGESTION'; payload: { index: number; preview?: string } }
|
||||
| { type: 'SET_PREVIEW'; payload: { value: string; show: boolean } }
|
||||
| { type: 'CLEAR_PREVIEW' }
|
||||
| { type: 'SET_QUERY_VALIDITY'; payload: boolean }
|
||||
| { type: 'SET_PENDING'; payload: string | null }
|
||||
| { type: 'RESET' }
|
||||
|
||||
const initialState: AutocompleteState = {
|
||||
inputValue: '',
|
||||
cursorPosition: 0,
|
||||
isOpen: false,
|
||||
suggestions: [],
|
||||
suggestionType: null,
|
||||
highlightedIndex: -1,
|
||||
previewValue: '',
|
||||
showPreview: false,
|
||||
isValidQuery: true,
|
||||
pendingQuery: null,
|
||||
}
|
||||
|
||||
function autocompleteReducer(
|
||||
state: AutocompleteState,
|
||||
action: AutocompleteAction
|
||||
): AutocompleteState {
|
||||
switch (action.type) {
|
||||
case 'SET_INPUT_VALUE':
|
||||
return {
|
||||
...state,
|
||||
inputValue: action.payload.value,
|
||||
cursorPosition: action.payload.cursorPosition,
|
||||
previewValue: '',
|
||||
showPreview: false,
|
||||
}
|
||||
|
||||
case 'SET_CURSOR_POSITION':
|
||||
return {
|
||||
...state,
|
||||
cursorPosition: action.payload,
|
||||
}
|
||||
|
||||
case 'OPEN_DROPDOWN':
|
||||
return {
|
||||
...state,
|
||||
isOpen: true,
|
||||
suggestions: action.payload.suggestions,
|
||||
suggestionType: action.payload.type,
|
||||
highlightedIndex: action.payload.suggestions.length > 0 ? 0 : -1,
|
||||
}
|
||||
|
||||
case 'CLOSE_DROPDOWN':
|
||||
return {
|
||||
...state,
|
||||
isOpen: false,
|
||||
suggestions: [],
|
||||
suggestionType: null,
|
||||
highlightedIndex: -1,
|
||||
previewValue: '',
|
||||
showPreview: false,
|
||||
}
|
||||
|
||||
case 'HIGHLIGHT_SUGGESTION':
|
||||
return {
|
||||
...state,
|
||||
highlightedIndex: action.payload.index,
|
||||
previewValue: action.payload.preview || '',
|
||||
showPreview: !!action.payload.preview,
|
||||
}
|
||||
|
||||
case 'SET_PREVIEW':
|
||||
return {
|
||||
...state,
|
||||
previewValue: action.payload.value,
|
||||
showPreview: action.payload.show,
|
||||
}
|
||||
|
||||
case 'CLEAR_PREVIEW':
|
||||
return {
|
||||
...state,
|
||||
previewValue: '',
|
||||
showPreview: false,
|
||||
}
|
||||
|
||||
case 'SET_QUERY_VALIDITY':
|
||||
return {
|
||||
...state,
|
||||
isValidQuery: action.payload,
|
||||
}
|
||||
|
||||
case 'SET_PENDING':
|
||||
return {
|
||||
...state,
|
||||
pendingQuery: action.payload,
|
||||
}
|
||||
|
||||
case 'RESET':
|
||||
return initialState
|
||||
|
||||
default:
|
||||
return state
|
||||
}
|
||||
}
|
||||
|
||||
export interface AutocompleteOptions {
|
||||
getSuggestions: (value: string, cursorPosition: number) => SuggestionGroup | null
|
||||
generatePreview: (suggestion: Suggestion, currentValue: string, cursorPosition: number) => string
|
||||
onQueryChange: (query: string) => void
|
||||
validateQuery?: (query: string) => boolean
|
||||
debounceMs?: number
|
||||
}
|
||||
|
||||
export function useAutocomplete({
|
||||
getSuggestions,
|
||||
generatePreview,
|
||||
onQueryChange,
|
||||
validateQuery,
|
||||
debounceMs = 150,
|
||||
}: AutocompleteOptions) {
|
||||
const [state, dispatch] = useReducer(autocompleteReducer, initialState)
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const dropdownRef = useRef<HTMLDivElement>(null)
|
||||
const debounceRef = useRef<NodeJS.Timeout | null>(null)
|
||||
const pointerDownInDropdownRef = useRef<boolean>(false)
|
||||
const latestRef = useRef<{ inputValue: string; cursorPosition: number }>({
|
||||
inputValue: '',
|
||||
cursorPosition: 0,
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
latestRef.current.inputValue = state.inputValue
|
||||
latestRef.current.cursorPosition = state.cursorPosition
|
||||
}, [state.inputValue, state.cursorPosition])
|
||||
|
||||
const currentSuggestion = useMemo(() => {
|
||||
if (state.highlightedIndex >= 0 && state.suggestions[state.highlightedIndex]) {
|
||||
return state.suggestions[state.highlightedIndex]
|
||||
}
|
||||
return null
|
||||
}, [state.highlightedIndex, state.suggestions])
|
||||
|
||||
const updateSuggestions = useCallback(() => {
|
||||
const { inputValue, cursorPosition } = latestRef.current
|
||||
const suggestionGroup = getSuggestions(inputValue, cursorPosition)
|
||||
|
||||
if (suggestionGroup && suggestionGroup.suggestions.length > 0) {
|
||||
dispatch({ type: 'OPEN_DROPDOWN', payload: suggestionGroup })
|
||||
|
||||
const firstSuggestion = suggestionGroup.suggestions[0]
|
||||
const preview = generatePreview(firstSuggestion, inputValue, cursorPosition)
|
||||
dispatch({
|
||||
type: 'HIGHLIGHT_SUGGESTION',
|
||||
payload: { index: 0, preview },
|
||||
})
|
||||
} else {
|
||||
dispatch({ type: 'CLOSE_DROPDOWN' })
|
||||
}
|
||||
}, [getSuggestions, generatePreview])
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
(value: string, cursorPosition: number) => {
|
||||
dispatch({ type: 'SET_INPUT_VALUE', payload: { value, cursorPosition } })
|
||||
|
||||
const isValid = validateQuery ? validateQuery(value) : true
|
||||
dispatch({ type: 'SET_QUERY_VALIDITY', payload: isValid })
|
||||
|
||||
if (isValid) {
|
||||
onQueryChange(value)
|
||||
}
|
||||
|
||||
if (debounceRef.current) {
|
||||
clearTimeout(debounceRef.current)
|
||||
}
|
||||
|
||||
dispatch({ type: 'SET_PENDING', payload: value })
|
||||
debounceRef.current = setTimeout(() => {
|
||||
dispatch({ type: 'SET_PENDING', payload: null })
|
||||
updateSuggestions()
|
||||
}, debounceMs)
|
||||
},
|
||||
[updateSuggestions, onQueryChange, validateQuery, debounceMs]
|
||||
)
|
||||
|
||||
const handleCursorChange = useCallback(
|
||||
(position: number) => {
|
||||
dispatch({ type: 'SET_CURSOR_POSITION', payload: position })
|
||||
updateSuggestions()
|
||||
},
|
||||
[updateSuggestions]
|
||||
)
|
||||
|
||||
const handleSuggestionHover = useCallback(
|
||||
(index: number) => {
|
||||
if (index >= 0 && index < state.suggestions.length) {
|
||||
const suggestion = state.suggestions[index]
|
||||
const preview = generatePreview(suggestion, state.inputValue, state.cursorPosition)
|
||||
dispatch({
|
||||
type: 'HIGHLIGHT_SUGGESTION',
|
||||
payload: { index, preview },
|
||||
})
|
||||
}
|
||||
},
|
||||
[state.suggestions, state.inputValue, state.cursorPosition, generatePreview]
|
||||
)
|
||||
|
||||
const handleSuggestionSelect = useCallback(
|
||||
(suggestion?: Suggestion) => {
|
||||
const selectedSuggestion = suggestion || currentSuggestion
|
||||
if (!selectedSuggestion) return
|
||||
|
||||
let newValue = generatePreview(selectedSuggestion, state.inputValue, state.cursorPosition)
|
||||
|
||||
let newCursorPosition = newValue.length
|
||||
|
||||
if (state.suggestionType === 'filter-keys' && selectedSuggestion.value.endsWith(':')) {
|
||||
newCursorPosition = newValue.lastIndexOf(':') + 1
|
||||
} else if (state.suggestionType === 'filter-values') {
|
||||
newValue = `${newValue} `
|
||||
newCursorPosition = newValue.length
|
||||
}
|
||||
|
||||
dispatch({
|
||||
type: 'SET_INPUT_VALUE',
|
||||
payload: { value: newValue, cursorPosition: newCursorPosition },
|
||||
})
|
||||
|
||||
const isValid = validateQuery ? validateQuery(newValue.trim()) : true
|
||||
dispatch({ type: 'SET_QUERY_VALIDITY', payload: isValid })
|
||||
|
||||
if (isValid) {
|
||||
onQueryChange(newValue.trim())
|
||||
}
|
||||
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
requestAnimationFrame(() => {
|
||||
if (inputRef.current) {
|
||||
inputRef.current.setSelectionRange(newCursorPosition, newCursorPosition)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (debounceRef.current) {
|
||||
clearTimeout(debounceRef.current)
|
||||
debounceRef.current = null
|
||||
}
|
||||
dispatch({ type: 'SET_PENDING', payload: null })
|
||||
setTimeout(updateSuggestions, 0)
|
||||
},
|
||||
[
|
||||
currentSuggestion,
|
||||
state.inputValue,
|
||||
state.cursorPosition,
|
||||
state.suggestionType,
|
||||
generatePreview,
|
||||
onQueryChange,
|
||||
validateQuery,
|
||||
updateSuggestions,
|
||||
]
|
||||
)
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(event: React.KeyboardEvent) => {
|
||||
if (event.key === 'Enter') {
|
||||
event.preventDefault()
|
||||
if (state.isOpen) {
|
||||
handleSuggestionSelect()
|
||||
} else if (state.isValidQuery) {
|
||||
updateSuggestions()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (!state.isOpen) return
|
||||
|
||||
switch (event.key) {
|
||||
case 'ArrowDown': {
|
||||
event.preventDefault()
|
||||
const nextIndex = Math.min(state.highlightedIndex + 1, state.suggestions.length - 1)
|
||||
handleSuggestionHover(nextIndex)
|
||||
break
|
||||
}
|
||||
|
||||
case 'ArrowUp': {
|
||||
event.preventDefault()
|
||||
const prevIndex = Math.max(state.highlightedIndex - 1, 0)
|
||||
handleSuggestionHover(prevIndex)
|
||||
break
|
||||
}
|
||||
|
||||
case 'Escape':
|
||||
event.preventDefault()
|
||||
dispatch({ type: 'CLOSE_DROPDOWN' })
|
||||
break
|
||||
|
||||
case 'Tab':
|
||||
if (currentSuggestion) {
|
||||
event.preventDefault()
|
||||
handleSuggestionSelect()
|
||||
} else {
|
||||
dispatch({ type: 'CLOSE_DROPDOWN' })
|
||||
}
|
||||
break
|
||||
}
|
||||
},
|
||||
[
|
||||
state.isOpen,
|
||||
state.highlightedIndex,
|
||||
state.suggestions.length,
|
||||
handleSuggestionHover,
|
||||
handleSuggestionSelect,
|
||||
currentSuggestion,
|
||||
]
|
||||
)
|
||||
|
||||
const handleFocus = useCallback(() => {
|
||||
updateSuggestions()
|
||||
}, [updateSuggestions])
|
||||
|
||||
const handleBlur = useCallback((e?: React.FocusEvent) => {
|
||||
const related = (e?.relatedTarget as Node) || document.activeElement
|
||||
const isInsideDropdown = related && dropdownRef.current?.contains(related)
|
||||
const isInsideInput = related && inputRef.current === related
|
||||
if (pointerDownInDropdownRef.current || isInsideDropdown || isInsideInput) {
|
||||
return
|
||||
}
|
||||
setTimeout(() => {
|
||||
dispatch({ type: 'CLOSE_DROPDOWN' })
|
||||
}, 150)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
const dropdownEl = dropdownRef.current
|
||||
if (!dropdownEl) return
|
||||
const onPointerDown = () => {
|
||||
pointerDownInDropdownRef.current = true
|
||||
}
|
||||
const onPointerUp = () => {
|
||||
setTimeout(() => {
|
||||
pointerDownInDropdownRef.current = false
|
||||
}, 0)
|
||||
}
|
||||
dropdownEl.addEventListener('pointerdown', onPointerDown)
|
||||
window.addEventListener('pointerup', onPointerUp)
|
||||
return () => {
|
||||
dropdownEl.removeEventListener('pointerdown', onPointerDown)
|
||||
window.removeEventListener('pointerup', onPointerUp)
|
||||
}
|
||||
}, [])
|
||||
|
||||
return {
|
||||
// State
|
||||
state,
|
||||
currentSuggestion,
|
||||
|
||||
// Refs
|
||||
inputRef,
|
||||
dropdownRef,
|
||||
|
||||
// Handlers
|
||||
handleInputChange,
|
||||
handleCursorChange,
|
||||
handleSuggestionHover,
|
||||
handleSuggestionSelect,
|
||||
handleKeyDown,
|
||||
handleFocus,
|
||||
handleBlur,
|
||||
|
||||
// Actions
|
||||
closeDropdown: () => dispatch({ type: 'CLOSE_DROPDOWN' }),
|
||||
clearPreview: () => dispatch({ type: 'CLEAR_PREVIEW' }),
|
||||
reset: () => dispatch({ type: 'RESET' }),
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,291 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import type { ParsedFilter } from '@/lib/logs/query-parser'
|
||||
import type {
|
||||
Suggestion,
|
||||
SuggestionGroup,
|
||||
SuggestionSection,
|
||||
} from '@/app/workspace/[workspaceId]/logs/types/search'
|
||||
|
||||
interface UseSearchStateOptions {
|
||||
onFiltersChange: (filters: ParsedFilter[], textSearch: string) => void
|
||||
getSuggestions: (input: string) => SuggestionGroup | null
|
||||
debounceMs?: number
|
||||
}
|
||||
|
||||
export function useSearchState({
|
||||
onFiltersChange,
|
||||
getSuggestions,
|
||||
debounceMs = 100,
|
||||
}: UseSearchStateOptions) {
|
||||
const [appliedFilters, setAppliedFilters] = useState<ParsedFilter[]>([])
|
||||
const [currentInput, setCurrentInput] = useState('')
|
||||
const [textSearch, setTextSearch] = useState('')
|
||||
|
||||
// Dropdown state
|
||||
const [isOpen, setIsOpen] = useState(false)
|
||||
const [suggestions, setSuggestions] = useState<Suggestion[]>([])
|
||||
const [sections, setSections] = useState<SuggestionSection[]>([])
|
||||
const [highlightedIndex, setHighlightedIndex] = useState(-1)
|
||||
|
||||
// Badge interaction
|
||||
const [highlightedBadgeIndex, setHighlightedBadgeIndex] = useState<number | null>(null)
|
||||
|
||||
// Refs
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const dropdownRef = useRef<HTMLDivElement>(null)
|
||||
const debounceRef = useRef<NodeJS.Timeout | null>(null)
|
||||
|
||||
// Update suggestions when input changes
|
||||
const updateSuggestions = useCallback(
|
||||
(input: string) => {
|
||||
const suggestionGroup = getSuggestions(input)
|
||||
|
||||
if (suggestionGroup && suggestionGroup.suggestions.length > 0) {
|
||||
setSuggestions(suggestionGroup.suggestions)
|
||||
setSections(suggestionGroup.sections || [])
|
||||
setIsOpen(true)
|
||||
setHighlightedIndex(0)
|
||||
} else {
|
||||
setIsOpen(false)
|
||||
setSuggestions([])
|
||||
setSections([])
|
||||
setHighlightedIndex(-1)
|
||||
}
|
||||
},
|
||||
[getSuggestions]
|
||||
)
|
||||
|
||||
// Handle input changes
|
||||
const handleInputChange = useCallback(
|
||||
(value: string) => {
|
||||
setCurrentInput(value)
|
||||
setHighlightedBadgeIndex(null) // Clear badge highlight on any input
|
||||
|
||||
// Debounce suggestion updates
|
||||
if (debounceRef.current) {
|
||||
clearTimeout(debounceRef.current)
|
||||
}
|
||||
|
||||
debounceRef.current = setTimeout(() => {
|
||||
updateSuggestions(value)
|
||||
}, debounceMs)
|
||||
},
|
||||
[updateSuggestions, debounceMs]
|
||||
)
|
||||
|
||||
// Handle suggestion selection
|
||||
const handleSuggestionSelect = useCallback(
|
||||
(suggestion: Suggestion) => {
|
||||
if (suggestion.category === 'show-all') {
|
||||
// Treat as text search
|
||||
setTextSearch(suggestion.value)
|
||||
setCurrentInput('')
|
||||
setIsOpen(false)
|
||||
onFiltersChange(appliedFilters, suggestion.value)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if this is a filter-key suggestion (ends with ':')
|
||||
if (suggestion.category === 'filters' && suggestion.value.endsWith(':')) {
|
||||
// Set input to the filter key and keep dropdown open for values
|
||||
setCurrentInput(suggestion.value)
|
||||
updateSuggestions(suggestion.value)
|
||||
return
|
||||
}
|
||||
|
||||
// For filter values, workflows, folders - add as a filter
|
||||
const newFilter: ParsedFilter = {
|
||||
field: suggestion.value.split(':')[0] as any,
|
||||
operator: '=',
|
||||
value: suggestion.value.includes(':')
|
||||
? suggestion.value.split(':').slice(1).join(':').replace(/"/g, '')
|
||||
: suggestion.value.replace(/"/g, ''),
|
||||
originalValue: suggestion.value.includes(':')
|
||||
? suggestion.value.split(':').slice(1).join(':')
|
||||
: suggestion.value,
|
||||
}
|
||||
|
||||
const updatedFilters = [...appliedFilters, newFilter]
|
||||
setAppliedFilters(updatedFilters)
|
||||
setCurrentInput('')
|
||||
setTextSearch('')
|
||||
|
||||
// Notify parent
|
||||
onFiltersChange(updatedFilters, '')
|
||||
|
||||
// Focus back on input and reopen dropdown with empty suggestions
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
}
|
||||
|
||||
// Show filter keys dropdown again after selection
|
||||
setTimeout(() => {
|
||||
updateSuggestions('')
|
||||
}, 50)
|
||||
},
|
||||
[appliedFilters, onFiltersChange, updateSuggestions]
|
||||
)
|
||||
|
||||
// Remove a badge
|
||||
const removeBadge = useCallback(
|
||||
(index: number) => {
|
||||
const updatedFilters = appliedFilters.filter((_, i) => i !== index)
|
||||
setAppliedFilters(updatedFilters)
|
||||
setHighlightedBadgeIndex(null)
|
||||
onFiltersChange(updatedFilters, textSearch)
|
||||
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
}
|
||||
},
|
||||
[appliedFilters, textSearch, onFiltersChange]
|
||||
)
|
||||
|
||||
// Handle keyboard navigation
|
||||
const handleKeyDown = useCallback(
|
||||
(event: React.KeyboardEvent) => {
|
||||
// Backspace on empty input - badge deletion
|
||||
if (event.key === 'Backspace' && currentInput === '') {
|
||||
event.preventDefault()
|
||||
|
||||
if (highlightedBadgeIndex !== null) {
|
||||
// Delete highlighted badge
|
||||
removeBadge(highlightedBadgeIndex)
|
||||
} else if (appliedFilters.length > 0) {
|
||||
// Highlight last badge
|
||||
setHighlightedBadgeIndex(appliedFilters.length - 1)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Clear badge highlight on any other key when not in dropdown navigation
|
||||
if (
|
||||
highlightedBadgeIndex !== null &&
|
||||
!['ArrowDown', 'ArrowUp', 'Enter'].includes(event.key)
|
||||
) {
|
||||
setHighlightedBadgeIndex(null)
|
||||
}
|
||||
|
||||
// Enter key
|
||||
if (event.key === 'Enter') {
|
||||
event.preventDefault()
|
||||
|
||||
if (isOpen && highlightedIndex >= 0 && suggestions[highlightedIndex]) {
|
||||
handleSuggestionSelect(suggestions[highlightedIndex])
|
||||
} else if (currentInput.trim()) {
|
||||
// Submit current input as text search
|
||||
setTextSearch(currentInput.trim())
|
||||
setCurrentInput('')
|
||||
setIsOpen(false)
|
||||
onFiltersChange(appliedFilters, currentInput.trim())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Dropdown navigation
|
||||
if (!isOpen) return
|
||||
|
||||
switch (event.key) {
|
||||
case 'ArrowDown': {
|
||||
event.preventDefault()
|
||||
setHighlightedIndex((prev) => Math.min(prev + 1, suggestions.length - 1))
|
||||
break
|
||||
}
|
||||
|
||||
case 'ArrowUp': {
|
||||
event.preventDefault()
|
||||
setHighlightedIndex((prev) => Math.max(prev - 1, 0))
|
||||
break
|
||||
}
|
||||
|
||||
case 'Escape': {
|
||||
event.preventDefault()
|
||||
setIsOpen(false)
|
||||
setHighlightedIndex(-1)
|
||||
break
|
||||
}
|
||||
|
||||
case 'Tab': {
|
||||
if (highlightedIndex >= 0 && suggestions[highlightedIndex]) {
|
||||
event.preventDefault()
|
||||
handleSuggestionSelect(suggestions[highlightedIndex])
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
currentInput,
|
||||
highlightedBadgeIndex,
|
||||
appliedFilters,
|
||||
isOpen,
|
||||
highlightedIndex,
|
||||
suggestions,
|
||||
handleSuggestionSelect,
|
||||
removeBadge,
|
||||
onFiltersChange,
|
||||
]
|
||||
)
|
||||
|
||||
// Handle focus
|
||||
const handleFocus = useCallback(() => {
|
||||
updateSuggestions(currentInput)
|
||||
}, [currentInput, updateSuggestions])
|
||||
|
||||
// Handle blur
|
||||
const handleBlur = useCallback(() => {
|
||||
setTimeout(() => {
|
||||
setIsOpen(false)
|
||||
setHighlightedIndex(-1)
|
||||
}, 150)
|
||||
}, [])
|
||||
|
||||
// Clear all filters
|
||||
const clearAll = useCallback(() => {
|
||||
setAppliedFilters([])
|
||||
setCurrentInput('')
|
||||
setTextSearch('')
|
||||
setIsOpen(false)
|
||||
onFiltersChange([], '')
|
||||
|
||||
if (inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
}
|
||||
}, [onFiltersChange])
|
||||
|
||||
// Initialize from external value (URL params, etc.)
|
||||
const initializeFromQuery = useCallback((query: string, filters: ParsedFilter[]) => {
|
||||
setAppliedFilters(filters)
|
||||
setTextSearch(query)
|
||||
setCurrentInput('')
|
||||
}, [])
|
||||
|
||||
return {
|
||||
// State
|
||||
appliedFilters,
|
||||
currentInput,
|
||||
textSearch,
|
||||
isOpen,
|
||||
suggestions,
|
||||
sections,
|
||||
highlightedIndex,
|
||||
highlightedBadgeIndex,
|
||||
|
||||
// Refs
|
||||
inputRef,
|
||||
dropdownRef,
|
||||
|
||||
// Handlers
|
||||
handleInputChange,
|
||||
handleSuggestionSelect,
|
||||
handleKeyDown,
|
||||
handleFocus,
|
||||
handleBlur,
|
||||
removeBadge,
|
||||
clearAll,
|
||||
initializeFromQuery,
|
||||
|
||||
// Setters for external control
|
||||
setHighlightedIndex,
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import { AutocompleteSearch } from '@/app/workspace/[workspaceId]/logs/component
|
||||
import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/sidebar'
|
||||
import Dashboard from '@/app/workspace/[workspaceId]/logs/dashboard'
|
||||
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
@@ -120,7 +121,8 @@ export default function Logs() {
|
||||
setSearchQuery(storeSearchQuery)
|
||||
}, [storeSearchQuery])
|
||||
|
||||
const { fetchFolders, getFolderTree } = useFolderStore()
|
||||
const foldersQuery = useFolders(workspaceId)
|
||||
const { getFolderTree } = useFolderStore()
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false
|
||||
@@ -138,7 +140,6 @@ export default function Logs() {
|
||||
if (!cancelled) setAvailableWorkflows([])
|
||||
}
|
||||
|
||||
await fetchFolders(workspaceId)
|
||||
const tree = getFolderTree(workspaceId)
|
||||
|
||||
const flatten = (nodes: any[], parentPath = ''): string[] => {
|
||||
@@ -168,7 +169,7 @@ export default function Logs() {
|
||||
return () => {
|
||||
cancelled = true
|
||||
}
|
||||
}, [workspaceId, fetchFolders, getFolderTree])
|
||||
}, [workspaceId, getFolderTree, foldersQuery.data])
|
||||
|
||||
useEffect(() => {
|
||||
if (isInitialized.current && debouncedSearchQuery !== storeSearchQuery) {
|
||||
@@ -711,9 +712,7 @@ export default function Logs() {
|
||||
value={searchQuery}
|
||||
onChange={setSearchQuery}
|
||||
placeholder='Search logs...'
|
||||
availableWorkflows={availableWorkflows}
|
||||
availableFolders={availableFolders}
|
||||
onOpenChange={(open) => {
|
||||
onOpenChange={(open: boolean) => {
|
||||
isSearchOpenRef.current = open
|
||||
}}
|
||||
/>
|
||||
@@ -840,8 +839,16 @@ export default function Logs() {
|
||||
|
||||
{/* Workflow */}
|
||||
<div className='min-w-0'>
|
||||
<div className='truncate font-medium text-[13px] text-[var(--text-primary)] dark:text-[var(--text-primary)]'>
|
||||
{log.workflow?.name || 'Unknown Workflow'}
|
||||
<div className='flex items-center gap-2 truncate'>
|
||||
<div
|
||||
className='h-[12px] w-[12px] flex-shrink-0 rounded'
|
||||
style={{
|
||||
backgroundColor: log.workflow?.color || '#64748b',
|
||||
}}
|
||||
/>
|
||||
<span className='truncate font-medium text-[13px] text-[var(--text-primary)] dark:text-[var(--text-primary)]'>
|
||||
{log.workflow?.name || 'Unknown Workflow'}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
30
apps/sim/app/workspace/[workspaceId]/logs/types/search.ts
Normal file
30
apps/sim/app/workspace/[workspaceId]/logs/types/search.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
export interface Suggestion {
|
||||
id: string
|
||||
value: string
|
||||
label: string
|
||||
description?: string
|
||||
category?:
|
||||
| 'filters'
|
||||
| 'level'
|
||||
| 'trigger'
|
||||
| 'cost'
|
||||
| 'date'
|
||||
| 'duration'
|
||||
| 'workflow'
|
||||
| 'folder'
|
||||
| 'workflowId'
|
||||
| 'executionId'
|
||||
| 'show-all'
|
||||
}
|
||||
|
||||
export interface SuggestionSection {
|
||||
title: string
|
||||
suggestions: Suggestion[]
|
||||
}
|
||||
|
||||
export interface SuggestionGroup {
|
||||
type: 'filter-keys' | 'filter-values' | 'multi-section'
|
||||
filterKey?: string
|
||||
suggestions: Suggestion[]
|
||||
sections?: SuggestionSection[]
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useProviderModels } from '@/hooks/queries/providers'
|
||||
import { updateOllamaProviderModels, updateOpenRouterProviderModels } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import type { ProviderName } from '@/stores/providers/types'
|
||||
|
||||
const logger = createLogger('ProviderModelsLoader')
|
||||
|
||||
function useSyncProvider(provider: ProviderName) {
|
||||
const setProviderModels = useProvidersStore((state) => state.setProviderModels)
|
||||
const setProviderLoading = useProvidersStore((state) => state.setProviderLoading)
|
||||
const { data, isLoading, isFetching, error } = useProviderModels(provider)
|
||||
|
||||
useEffect(() => {
|
||||
setProviderLoading(provider, isLoading || isFetching)
|
||||
}, [provider, isLoading, isFetching, setProviderLoading])
|
||||
|
||||
useEffect(() => {
|
||||
if (!data) return
|
||||
|
||||
try {
|
||||
if (provider === 'ollama') {
|
||||
updateOllamaProviderModels(data)
|
||||
} else if (provider === 'openrouter') {
|
||||
void updateOpenRouterProviderModels(data)
|
||||
}
|
||||
} catch (syncError) {
|
||||
logger.warn(`Failed to sync provider definitions for ${provider}`, syncError as Error)
|
||||
}
|
||||
|
||||
setProviderModels(provider, data)
|
||||
}, [provider, data, setProviderModels])
|
||||
|
||||
useEffect(() => {
|
||||
if (error) {
|
||||
logger.error(`Failed to load ${provider} models`, error)
|
||||
}
|
||||
}, [provider, error])
|
||||
}
|
||||
|
||||
export function ProviderModelsLoader() {
|
||||
useSyncProvider('base')
|
||||
useSyncProvider('ollama')
|
||||
useSyncProvider('openrouter')
|
||||
return null
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import React from 'react'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import { WorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { ProviderModelsLoader } from './provider-models-loader'
|
||||
import { SettingsLoader } from './settings-loader'
|
||||
|
||||
interface ProvidersProps {
|
||||
@@ -14,6 +15,7 @@ const Providers = React.memo<ProvidersProps>(({ children }) => {
|
||||
return (
|
||||
<>
|
||||
<SettingsLoader />
|
||||
<ProviderModelsLoader />
|
||||
<GlobalCommandsProvider>
|
||||
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
|
||||
<WorkspacePermissionsProvider>{children}</WorkspacePermissionsProvider>
|
||||
|
||||
@@ -288,7 +288,7 @@ export function OutputSelect({
|
||||
<PopoverContent
|
||||
ref={popoverRef}
|
||||
side='bottom'
|
||||
align='end'
|
||||
align='start'
|
||||
sideOffset={4}
|
||||
maxHeight={140}
|
||||
maxWidth={140}
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback } from 'react'
|
||||
import { Layout, LibraryBig, Search } from 'lucide-react'
|
||||
import Image from 'next/image'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { AgentIcon } from '@/components/icons'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useSearchModalStore } from '@/stores/search-modal/store'
|
||||
|
||||
const logger = createLogger('WorkflowCommandList')
|
||||
|
||||
/**
|
||||
* Command item data structure
|
||||
@@ -49,13 +55,131 @@ const commands: CommandItem[] = [
|
||||
* Centered on the screen for empty workflows
|
||||
*/
|
||||
export function CommandList() {
|
||||
const params = useParams()
|
||||
const router = useRouter()
|
||||
const { open: openSearchModal } = useSearchModalStore()
|
||||
|
||||
const workspaceId = params.workspaceId as string | undefined
|
||||
|
||||
/**
|
||||
* Handle click on a command row.
|
||||
*
|
||||
* Mirrors the behavior of the corresponding global keyboard shortcuts:
|
||||
* - Templates: navigate to workspace templates
|
||||
* - New Agent: add an agent block to the canvas
|
||||
* - Logs: navigate to workspace logs
|
||||
* - Search Blocks: open the universal search modal
|
||||
*
|
||||
* @param label - Command label that was clicked.
|
||||
*/
|
||||
const handleCommandClick = useCallback(
|
||||
(label: string) => {
|
||||
try {
|
||||
switch (label) {
|
||||
case 'Templates': {
|
||||
if (!workspaceId) {
|
||||
logger.warn('No workspace ID found, cannot navigate to templates from command list')
|
||||
return
|
||||
}
|
||||
router.push(`/workspace/${workspaceId}/templates`)
|
||||
return
|
||||
}
|
||||
case 'New Agent': {
|
||||
const event = new CustomEvent('add-block-from-toolbar', {
|
||||
detail: { type: 'agent', enableTriggerMode: false },
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
return
|
||||
}
|
||||
case 'Logs': {
|
||||
if (!workspaceId) {
|
||||
logger.warn('No workspace ID found, cannot navigate to logs from command list')
|
||||
return
|
||||
}
|
||||
router.push(`/workspace/${workspaceId}/logs`)
|
||||
return
|
||||
}
|
||||
case 'Search Blocks': {
|
||||
openSearchModal()
|
||||
return
|
||||
}
|
||||
default:
|
||||
logger.warn('Unknown command label clicked in command list', { label })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle command click in command list', { error, label })
|
||||
}
|
||||
},
|
||||
[router, workspaceId, openSearchModal]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle drag-over events from the toolbar.
|
||||
*
|
||||
* When a toolbar item is dragged over the command list, mark the drop as valid
|
||||
* so the browser shows the appropriate drop cursor. Only reacts to toolbar
|
||||
* drags that carry the expected JSON payload.
|
||||
*
|
||||
* @param event - Drag event from the browser.
|
||||
*/
|
||||
const handleDragOver = useCallback((event: React.DragEvent<HTMLDivElement>) => {
|
||||
if (!event.dataTransfer?.types.includes('application/json')) {
|
||||
return
|
||||
}
|
||||
event.preventDefault()
|
||||
event.dataTransfer.dropEffect = 'move'
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle drops of toolbar items onto the command list.
|
||||
*
|
||||
* This forwards the drop information (block type and cursor position)
|
||||
* to the workflow canvas via a custom event. The workflow component
|
||||
* then reuses its existing drop logic to place the block precisely
|
||||
* under the cursor, including container/subflow handling.
|
||||
*
|
||||
* @param event - Drop event from the browser.
|
||||
*/
|
||||
const handleDrop = useCallback((event: React.DragEvent<HTMLDivElement>) => {
|
||||
if (!event.dataTransfer?.types.includes('application/json')) {
|
||||
return
|
||||
}
|
||||
|
||||
event.preventDefault()
|
||||
|
||||
try {
|
||||
const raw = event.dataTransfer.getData('application/json')
|
||||
if (!raw) return
|
||||
|
||||
const data = JSON.parse(raw) as { type?: string; enableTriggerMode?: boolean }
|
||||
if (!data?.type || data.type === 'connectionBlock') return
|
||||
|
||||
const overlayDropEvent = new CustomEvent('toolbar-drop-on-empty-workflow-overlay', {
|
||||
detail: {
|
||||
type: data.type,
|
||||
enableTriggerMode: data.enableTriggerMode ?? false,
|
||||
clientX: event.clientX,
|
||||
clientY: event.clientY,
|
||||
},
|
||||
})
|
||||
|
||||
window.dispatchEvent(overlayDropEvent)
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle drop on command list', { error })
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'pointer-events-none absolute inset-0 mb-[50px] flex items-center justify-center'
|
||||
)}
|
||||
>
|
||||
<div className='pointer-events-none flex flex-col gap-[8px]'>
|
||||
<div
|
||||
className='pointer-events-auto flex flex-col gap-[8px]'
|
||||
onDragOver={handleDragOver}
|
||||
onDrop={handleDrop}
|
||||
>
|
||||
{/* Logo */}
|
||||
<div className='mb-[20px] flex justify-center'>
|
||||
<Image
|
||||
@@ -79,6 +203,7 @@ export function CommandList() {
|
||||
<div
|
||||
key={command.label}
|
||||
className='group flex cursor-pointer items-center justify-between gap-[60px]'
|
||||
onClick={() => handleCommandClick(command.label)}
|
||||
>
|
||||
{/* Left side: Icon and Label */}
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
@@ -91,7 +216,7 @@ export function CommandList() {
|
||||
{/* Right side: Keyboard Shortcut */}
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
<Button
|
||||
className='group-hover:-translate-y-0.5 w-[26px] py-[3px] text-[12px] hover:translate-y-0 hover:text-[var(--text-tertiary)] hover:shadow-[0_2px_0_0] group-hover:text-[var(--text-primary)] group-hover:shadow-[0_4px_0_0]'
|
||||
className='group-hover:-translate-y-0.5 w-[26px] py-[3px] text-[12px] hover:translate-y-0 hover:text-[var(--text-tertiary)] hover:shadow-[0_2px_0_0_rgba(48,48,48,1)] group-hover:text-[var(--text-primary)] group-hover:shadow-[0_4px_0_0_rgba(48,48,48,1)]'
|
||||
variant='3d'
|
||||
>
|
||||
<span>⌘</span>
|
||||
@@ -99,7 +224,7 @@ export function CommandList() {
|
||||
{shortcuts.map((key, index) => (
|
||||
<Button
|
||||
key={index}
|
||||
className='group-hover:-translate-y-0.5 w-[26px] py-[3px] text-[12px] hover:translate-y-0 hover:text-[var(--text-tertiary)] hover:shadow-[0_2px_0_0] group-hover:text-[var(--text-primary)] group-hover:shadow-[0_4px_0_0]'
|
||||
className='group-hover:-translate-y-0.5 w-[26px] py-[3px] text-[12px] hover:translate-y-0 hover:text-[var(--text-tertiary)] hover:shadow-[0_2px_0_0_rgba(48,48,48,1)] group-hover:text-[var(--text-primary)] group-hover:shadow-[0_4px_0_0_rgba(48,48,48,1)]'
|
||||
variant='3d'
|
||||
>
|
||||
{key}
|
||||
|
||||
@@ -196,7 +196,7 @@ export function ExampleCommand({
|
||||
<ChevronDown className='ml-1 h-3 w-3 flex-shrink-0' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align='end'>
|
||||
<DropdownMenuContent align='end' className='z-[10000050]'>
|
||||
<DropdownMenuItem
|
||||
className='cursor-pointer'
|
||||
onClick={() => setExampleType('execute')}
|
||||
|
||||
@@ -18,11 +18,6 @@ interface CursorRenderData {
|
||||
color: string
|
||||
}
|
||||
|
||||
const POINTER_OFFSET = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
}
|
||||
|
||||
const CursorsComponent = () => {
|
||||
const { presenceUsers } = useSocket()
|
||||
const viewport = useViewport()
|
||||
@@ -60,23 +55,15 @@ const CursorsComponent = () => {
|
||||
transition: 'transform 0.12s ease-out',
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className='relative'
|
||||
style={{ transform: `translate(${-POINTER_OFFSET.x}px, ${-POINTER_OFFSET.y}px)` }}
|
||||
>
|
||||
{/* Simple cursor pointer */}
|
||||
<svg width={16} height={18} viewBox='0 0 16 18' fill='none'>
|
||||
<path
|
||||
d='M0.5 0.5L0.5 12L4 9L6.5 15L8.5 14L6 8L12 8L0.5 0.5Z'
|
||||
fill={color}
|
||||
stroke='rgba(0,0,0,0.3)'
|
||||
strokeWidth={1}
|
||||
/>
|
||||
<div className='relative flex items-start'>
|
||||
{/* Filled mouse pointer cursor */}
|
||||
<svg className='-mt-[18px]' width={24} height={24} viewBox='0 0 24 24' fill={color}>
|
||||
<path d='M4.037 4.688a.495.495 0 0 1 .651-.651l16 6.5a.5.5 0 0 1-.063.947l-6.124 1.58a2 2 0 0 0-1.438 1.435l-1.579 6.126a.5.5 0 0 1-.947.063z' />
|
||||
</svg>
|
||||
|
||||
{/* Name tag underneath and to the right */}
|
||||
{/* Name tag to the right, background tightly wrapping text */}
|
||||
<div
|
||||
className='absolute top-[18px] left-[4px] h-[21px] w-[140px] truncate whitespace-nowrap rounded-[2px] p-[6px] font-medium text-[11px] text-[var(--surface-1)]'
|
||||
className='ml-[-4px] inline-flex max-w-[160px] truncate whitespace-nowrap rounded-[2px] px-1.5 py-[2px] font-medium text-[11px] text-[var(--surface-1)]'
|
||||
style={{ backgroundColor: color }}
|
||||
>
|
||||
{name}
|
||||
|
||||
@@ -221,17 +221,26 @@ export function Code({
|
||||
// Derived state
|
||||
const effectiveLanguage = (languageValue as 'javascript' | 'python' | 'json') || language
|
||||
|
||||
const trimmedCode = code.trim()
|
||||
const containsReferencePlaceholders =
|
||||
trimmedCode.includes('{{') ||
|
||||
trimmedCode.includes('}}') ||
|
||||
trimmedCode.includes('<') ||
|
||||
trimmedCode.includes('>')
|
||||
|
||||
const shouldValidateJson = effectiveLanguage === 'json' && !containsReferencePlaceholders
|
||||
|
||||
const isValidJson = useMemo(() => {
|
||||
if (subBlockId !== 'responseFormat' || !code.trim()) {
|
||||
if (!shouldValidateJson || !trimmedCode) {
|
||||
return true
|
||||
}
|
||||
try {
|
||||
JSON.parse(code)
|
||||
JSON.parse(trimmedCode)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}, [subBlockId, code])
|
||||
}, [shouldValidateJson, trimmedCode])
|
||||
|
||||
const gutterWidthPx = useMemo(() => {
|
||||
const lineCount = code.split('\n').length
|
||||
@@ -309,14 +318,29 @@ export function Code({
|
||||
: storeValue
|
||||
|
||||
// Effects: JSON validation
|
||||
const lastValidationStatus = useRef<boolean>(true)
|
||||
|
||||
useEffect(() => {
|
||||
if (onValidationChange && subBlockId === 'responseFormat') {
|
||||
const timeoutId = setTimeout(() => {
|
||||
onValidationChange(isValidJson)
|
||||
}, 150)
|
||||
return () => clearTimeout(timeoutId)
|
||||
if (!onValidationChange) return
|
||||
|
||||
const nextStatus = shouldValidateJson ? isValidJson : true
|
||||
if (lastValidationStatus.current === nextStatus) {
|
||||
return
|
||||
}
|
||||
}, [isValidJson, onValidationChange, subBlockId])
|
||||
|
||||
lastValidationStatus.current = nextStatus
|
||||
|
||||
if (!shouldValidateJson) {
|
||||
onValidationChange(nextStatus)
|
||||
return
|
||||
}
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
onValidationChange(nextStatus)
|
||||
}, 150)
|
||||
|
||||
return () => clearTimeout(timeoutId)
|
||||
}, [isValidJson, onValidationChange, shouldValidateJson])
|
||||
|
||||
// Effects: AI stream handlers setup
|
||||
useEffect(() => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { Check, ChevronDown, FileText, RefreshCw } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
@@ -15,8 +15,9 @@ import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useKnowledgeBaseDocuments } from '@/hooks/use-knowledge'
|
||||
import { useDisplayNamesStore } from '@/stores/display-names/store'
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import type { DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
interface DocumentSelectorProps {
|
||||
blockId: string
|
||||
@@ -45,68 +46,29 @@ export function DocumentSelector({
|
||||
? knowledgeBaseId
|
||||
: null
|
||||
|
||||
const documentsCache = useKnowledgeStore(
|
||||
useCallback(
|
||||
(state) =>
|
||||
normalizedKnowledgeBaseId ? state.documents[normalizedKnowledgeBaseId] : undefined,
|
||||
[normalizedKnowledgeBaseId]
|
||||
)
|
||||
)
|
||||
|
||||
const isDocumentsLoading = useKnowledgeStore(
|
||||
useCallback(
|
||||
(state) =>
|
||||
normalizedKnowledgeBaseId ? state.isDocumentsLoading(normalizedKnowledgeBaseId) : false,
|
||||
[normalizedKnowledgeBaseId]
|
||||
)
|
||||
)
|
||||
|
||||
const getDocuments = useKnowledgeStore((state) => state.getDocuments)
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
const { finalDisabled } = useDependsOnGate(blockId, subBlock, { disabled, isPreview })
|
||||
const isDisabled = finalDisabled
|
||||
|
||||
const documents = useMemo<DocumentData[]>(() => {
|
||||
if (!documentsCache) return []
|
||||
return documentsCache.documents ?? []
|
||||
}, [documentsCache])
|
||||
|
||||
const loadDocuments = useCallback(async () => {
|
||||
if (!normalizedKnowledgeBaseId) {
|
||||
setError('No knowledge base selected')
|
||||
return
|
||||
}
|
||||
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const fetchedDocuments = await getDocuments(normalizedKnowledgeBaseId)
|
||||
|
||||
if (fetchedDocuments.length > 0) {
|
||||
const documentMap = fetchedDocuments.reduce<Record<string, string>>((acc, doc) => {
|
||||
acc[doc.id] = doc.filename
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
useDisplayNamesStore
|
||||
.getState()
|
||||
.setDisplayNames('documents', normalizedKnowledgeBaseId, documentMap)
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === 'AbortError') return
|
||||
setError(err instanceof Error ? err.message : 'Failed to fetch documents')
|
||||
}
|
||||
}, [normalizedKnowledgeBaseId, getDocuments])
|
||||
const {
|
||||
documents,
|
||||
isLoading: documentsLoading,
|
||||
error: documentsError,
|
||||
refreshDocuments,
|
||||
} = useKnowledgeBaseDocuments(normalizedKnowledgeBaseId ?? '', {
|
||||
limit: 500,
|
||||
offset: 0,
|
||||
enabled: open && Boolean(normalizedKnowledgeBaseId),
|
||||
})
|
||||
|
||||
const handleOpenChange = (isOpen: boolean) => {
|
||||
if (isPreview || isDisabled) return
|
||||
|
||||
setOpen(isOpen)
|
||||
|
||||
if (isOpen && (!documentsCache || !documentsCache.documents.length)) {
|
||||
void loadDocuments()
|
||||
if (isOpen && normalizedKnowledgeBaseId) {
|
||||
void refreshDocuments()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,9 +81,15 @@ export function DocumentSelector({
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
setError(null)
|
||||
if (!normalizedKnowledgeBaseId) {
|
||||
setError(null)
|
||||
}
|
||||
}, [normalizedKnowledgeBaseId])
|
||||
|
||||
useEffect(() => {
|
||||
setError(documentsError)
|
||||
}, [documentsError])
|
||||
|
||||
useEffect(() => {
|
||||
if (!normalizedKnowledgeBaseId || documents.length === 0) return
|
||||
|
||||
@@ -152,7 +120,7 @@ export function DocumentSelector({
|
||||
}
|
||||
|
||||
const label = subBlock.placeholder || 'Select document'
|
||||
const isLoading = isDocumentsLoading && !error
|
||||
const isLoading = documentsLoading && !error
|
||||
|
||||
// Always use cached display name
|
||||
const displayName = useDisplayNamesStore(
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useQueries } from '@tanstack/react-query'
|
||||
import { X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components/combobox/combobox'
|
||||
import { PackageSearchIcon } from '@/components/icons'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { type KnowledgeBaseData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useKnowledgeBasesList } from '@/hooks/use-knowledge'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
|
||||
interface KnowledgeBaseSelectorProps {
|
||||
blockId: string
|
||||
@@ -29,14 +32,11 @@ export function KnowledgeBaseSelector({
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const knowledgeBasesList = useKnowledgeStore((state) => state.knowledgeBasesList)
|
||||
const knowledgeBasesMap = useKnowledgeStore((state) => state.knowledgeBases)
|
||||
const loadingKnowledgeBasesList = useKnowledgeStore((state) => state.loadingKnowledgeBasesList)
|
||||
const getKnowledgeBasesList = useKnowledgeStore((state) => state.getKnowledgeBasesList)
|
||||
const getKnowledgeBase = useKnowledgeStore((state) => state.getKnowledgeBase)
|
||||
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const hasRequestedListRef = useRef(false)
|
||||
const {
|
||||
knowledgeBases,
|
||||
isLoading: isKnowledgeBasesLoading,
|
||||
error,
|
||||
} = useKnowledgeBasesList(workspaceId)
|
||||
|
||||
// Use the proper hook to get the current value and setter - this prevents infinite loops
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
@@ -46,28 +46,6 @@ export function KnowledgeBaseSelector({
|
||||
|
||||
const isMultiSelect = subBlock.multiSelect === true
|
||||
|
||||
/**
|
||||
* Convert knowledge bases to combobox options format
|
||||
*/
|
||||
const combinedKnowledgeBases = useMemo<KnowledgeBaseData[]>(() => {
|
||||
const merged = new Map<string, KnowledgeBaseData>()
|
||||
knowledgeBasesList.forEach((kb) => {
|
||||
merged.set(kb.id, kb)
|
||||
})
|
||||
Object.values(knowledgeBasesMap).forEach((kb) => {
|
||||
merged.set(kb.id, kb)
|
||||
})
|
||||
return Array.from(merged.values())
|
||||
}, [knowledgeBasesList, knowledgeBasesMap])
|
||||
|
||||
const options = useMemo<ComboboxOption[]>(() => {
|
||||
return combinedKnowledgeBases.map((kb) => ({
|
||||
label: kb.name,
|
||||
value: kb.id,
|
||||
icon: PackageSearchIcon,
|
||||
}))
|
||||
}, [combinedKnowledgeBases])
|
||||
|
||||
/**
|
||||
* Parse value into array of selected IDs
|
||||
*/
|
||||
@@ -84,6 +62,39 @@ export function KnowledgeBaseSelector({
|
||||
return []
|
||||
}, [value])
|
||||
|
||||
/**
|
||||
* Convert knowledge bases to combobox options format
|
||||
*/
|
||||
const selectedKnowledgeBaseQueries = useQueries({
|
||||
queries: selectedIds.map((selectedId) => ({
|
||||
queryKey: knowledgeKeys.detail(selectedId),
|
||||
queryFn: () => fetchKnowledgeBase(selectedId),
|
||||
enabled: Boolean(selectedId),
|
||||
staleTime: 60 * 1000,
|
||||
})),
|
||||
})
|
||||
|
||||
const combinedKnowledgeBases = useMemo<KnowledgeBaseData[]>(() => {
|
||||
const merged = new Map<string, KnowledgeBaseData>()
|
||||
knowledgeBases.forEach((kb) => merged.set(kb.id, kb))
|
||||
|
||||
selectedKnowledgeBaseQueries.forEach((query) => {
|
||||
if (query.data) {
|
||||
merged.set(query.data.id, query.data)
|
||||
}
|
||||
})
|
||||
|
||||
return Array.from(merged.values())
|
||||
}, [knowledgeBases, selectedKnowledgeBaseQueries])
|
||||
|
||||
const options = useMemo<ComboboxOption[]>(() => {
|
||||
return combinedKnowledgeBases.map((kb) => ({
|
||||
label: kb.name,
|
||||
value: kb.id,
|
||||
icon: PackageSearchIcon,
|
||||
}))
|
||||
}, [combinedKnowledgeBases])
|
||||
|
||||
/**
|
||||
* Compute selected knowledge bases for tag display
|
||||
*/
|
||||
@@ -144,44 +155,6 @@ export function KnowledgeBaseSelector({
|
||||
[isPreview, selectedIds, setStoreValue, onKnowledgeBaseSelect]
|
||||
)
|
||||
|
||||
/**
|
||||
* Fetch knowledge bases on initial mount
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (hasRequestedListRef.current) return
|
||||
|
||||
let cancelled = false
|
||||
hasRequestedListRef.current = true
|
||||
setError(null)
|
||||
getKnowledgeBasesList(workspaceId).catch((err) => {
|
||||
if (cancelled) return
|
||||
setError(err instanceof Error ? err.message : 'Failed to load knowledge bases')
|
||||
})
|
||||
|
||||
return () => {
|
||||
cancelled = true
|
||||
}
|
||||
}, [workspaceId, getKnowledgeBasesList])
|
||||
|
||||
/**
|
||||
* Ensure selected knowledge bases are cached
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (selectedIds.length === 0) return
|
||||
|
||||
selectedIds.forEach((id) => {
|
||||
const isKnown =
|
||||
Boolean(knowledgeBasesMap[id]) ||
|
||||
knowledgeBasesList.some((knowledgeBase) => knowledgeBase.id === id)
|
||||
|
||||
if (!isKnown) {
|
||||
void getKnowledgeBase(id).catch(() => {
|
||||
// Ignore fetch errors here; they will surface via display hooks if needed
|
||||
})
|
||||
}
|
||||
})
|
||||
}, [selectedIds, knowledgeBasesList, knowledgeBasesMap, getKnowledgeBase])
|
||||
|
||||
const label =
|
||||
subBlock.placeholder || (isMultiSelect ? 'Select knowledge bases' : 'Select knowledge base')
|
||||
|
||||
@@ -221,7 +194,7 @@ export function KnowledgeBaseSelector({
|
||||
onMultiSelectChange={handleMultiSelectChange}
|
||||
placeholder={label}
|
||||
disabled={disabled || isPreview}
|
||||
isLoading={loadingKnowledgeBasesList}
|
||||
isLoading={isKnowledgeBasesLoading}
|
||||
error={error}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -190,7 +190,7 @@ const renderLabel = (
|
||||
<div className='flex items-center gap-[6px] whitespace-nowrap'>
|
||||
{config.title}
|
||||
{required && <span className='ml-0.5'>*</span>}
|
||||
{config.id === 'responseFormat' && (
|
||||
{config.type === 'code' && config.language === 'json' && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<AlertTriangle
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { usePanelEditorStore } from '@/stores/panel-new/editor/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useBlockState } from '../components/workflow-block/hooks'
|
||||
@@ -28,6 +29,10 @@ export function useBlockCore({ blockId, data, isPending = false }: UseBlockCoreO
|
||||
data
|
||||
)
|
||||
|
||||
// Run path state (from last execution)
|
||||
const lastRunPath = useExecutionStore((state) => state.lastRunPath)
|
||||
const runPathStatus = lastRunPath.get(blockId)
|
||||
|
||||
// Focus management
|
||||
const setCurrentBlockId = usePanelEditorStore((state) => state.setCurrentBlockId)
|
||||
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
|
||||
@@ -38,6 +43,7 @@ export function useBlockCore({ blockId, data, isPending = false }: UseBlockCoreO
|
||||
}, [blockId, setCurrentBlockId])
|
||||
|
||||
// Ring styling based on all states
|
||||
// Priority: active (animated) > pending > focused > deleted > diff > run path
|
||||
const { hasRing, ringStyles } = useMemo(() => {
|
||||
const hasRing =
|
||||
isActive ||
|
||||
@@ -45,20 +51,52 @@ export function useBlockCore({ blockId, data, isPending = false }: UseBlockCoreO
|
||||
isFocused ||
|
||||
diffStatus === 'new' ||
|
||||
diffStatus === 'edited' ||
|
||||
isDeletedBlock
|
||||
isDeletedBlock ||
|
||||
!!runPathStatus
|
||||
|
||||
const ringStyles = cn(
|
||||
hasRing && 'ring-[1.75px]',
|
||||
isActive && 'ring-[#8C10FF] animate-pulse-ring',
|
||||
isPending && 'ring-[var(--warning)]',
|
||||
isFocused && 'ring-[var(--brand-secondary)]',
|
||||
diffStatus === 'new' && 'ring-[#22C55F]',
|
||||
diffStatus === 'edited' && 'ring-[var(--warning)]',
|
||||
isDeletedBlock && 'ring-[var(--text-error)]'
|
||||
// Executing block: animated ring cycling through gray tones (animation handles all styling)
|
||||
isActive && 'animate-ring-pulse',
|
||||
// Non-active states use standard ring utilities
|
||||
!isActive && hasRing && 'ring-[1.75px]',
|
||||
// Pending state: warning ring
|
||||
!isActive && isPending && 'ring-[var(--warning)]',
|
||||
// Focused (selected) state: brand ring
|
||||
!isActive && !isPending && isFocused && 'ring-[var(--brand-secondary)]',
|
||||
// Deleted state (highest priority after active/pending/focused)
|
||||
!isActive && !isPending && !isFocused && isDeletedBlock && 'ring-[var(--text-error)]',
|
||||
// Diff states
|
||||
!isActive &&
|
||||
!isPending &&
|
||||
!isFocused &&
|
||||
!isDeletedBlock &&
|
||||
diffStatus === 'new' &&
|
||||
'ring-[#22C55E]',
|
||||
!isActive &&
|
||||
!isPending &&
|
||||
!isFocused &&
|
||||
!isDeletedBlock &&
|
||||
diffStatus === 'edited' &&
|
||||
'ring-[var(--warning)]',
|
||||
// Run path states (lowest priority - only show if no other states active)
|
||||
!isActive &&
|
||||
!isPending &&
|
||||
!isFocused &&
|
||||
!isDeletedBlock &&
|
||||
!diffStatus &&
|
||||
runPathStatus === 'success' &&
|
||||
'ring-[var(--surface-14)]',
|
||||
!isActive &&
|
||||
!isPending &&
|
||||
!isFocused &&
|
||||
!isDeletedBlock &&
|
||||
!diffStatus &&
|
||||
runPathStatus === 'error' &&
|
||||
'ring-[var(--text-error)]'
|
||||
)
|
||||
|
||||
return { hasRing, ringStyles }
|
||||
}, [isActive, isPending, isFocused, diffStatus, isDeletedBlock])
|
||||
}, [isActive, isPending, isFocused, diffStatus, isDeletedBlock, runPathStatus])
|
||||
|
||||
return {
|
||||
// Workflow context
|
||||
|
||||
@@ -85,7 +85,7 @@ export function useWorkflowExecution() {
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||
const { toggleConsole, addConsole } = useTerminalConsoleStore()
|
||||
const { getAllVariables, loadWorkspaceEnvironment } = useEnvironmentStore()
|
||||
const { getAllVariables } = useEnvironmentStore()
|
||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||
const {
|
||||
isExecuting,
|
||||
@@ -99,6 +99,7 @@ export function useWorkflowExecution() {
|
||||
setExecutor,
|
||||
setDebugContext,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
} = useExecutionStore()
|
||||
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
|
||||
const executionStream = useExecutionStream()
|
||||
@@ -650,7 +651,6 @@ export function useWorkflowExecution() {
|
||||
currentWorkflow,
|
||||
toggleConsole,
|
||||
getAllVariables,
|
||||
loadWorkspaceEnvironment,
|
||||
getVariablesByWorkflowId,
|
||||
setIsExecuting,
|
||||
setIsDebugging,
|
||||
@@ -901,6 +901,9 @@ export function useWorkflowExecution() {
|
||||
// Create a new Set to trigger React re-render
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track successful block execution in run path
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
// Add to console
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
@@ -933,6 +936,9 @@ export function useWorkflowExecution() {
|
||||
// Create a new Set to trigger React re-render
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track failed block execution in run path
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
// Add error to console
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
|
||||
@@ -553,246 +553,21 @@ const WorkflowContent = React.memo(() => {
|
||||
return sourceHandle
|
||||
}, [])
|
||||
|
||||
// Listen for toolbar block click events
|
||||
useEffect(() => {
|
||||
const handleAddBlockFromToolbar = (event: CustomEvent) => {
|
||||
// Check if user has permission to interact with blocks
|
||||
if (!effectivePermissions.canEdit) {
|
||||
return
|
||||
}
|
||||
|
||||
const { type, enableTriggerMode } = event.detail
|
||||
|
||||
if (!type) return
|
||||
if (type === 'connectionBlock') return
|
||||
|
||||
// Special handling for container nodes (loop or parallel)
|
||||
if (type === 'loop' || type === 'parallel') {
|
||||
const id = crypto.randomUUID()
|
||||
const baseName = type === 'loop' ? 'Loop' : 'Parallel'
|
||||
const name = getUniqueBlockName(baseName, blocks)
|
||||
|
||||
const centerPosition = project({
|
||||
x: window.innerWidth / 2,
|
||||
y: window.innerHeight / 2,
|
||||
})
|
||||
|
||||
// Auto-connect logic for container nodes
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled) {
|
||||
const closestBlock = findClosestOutput(centerPosition)
|
||||
if (closestBlock) {
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the container node with default dimensions and auto-connect edge
|
||||
addBlock(
|
||||
id,
|
||||
type,
|
||||
name,
|
||||
centerPosition,
|
||||
{
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
},
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(type)
|
||||
if (!blockConfig) {
|
||||
logger.error('Invalid block type:', { type })
|
||||
return
|
||||
}
|
||||
|
||||
// Calculate the center position of the viewport
|
||||
const centerPosition = project({
|
||||
x: window.innerWidth / 2,
|
||||
y: window.innerHeight / 2,
|
||||
})
|
||||
|
||||
// Create a new block with a unique ID
|
||||
const id = crypto.randomUUID()
|
||||
// Prefer semantic default names for triggers; then ensure unique numbering centrally
|
||||
const defaultTriggerName = TriggerUtils.getDefaultTriggerName(type)
|
||||
const baseName = defaultTriggerName || blockConfig.name
|
||||
const name = getUniqueBlockName(baseName, blocks)
|
||||
|
||||
// Auto-connect logic
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled && type !== 'starter') {
|
||||
const closestBlock = findClosestOutput(centerPosition)
|
||||
logger.info('Closest block found:', closestBlock)
|
||||
if (closestBlock) {
|
||||
// Don't create edges into trigger blocks
|
||||
const targetBlockConfig = blockConfig
|
||||
const isTargetTrigger = enableTriggerMode || targetBlockConfig?.category === 'triggers'
|
||||
|
||||
if (!isTargetTrigger) {
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
}
|
||||
logger.info('Auto-connect edge created:', autoConnectEdge)
|
||||
} else {
|
||||
logger.info('Skipping auto-connect into trigger block', {
|
||||
target: type,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Centralized trigger constraints
|
||||
const additionIssue = TriggerUtils.getTriggerAdditionIssue(blocks, type)
|
||||
if (additionIssue) {
|
||||
if (additionIssue.issue === 'legacy') {
|
||||
setTriggerWarning({
|
||||
open: true,
|
||||
triggerName: additionIssue.triggerName,
|
||||
type: TriggerWarningType.LEGACY_INCOMPATIBILITY,
|
||||
})
|
||||
} else {
|
||||
setTriggerWarning({
|
||||
open: true,
|
||||
triggerName: additionIssue.triggerName,
|
||||
type: TriggerWarningType.DUPLICATE_TRIGGER,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Add the block to the workflow with auto-connect edge
|
||||
// Enable trigger mode if this is a trigger-capable block from the triggers tab
|
||||
addBlock(
|
||||
id,
|
||||
type,
|
||||
name,
|
||||
centerPosition,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge,
|
||||
enableTriggerMode
|
||||
)
|
||||
}
|
||||
|
||||
window.addEventListener('add-block-from-toolbar', handleAddBlockFromToolbar as EventListener)
|
||||
|
||||
return () => {
|
||||
window.removeEventListener(
|
||||
'add-block-from-toolbar',
|
||||
handleAddBlockFromToolbar as EventListener
|
||||
)
|
||||
}
|
||||
}, [
|
||||
project,
|
||||
blocks,
|
||||
addBlock,
|
||||
addEdge,
|
||||
findClosestOutput,
|
||||
determineSourceHandle,
|
||||
effectivePermissions.canEdit,
|
||||
setTriggerWarning,
|
||||
])
|
||||
|
||||
/**
|
||||
* Recenter canvas when diff appears
|
||||
* Tracks when diff becomes ready to automatically fit the view with smooth animation
|
||||
* Shared handler for drops of toolbar items onto the workflow canvas.
|
||||
*
|
||||
* This encapsulates the full drop behavior (container handling, auto-connect,
|
||||
* trigger constraints, etc.) so it can be reused both for direct ReactFlow
|
||||
* drops and for drops forwarded from the empty-workflow command list overlay.
|
||||
*
|
||||
* @param data - Drag data from the toolbar (type + optional trigger mode).
|
||||
* @param position - Drop position in ReactFlow coordinates.
|
||||
*/
|
||||
const prevDiffReadyRef = useRef(false)
|
||||
useEffect(() => {
|
||||
// Only recenter when diff transitions from not ready to ready
|
||||
if (isDiffReady && !prevDiffReadyRef.current && diffAnalysis) {
|
||||
logger.info('Diff ready - recentering canvas to show changes')
|
||||
// Use a small delay to ensure the diff has fully rendered
|
||||
setTimeout(() => {
|
||||
fitView({ padding: 0.3, duration: 600 })
|
||||
}, 100)
|
||||
}
|
||||
prevDiffReadyRef.current = isDiffReady
|
||||
}, [isDiffReady, diffAnalysis, fitView])
|
||||
|
||||
// Listen for trigger warning events
|
||||
useEffect(() => {
|
||||
const handleShowTriggerWarning = (event: CustomEvent) => {
|
||||
const { type, triggerName } = event.detail
|
||||
setTriggerWarning({
|
||||
open: true,
|
||||
triggerName: triggerName || 'trigger',
|
||||
type: type === 'trigger_in_subflow' ? TriggerWarningType.TRIGGER_IN_SUBFLOW : type,
|
||||
})
|
||||
}
|
||||
|
||||
window.addEventListener('show-trigger-warning', handleShowTriggerWarning as EventListener)
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('show-trigger-warning', handleShowTriggerWarning as EventListener)
|
||||
}
|
||||
}, [setTriggerWarning])
|
||||
|
||||
// Handler for trigger selection from list
|
||||
const handleTriggerSelect = useCallback(
|
||||
(triggerId: string, enableTriggerMode?: boolean) => {
|
||||
// Get the trigger name
|
||||
const triggerName = TriggerUtils.getDefaultTriggerName(triggerId) || triggerId
|
||||
|
||||
// Create the trigger block at the center of the viewport
|
||||
const centerPosition = project({ x: window.innerWidth / 2, y: window.innerHeight / 2 })
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Add the trigger block with trigger mode if specified
|
||||
addBlock(
|
||||
id,
|
||||
triggerId,
|
||||
triggerName,
|
||||
centerPosition,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
enableTriggerMode || false
|
||||
)
|
||||
},
|
||||
[project, addBlock]
|
||||
)
|
||||
|
||||
// Update the onDrop handler
|
||||
const onDrop = useCallback(
|
||||
(event: React.DragEvent) => {
|
||||
event.preventDefault()
|
||||
const handleToolbarDrop = useCallback(
|
||||
(data: { type: string; enableTriggerMode?: boolean }, position: { x: number; y: number }) => {
|
||||
if (!data.type || data.type === 'connectionBlock') return
|
||||
|
||||
try {
|
||||
const data = JSON.parse(event.dataTransfer.getData('application/json'))
|
||||
if (data.type === 'connectionBlock') return
|
||||
|
||||
const reactFlowBounds = event.currentTarget.getBoundingClientRect()
|
||||
const position = project({
|
||||
x: event.clientX - reactFlowBounds.left,
|
||||
y: event.clientY - reactFlowBounds.top,
|
||||
})
|
||||
|
||||
// Check if dropping inside a container node (loop or parallel)
|
||||
const containerInfo = isPointInLoopNode(position)
|
||||
|
||||
@@ -806,7 +581,7 @@ const WorkflowContent = React.memo(() => {
|
||||
// Ensure any toolbar drag flags are cleared on drop
|
||||
document.body.classList.remove('sim-drag-subflow')
|
||||
|
||||
// Special handling for container nodes (loop or parallel)
|
||||
// Special handling for container nodes (loop or parallel) dragged from toolbar
|
||||
if (data.type === 'loop' || data.type === 'parallel') {
|
||||
// Create a unique ID and name for the container
|
||||
const id = crypto.randomUUID()
|
||||
@@ -1033,22 +808,307 @@ const WorkflowContent = React.memo(() => {
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error dropping block:', { err })
|
||||
logger.error('Error handling toolbar drop on workflow canvas', { err })
|
||||
}
|
||||
},
|
||||
[
|
||||
project,
|
||||
blocks,
|
||||
addBlock,
|
||||
addEdge,
|
||||
getNodes,
|
||||
findClosestOutput,
|
||||
determineSourceHandle,
|
||||
isPointInLoopNode,
|
||||
getNodes,
|
||||
resizeLoopNodesWrapper,
|
||||
addBlock,
|
||||
setTriggerWarning,
|
||||
]
|
||||
)
|
||||
|
||||
// Listen for toolbar block click events
|
||||
useEffect(() => {
|
||||
const handleAddBlockFromToolbar = (event: CustomEvent) => {
|
||||
// Check if user has permission to interact with blocks
|
||||
if (!effectivePermissions.canEdit) {
|
||||
return
|
||||
}
|
||||
|
||||
const { type, enableTriggerMode } = event.detail
|
||||
|
||||
if (!type) return
|
||||
if (type === 'connectionBlock') return
|
||||
|
||||
// Special handling for container nodes (loop or parallel)
|
||||
if (type === 'loop' || type === 'parallel') {
|
||||
const id = crypto.randomUUID()
|
||||
const baseName = type === 'loop' ? 'Loop' : 'Parallel'
|
||||
const name = getUniqueBlockName(baseName, blocks)
|
||||
|
||||
const centerPosition = project({
|
||||
x: window.innerWidth / 2,
|
||||
y: window.innerHeight / 2,
|
||||
})
|
||||
|
||||
// Auto-connect logic for container nodes
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled) {
|
||||
const closestBlock = findClosestOutput(centerPosition)
|
||||
if (closestBlock) {
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the container node with default dimensions and auto-connect edge
|
||||
addBlock(
|
||||
id,
|
||||
type,
|
||||
name,
|
||||
centerPosition,
|
||||
{
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
},
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(type)
|
||||
if (!blockConfig) {
|
||||
logger.error('Invalid block type:', { type })
|
||||
return
|
||||
}
|
||||
|
||||
// Calculate the center position of the viewport
|
||||
const centerPosition = project({
|
||||
x: window.innerWidth / 2,
|
||||
y: window.innerHeight / 2,
|
||||
})
|
||||
|
||||
// Create a new block with a unique ID
|
||||
const id = crypto.randomUUID()
|
||||
// Prefer semantic default names for triggers; then ensure unique numbering centrally
|
||||
const defaultTriggerName = TriggerUtils.getDefaultTriggerName(type)
|
||||
const baseName = defaultTriggerName || blockConfig.name
|
||||
const name = getUniqueBlockName(baseName, blocks)
|
||||
|
||||
// Auto-connect logic
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled && type !== 'starter') {
|
||||
const closestBlock = findClosestOutput(centerPosition)
|
||||
logger.info('Closest block found:', closestBlock)
|
||||
if (closestBlock) {
|
||||
// Don't create edges into trigger blocks
|
||||
const targetBlockConfig = blockConfig
|
||||
const isTargetTrigger = enableTriggerMode || targetBlockConfig?.category === 'triggers'
|
||||
|
||||
if (!isTargetTrigger) {
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
}
|
||||
logger.info('Auto-connect edge created:', autoConnectEdge)
|
||||
} else {
|
||||
logger.info('Skipping auto-connect into trigger block', {
|
||||
target: type,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Centralized trigger constraints
|
||||
const additionIssue = TriggerUtils.getTriggerAdditionIssue(blocks, type)
|
||||
if (additionIssue) {
|
||||
if (additionIssue.issue === 'legacy') {
|
||||
setTriggerWarning({
|
||||
open: true,
|
||||
triggerName: additionIssue.triggerName,
|
||||
type: TriggerWarningType.LEGACY_INCOMPATIBILITY,
|
||||
})
|
||||
} else {
|
||||
setTriggerWarning({
|
||||
open: true,
|
||||
triggerName: additionIssue.triggerName,
|
||||
type: TriggerWarningType.DUPLICATE_TRIGGER,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Add the block to the workflow with auto-connect edge
|
||||
// Enable trigger mode if this is a trigger-capable block from the triggers tab
|
||||
addBlock(
|
||||
id,
|
||||
type,
|
||||
name,
|
||||
centerPosition,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge,
|
||||
enableTriggerMode
|
||||
)
|
||||
}
|
||||
|
||||
window.addEventListener('add-block-from-toolbar', handleAddBlockFromToolbar as EventListener)
|
||||
|
||||
return () => {
|
||||
window.removeEventListener(
|
||||
'add-block-from-toolbar',
|
||||
handleAddBlockFromToolbar as EventListener
|
||||
)
|
||||
}
|
||||
}, [
|
||||
project,
|
||||
blocks,
|
||||
addBlock,
|
||||
addEdge,
|
||||
findClosestOutput,
|
||||
determineSourceHandle,
|
||||
effectivePermissions.canEdit,
|
||||
setTriggerWarning,
|
||||
])
|
||||
|
||||
/**
|
||||
* Listen for toolbar drops that occur on the empty-workflow overlay (command list).
|
||||
*
|
||||
* The overlay forwards drop events with the cursor position; this handler
|
||||
* computes the corresponding ReactFlow coordinates and delegates to
|
||||
* `handleToolbarDrop` so the behavior matches native canvas drops.
|
||||
*/
|
||||
useEffect(() => {
|
||||
const handleOverlayToolbarDrop = (event: Event) => {
|
||||
const customEvent = event as CustomEvent<{
|
||||
type: string
|
||||
enableTriggerMode?: boolean
|
||||
clientX: number
|
||||
clientY: number
|
||||
}>
|
||||
|
||||
const detail = customEvent.detail
|
||||
if (!detail?.type) return
|
||||
|
||||
try {
|
||||
const canvasElement = document.querySelector('.workflow-container') as HTMLElement | null
|
||||
if (!canvasElement) {
|
||||
logger.warn('Workflow canvas element not found for overlay toolbar drop')
|
||||
return
|
||||
}
|
||||
|
||||
const bounds = canvasElement.getBoundingClientRect()
|
||||
const position = project({
|
||||
x: detail.clientX - bounds.left,
|
||||
y: detail.clientY - bounds.top,
|
||||
})
|
||||
|
||||
handleToolbarDrop(
|
||||
{
|
||||
type: detail.type,
|
||||
enableTriggerMode: detail.enableTriggerMode ?? false,
|
||||
},
|
||||
position
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error('Error handling toolbar drop from empty-workflow overlay', { err })
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener(
|
||||
'toolbar-drop-on-empty-workflow-overlay',
|
||||
handleOverlayToolbarDrop as EventListener
|
||||
)
|
||||
|
||||
return () =>
|
||||
window.removeEventListener(
|
||||
'toolbar-drop-on-empty-workflow-overlay',
|
||||
handleOverlayToolbarDrop as EventListener
|
||||
)
|
||||
}, [project, handleToolbarDrop])
|
||||
|
||||
/**
|
||||
* Recenter canvas when diff appears
|
||||
* Tracks when diff becomes ready to automatically fit the view with smooth animation
|
||||
*/
|
||||
const prevDiffReadyRef = useRef(false)
|
||||
useEffect(() => {
|
||||
// Only recenter when diff transitions from not ready to ready
|
||||
if (isDiffReady && !prevDiffReadyRef.current && diffAnalysis) {
|
||||
logger.info('Diff ready - recentering canvas to show changes')
|
||||
// Use a small delay to ensure the diff has fully rendered
|
||||
setTimeout(() => {
|
||||
fitView({ padding: 0.3, duration: 600 })
|
||||
}, 100)
|
||||
}
|
||||
prevDiffReadyRef.current = isDiffReady
|
||||
}, [isDiffReady, diffAnalysis, fitView])
|
||||
|
||||
// Listen for trigger warning events
|
||||
useEffect(() => {
|
||||
const handleShowTriggerWarning = (event: CustomEvent) => {
|
||||
const { type, triggerName } = event.detail
|
||||
setTriggerWarning({
|
||||
open: true,
|
||||
triggerName: triggerName || 'trigger',
|
||||
type: type === 'trigger_in_subflow' ? TriggerWarningType.TRIGGER_IN_SUBFLOW : type,
|
||||
})
|
||||
}
|
||||
|
||||
window.addEventListener('show-trigger-warning', handleShowTriggerWarning as EventListener)
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('show-trigger-warning', handleShowTriggerWarning as EventListener)
|
||||
}
|
||||
}, [setTriggerWarning])
|
||||
|
||||
// Update the onDrop handler to delegate to the shared toolbar-drop handler
|
||||
const onDrop = useCallback(
|
||||
(event: React.DragEvent) => {
|
||||
event.preventDefault()
|
||||
|
||||
try {
|
||||
const raw = event.dataTransfer.getData('application/json')
|
||||
if (!raw) return
|
||||
const data = JSON.parse(raw)
|
||||
if (!data?.type) return
|
||||
|
||||
const reactFlowBounds = event.currentTarget.getBoundingClientRect()
|
||||
const position = project({
|
||||
x: event.clientX - reactFlowBounds.left,
|
||||
y: event.clientY - reactFlowBounds.top,
|
||||
})
|
||||
|
||||
handleToolbarDrop(
|
||||
{
|
||||
type: data.type,
|
||||
enableTriggerMode: data.enableTriggerMode ?? false,
|
||||
},
|
||||
position
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error('Error dropping block on ReactFlow canvas:', { err })
|
||||
}
|
||||
},
|
||||
[project, handleToolbarDrop]
|
||||
)
|
||||
|
||||
const handleCanvasPointerMove = useCallback(
|
||||
(event: React.PointerEvent<Element>) => {
|
||||
const target = event.currentTarget as HTMLElement
|
||||
|
||||
@@ -5,7 +5,7 @@ import * as DialogPrimitive from '@radix-ui/react-dialog'
|
||||
import * as VisuallyHidden from '@radix-ui/react-visually-hidden'
|
||||
import { BookOpen, Layout, RepeatIcon, ScrollText, Search, SplitIcon } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Dialog, DialogOverlay, DialogPortal, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Dialog, DialogPortal, DialogTitle } from '@/components/ui/dialog'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { getTriggersForSidebar, hasTriggerCapability } from '@/lib/workflows/trigger-utils'
|
||||
@@ -332,7 +332,7 @@ export function SearchModal({
|
||||
}, [workspaces, workflows, pages, blocks, triggers, tools, docs])
|
||||
|
||||
const sectionOrder = useMemo<SearchItem['type'][]>(
|
||||
() => ['workspace', 'workflow', 'page', 'tool', 'trigger', 'block', 'doc'],
|
||||
() => ['block', 'tool', 'trigger', 'workflow', 'workspace', 'page', 'doc'],
|
||||
[]
|
||||
)
|
||||
|
||||
@@ -447,7 +447,10 @@ export function SearchModal({
|
||||
if (open && selectedIndex >= 0) {
|
||||
const element = document.querySelector(`[data-search-item-index="${selectedIndex}"]`)
|
||||
if (element) {
|
||||
element.scrollIntoView({ block: 'nearest' })
|
||||
element.scrollIntoView({
|
||||
block: 'nearest',
|
||||
behavior: 'auto',
|
||||
})
|
||||
}
|
||||
}
|
||||
}, [selectedIndex, open])
|
||||
@@ -481,16 +484,13 @@ export function SearchModal({
|
||||
trigger: 'Triggers',
|
||||
block: 'Blocks',
|
||||
tool: 'Tools',
|
||||
doc: 'Documentation',
|
||||
doc: 'Docs',
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogPortal>
|
||||
<DialogOverlay
|
||||
className='z-40 bg-white/80 dark:bg-[#1b1b1b]/90'
|
||||
style={{ backdropFilter: 'blur(4px)' }}
|
||||
/>
|
||||
<DialogPrimitive.Overlay className='fixed inset-0 z-40 backdrop-blur-md' />
|
||||
<DialogPrimitive.Content className='fixed top-[15%] left-[50%] z-50 flex w-[500px] translate-x-[-50%] flex-col gap-[12px] p-0 focus:outline-none focus-visible:outline-none'>
|
||||
<VisuallyHidden.Root>
|
||||
<DialogTitle>Search</DialogTitle>
|
||||
@@ -498,13 +498,13 @@ export function SearchModal({
|
||||
|
||||
{/* Search input container */}
|
||||
<div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-5)] px-[12px] py-[8px] shadow-sm dark:border-[var(--border)] dark:bg-[var(--surface-5)]'>
|
||||
<Search className='h-[16px] w-[16px] flex-shrink-0 text-[var(--text-subtle)] dark:text-[var(--text-subtle)]' />
|
||||
<Search className='h-[15px] w-[15px] flex-shrink-0 text-[var(--text-subtle)] dark:text-[var(--text-subtle)]' />
|
||||
<input
|
||||
type='text'
|
||||
placeholder='Search anything...'
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className='w-full border-0 bg-transparent font-base text-[18px] text-[var(--text-primary)] placeholder:text-[var(--text-secondary)] focus:outline-none dark:text-[var(--text-primary)] dark:placeholder:text-[var(--text-secondary)]'
|
||||
className='w-full border-0 bg-transparent font-base text-[15px] text-[var(--text-primary)] placeholder:text-[var(--text-secondary)] focus:outline-none dark:text-[var(--text-primary)] dark:placeholder:text-[var(--text-secondary)]'
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -2,14 +2,12 @@
|
||||
|
||||
import type { ReactNode } from 'react'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { calculateFilledPills, USAGE_PILL_COUNT } from '@/lib/subscription/usage-visualization'
|
||||
import { cn } from '@/lib/utils'
|
||||
|
||||
const GRADIENT_BADGE_STYLES =
|
||||
'gradient-text h-[1.125rem] rounded-[6px] border-gradient-primary/20 bg-gradient-to-b from-gradient-primary via-gradient-secondary to-gradient-primary px-2 py-0 font-medium text-xs cursor-pointer'
|
||||
|
||||
// Constants matching UsageIndicator
|
||||
const PILL_COUNT = 8
|
||||
|
||||
interface UsageHeaderProps {
|
||||
title: string
|
||||
gradientTitle?: boolean
|
||||
@@ -45,9 +43,9 @@ export function UsageHeader({
|
||||
}: UsageHeaderProps) {
|
||||
const progress = progressValue ?? (limit > 0 ? Math.min((current / limit) * 100, 100) : 0)
|
||||
|
||||
// Calculate filled pills based on usage percentage
|
||||
const filledPillsCount = Math.ceil((progress / 100) * PILL_COUNT)
|
||||
const isAlmostOut = filledPillsCount === PILL_COUNT
|
||||
// Calculate filled pills based on usage percentage using shared utility (fixed 8 pills)
|
||||
const filledPillsCount = calculateFilledPills(progress)
|
||||
const isAlmostOut = filledPillsCount === USAGE_PILL_COUNT
|
||||
|
||||
return (
|
||||
<div className='rounded-[8px] border bg-background p-3 shadow-xs'>
|
||||
@@ -93,9 +91,9 @@ export function UsageHeader({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Pills row - matching UsageIndicator */}
|
||||
{/* Pills row - fixed 8 pills with shared heuristic */}
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{Array.from({ length: PILL_COUNT }).map((_, i) => {
|
||||
{Array.from({ length: USAGE_PILL_COUNT }).map((_, i) => {
|
||||
const isFilled = i < filledPillsCount
|
||||
return (
|
||||
<div
|
||||
|
||||
@@ -34,8 +34,9 @@ import {
|
||||
getSubscriptionPermissions,
|
||||
getVisiblePlans,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/subscription-permissions'
|
||||
import { useUpdateGeneralSetting } from '@/hooks/queries/general-settings'
|
||||
import { useOrganizationBilling, useOrganizations } from '@/hooks/queries/organization'
|
||||
import { useSubscriptionData, useUsageData, useUsageLimitData } from '@/hooks/queries/subscription'
|
||||
import { useSubscriptionData, useUsageLimitData } from '@/hooks/queries/subscription'
|
||||
import { useUpdateWorkspaceSettings, useWorkspaceSettings } from '@/hooks/queries/workspace'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
|
||||
@@ -170,7 +171,6 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
|
||||
// React Query hooks for data fetching
|
||||
const { data: subscriptionData, isLoading: isSubscriptionLoading } = useSubscriptionData()
|
||||
const { data: usageResponse, isLoading: isUsageLoading } = useUsageData()
|
||||
const { data: usageLimitResponse, isLoading: isUsageLimitLoading } = useUsageLimitData()
|
||||
const { data: workspaceData, isLoading: isWorkspaceLoading } = useWorkspaceSettings(workspaceId)
|
||||
const updateWorkspaceMutation = useUpdateWorkspaceSettings()
|
||||
@@ -188,38 +188,38 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
const usageLimitRef = useRef<UsageLimitRef | null>(null)
|
||||
|
||||
// Combine all loading states
|
||||
const isLoading =
|
||||
isSubscriptionLoading || isUsageLoading || isUsageLimitLoading || isWorkspaceLoading
|
||||
const isLoading = isSubscriptionLoading || isUsageLimitLoading || isWorkspaceLoading
|
||||
|
||||
// Extract subscription status from data
|
||||
// Extract subscription status from subscriptionData.data
|
||||
const subscription = {
|
||||
isFree: subscriptionData?.plan === 'free' || !subscriptionData?.plan,
|
||||
isPro: subscriptionData?.plan === 'pro',
|
||||
isTeam: subscriptionData?.plan === 'team',
|
||||
isEnterprise: subscriptionData?.plan === 'enterprise',
|
||||
isFree: subscriptionData?.data?.plan === 'free' || !subscriptionData?.data?.plan,
|
||||
isPro: subscriptionData?.data?.plan === 'pro',
|
||||
isTeam: subscriptionData?.data?.plan === 'team',
|
||||
isEnterprise: subscriptionData?.data?.plan === 'enterprise',
|
||||
isPaid:
|
||||
subscriptionData?.plan &&
|
||||
['pro', 'team', 'enterprise'].includes(subscriptionData.plan) &&
|
||||
subscriptionData?.status === 'active',
|
||||
plan: subscriptionData?.plan || 'free',
|
||||
status: subscriptionData?.status || 'inactive',
|
||||
seats: subscriptionData?.seats || 1,
|
||||
subscriptionData?.data?.plan &&
|
||||
['pro', 'team', 'enterprise'].includes(subscriptionData.data.plan) &&
|
||||
subscriptionData?.data?.status === 'active',
|
||||
plan: subscriptionData?.data?.plan || 'free',
|
||||
status: subscriptionData?.data?.status || 'inactive',
|
||||
seats: subscriptionData?.data?.seats || 1,
|
||||
}
|
||||
|
||||
// Extract usage data
|
||||
// Extract usage data from subscriptionData.data.usage (same source as panel usage indicator)
|
||||
const usage = {
|
||||
current: usageResponse?.usage?.current || 0,
|
||||
limit: usageResponse?.usage?.limit || 0,
|
||||
percentUsed: usageResponse?.usage?.percentUsed || 0,
|
||||
current: subscriptionData?.data?.usage?.current || 0,
|
||||
limit: subscriptionData?.data?.usage?.limit || 0,
|
||||
percentUsed: subscriptionData?.data?.usage?.percentUsed || 0,
|
||||
}
|
||||
|
||||
// Extract usage limit metadata from usageLimitResponse.data
|
||||
const usageLimitData = {
|
||||
currentLimit: usageLimitResponse?.usage?.limit || 0,
|
||||
minimumLimit: usageLimitResponse?.usage?.minimumLimit || (subscription.isPro ? 20 : 40),
|
||||
currentLimit: usageLimitResponse?.data?.currentLimit || 0,
|
||||
minimumLimit: usageLimitResponse?.data?.minimumLimit || (subscription.isPro ? 20 : 40),
|
||||
}
|
||||
|
||||
// Extract billing status
|
||||
const billingStatus = subscriptionData?.billingBlocked ? 'blocked' : 'ok'
|
||||
const billingStatus = subscriptionData?.data?.billingBlocked ? 'blocked' : 'ok'
|
||||
|
||||
// Extract workspace settings
|
||||
const billedAccountUserId = workspaceData?.settings?.workspace?.billedAccountUserId ?? null
|
||||
@@ -406,20 +406,18 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
? usage.current // placeholder; rightContent will render UsageLimit
|
||||
: usage.limit
|
||||
}
|
||||
isBlocked={Boolean(subscriptionData?.billingBlocked)}
|
||||
isBlocked={Boolean(subscriptionData?.data?.billingBlocked)}
|
||||
status={billingStatus}
|
||||
percentUsed={
|
||||
subscription.isEnterprise || subscription.isTeam
|
||||
? organizationBillingData?.totalUsageLimit &&
|
||||
organizationBillingData.totalUsageLimit > 0 &&
|
||||
organizationBillingData.totalCurrentUsage !== undefined
|
||||
? Math.round(
|
||||
(organizationBillingData.totalCurrentUsage /
|
||||
organizationBillingData.totalUsageLimit) *
|
||||
100
|
||||
)
|
||||
: Math.round(usage.percentUsed)
|
||||
: Math.round(usage.percentUsed)
|
||||
? (organizationBillingData.totalCurrentUsage /
|
||||
organizationBillingData.totalUsageLimit) *
|
||||
100
|
||||
: usage.percentUsed
|
||||
: usage.percentUsed
|
||||
}
|
||||
onResolvePayment={async () => {
|
||||
try {
|
||||
@@ -467,7 +465,7 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
/>
|
||||
) : undefined
|
||||
}
|
||||
progressValue={Math.min(Math.round(usage.percentUsed), 100)}
|
||||
progressValue={Math.min(usage.percentUsed, 100)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -544,11 +542,11 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
)}
|
||||
|
||||
{/* Next Billing Date */}
|
||||
{subscription.isPaid && subscriptionData?.periodEnd && (
|
||||
{subscription.isPaid && subscriptionData?.data?.periodEnd && (
|
||||
<div className='mt-4 flex items-center justify-between'>
|
||||
<span className='font-medium text-sm'>Next Billing Date</span>
|
||||
<span className='text-muted-foreground text-sm'>
|
||||
{new Date(subscriptionData.periodEnd).toLocaleDateString()}
|
||||
{new Date(subscriptionData.data.periodEnd).toLocaleDateString()}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
@@ -574,8 +572,8 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
isPaid: subscription.isPaid,
|
||||
}}
|
||||
subscriptionData={{
|
||||
periodEnd: subscriptionData?.periodEnd || null,
|
||||
cancelAtPeriodEnd: subscriptionData?.cancelAtPeriodEnd,
|
||||
periodEnd: subscriptionData?.data?.periodEnd || null,
|
||||
cancelAtPeriodEnd: subscriptionData?.data?.cancelAtPeriodEnd,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
@@ -607,7 +605,7 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
<SelectTrigger className='h-8 w-[200px] justify-between text-left text-xs'>
|
||||
<SelectValue placeholder='Select admin' />
|
||||
</SelectTrigger>
|
||||
<SelectContent align='start' className='z-50'>
|
||||
<SelectContent align='start' className='z-[10000050]'>
|
||||
<SelectGroup>
|
||||
<SelectLabel className='px-3 py-1 text-[11px] text-muted-foreground uppercase'>
|
||||
Workspace admins
|
||||
@@ -629,9 +627,9 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
}
|
||||
|
||||
function BillingUsageNotificationsToggle() {
|
||||
const isLoading = useGeneralStore((s) => s.isBillingUsageNotificationsLoading)
|
||||
const enabled = useGeneralStore((s) => s.isBillingUsageNotificationsEnabled)
|
||||
const setEnabled = useGeneralStore((s) => s.setBillingUsageNotificationsEnabled)
|
||||
const updateSetting = useUpdateGeneralSetting()
|
||||
const isLoading = updateSetting.isPending
|
||||
|
||||
// Settings are automatically loaded by SettingsLoader provider
|
||||
// No need to load here - Zustand is synced from React Query
|
||||
@@ -646,7 +644,9 @@ function BillingUsageNotificationsToggle() {
|
||||
checked={!!enabled}
|
||||
disabled={isLoading}
|
||||
onCheckedChange={(v: boolean) => {
|
||||
void setEnabled(v)
|
||||
if (v !== enabled) {
|
||||
updateSetting.mutate({ key: 'billingUsageNotificationsEnabled', value: v })
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -128,7 +128,7 @@ export function TeamSeatsOverview({
|
||||
key={i}
|
||||
className={cn(
|
||||
'h-[6px] flex-1 rounded-full transition-colors',
|
||||
isFilled ? 'bg-[#4285F4]' : 'bg-[#2C2C2C]'
|
||||
isFilled ? 'bg-[#34B5FF]' : 'bg-[#2C2C2C]'
|
||||
)}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -30,6 +30,14 @@ const MAX_PILL_COUNT = 8
|
||||
*/
|
||||
const WIDTH_PER_PILL = 50
|
||||
|
||||
/**
|
||||
* Animation configuration for usage pills
|
||||
* Controls how smoothly and quickly the highlight progresses across pills
|
||||
*/
|
||||
const PILL_ANIMATION_TICK_MS = 30
|
||||
const PILLS_PER_SECOND = 1.8
|
||||
const PILL_STEP_PER_TICK = (PILLS_PER_SECOND * PILL_ANIMATION_TICK_MS) / 1000
|
||||
|
||||
/**
|
||||
* Plan name mapping
|
||||
*/
|
||||
@@ -49,8 +57,8 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
const sidebarWidth = useSidebarStore((state) => state.sidebarWidth)
|
||||
|
||||
/**
|
||||
* Calculate pill count based on sidebar width
|
||||
* Starts at MIN_PILL_COUNT at minimum width, adds 1 pill per WIDTH_PER_PILL increase
|
||||
* Calculate pill count based on sidebar width (6-8 pills dynamically)
|
||||
* This provides responsive feedback as the sidebar width changes
|
||||
*/
|
||||
const pillCount = useMemo(() => {
|
||||
const widthDelta = sidebarWidth - MIN_SIDEBAR_WIDTH
|
||||
@@ -62,6 +70,67 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
const usage = getUsage(subscriptionData?.data)
|
||||
const subscription = getSubscriptionStatus(subscriptionData?.data)
|
||||
|
||||
const progressPercentage = Math.min(usage.percentUsed, 100)
|
||||
|
||||
const planType = subscription.isEnterprise
|
||||
? 'enterprise'
|
||||
: subscription.isTeam
|
||||
? 'team'
|
||||
: subscription.isPro
|
||||
? 'pro'
|
||||
: 'free'
|
||||
|
||||
const billingStatus = getBillingStatus(subscriptionData?.data)
|
||||
const isBlocked = billingStatus === 'blocked'
|
||||
const showUpgradeButton = planType === 'free' || isBlocked
|
||||
|
||||
/**
|
||||
* Calculate which pills should be filled based on usage percentage
|
||||
* Uses shared Math.ceil heuristic but with dynamic pill count (6-8)
|
||||
* This ensures consistent calculation logic while maintaining responsive pill count
|
||||
*/
|
||||
const filledPillsCount = Math.ceil((progressPercentage / 100) * pillCount)
|
||||
const isAlmostOut = filledPillsCount === pillCount
|
||||
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
const [wavePosition, setWavePosition] = useState<number | null>(null)
|
||||
const [hasWrapped, setHasWrapped] = useState(false)
|
||||
|
||||
const startAnimationIndex = pillCount === 0 ? 0 : Math.min(filledPillsCount, pillCount - 1)
|
||||
|
||||
useEffect(() => {
|
||||
if (!isHovered || pillCount <= 0) {
|
||||
setWavePosition(null)
|
||||
setHasWrapped(false)
|
||||
return
|
||||
}
|
||||
|
||||
const totalSpan = pillCount
|
||||
let wrapped = false
|
||||
setHasWrapped(false)
|
||||
setWavePosition(0)
|
||||
|
||||
const interval = window.setInterval(() => {
|
||||
setWavePosition((prev) => {
|
||||
const current = prev ?? 0
|
||||
const next = current + PILL_STEP_PER_TICK
|
||||
|
||||
// Mark as wrapped after first complete cycle
|
||||
if (next >= totalSpan && !wrapped) {
|
||||
wrapped = true
|
||||
setHasWrapped(true)
|
||||
}
|
||||
|
||||
// Return continuous value, never reset (seamless loop)
|
||||
return next
|
||||
})
|
||||
}, PILL_ANIMATION_TICK_MS)
|
||||
|
||||
return () => {
|
||||
window.clearInterval(interval)
|
||||
}
|
||||
}, [isHovered, pillCount, startAnimationIndex])
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-shrink-0 flex-col gap-[8px] border-t pt-[12px] pr-[13.5px] pb-[10px] pl-[12px] dark:border-[var(--border)]'>
|
||||
@@ -84,26 +153,6 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
)
|
||||
}
|
||||
|
||||
const progressPercentage = Math.min(usage.percentUsed, 100)
|
||||
|
||||
const planType = subscription.isEnterprise
|
||||
? 'enterprise'
|
||||
: subscription.isTeam
|
||||
? 'team'
|
||||
: subscription.isPro
|
||||
? 'pro'
|
||||
: 'free'
|
||||
|
||||
const billingStatus = getBillingStatus(subscriptionData?.data)
|
||||
const isBlocked = billingStatus === 'blocked'
|
||||
const showUpgradeButton = planType === 'free' || isBlocked
|
||||
|
||||
/**
|
||||
* Calculate which pills should be filled based on usage percentage
|
||||
*/
|
||||
const filledPillsCount = Math.ceil((progressPercentage / 100) * pillCount)
|
||||
const isAlmostOut = filledPillsCount === pillCount
|
||||
|
||||
const handleClick = () => {
|
||||
try {
|
||||
if (onClick) {
|
||||
@@ -125,7 +174,12 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-shrink-0 flex-col gap-[8px] border-t px-[13.5px] pt-[8px] pb-[10px] dark:border-[var(--border)]'>
|
||||
<div
|
||||
className='group flex flex-shrink-0 cursor-pointer flex-col gap-[8px] border-t px-[13.5px] pt-[8px] pb-[10px]'
|
||||
onClick={handleClick}
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
>
|
||||
{/* Top row */}
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
@@ -153,10 +207,10 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
{showUpgradeButton && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!h-auto !px-1 !py-0 -mx-1 mt-[-2px] text-[var(--text-secondary)]'
|
||||
className='-mx-1 !h-auto !px-1 !py-0 !text-[#F473B7] group-hover:!text-[#F789C4] mt-[-2px] transition-colors duration-100'
|
||||
onClick={handleClick}
|
||||
>
|
||||
Upgrade
|
||||
<span className='font-medium text-[12px]'>Upgrade</span>
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
@@ -165,12 +219,75 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{Array.from({ length: pillCount }).map((_, i) => {
|
||||
const isFilled = i < filledPillsCount
|
||||
|
||||
const baseColor = isFilled ? (isAlmostOut ? '#ef4444' : '#34B5FF') : '#414141'
|
||||
|
||||
let backgroundColor = baseColor
|
||||
let backgroundImage: string | undefined
|
||||
|
||||
if (isHovered && wavePosition !== null && pillCount > 0) {
|
||||
const totalSpan = pillCount
|
||||
const grayColor = '#414141'
|
||||
const activeColor = isAlmostOut ? '#ef4444' : '#34B5FF'
|
||||
|
||||
if (!hasWrapped) {
|
||||
// First pass: respect original fill state, start from startAnimationIndex
|
||||
const headIndex = Math.floor(wavePosition)
|
||||
const progress = wavePosition - headIndex
|
||||
|
||||
const pillOffsetFromStart =
|
||||
i >= startAnimationIndex
|
||||
? i - startAnimationIndex
|
||||
: totalSpan - startAnimationIndex + i
|
||||
|
||||
if (pillOffsetFromStart < headIndex) {
|
||||
backgroundColor = baseColor
|
||||
backgroundImage = `linear-gradient(to right, ${activeColor} 0%, ${activeColor} 100%)`
|
||||
} else if (pillOffsetFromStart === headIndex) {
|
||||
const fillPercent = Math.max(0, Math.min(1, progress)) * 100
|
||||
backgroundColor = baseColor
|
||||
backgroundImage = `linear-gradient(to right, ${activeColor} 0%, ${activeColor} ${fillPercent}%, ${baseColor} ${fillPercent}%, ${baseColor} 100%)`
|
||||
}
|
||||
} else {
|
||||
// Subsequent passes: render wave at BOTH current and next-cycle positions for seamless wrap
|
||||
const wrappedPosition = wavePosition % totalSpan
|
||||
const currentHead = Math.floor(wrappedPosition)
|
||||
const progress = wrappedPosition - currentHead
|
||||
|
||||
// Primary wave position
|
||||
const primaryFilled = i < currentHead
|
||||
const primaryActive = i === currentHead
|
||||
|
||||
// Secondary wave position (one full cycle ahead, wraps to beginning)
|
||||
const secondaryHead = Math.floor(wavePosition + totalSpan) % totalSpan
|
||||
const secondaryProgress =
|
||||
wavePosition + totalSpan - Math.floor(wavePosition + totalSpan)
|
||||
const secondaryFilled = i < secondaryHead
|
||||
const secondaryActive = i === secondaryHead
|
||||
|
||||
// Render: pill is filled if either wave position has filled it
|
||||
if (primaryFilled || secondaryFilled) {
|
||||
backgroundColor = grayColor
|
||||
backgroundImage = `linear-gradient(to right, ${activeColor} 0%, ${activeColor} 100%)`
|
||||
} else if (primaryActive || secondaryActive) {
|
||||
const activeProgress = primaryActive ? progress : secondaryProgress
|
||||
const fillPercent = Math.max(0, Math.min(1, activeProgress)) * 100
|
||||
backgroundColor = grayColor
|
||||
backgroundImage = `linear-gradient(to right, ${activeColor} 0%, ${activeColor} ${fillPercent}%, ${grayColor} ${fillPercent}%, ${grayColor} 100%)`
|
||||
} else {
|
||||
backgroundColor = grayColor
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
key={i}
|
||||
className='h-[6px] flex-1 rounded-[2px]'
|
||||
style={{
|
||||
backgroundColor: isFilled ? (isAlmostOut ? '#ef4444' : '#34B5FF') : '#414141',
|
||||
backgroundColor,
|
||||
backgroundImage,
|
||||
transition: isHovered ? 'none' : 'background-color 200ms',
|
||||
}}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -13,7 +13,8 @@ import {
|
||||
useItemRename,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useDeleteFolder, useDuplicateFolder } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
|
||||
import { useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import type { FolderTreeNode } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface FolderItemProps {
|
||||
@@ -37,7 +38,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
const params = useParams()
|
||||
const router = useRouter()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const { updateFolderAPI } = useFolderStore()
|
||||
const updateFolderMutation = useUpdateFolder()
|
||||
const { createWorkflow } = useWorkflowRegistry()
|
||||
|
||||
// Delete modal state
|
||||
@@ -125,7 +126,11 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
} = useItemRename({
|
||||
initialName: folder.name,
|
||||
onSave: async (newName) => {
|
||||
await updateFolderAPI(folder.id, { name: newName })
|
||||
await updateFolderMutation.mutateAsync({
|
||||
workspaceId,
|
||||
id: folder.id,
|
||||
updates: { name: newName },
|
||||
})
|
||||
},
|
||||
itemType: 'folder',
|
||||
itemId: folder.id,
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
useWorkflowSelection,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks/use-import-workflow'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
|
||||
@@ -56,14 +57,9 @@ export function WorkflowList({
|
||||
const workspaceId = params.workspaceId as string
|
||||
const workflowId = params.workflowId as string
|
||||
|
||||
const {
|
||||
getFolderTree,
|
||||
expandedFolders,
|
||||
fetchFolders,
|
||||
isLoading: foldersLoading,
|
||||
getFolderPath,
|
||||
setExpanded,
|
||||
} = useFolderStore()
|
||||
const { isLoading: foldersLoading } = useFolders(workspaceId)
|
||||
|
||||
const { getFolderTree, expandedFolders, getFolderPath, setExpanded } = useFolderStore()
|
||||
|
||||
const {
|
||||
dropTargetId,
|
||||
@@ -169,15 +165,6 @@ export function WorkflowList({
|
||||
}
|
||||
}, [workflowId, activeWorkflowFolderId, isLoading, foldersLoading, getFolderPath, setExpanded])
|
||||
|
||||
/**
|
||||
* Fetch folders when workspace changes
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (workspaceId) {
|
||||
fetchFolders(workspaceId)
|
||||
}
|
||||
}, [workspaceId, fetchFolders])
|
||||
|
||||
const renderWorkflowItem = useCallback(
|
||||
(workflow: WorkflowMetadata, level: number, parentFolderId: string | null = null) => (
|
||||
<div key={workflow.id} className='relative' {...createItemDragHandlers(parentFolderId)}>
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
extractWorkflowsFromZip,
|
||||
} from '@/lib/workflows/import-export'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useCreateFolder } from '@/hooks/queries/folders'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -43,7 +43,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const params = useParams()
|
||||
const router = useRouter()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const { createFolder } = useFolderStore()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const { createWorkflow } = useWorkflowRegistry()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
@@ -110,14 +110,14 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
try {
|
||||
setIsCreating(true)
|
||||
const folderName = await generateFolderName(workspaceId)
|
||||
await createFolder({ name: folderName, workspaceId })
|
||||
await createFolderMutation.mutateAsync({ name: folderName, workspaceId })
|
||||
logger.info(`Created folder: ${folderName}`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to create folder:', { error })
|
||||
} finally {
|
||||
setIsCreating(false)
|
||||
}
|
||||
}, [createFolder, workspaceId, isCreating])
|
||||
}, [createFolderMutation, workspaceId, isCreating])
|
||||
|
||||
const handleImportWorkflow = useCallback(() => {
|
||||
setIsOpen(false)
|
||||
@@ -143,9 +143,8 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
importedWorkflows = extractedWorkflows
|
||||
|
||||
const { createFolder } = useFolderStore.getState()
|
||||
const folderName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
|
||||
const importFolder = await createFolder({
|
||||
const importFolder = await createFolderMutation.mutateAsync({
|
||||
name: folderName,
|
||||
workspaceId,
|
||||
})
|
||||
@@ -175,7 +174,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
|
||||
|
||||
if (!folderMap.has(pathSegment)) {
|
||||
const subFolder = await createFolder({
|
||||
const subFolder = await createFolderMutation.mutateAsync({
|
||||
name: workflow.folderPath[i],
|
||||
workspaceId,
|
||||
parentId,
|
||||
@@ -302,9 +301,6 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
|
||||
const { loadWorkflows } = useWorkflowRegistry.getState()
|
||||
await loadWorkflows(workspaceId)
|
||||
|
||||
const { fetchFolders } = useFolderStore.getState()
|
||||
await fetchFolders(workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflows:', error)
|
||||
} finally {
|
||||
@@ -314,7 +310,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
}
|
||||
}
|
||||
},
|
||||
[workspaceId, createWorkflow]
|
||||
[workspaceId, createWorkflow, createFolderMutation]
|
||||
)
|
||||
|
||||
// Button event handlers
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useDeleteFolderMutation, useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
|
||||
|
||||
const logger = createLogger('FolderItem')
|
||||
@@ -45,7 +46,9 @@ export function FolderItem({
|
||||
isFirstItem = false,
|
||||
level,
|
||||
}: FolderItemProps) {
|
||||
const { expandedFolders, toggleExpanded, updateFolderAPI, deleteFolder } = useFolderStore()
|
||||
const { expandedFolders, toggleExpanded } = useFolderStore()
|
||||
const updateFolderMutation = useUpdateFolder()
|
||||
const deleteFolderMutation = useDeleteFolderMutation()
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDragging, setIsDragging] = useState(false)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
@@ -127,7 +130,11 @@ export function FolderItem({
|
||||
|
||||
setIsRenaming(true)
|
||||
try {
|
||||
await updateFolderAPI(folder.id, { name: editValue.trim() })
|
||||
await updateFolderMutation.mutateAsync({
|
||||
workspaceId,
|
||||
id: folder.id,
|
||||
updates: { name: editValue.trim() },
|
||||
})
|
||||
logger.info(`Successfully renamed folder from "${folder.name}" to "${editValue.trim()}"`)
|
||||
setIsEditing(false)
|
||||
} catch (error) {
|
||||
@@ -171,7 +178,7 @@ export function FolderItem({
|
||||
setShowDeleteDialog(false)
|
||||
|
||||
try {
|
||||
await deleteFolder(folder.id, workspaceId)
|
||||
await deleteFolderMutation.mutateAsync({ id: folder.id, workspaceId })
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete folder:', { error })
|
||||
}
|
||||
|
||||
@@ -7,7 +7,8 @@ import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { FolderItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/folder-tree/components/folder-item'
|
||||
import { WorkflowItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/folder-tree/components/workflow-item'
|
||||
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
|
||||
import { useFolders, useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { type FolderTreeNode, useFolderStore, type WorkflowFolder } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
|
||||
@@ -390,24 +391,18 @@ export function FolderTree({
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const workflowId = params.workflowId as string
|
||||
const {
|
||||
getFolderTree,
|
||||
expandedFolders,
|
||||
fetchFolders,
|
||||
isLoading: foldersLoading,
|
||||
clearSelection,
|
||||
updateFolderAPI,
|
||||
getFolderPath,
|
||||
setExpanded,
|
||||
} = useFolderStore()
|
||||
const foldersQuery = useFolders(workspaceId)
|
||||
const updateFolder = useUpdateFolder()
|
||||
const { getFolderTree, expandedFolders, clearSelection, getFolderPath, setExpanded } =
|
||||
useFolderStore()
|
||||
const { updateWorkflow } = useWorkflowRegistry()
|
||||
|
||||
// Memoize the active workflow's folder ID to avoid unnecessary re-runs
|
||||
const activeWorkflowFolderId = useMemo(() => {
|
||||
if (!workflowId || isLoading || foldersLoading) return null
|
||||
if (!workflowId || isLoading || foldersQuery.isLoading) return null
|
||||
const activeWorkflow = regularWorkflows.find((workflow) => workflow.id === workflowId)
|
||||
return activeWorkflow?.folderId || null
|
||||
}, [workflowId, regularWorkflows, isLoading, foldersLoading])
|
||||
}, [workflowId, regularWorkflows, isLoading, foldersQuery.isLoading])
|
||||
|
||||
// Auto-expand folders when a workflow is active
|
||||
useEffect(() => {
|
||||
@@ -426,7 +421,7 @@ export function FolderTree({
|
||||
|
||||
// Clean up any existing folders with 3+ levels of nesting
|
||||
const cleanupDeepNesting = useCallback(async () => {
|
||||
const { getFolderTree, updateFolderAPI } = useFolderStore.getState()
|
||||
const { getFolderTree } = useFolderStore.getState()
|
||||
const folderTree = getFolderTree(workspaceId)
|
||||
|
||||
const findDeepFolders = (nodes: FolderTreeNode[], currentLevel = 0): FolderTreeNode[] => {
|
||||
@@ -452,23 +447,24 @@ export function FolderTree({
|
||||
// Move deeply nested folders to root level
|
||||
for (const folder of deepFolders) {
|
||||
try {
|
||||
await updateFolderAPI(folder.id, { parentId: null })
|
||||
await updateFolder.mutateAsync({
|
||||
workspaceId,
|
||||
id: folder.id,
|
||||
updates: { parentId: null },
|
||||
})
|
||||
logger.info(`Moved deeply nested folder "${folder.name}" to root level`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to move folder "${folder.name}":`, error)
|
||||
}
|
||||
}
|
||||
}, [workspaceId])
|
||||
}, [workspaceId, updateFolder])
|
||||
|
||||
// Fetch folders when workspace changes
|
||||
useEffect(() => {
|
||||
if (workspaceId) {
|
||||
fetchFolders(workspaceId).then(() => {
|
||||
// Clean up any existing deep nesting after folders are loaded
|
||||
cleanupDeepNesting()
|
||||
})
|
||||
if (workspaceId && foldersQuery.data) {
|
||||
cleanupDeepNesting()
|
||||
}
|
||||
}, [workspaceId, fetchFolders, cleanupDeepNesting])
|
||||
}, [workspaceId, foldersQuery.data, cleanupDeepNesting])
|
||||
|
||||
useEffect(() => {
|
||||
clearSelection()
|
||||
@@ -487,13 +483,19 @@ export function FolderTree({
|
||||
{} as Record<string, WorkflowMetadata[]>
|
||||
)
|
||||
|
||||
const updateFolderFn = useCallback(
|
||||
(id: string, updates: Partial<WorkflowFolder>) =>
|
||||
updateFolder.mutateAsync({ workspaceId, id, updates }),
|
||||
[updateFolder, workspaceId]
|
||||
)
|
||||
|
||||
const {
|
||||
isDragOver: rootDragOver,
|
||||
isInvalidDrop: rootInvalidDrop,
|
||||
handleDragOver: handleRootDragOver,
|
||||
handleDragLeave: handleRootDragLeave,
|
||||
handleDrop: handleRootDrop,
|
||||
} = useDragHandlers(updateWorkflow, updateFolderAPI, null, 'Moved workflow(s) to root')
|
||||
} = useDragHandlers(updateWorkflow, updateFolderFn, null, 'Moved workflow(s) to root')
|
||||
|
||||
const renderFolderTree = (
|
||||
nodes: FolderTreeNode[],
|
||||
@@ -510,7 +512,7 @@ export function FolderTree({
|
||||
expandedFolders={expandedFolders}
|
||||
pathname={pathname}
|
||||
updateWorkflow={updateWorkflow}
|
||||
updateFolder={updateFolderAPI}
|
||||
updateFolder={updateFolderFn}
|
||||
renderFolderTree={renderFolderTree}
|
||||
parentDragOver={parentDragOver}
|
||||
isFirstItem={level === 0 && index === 0}
|
||||
@@ -518,7 +520,7 @@ export function FolderTree({
|
||||
))
|
||||
}
|
||||
|
||||
const showLoading = isLoading || foldersLoading
|
||||
const showLoading = isLoading || foldersQuery.isLoading
|
||||
const rootWorkflows = workflowsByFolder.root || []
|
||||
|
||||
// Render skeleton loading state
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -31,7 +33,10 @@ export function useDragDrop() {
|
||||
const hoverExpandTimerRef = useRef<number | null>(null)
|
||||
const lastDragYRef = useRef<number>(0)
|
||||
|
||||
const { updateFolderAPI, getFolderPath, setExpanded, expandedFolders } = useFolderStore()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string | undefined
|
||||
const updateFolderMutation = useUpdateFolder()
|
||||
const { setExpanded, expandedFolders } = useFolderStore()
|
||||
const { updateWorkflow } = useWorkflowRegistry()
|
||||
|
||||
/**
|
||||
@@ -192,13 +197,21 @@ export function useDragDrop() {
|
||||
return
|
||||
}
|
||||
|
||||
await updateFolderAPI(draggedFolderId, { parentId: targetFolderId })
|
||||
if (!workspaceId) {
|
||||
logger.warn('No workspaceId available for folder move')
|
||||
return
|
||||
}
|
||||
await updateFolderMutation.mutateAsync({
|
||||
workspaceId,
|
||||
id: draggedFolderId,
|
||||
updates: { parentId: targetFolderId },
|
||||
})
|
||||
logger.info(`Moved folder to ${targetFolderId ? `folder ${targetFolderId}` : 'root'}`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to move folder:', error)
|
||||
}
|
||||
},
|
||||
[updateFolderAPI]
|
||||
[updateFolderMutation, workspaceId]
|
||||
)
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateFolderName } from '@/lib/naming'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useCreateFolder } from '@/hooks/queries/folders'
|
||||
|
||||
const logger = createLogger('useFolderOperations')
|
||||
|
||||
@@ -17,7 +17,7 @@ interface UseFolderOperationsProps {
|
||||
* @returns Folder operations state and handlers
|
||||
*/
|
||||
export function useFolderOperations({ workspaceId }: UseFolderOperationsProps) {
|
||||
const { createFolder } = useFolderStore()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const [isCreatingFolder, setIsCreatingFolder] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -32,7 +32,7 @@ export function useFolderOperations({ workspaceId }: UseFolderOperationsProps) {
|
||||
try {
|
||||
setIsCreatingFolder(true)
|
||||
const folderName = await generateFolderName(workspaceId)
|
||||
const folder = await createFolder({ name: folderName, workspaceId })
|
||||
const folder = await createFolderMutation.mutateAsync({ name: folderName, workspaceId })
|
||||
logger.info(`Created folder: ${folderName}`)
|
||||
return folder.id
|
||||
} catch (error) {
|
||||
@@ -41,7 +41,7 @@ export function useFolderOperations({ workspaceId }: UseFolderOperationsProps) {
|
||||
} finally {
|
||||
setIsCreatingFolder(false)
|
||||
}
|
||||
}, [createFolder, workspaceId, isCreatingFolder])
|
||||
}, [createFolderMutation, workspaceId, isCreatingFolder])
|
||||
|
||||
return {
|
||||
// State
|
||||
|
||||
@@ -5,7 +5,6 @@ import { ArrowDown, Plus, Search } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Button, FolderPlus, Tooltip } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { getEnv, isTruthy } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import {
|
||||
@@ -27,12 +26,14 @@ import {
|
||||
useImportWorkspace,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useSearchModalStore } from '@/stores/search-modal/store'
|
||||
import { MIN_SIDEBAR_WIDTH, useSidebarStore } from '@/stores/sidebar/store'
|
||||
|
||||
const logger = createLogger('SidebarNew')
|
||||
|
||||
// Feature flag: Billing usage indicator visibility (matches legacy sidebar behavior)
|
||||
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
|
||||
// const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
|
||||
const isBillingEnabled = true
|
||||
|
||||
/**
|
||||
* Sidebar component with resizable width that persists across page refreshes.
|
||||
@@ -84,8 +85,12 @@ export function SidebarNew() {
|
||||
// Workspace popover state
|
||||
const [isWorkspaceMenuOpen, setIsWorkspaceMenuOpen] = useState(false)
|
||||
|
||||
// Search modal state
|
||||
const [isSearchModalOpen, setIsSearchModalOpen] = useState(false)
|
||||
// Global search modal state
|
||||
const {
|
||||
isOpen: isSearchModalOpen,
|
||||
setOpen: setIsSearchModalOpen,
|
||||
open: openSearchModal,
|
||||
} = useSearchModalStore()
|
||||
|
||||
// Workspace management hook
|
||||
const {
|
||||
@@ -452,8 +457,7 @@ export function SidebarNew() {
|
||||
shortcut: 'Mod+K',
|
||||
allowInEditable: true,
|
||||
handler: () => {
|
||||
setIsSearchModalOpen(true)
|
||||
logger.info('Search modal opened')
|
||||
openSearchModal()
|
||||
},
|
||||
},
|
||||
])
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useDeleteFolderMutation } from '@/hooks/queries/folders'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
|
||||
const logger = createLogger('useDeleteFolder')
|
||||
@@ -34,7 +35,7 @@ interface UseDeleteFolderProps {
|
||||
* @returns Delete folder handlers and state
|
||||
*/
|
||||
export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
const { deleteFolder } = useFolderStore()
|
||||
const deleteFolderMutation = useDeleteFolderMutation()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -58,7 +59,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
|
||||
// Delete each folder sequentially
|
||||
for (const folderId of folderIdsToDelete) {
|
||||
await deleteFolder(folderId, workspaceId)
|
||||
await deleteFolderMutation.mutateAsync({ id: folderId, workspaceId })
|
||||
}
|
||||
|
||||
// Clear selection after successful deletion
|
||||
@@ -73,7 +74,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [getFolderIds, isDeleting, deleteFolder, workspaceId, onSuccess])
|
||||
}, [getFolderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useDuplicateFolderMutation } from '@/hooks/queries/folders'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
|
||||
const logger = createLogger('useDuplicateFolder')
|
||||
@@ -38,9 +39,22 @@ export function useDuplicateFolder({
|
||||
getFolderIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateFolderProps) {
|
||||
const { duplicateFolder } = useFolderStore()
|
||||
const duplicateFolderMutation = useDuplicateFolderMutation()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
const generateDuplicateName = useCallback((baseName: string, siblingNames: Set<string>) => {
|
||||
const trimmedName = (baseName || 'Untitled Folder').trim()
|
||||
let candidate = `${trimmedName} Copy`
|
||||
let counter = 2
|
||||
|
||||
while (siblingNames.has(candidate)) {
|
||||
candidate = `${trimmedName} Copy ${counter}`
|
||||
counter += 1
|
||||
}
|
||||
|
||||
return candidate
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Duplicate the folder(s)
|
||||
*/
|
||||
@@ -61,10 +75,33 @@ export function useDuplicateFolder({
|
||||
const folderIdsToDuplicate = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const folderStore = useFolderStore.getState()
|
||||
|
||||
// Duplicate each folder sequentially
|
||||
for (const folderId of folderIdsToDuplicate) {
|
||||
const newFolderId = await duplicateFolder(folderId)
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
if (!folder) {
|
||||
logger.warn('Attempted to duplicate folder that no longer exists', { folderId })
|
||||
continue
|
||||
}
|
||||
|
||||
const siblingNames = new Set(
|
||||
folderStore.getChildFolders(folder.parentId).map((sibling) => sibling.name)
|
||||
)
|
||||
// Avoid colliding with the original folder name
|
||||
siblingNames.add(folder.name)
|
||||
|
||||
const duplicateName = generateDuplicateName(folder.name, siblingNames)
|
||||
|
||||
const result = await duplicateFolderMutation.mutateAsync({
|
||||
id: folderId,
|
||||
workspaceId,
|
||||
name: duplicateName,
|
||||
parentId: folder.parentId,
|
||||
color: folder.color,
|
||||
})
|
||||
const newFolderId = result?.id
|
||||
if (newFolderId) {
|
||||
duplicatedIds.push(newFolderId)
|
||||
}
|
||||
@@ -86,7 +123,14 @@ export function useDuplicateFolder({
|
||||
} finally {
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
}, [getFolderIds, isDuplicating, duplicateFolder, onSuccess])
|
||||
}, [
|
||||
getFolderIds,
|
||||
generateDuplicateName,
|
||||
isDuplicating,
|
||||
duplicateFolderMutation,
|
||||
workspaceId,
|
||||
onSuccess,
|
||||
])
|
||||
|
||||
return {
|
||||
isDuplicating,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
@@ -6,7 +7,7 @@ import {
|
||||
extractWorkflowsFromFiles,
|
||||
extractWorkflowsFromZip,
|
||||
} from '@/lib/workflows/import-export'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { folderKeys, useCreateFolder } from '@/hooks/queries/folders'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -30,6 +31,8 @@ interface UseImportWorkflowProps {
|
||||
export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const router = useRouter()
|
||||
const { createWorkflow, loadWorkflows } = useWorkflowRegistry()
|
||||
const queryClient = useQueryClient()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -119,9 +122,11 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const zipFile = fileArray[0]
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
const { createFolder } = useFolderStore.getState()
|
||||
const folderName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
|
||||
const importFolder = await createFolder({ name: folderName, workspaceId })
|
||||
const importFolder = await createFolderMutation.mutateAsync({
|
||||
name: folderName,
|
||||
workspaceId,
|
||||
})
|
||||
const folderMap = new Map<string, string>()
|
||||
|
||||
for (const workflow of extractedWorkflows) {
|
||||
@@ -139,7 +144,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
|
||||
|
||||
if (!folderMap.has(pathSegment)) {
|
||||
const subFolder = await createFolder({
|
||||
const subFolder = await createFolderMutation.mutateAsync({
|
||||
name: workflow.folderPath[i],
|
||||
workspaceId,
|
||||
parentId,
|
||||
@@ -181,7 +186,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
|
||||
// Reload workflows to show newly imported ones
|
||||
await loadWorkflows(workspaceId)
|
||||
await useFolderStore.getState().fetchFolders(workspaceId)
|
||||
await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) })
|
||||
|
||||
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
|
||||
|
||||
@@ -200,7 +205,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
},
|
||||
[importSingleWorkflow, workspaceId, loadWorkflows, router]
|
||||
[importSingleWorkflow, workspaceId, loadWorkflows, router, createFolderMutation, queryClient]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useState } from 'react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { extractWorkflowName, extractWorkflowsFromZip } from '@/lib/workflows/import-export'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useCreateFolder } from '@/hooks/queries/folders'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
|
||||
|
||||
@@ -33,6 +33,7 @@ interface UseImportWorkspaceProps {
|
||||
export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {}) {
|
||||
const router = useRouter()
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const createFolderMutation = useCreateFolder()
|
||||
|
||||
/**
|
||||
* Handle workspace import from ZIP file
|
||||
@@ -75,7 +76,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
const { workspace: newWorkspace } = await createResponse.json()
|
||||
logger.info('Created new workspace:', newWorkspace)
|
||||
|
||||
const { createFolder } = useFolderStore.getState()
|
||||
const folderMap = new Map<string, string>()
|
||||
|
||||
// Import workflows
|
||||
@@ -100,7 +100,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
|
||||
|
||||
if (!folderMap.has(pathSegment)) {
|
||||
const subFolder = await createFolder({
|
||||
const subFolder = await createFolderMutation.mutateAsync({
|
||||
name: workflow.folderPath[i],
|
||||
workspaceId: newWorkspace.id,
|
||||
parentId: parentId || undefined,
|
||||
@@ -192,7 +192,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
setIsImporting(false)
|
||||
}
|
||||
},
|
||||
[isImporting, router, onSuccess]
|
||||
[isImporting, router, onSuccess, createFolderMutation]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { OneDriveResponse } from '@/tools/onedrive/types'
|
||||
import { normalizeExcelValuesForToolParams } from '@/tools/onedrive/utils'
|
||||
|
||||
const logger = createLogger('OneDriveBlock')
|
||||
|
||||
@@ -78,9 +79,10 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
{
|
||||
id: 'values',
|
||||
title: 'Values',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'Enter values as JSON array of arrays (e.g., [["A1","B1"],["A2","B2"]]) or an array of objects',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
generationType: 'json-object',
|
||||
placeholder: 'Enter a JSON array of rows (e.g., [["A1","B1"],["A2","B2"]])',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_file',
|
||||
@@ -89,6 +91,13 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
value: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
},
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt:
|
||||
'Generate a JSON array of arrays that can be written directly into an Excel worksheet.',
|
||||
placeholder: 'Describe the table you want to generate...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
required: false,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
@@ -351,17 +360,15 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
params: (params) => {
|
||||
const { credential, folderId, fileId, mimeType, values, downloadFileName, ...rest } = params
|
||||
|
||||
let parsedValues
|
||||
try {
|
||||
parsedValues = values ? JSON.parse(values as string) : undefined
|
||||
} catch (error) {
|
||||
throw new Error('Invalid JSON format for values')
|
||||
let normalizedValues: ReturnType<typeof normalizeExcelValuesForToolParams>
|
||||
if (values !== undefined) {
|
||||
normalizedValues = normalizeExcelValuesForToolParams(values)
|
||||
}
|
||||
|
||||
return {
|
||||
credential,
|
||||
...rest,
|
||||
values: parsedValues,
|
||||
values: normalizedValues,
|
||||
folderId: folderId || undefined,
|
||||
fileId: fileId || undefined,
|
||||
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
|
||||
@@ -380,7 +387,7 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
fileReference: { type: 'json', description: 'File reference from previous block' },
|
||||
content: { type: 'string', description: 'Text content to upload' },
|
||||
mimeType: { type: 'string', description: 'MIME type of file to create' },
|
||||
values: { type: 'string', description: 'Cell values for new Excel as JSON' },
|
||||
values: { type: 'json', description: 'Cell values for new Excel as JSON' },
|
||||
fileId: { type: 'string', description: 'File ID to download' },
|
||||
downloadFileName: { type: 'string', description: 'File name override for download' },
|
||||
folderId: { type: 'string', description: 'Folder ID' },
|
||||
|
||||
@@ -464,18 +464,29 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
thread_ts: { type: 'string', description: 'Thread timestamp for reply' },
|
||||
},
|
||||
outputs: {
|
||||
// slack_message outputs
|
||||
// slack_message outputs (send operation)
|
||||
message: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Complete message object with all properties: ts, text, user, channel, reactions, threads, files, attachments, blocks, stars, pins, and edit history',
|
||||
},
|
||||
// Legacy properties for send operation (backward compatibility)
|
||||
ts: { type: 'string', description: 'Message timestamp returned by Slack API' },
|
||||
channel: { type: 'string', description: 'Channel identifier where message was sent' },
|
||||
fileCount: {
|
||||
type: 'number',
|
||||
description: 'Number of files uploaded (when files are attached)',
|
||||
},
|
||||
|
||||
// slack_canvas outputs
|
||||
canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' },
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
|
||||
// slack_message_reader outputs
|
||||
// slack_message_reader outputs (read operation)
|
||||
messages: {
|
||||
type: 'json',
|
||||
description: 'Array of message objects with text, user, timestamp, and file attachments',
|
||||
description:
|
||||
'Array of message objects with comprehensive properties: text, user, timestamp, reactions, threads, files, attachments, blocks, stars, pins, and edit history',
|
||||
},
|
||||
|
||||
// slack_download outputs
|
||||
@@ -484,6 +495,13 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
description: 'Downloaded file stored in execution files',
|
||||
},
|
||||
|
||||
// slack_update_message outputs (update operation)
|
||||
content: { type: 'string', description: 'Success message for update operation' },
|
||||
metadata: {
|
||||
type: 'json',
|
||||
description: 'Updated message metadata (legacy, use message object instead)',
|
||||
},
|
||||
|
||||
// Trigger outputs (when used as webhook trigger)
|
||||
event_type: { type: 'string', description: 'Type of Slack event that triggered the workflow' },
|
||||
channel_name: { type: 'string', description: 'Human-readable channel name' },
|
||||
|
||||
@@ -210,7 +210,7 @@ export interface SubBlockConfig {
|
||||
}
|
||||
})
|
||||
// Props specific to 'code' sub-block type
|
||||
language?: 'javascript' | 'json'
|
||||
language?: 'javascript' | 'json' | 'python'
|
||||
generationType?: GenerationType
|
||||
collapsible?: boolean // Whether the code block can be collapsed
|
||||
defaultCollapsed?: boolean // Whether the code block is collapsed by default
|
||||
|
||||
@@ -11,7 +11,7 @@ const buttonVariants = cva(
|
||||
'bg-[var(--surface-5)] dark:bg-[var(--surface-5)] hover:bg-[var(--surface-9)] dark:hover:bg-[var(--surface-9)]',
|
||||
active:
|
||||
'bg-[var(--surface-9)] dark:bg-[var(--surface-9)] hover:bg-[var(--surface-11)] dark:hover:bg-[var(--surface-11)] dark:text-[var(--text-primary)] text-[var(--text-primary)]',
|
||||
'3d': 'dark:text-[var(--text-tertiary)] border-t border-l border-r dark:border-[var(--border-strong)] shadow-[0_2px_0_0] dark:shadow-[var(--border-strong)] hover:shadow-[0_4px_0_0] transition-all hover:-translate-y-0.5 hover:dark:text-[var(--text-primary)]',
|
||||
'3d': 'text-[var(--text-tertiary)] dark:text-[var(--text-tertiary)] border-t border-l border-r border-[#303030] dark:border-[#303030] shadow-[0_2px_0_0_rgba(48,48,48,1)] hover:shadow-[0_4px_0_0_rgba(48,48,48,1)] transition-all hover:-translate-y-0.5 hover:text-[var(--text-primary)] hover:dark:text-[var(--text-primary)]',
|
||||
outline:
|
||||
'border border-[#727272] bg-[var(--border-strong)] hover:bg-[var(--surface-11)] dark:border-[#727272] dark:bg-[var(--border-strong)] dark:hover:bg-[var(--surface-11)]',
|
||||
primary:
|
||||
|
||||
@@ -1802,38 +1802,38 @@ export function StripeIcon(props: SVGProps<SVGSVGElement>) {
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M360 78.0002C360 52.4002 347.6 32.2002 323.9 32.2002C300.1 32.2002 285.7 52.4002 285.7 77.8002C285.7 107.9 302.7 123.1 327.1 123.1C339 123.1 348 120.4 354.8 116.6V96.6002C348 100 340.2 102.1 330.3 102.1C320.6 102.1 312 98.7002 310.9 86.9002H359.8C359.8 85.6002 360 80.4002 360 78.0002ZM310.6 68.5002C310.6 57.2002 317.5 52.5002 323.8 52.5002C329.9 52.5002 336.4 57.2002 336.4 68.5002H310.6Z'
|
||||
fill='white'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M247.1 32.2002C237.3 32.2002 231 36.8002 227.5 40.0002L226.2 33.8002H204.2V150.4L229.2 145.1L229.3 116.8C232.9 119.4 238.2 123.1 247 123.1C264.9 123.1 281.2 108.7 281.2 77.0002C281.1 48.0002 264.6 32.2002 247.1 32.2002ZM241.1 101.1C235.2 101.1 231.7 99.0002 229.3 96.4002L229.2 59.3002C231.8 56.4002 235.4 54.4002 241.1 54.4002C250.2 54.4002 256.5 64.6002 256.5 77.7002C256.5 91.1002 250.3 101.1 241.1 101.1Z'
|
||||
fill='white'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M169.8 26.3001L194.9 20.9001V0.600098L169.8 5.9001V26.3001Z'
|
||||
fill='white'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path d='M194.9 33.9001H169.8V121.4H194.9V33.9001Z' fill='white' />
|
||||
<path d='M194.9 33.9001H169.8V121.4H194.9V33.9001Z' fill='currentColor' />
|
||||
<path
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M142.9 41.3001L141.3 33.9001H119.7V121.4H144.7V62.1001C150.6 54.4001 160.6 55.8001 163.7 56.9001V33.9001C160.5 32.7001 148.8 30.5001 142.9 41.3001Z'
|
||||
fill='white'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M92.8999 12.2002L68.4999 17.4002L68.3999 97.5002C68.3999 112.3 79.4999 123.2 94.2999 123.2C102.5 123.2 108.5 121.7 111.8 119.9V99.6002C108.6 100.9 92.7999 105.5 92.7999 90.7002V55.2002H111.8V33.9002H92.7999L92.8999 12.2002Z'
|
||||
fill='white'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<path
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
d='M25.3 59.3002C25.3 55.4002 28.5 53.9002 33.8 53.9002C41.4 53.9002 51 56.2002 58.6 60.3002V36.8002C50.3 33.5002 42.1 32.2002 33.8 32.2002C13.5 32.2002 0 42.8002 0 60.5002C0 88.1002 38 83.7002 38 95.6002C38 100.2 34 101.7 28.4 101.7C20.1 101.7 9.5 98.3002 1.1 93.7002V117.5C10.4 121.5 19.8 123.2 28.4 123.2C49.2 123.2 63.5 112.9 63.5 95.0002C63.4 65.2002 25.3 70.5002 25.3 59.3002Z'
|
||||
fill='white'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
|
||||
@@ -72,6 +72,9 @@ export class BlockExecutor {
|
||||
|
||||
try {
|
||||
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
|
||||
if (blockLog) {
|
||||
blockLog.input = resolvedInputs
|
||||
}
|
||||
} catch (error) {
|
||||
cleanupSelfReference?.()
|
||||
return this.handleBlockError(
|
||||
|
||||
@@ -127,33 +127,22 @@ export class VariablesBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
if (type === 'object' || type === 'array') {
|
||||
// If value is already an object or array, accept it as-is
|
||||
// The type hint is for UI purposes and string parsing, not runtime validation
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
if (type === 'array' && !Array.isArray(value)) {
|
||||
throw new Error(
|
||||
`Invalid array value for variable "${variableName || 'unknown'}": expected an array, got an object`
|
||||
)
|
||||
}
|
||||
if (type === 'object' && Array.isArray(value)) {
|
||||
throw new Error(
|
||||
`Invalid object value for variable "${variableName || 'unknown'}": expected an object, got an array`
|
||||
)
|
||||
}
|
||||
return value
|
||||
}
|
||||
// If it's a string, try to parse it as JSON
|
||||
if (typeof value === 'string' && value.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(value)
|
||||
if (type === 'array' && !Array.isArray(parsed)) {
|
||||
throw new Error(
|
||||
`Invalid array value for variable "${variableName || 'unknown'}": parsed value is not an array`
|
||||
)
|
||||
// Accept any valid JSON object or array
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return parsed
|
||||
}
|
||||
if (type === 'object' && (Array.isArray(parsed) || typeof parsed !== 'object')) {
|
||||
throw new Error(
|
||||
`Invalid object value for variable "${variableName || 'unknown'}": parsed value is not an object`
|
||||
)
|
||||
}
|
||||
return parsed
|
||||
throw new Error(
|
||||
`Invalid JSON for variable "${variableName || 'unknown'}": parsed value is not an object or array`
|
||||
)
|
||||
} catch (error: any) {
|
||||
throw new Error(
|
||||
`Invalid JSON for variable "${variableName || 'unknown'}": ${error.message}`
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import { isReference, parseReferencePath, SPECIAL_REFERENCE_PREFIXES } from '@/executor/consts'
|
||||
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
type Resolver,
|
||||
} from '@/executor/variables/resolvers/reference'
|
||||
import type { SerializedWorkflow } from '@/serializer/types'
|
||||
import { normalizeBlockName } from '@/stores/workflows/utils'
|
||||
|
||||
@@ -50,7 +54,7 @@ export class BlockResolver implements Resolver {
|
||||
return output
|
||||
}
|
||||
|
||||
const result = this.navigatePath(output, pathParts)
|
||||
const result = navigatePath(output, pathParts)
|
||||
|
||||
if (result === undefined) {
|
||||
const availableKeys = output && typeof output === 'object' ? Object.keys(output) : []
|
||||
@@ -83,67 +87,6 @@ export class BlockResolver implements Resolver {
|
||||
return this.blockByNormalizedName.get(normalized)
|
||||
}
|
||||
|
||||
private navigatePath(obj: any, path: string[]): any {
|
||||
let current = obj
|
||||
for (const part of path) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\](.*)$/)
|
||||
if (arrayMatch) {
|
||||
current = this.resolvePartWithIndices(current, part, '', 'block')
|
||||
} else if (/^\d+$/.test(part)) {
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
current = current[part]
|
||||
}
|
||||
}
|
||||
return current
|
||||
}
|
||||
|
||||
private resolvePartWithIndices(
|
||||
base: any,
|
||||
part: string,
|
||||
fullPath: string,
|
||||
sourceName: string
|
||||
): any {
|
||||
let value = base
|
||||
|
||||
const propMatch = part.match(/^([^[]+)/)
|
||||
let rest = part
|
||||
if (propMatch) {
|
||||
const prop = propMatch[1]
|
||||
value = value[prop]
|
||||
rest = part.slice(prop.length)
|
||||
if (value === undefined) {
|
||||
throw new Error(`No value found at path "${fullPath}" in block "${sourceName}".`)
|
||||
}
|
||||
}
|
||||
|
||||
const indexRe = /^\[(\d+)\]/
|
||||
while (rest.length > 0) {
|
||||
const m = rest.match(indexRe)
|
||||
if (!m) {
|
||||
throw new Error(`Invalid path "${part}" in "${fullPath}" for block "${sourceName}".`)
|
||||
}
|
||||
const idx = Number.parseInt(m[1], 10)
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error(`Invalid path "${part}" in "${fullPath}" for block "${sourceName}".`)
|
||||
}
|
||||
if (idx < 0 || idx >= value.length) {
|
||||
throw new Error(
|
||||
`Array index ${idx} out of bounds (length: ${value.length}) in path "${part}"`
|
||||
)
|
||||
}
|
||||
value = value[idx]
|
||||
rest = rest.slice(m[0].length)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
public formatValueForBlock(
|
||||
value: any,
|
||||
blockType: string | undefined,
|
||||
@@ -159,9 +102,12 @@ export class BlockResolver implements Resolver {
|
||||
|
||||
if (blockType === 'response') {
|
||||
if (typeof value === 'string') {
|
||||
return value
|
||||
}
|
||||
if (Array.isArray(value) || (typeof value === 'object' && value !== null)) {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
return value
|
||||
return String(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
|
||||
import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
|
||||
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
type Resolver,
|
||||
} from '@/executor/variables/resolvers/reference'
|
||||
import type { SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('LoopResolver')
|
||||
@@ -28,7 +32,7 @@ export class LoopResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const [_, property] = parts
|
||||
const [_, property, ...pathParts] = parts
|
||||
let loopScope = context.loopScope
|
||||
|
||||
if (!loopScope) {
|
||||
@@ -43,19 +47,31 @@ export class LoopResolver implements Resolver {
|
||||
logger.warn('Loop scope not found', { reference })
|
||||
return undefined
|
||||
}
|
||||
|
||||
let value: any
|
||||
switch (property) {
|
||||
case 'iteration':
|
||||
case 'index':
|
||||
return loopScope.iteration
|
||||
value = loopScope.iteration
|
||||
break
|
||||
case 'item':
|
||||
case 'currentItem':
|
||||
return loopScope.item
|
||||
value = loopScope.item
|
||||
break
|
||||
case 'items':
|
||||
return loopScope.items
|
||||
value = loopScope.items
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown loop property', { property })
|
||||
return undefined
|
||||
}
|
||||
|
||||
// If there are additional path parts, navigate deeper
|
||||
if (pathParts.length > 0) {
|
||||
return navigatePath(value, pathParts)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private findLoopForBlock(blockId: string): string | undefined {
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
|
||||
import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils'
|
||||
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
type Resolver,
|
||||
} from '@/executor/variables/resolvers/reference'
|
||||
import type { SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('ParallelResolver')
|
||||
@@ -28,7 +32,7 @@ export class ParallelResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const [_, property] = parts
|
||||
const [_, property, ...pathParts] = parts
|
||||
const parallelId = this.findParallelForBlock(context.currentNodeId)
|
||||
if (!parallelId) {
|
||||
return undefined
|
||||
@@ -47,25 +51,36 @@ export class ParallelResolver implements Resolver {
|
||||
|
||||
const distributionItems = this.getDistributionItems(parallelConfig)
|
||||
|
||||
let value: any
|
||||
switch (property) {
|
||||
case 'index':
|
||||
return branchIndex
|
||||
value = branchIndex
|
||||
break
|
||||
case 'currentItem':
|
||||
if (Array.isArray(distributionItems)) {
|
||||
return distributionItems[branchIndex]
|
||||
}
|
||||
if (typeof distributionItems === 'object' && distributionItems !== null) {
|
||||
value = distributionItems[branchIndex]
|
||||
} else if (typeof distributionItems === 'object' && distributionItems !== null) {
|
||||
const keys = Object.keys(distributionItems)
|
||||
const key = keys[branchIndex]
|
||||
return key !== undefined ? distributionItems[key] : undefined
|
||||
value = key !== undefined ? distributionItems[key] : undefined
|
||||
} else {
|
||||
return undefined
|
||||
}
|
||||
return undefined
|
||||
break
|
||||
case 'items':
|
||||
return distributionItems
|
||||
value = distributionItems
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown parallel property', { property })
|
||||
return undefined
|
||||
}
|
||||
|
||||
// If there are additional path parts, navigate deeper
|
||||
if (pathParts.length > 0) {
|
||||
return navigatePath(value, pathParts)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private findParallelForBlock(blockId: string): string | undefined {
|
||||
|
||||
@@ -11,3 +11,41 @@ export interface Resolver {
|
||||
canResolve(reference: string): boolean
|
||||
resolve(reference: string, context: ResolutionContext): any
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate through nested object properties using a path array.
|
||||
* Supports dot notation and array indices.
|
||||
*
|
||||
* @example
|
||||
* navigatePath({a: {b: {c: 1}}}, ['a', 'b', 'c']) => 1
|
||||
* navigatePath({items: [{name: 'test'}]}, ['items', '0', 'name']) => 'test'
|
||||
*/
|
||||
export function navigatePath(obj: any, path: string[]): any {
|
||||
let current = obj
|
||||
for (const part of path) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Handle array indexing like "items[0]" or just numeric indices
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\](.*)$/)
|
||||
if (arrayMatch) {
|
||||
// Handle complex array access like "items[0]"
|
||||
const [, prop, index] = arrayMatch
|
||||
current = current[prop]
|
||||
if (current === undefined || current === null) {
|
||||
return undefined
|
||||
}
|
||||
const idx = Number.parseInt(index, 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
} else if (/^\d+$/.test(part)) {
|
||||
// Handle plain numeric index
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
// Handle regular property access
|
||||
current = current[part]
|
||||
}
|
||||
}
|
||||
return current
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { VariableManager } from '@/lib/variables/variable-manager'
|
||||
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
|
||||
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
type Resolver,
|
||||
} from '@/executor/variables/resolvers/reference'
|
||||
|
||||
const logger = createLogger('WorkflowResolver')
|
||||
|
||||
@@ -27,7 +31,7 @@ export class WorkflowResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const [_, variableName] = parts
|
||||
const [_, variableName, ...pathParts] = parts
|
||||
|
||||
const workflowVars = context.executionContext.workflowVariables || this.workflowVariables
|
||||
|
||||
@@ -35,15 +39,23 @@ export class WorkflowResolver implements Resolver {
|
||||
const v = varObj as any
|
||||
if (v && (v.name === variableName || v.id === variableName)) {
|
||||
const normalizedType = (v.type === 'string' ? 'plain' : v.type) || 'plain'
|
||||
let value: any
|
||||
try {
|
||||
return VariableManager.resolveForExecution(v.value, normalizedType)
|
||||
value = VariableManager.resolveForExecution(v.value, normalizedType)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve workflow variable, returning raw value', {
|
||||
variableName,
|
||||
error: (error as Error).message,
|
||||
})
|
||||
return v.value
|
||||
value = v.value
|
||||
}
|
||||
|
||||
// If there are additional path parts, navigate deeper
|
||||
if (pathParts.length > 0) {
|
||||
return navigatePath(value, pathParts)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCustomToolsStore } from '@/stores/custom-tools/store'
|
||||
import type { CustomToolDefinition, CustomToolSchema } from '@/stores/custom-tools/types'
|
||||
|
||||
const logger = createLogger('CustomToolsQueries')
|
||||
const API_ENDPOINT = '/api/tools/custom'
|
||||
@@ -14,32 +16,62 @@ export const customToolsKeys = {
|
||||
detail: (toolId: string) => [...customToolsKeys.all, 'detail', toolId] as const,
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom Tool Types
|
||||
*/
|
||||
export interface CustomToolSchema {
|
||||
function?: {
|
||||
name?: string
|
||||
description?: string
|
||||
parameters?: any
|
||||
export type CustomTool = CustomToolDefinition
|
||||
|
||||
type ApiCustomTool = Partial<CustomToolDefinition> & {
|
||||
id: string
|
||||
title: string
|
||||
schema: Partial<CustomToolSchema> & {
|
||||
function?: Partial<CustomToolSchema['function']> & {
|
||||
parameters?: Partial<CustomToolSchema['function']['parameters']>
|
||||
}
|
||||
}
|
||||
code?: string
|
||||
}
|
||||
|
||||
function normalizeCustomTool(tool: ApiCustomTool, workspaceId: string): CustomToolDefinition {
|
||||
const fallbackName = tool.schema.function?.name || tool.id
|
||||
const parameters = tool.schema.function?.parameters ?? {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
}
|
||||
|
||||
return {
|
||||
id: tool.id,
|
||||
title: tool.title,
|
||||
code: typeof tool.code === 'string' ? tool.code : '',
|
||||
workspaceId: tool.workspaceId ?? workspaceId ?? null,
|
||||
userId: tool.userId ?? null,
|
||||
createdAt:
|
||||
typeof tool.createdAt === 'string'
|
||||
? tool.createdAt
|
||||
: tool.updatedAt && typeof tool.updatedAt === 'string'
|
||||
? tool.updatedAt
|
||||
: new Date().toISOString(),
|
||||
updatedAt: typeof tool.updatedAt === 'string' ? tool.updatedAt : undefined,
|
||||
schema: {
|
||||
type: tool.schema.type ?? 'function',
|
||||
function: {
|
||||
name: fallbackName,
|
||||
description: tool.schema.function?.description,
|
||||
parameters: {
|
||||
type: parameters.type ?? 'object',
|
||||
properties: parameters.properties ?? {},
|
||||
required: parameters.required,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export interface CustomTool {
|
||||
id: string
|
||||
title: string
|
||||
schema?: CustomToolSchema
|
||||
code: string
|
||||
workspaceId?: string
|
||||
userId?: string
|
||||
createdAt?: string
|
||||
updatedAt?: string
|
||||
function syncCustomToolsToStore(tools: CustomToolDefinition[]) {
|
||||
useCustomToolsStore.getState().setTools(tools)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch custom tools for a workspace
|
||||
*/
|
||||
async function fetchCustomTools(workspaceId: string): Promise<CustomTool[]> {
|
||||
async function fetchCustomTools(workspaceId: string): Promise<CustomToolDefinition[]> {
|
||||
const response = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -53,45 +85,68 @@ async function fetchCustomTools(workspaceId: string): Promise<CustomTool[]> {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
|
||||
// Filter and validate tools
|
||||
const validTools = data.filter((tool, index) => {
|
||||
const normalizedTools: CustomToolDefinition[] = []
|
||||
|
||||
data.forEach((tool, index) => {
|
||||
if (!tool || typeof tool !== 'object') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: not an object`)
|
||||
return false
|
||||
return
|
||||
}
|
||||
if (!tool.id || typeof tool.id !== 'string') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid id`)
|
||||
return false
|
||||
return
|
||||
}
|
||||
if (!tool.title || typeof tool.title !== 'string') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid title`)
|
||||
return false
|
||||
return
|
||||
}
|
||||
if (!tool.schema || typeof tool.schema !== 'object') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid schema`)
|
||||
return false
|
||||
return
|
||||
}
|
||||
if (!tool.code || typeof tool.code !== 'string') {
|
||||
logger.warn(`Tool at index ${index} missing code field, defaulting to empty string`)
|
||||
tool.code = ''
|
||||
if (!tool.schema.function || typeof tool.schema.function !== 'object') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing function schema`)
|
||||
return
|
||||
}
|
||||
|
||||
const apiTool: ApiCustomTool = {
|
||||
id: tool.id,
|
||||
title: tool.title,
|
||||
schema: tool.schema,
|
||||
code: typeof tool.code === 'string' ? tool.code : '',
|
||||
workspaceId: tool.workspaceId ?? null,
|
||||
userId: tool.userId ?? null,
|
||||
createdAt: tool.createdAt ?? undefined,
|
||||
updatedAt: tool.updatedAt ?? undefined,
|
||||
}
|
||||
|
||||
try {
|
||||
normalizedTools.push(normalizeCustomTool(apiTool, workspaceId))
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to normalize custom tool at index ${index}`, { error })
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return validTools
|
||||
return normalizedTools
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch custom tools
|
||||
*/
|
||||
export function useCustomTools(workspaceId: string) {
|
||||
return useQuery({
|
||||
const query = useQuery<CustomToolDefinition[]>({
|
||||
queryKey: customToolsKeys.list(workspaceId),
|
||||
queryFn: () => fetchCustomTools(workspaceId),
|
||||
enabled: !!workspaceId,
|
||||
staleTime: 60 * 1000, // 1 minute - tools don't change frequently
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
|
||||
if (query.data) {
|
||||
syncCustomToolsToStore(query.data)
|
||||
}
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -169,7 +224,9 @@ export function useUpdateCustomTool() {
|
||||
logger.info(`Updating custom tool: ${toolId} in workspace ${workspaceId}`)
|
||||
|
||||
// Get the current tool to merge with updates
|
||||
const currentTools = queryClient.getQueryData<CustomTool[]>(customToolsKeys.list(workspaceId))
|
||||
const currentTools = queryClient.getQueryData<CustomToolDefinition[]>(
|
||||
customToolsKeys.list(workspaceId)
|
||||
)
|
||||
const currentTool = currentTools?.find((t) => t.id === toolId)
|
||||
|
||||
if (!currentTool) {
|
||||
@@ -210,13 +267,13 @@ export function useUpdateCustomTool() {
|
||||
await queryClient.cancelQueries({ queryKey: customToolsKeys.list(workspaceId) })
|
||||
|
||||
// Snapshot the previous value
|
||||
const previousTools = queryClient.getQueryData<CustomTool[]>(
|
||||
const previousTools = queryClient.getQueryData<CustomToolDefinition[]>(
|
||||
customToolsKeys.list(workspaceId)
|
||||
)
|
||||
|
||||
// Optimistically update to the new value
|
||||
if (previousTools) {
|
||||
queryClient.setQueryData<CustomTool[]>(
|
||||
queryClient.setQueryData<CustomToolDefinition[]>(
|
||||
customToolsKeys.list(workspaceId),
|
||||
previousTools.map((tool) =>
|
||||
tool.id === toolId
|
||||
@@ -285,13 +342,13 @@ export function useDeleteCustomTool() {
|
||||
await queryClient.cancelQueries({ queryKey: customToolsKeys.list(workspaceId) })
|
||||
|
||||
// Snapshot the previous value
|
||||
const previousTools = queryClient.getQueryData<CustomTool[]>(
|
||||
const previousTools = queryClient.getQueryData<CustomToolDefinition[]>(
|
||||
customToolsKeys.list(workspaceId)
|
||||
)
|
||||
|
||||
// Optimistically update to the new value
|
||||
if (previousTools) {
|
||||
queryClient.setQueryData<CustomTool[]>(
|
||||
queryClient.setQueryData<CustomToolDefinition[]>(
|
||||
customToolsKeys.list(workspaceId),
|
||||
previousTools.filter((tool) => tool.id !== toolId)
|
||||
)
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import { useEffect } from 'react'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type { WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
import { fetchPersonalEnvironment, fetchWorkspaceEnvironment } from '@/lib/environment/api'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
|
||||
|
||||
export type { WorkspaceEnvironmentData } from '@/lib/environment/api'
|
||||
export type { EnvironmentVariable } from '@/stores/settings/environment/types'
|
||||
|
||||
const logger = createLogger('EnvironmentQueries')
|
||||
|
||||
@@ -16,65 +24,26 @@ export const environmentKeys = {
|
||||
/**
|
||||
* Environment Variable Types
|
||||
*/
|
||||
export interface EnvironmentVariable {
|
||||
key: string
|
||||
value: string
|
||||
}
|
||||
|
||||
export interface WorkspaceEnvironmentData {
|
||||
workspace: Record<string, string>
|
||||
personal: Record<string, string>
|
||||
conflicts: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch personal environment variables
|
||||
*/
|
||||
async function fetchPersonalEnvironment(): Promise<Record<string, EnvironmentVariable>> {
|
||||
const response = await fetch(API_ENDPOINTS.ENVIRONMENT)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load environment variables: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
if (data && typeof data === 'object') {
|
||||
return data
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch personal environment variables
|
||||
*/
|
||||
export function usePersonalEnvironment() {
|
||||
return useQuery({
|
||||
const setVariables = useEnvironmentStore((state) => state.setVariables)
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: environmentKeys.personal(),
|
||||
queryFn: fetchPersonalEnvironment,
|
||||
staleTime: 60 * 1000, // 1 minute
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch workspace environment variables
|
||||
*/
|
||||
async function fetchWorkspaceEnvironment(workspaceId: string): Promise<WorkspaceEnvironmentData> {
|
||||
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId))
|
||||
useEffect(() => {
|
||||
if (query.data) {
|
||||
setVariables(query.data)
|
||||
}
|
||||
}, [query.data, setVariables])
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load workspace environment: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
return {
|
||||
workspace: data.workspace || {},
|
||||
personal: data.personal || {},
|
||||
conflicts: data.conflicts || [],
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
194
apps/sim/hooks/queries/folders.ts
Normal file
194
apps/sim/hooks/queries/folders.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { useEffect } from 'react'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useFolderStore, type WorkflowFolder } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('FolderQueries')
|
||||
|
||||
export const folderKeys = {
|
||||
all: ['folders'] as const,
|
||||
lists: () => [...folderKeys.all, 'list'] as const,
|
||||
list: (workspaceId: string | undefined) => [...folderKeys.lists(), workspaceId ?? ''] as const,
|
||||
}
|
||||
|
||||
function mapFolder(folder: any): WorkflowFolder {
|
||||
return {
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
userId: folder.userId,
|
||||
workspaceId: folder.workspaceId,
|
||||
parentId: folder.parentId,
|
||||
color: folder.color,
|
||||
isExpanded: folder.isExpanded,
|
||||
sortOrder: folder.sortOrder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchFolders(workspaceId: string): Promise<WorkflowFolder[]> {
|
||||
const response = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
|
||||
const { folders }: { folders: any[] } = await response.json()
|
||||
return folders.map(mapFolder)
|
||||
}
|
||||
|
||||
export function useFolders(workspaceId?: string) {
|
||||
const setFolders = useFolderStore((state) => state.setFolders)
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: folderKeys.list(workspaceId),
|
||||
queryFn: () => fetchFolders(workspaceId as string),
|
||||
enabled: Boolean(workspaceId),
|
||||
placeholderData: keepPreviousData,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (query.data) {
|
||||
setFolders(query.data)
|
||||
}
|
||||
}, [query.data, setFolders])
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
interface CreateFolderVariables {
|
||||
workspaceId: string
|
||||
name: string
|
||||
parentId?: string
|
||||
color?: string
|
||||
}
|
||||
|
||||
interface UpdateFolderVariables {
|
||||
workspaceId: string
|
||||
id: string
|
||||
updates: Partial<Pick<WorkflowFolder, 'name' | 'parentId' | 'color' | 'sortOrder'>>
|
||||
}
|
||||
|
||||
interface DeleteFolderVariables {
|
||||
workspaceId: string
|
||||
id: string
|
||||
}
|
||||
|
||||
interface DuplicateFolderVariables {
|
||||
workspaceId: string
|
||||
id: string
|
||||
name: string
|
||||
parentId?: string | null
|
||||
color?: string
|
||||
}
|
||||
|
||||
export function useCreateFolder() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, ...payload }: CreateFolderVariables) => {
|
||||
const response = await fetch('/api/folders', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ ...payload, workspaceId }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error || 'Failed to create folder')
|
||||
}
|
||||
|
||||
const { folder } = await response.json()
|
||||
return mapFolder(folder)
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useUpdateFolder() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId, id, updates }: UpdateFolderVariables) => {
|
||||
const response = await fetch(`/api/folders/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error || 'Failed to update folder')
|
||||
}
|
||||
|
||||
const { folder } = await response.json()
|
||||
return mapFolder(folder)
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useDeleteFolderMutation() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ workspaceId: _workspaceId, id }: DeleteFolderVariables) => {
|
||||
const response = await fetch(`/api/folders/${id}`, { method: 'DELETE' })
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error || 'Failed to delete folder')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: async (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
try {
|
||||
await useWorkflowRegistry.getState().loadWorkflows(variables.workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Failed to reload workflows after folder delete', { error })
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useDuplicateFolderMutation() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ id, workspaceId, name, parentId, color }: DuplicateFolderVariables) => {
|
||||
const response = await fetch(`/api/folders/${id}/duplicate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workspaceId,
|
||||
name,
|
||||
parentId: parentId ?? null,
|
||||
color,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({}))
|
||||
throw new Error(error.error || 'Failed to duplicate folder')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: async (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
try {
|
||||
await useWorkflowRegistry.getState().loadWorkflows(variables.workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Failed to reload workflows after folder duplicate', { error })
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -57,11 +57,9 @@ async function fetchGeneralSettings(): Promise<GeneralSettings> {
|
||||
* This ensures the rest of the app (which uses Zustand) stays in sync
|
||||
*/
|
||||
function syncSettingsToZustand(settings: GeneralSettings) {
|
||||
const store = useGeneralStore.getState()
|
||||
const { setSettings } = useGeneralStore.getState()
|
||||
|
||||
// Update Zustand store to match React Query cache
|
||||
// This allows the rest of the app to continue using Zustand for reading values
|
||||
useGeneralStore.setState({
|
||||
setSettings({
|
||||
isAutoConnectEnabled: settings.autoConnect,
|
||||
isAutoPanEnabled: settings.autoPan,
|
||||
isConsoleExpandedByDefault: settings.consoleExpandedByDefault,
|
||||
|
||||
297
apps/sim/hooks/queries/knowledge.ts
Normal file
297
apps/sim/hooks/queries/knowledge.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
ChunkData,
|
||||
ChunksPagination,
|
||||
DocumentData,
|
||||
DocumentsPagination,
|
||||
KnowledgeBaseData,
|
||||
} from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('KnowledgeQueries')
|
||||
|
||||
export const knowledgeKeys = {
|
||||
all: ['knowledge'] as const,
|
||||
list: (workspaceId?: string) => [...knowledgeKeys.all, 'list', workspaceId ?? 'all'] as const,
|
||||
detail: (knowledgeBaseId?: string) =>
|
||||
[...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const,
|
||||
documents: (knowledgeBaseId: string, paramsKey: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const,
|
||||
chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) =>
|
||||
[
|
||||
...knowledgeKeys.detail(knowledgeBaseId),
|
||||
'document',
|
||||
documentId,
|
||||
'chunks',
|
||||
paramsKey,
|
||||
] as const,
|
||||
}
|
||||
|
||||
export async function fetchKnowledgeBases(workspaceId?: string): Promise<KnowledgeBaseData[]> {
|
||||
const url = workspaceId ? `/api/knowledge?workspaceId=${workspaceId}` : '/api/knowledge'
|
||||
const response = await fetch(url)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch knowledge bases: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (result?.success === false) {
|
||||
throw new Error(result.error || 'Failed to fetch knowledge bases')
|
||||
}
|
||||
|
||||
return Array.isArray(result?.data) ? result.data : []
|
||||
}
|
||||
|
||||
export async function fetchKnowledgeBase(knowledgeBaseId: string): Promise<KnowledgeBaseData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch knowledge base: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to fetch knowledge base')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsParams {
|
||||
knowledgeBaseId: string
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsResponse {
|
||||
documents: DocumentData[]
|
||||
pagination: DocumentsPagination
|
||||
}
|
||||
|
||||
export async function fetchKnowledgeDocuments({
|
||||
knowledgeBaseId,
|
||||
search,
|
||||
limit = 50,
|
||||
offset = 0,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
if (sortBy) params.set('sortBy', sortBy)
|
||||
if (sortOrder) params.set('sortOrder', sortOrder)
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', offset.toString())
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch documents: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch documents')
|
||||
}
|
||||
|
||||
const documents: DocumentData[] = result.data?.documents ?? result.data ?? []
|
||||
const pagination: DocumentsPagination = result.data?.pagination ??
|
||||
result.pagination ?? {
|
||||
total: documents.length,
|
||||
limit,
|
||||
offset,
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
return {
|
||||
documents,
|
||||
pagination: {
|
||||
total: pagination.total ?? documents.length,
|
||||
limit: pagination.limit ?? limit,
|
||||
offset: pagination.offset ?? offset,
|
||||
hasMore: Boolean(pagination.hasMore),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export interface KnowledgeChunksParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
}
|
||||
|
||||
export interface KnowledgeChunksResponse {
|
||||
chunks: ChunkData[]
|
||||
pagination: ChunksPagination
|
||||
}
|
||||
|
||||
export async function fetchKnowledgeChunks({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
search,
|
||||
limit = 50,
|
||||
offset = 0,
|
||||
}: KnowledgeChunksParams): Promise<KnowledgeChunksResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
if (limit) params.set('limit', limit.toString())
|
||||
if (offset) params.set('offset', offset.toString())
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks${params.toString() ? `?${params.toString()}` : ''}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch chunks: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch chunks')
|
||||
}
|
||||
|
||||
const chunks: ChunkData[] = result.data ?? []
|
||||
const pagination: ChunksPagination = {
|
||||
total: result.pagination?.total ?? chunks.length,
|
||||
limit: result.pagination?.limit ?? limit,
|
||||
offset: result.pagination?.offset ?? offset,
|
||||
hasMore: Boolean(result.pagination?.hasMore),
|
||||
}
|
||||
|
||||
return { chunks, pagination }
|
||||
}
|
||||
|
||||
export function useKnowledgeBasesQuery(
|
||||
workspaceId?: string,
|
||||
options?: {
|
||||
enabled?: boolean
|
||||
}
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
queryFn: () => fetchKnowledgeBases(workspaceId),
|
||||
enabled: options?.enabled ?? true,
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export function useKnowledgeBaseQuery(knowledgeBaseId?: string) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
queryFn: () => fetchKnowledgeBase(knowledgeBaseId as string),
|
||||
enabled: Boolean(knowledgeBaseId),
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
|
||||
JSON.stringify({
|
||||
search: params.search ?? '',
|
||||
limit: params.limit ?? 50,
|
||||
offset: params.offset ?? 0,
|
||||
sortBy: params.sortBy ?? '',
|
||||
sortOrder: params.sortOrder ?? '',
|
||||
})
|
||||
|
||||
export function useKnowledgeDocumentsQuery(
|
||||
params: KnowledgeDocumentsParams,
|
||||
options?: {
|
||||
enabled?: boolean
|
||||
}
|
||||
) {
|
||||
const paramsKey = serializeDocumentParams(params)
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.documents(params.knowledgeBaseId, paramsKey),
|
||||
queryFn: () => fetchKnowledgeDocuments(params),
|
||||
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId),
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export const serializeChunkParams = (params: KnowledgeChunksParams) =>
|
||||
JSON.stringify({
|
||||
search: params.search ?? '',
|
||||
limit: params.limit ?? 50,
|
||||
offset: params.offset ?? 0,
|
||||
})
|
||||
|
||||
export function useKnowledgeChunksQuery(
|
||||
params: KnowledgeChunksParams,
|
||||
options?: {
|
||||
enabled?: boolean
|
||||
}
|
||||
) {
|
||||
const paramsKey = serializeChunkParams(params)
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.chunks(params.knowledgeBaseId, params.documentId, paramsKey),
|
||||
queryFn: () => fetchKnowledgeChunks(params),
|
||||
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId && params.documentId),
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
interface UpdateDocumentPayload {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
updates: Partial<DocumentData>
|
||||
}
|
||||
|
||||
export function useMutateKnowledgeDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: async ({ knowledgeBaseId, documentId, updates }: UpdateDocumentPayload) => {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
throw new Error(errorData.error || 'Failed to update document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update document')
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
onMutate: async ({ knowledgeBaseId, documentId, updates }) => {
|
||||
await queryClient.cancelQueries({ queryKey: knowledgeKeys.detail(knowledgeBaseId) })
|
||||
|
||||
const documentQueries = queryClient
|
||||
.getQueriesData<KnowledgeDocumentsResponse>({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
.filter(([key]) => Array.isArray(key) && key.includes('documents'))
|
||||
|
||||
documentQueries.forEach(([key, data]) => {
|
||||
if (!data) return
|
||||
queryClient.setQueryData(key, {
|
||||
...data,
|
||||
documents: data.documents.map((doc) =>
|
||||
doc.id === documentId ? { ...doc, ...updates } : doc
|
||||
),
|
||||
})
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
logger.error('Failed to mutate document', error)
|
||||
},
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: knowledgeKeys.detail(variables.knowledgeBaseId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
36
apps/sim/hooks/queries/providers.ts
Normal file
36
apps/sim/hooks/queries/providers.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { ProviderName } from '@/stores/providers/types'
|
||||
|
||||
const logger = createLogger('ProviderModelsQuery')
|
||||
|
||||
const providerEndpoints: Record<ProviderName, string> = {
|
||||
base: '/api/providers/base/models',
|
||||
ollama: '/api/providers/ollama/models',
|
||||
openrouter: '/api/providers/openrouter/models',
|
||||
}
|
||||
|
||||
async function fetchProviderModels(provider: ProviderName): Promise<string[]> {
|
||||
const response = await fetch(providerEndpoints[provider])
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch ${provider} models`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
})
|
||||
throw new Error(`Failed to fetch ${provider} models`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const models: string[] = Array.isArray(data.models) ? data.models : []
|
||||
|
||||
return provider === 'openrouter' ? Array.from(new Set(models)) : models
|
||||
}
|
||||
|
||||
export function useProviderModels(provider: ProviderName) {
|
||||
return useQuery({
|
||||
queryKey: ['provider-models', provider],
|
||||
queryFn: () => fetchProviderModels(provider),
|
||||
staleTime: 5 * 60 * 1000,
|
||||
})
|
||||
}
|
||||
@@ -34,42 +34,32 @@ export function useSubscriptionData() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch user usage data
|
||||
* Fetch user usage limit metadata
|
||||
* Note: This endpoint returns limit information (currentLimit, minimumLimit, canEdit, etc.)
|
||||
* For actual usage data (current, limit, percentUsed), use useSubscriptionData() instead
|
||||
*/
|
||||
async function fetchUsageData() {
|
||||
async function fetchUsageLimitData() {
|
||||
const response = await fetch('/api/usage?context=user')
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch usage data')
|
||||
throw new Error('Failed to fetch usage limit data')
|
||||
}
|
||||
return response.json()
|
||||
}
|
||||
|
||||
/**
|
||||
* Base hook to fetch user usage data (single query)
|
||||
* Hook to fetch usage limit metadata
|
||||
* Returns: currentLimit, minimumLimit, canEdit, plan, updatedAt
|
||||
* Use this for editing usage limits, not for displaying current usage
|
||||
*/
|
||||
function useUsageDataBase() {
|
||||
export function useUsageLimitData() {
|
||||
return useQuery({
|
||||
queryKey: subscriptionKeys.usage(),
|
||||
queryFn: fetchUsageData,
|
||||
queryFn: fetchUsageLimitData,
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch user usage data
|
||||
*/
|
||||
export function useUsageData() {
|
||||
return useUsageDataBase()
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch usage limit data
|
||||
*/
|
||||
export function useUsageLimitData() {
|
||||
return useUsageDataBase()
|
||||
}
|
||||
|
||||
/**
|
||||
* Update usage limit mutation
|
||||
*/
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Environment utility functions for consistent environment detection across the application
|
||||
*/
|
||||
import { env, isTruthy } from './env'
|
||||
import { env, getEnv, isTruthy } from './env'
|
||||
|
||||
/**
|
||||
* Is the application running in production mode
|
||||
@@ -21,7 +21,9 @@ export const isTest = env.NODE_ENV === 'test'
|
||||
/**
|
||||
* Is this the hosted version of the application
|
||||
*/
|
||||
export const isHosted = true
|
||||
export const isHosted =
|
||||
getEnv('NEXT_PUBLIC_APP_URL') === 'https://www.sim.ai' ||
|
||||
getEnv('NEXT_PUBLIC_APP_URL') === 'https://www.staging.sim.ai'
|
||||
|
||||
/**
|
||||
* Is billing enforcement enabled
|
||||
|
||||
42
apps/sim/lib/environment/api.ts
Normal file
42
apps/sim/lib/environment/api.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
|
||||
|
||||
export interface WorkspaceEnvironmentData {
|
||||
workspace: Record<string, string>
|
||||
personal: Record<string, string>
|
||||
conflicts: string[]
|
||||
}
|
||||
|
||||
export async function fetchPersonalEnvironment(): Promise<Record<string, EnvironmentVariable>> {
|
||||
const response = await fetch(API_ENDPOINTS.ENVIRONMENT)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load environment variables: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
if (data && typeof data === 'object') {
|
||||
return data
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
export async function fetchWorkspaceEnvironment(
|
||||
workspaceId: string
|
||||
): Promise<WorkspaceEnvironmentData> {
|
||||
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId))
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load workspace environment: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
return {
|
||||
workspace: data.workspace || {},
|
||||
personal: data.personal || {},
|
||||
conflicts: data.conflicts || [],
|
||||
}
|
||||
}
|
||||
@@ -151,7 +151,9 @@ export function queryToApiParams(parsedQuery: ParsedQuery): Record<string, strin
|
||||
case 'level':
|
||||
case 'status':
|
||||
if (filter.operator === '=') {
|
||||
params.level = filter.value as string
|
||||
const existing = params.level ? params.level.split(',') : []
|
||||
existing.push(filter.value as string)
|
||||
params.level = existing.join(',')
|
||||
}
|
||||
break
|
||||
|
||||
|
||||
@@ -1,182 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { SearchSuggestions } from './search-suggestions'
|
||||
|
||||
describe('SearchSuggestions', () => {
|
||||
const engine = new SearchSuggestions(['workflow1', 'workflow2'], ['folder1', 'folder2'])
|
||||
|
||||
describe('validateQuery', () => {
|
||||
it.concurrent('should return false for incomplete filter expressions', () => {
|
||||
expect(engine.validateQuery('level:')).toBe(false)
|
||||
expect(engine.validateQuery('trigger:')).toBe(false)
|
||||
expect(engine.validateQuery('cost:')).toBe(false)
|
||||
expect(engine.validateQuery('some text level:')).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent('should return false for incomplete quoted strings', () => {
|
||||
expect(engine.validateQuery('workflow:"incomplete')).toBe(false)
|
||||
expect(engine.validateQuery('level:error workflow:"incomplete')).toBe(false)
|
||||
expect(engine.validateQuery('"incomplete string')).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent('should return true for complete queries', () => {
|
||||
expect(engine.validateQuery('level:error')).toBe(true)
|
||||
expect(engine.validateQuery('trigger:api')).toBe(true)
|
||||
expect(engine.validateQuery('cost:>0.01')).toBe(true)
|
||||
expect(engine.validateQuery('workflow:"test workflow"')).toBe(true)
|
||||
expect(engine.validateQuery('level:error trigger:api')).toBe(true)
|
||||
expect(engine.validateQuery('some search text')).toBe(true)
|
||||
expect(engine.validateQuery('')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return true for mixed complete queries', () => {
|
||||
expect(engine.validateQuery('search text level:error')).toBe(true)
|
||||
expect(engine.validateQuery('level:error some search')).toBe(true)
|
||||
expect(engine.validateQuery('workflow:"test" level:error search')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getSuggestions', () => {
|
||||
it.concurrent('should return filter key suggestions at the beginning', () => {
|
||||
const result = engine.getSuggestions('', 0)
|
||||
expect(result?.type).toBe('filter-keys')
|
||||
expect(result?.suggestions.length).toBeGreaterThan(0)
|
||||
expect(result?.suggestions.some((s) => s.value === 'level:')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return value suggestions for uniquely identified partial keys', () => {
|
||||
const result = engine.getSuggestions('lev', 3)
|
||||
expect(result?.type).toBe('filter-values')
|
||||
expect(result?.suggestions.some((s) => s.value === 'error' || s.value === 'info')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return filter value suggestions after colon', () => {
|
||||
const result = engine.getSuggestions('level:', 6)
|
||||
expect(result?.type).toBe('filter-values')
|
||||
expect(result?.suggestions.length).toBeGreaterThan(0)
|
||||
expect(result?.suggestions.some((s) => s.value === 'error')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return filtered value suggestions for partial values', () => {
|
||||
const result = engine.getSuggestions('level:err', 9)
|
||||
expect(result?.type).toBe('filter-values')
|
||||
expect(result?.suggestions.some((s) => s.value === 'error')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should handle workflow suggestions', () => {
|
||||
const result = engine.getSuggestions('workflow:', 9)
|
||||
expect(result?.type).toBe('filter-values')
|
||||
expect(result?.suggestions.some((s) => s.label === 'workflow1')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return null for text search context', () => {
|
||||
const result = engine.getSuggestions('some random text', 10)
|
||||
expect(result).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should show filter key suggestions after completing a filter', () => {
|
||||
const result = engine.getSuggestions('level:error ', 12)
|
||||
expect(result?.type).toBe('filter-keys')
|
||||
expect(result?.suggestions.length).toBeGreaterThan(0)
|
||||
expect(result?.suggestions.some((s) => s.value === 'level:')).toBe(true)
|
||||
expect(result?.suggestions.some((s) => s.value === 'trigger:')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should show filter key suggestions after multiple completed filters', () => {
|
||||
const result = engine.getSuggestions('level:error trigger:api ', 24)
|
||||
expect(result?.type).toBe('filter-keys')
|
||||
expect(result?.suggestions.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should surface value suggestions for uniquely matched partial keys after existing filters',
|
||||
() => {
|
||||
const result = engine.getSuggestions('level:error lev', 15)
|
||||
expect(result?.type).toBe('filter-values')
|
||||
expect(result?.suggestions.some((s) => s.value === 'error' || s.value === 'info')).toBe(
|
||||
true
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should handle filter values after existing filters', () => {
|
||||
const result = engine.getSuggestions('level:error level:', 18)
|
||||
expect(result?.type).toBe('filter-values')
|
||||
expect(result?.suggestions.some((s) => s.value === 'info')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('generatePreview', () => {
|
||||
it.concurrent('should generate correct preview for filter keys', () => {
|
||||
const suggestion = {
|
||||
id: 'test',
|
||||
value: 'level:',
|
||||
label: 'Status',
|
||||
category: 'filters' as const,
|
||||
}
|
||||
const preview = engine.generatePreview(suggestion, '', 0)
|
||||
expect(preview).toBe('level:')
|
||||
})
|
||||
|
||||
it.concurrent('should generate correct preview for filter values', () => {
|
||||
const suggestion = { id: 'test', value: 'error', label: 'Error', category: 'level' as const }
|
||||
const preview = engine.generatePreview(suggestion, 'level:', 6)
|
||||
expect(preview).toBe('level:error')
|
||||
})
|
||||
|
||||
it.concurrent('should handle partial replacements correctly', () => {
|
||||
const suggestion = {
|
||||
id: 'test',
|
||||
value: 'level:',
|
||||
label: 'Status',
|
||||
category: 'filters' as const,
|
||||
}
|
||||
const preview = engine.generatePreview(suggestion, 'lev', 3)
|
||||
expect(preview).toBe('level:')
|
||||
})
|
||||
|
||||
it.concurrent('should handle quoted workflow values', () => {
|
||||
const suggestion = {
|
||||
id: 'test',
|
||||
value: '"workflow1"',
|
||||
label: 'workflow1',
|
||||
category: 'workflow' as const,
|
||||
}
|
||||
const preview = engine.generatePreview(suggestion, 'workflow:', 9)
|
||||
expect(preview).toBe('workflow:"workflow1"')
|
||||
})
|
||||
|
||||
it.concurrent('should add space when adding filter after completed filter', () => {
|
||||
const suggestion = {
|
||||
id: 'test',
|
||||
value: 'trigger:',
|
||||
label: 'Trigger',
|
||||
category: 'filters' as const,
|
||||
}
|
||||
const preview = engine.generatePreview(suggestion, 'level:error ', 12)
|
||||
expect(preview).toBe('level:error trigger:')
|
||||
})
|
||||
|
||||
it.concurrent('should handle multiple completed filters', () => {
|
||||
const suggestion = { id: 'test', value: 'cost:', label: 'Cost', category: 'filters' as const }
|
||||
const preview = engine.generatePreview(suggestion, 'level:error trigger:api ', 24)
|
||||
expect(preview).toBe('level:error trigger:api cost:')
|
||||
})
|
||||
|
||||
it.concurrent('should handle adding same filter type multiple times', () => {
|
||||
const suggestion = {
|
||||
id: 'test',
|
||||
value: 'level:',
|
||||
label: 'Status',
|
||||
category: 'filters' as const,
|
||||
}
|
||||
const preview = engine.generatePreview(suggestion, 'level:error ', 12)
|
||||
expect(preview).toBe('level:error level:')
|
||||
})
|
||||
|
||||
it.concurrent('should handle filter value after existing filters', () => {
|
||||
const suggestion = { id: 'test', value: 'info', label: 'Info', category: 'level' as const }
|
||||
const preview = engine.generatePreview(suggestion, 'level:error level:', 19)
|
||||
expect(preview).toBe('level:error level:info')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,4 @@
|
||||
import type {
|
||||
Suggestion,
|
||||
SuggestionGroup,
|
||||
} from '@/app/workspace/[workspaceId]/logs/hooks/use-autocomplete'
|
||||
import type { Suggestion, SuggestionGroup } from '@/app/workspace/[workspaceId]/logs/types/search'
|
||||
|
||||
export interface FilterDefinition {
|
||||
key: string
|
||||
@@ -14,6 +11,17 @@ export interface FilterDefinition {
|
||||
}>
|
||||
}
|
||||
|
||||
export interface WorkflowData {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface FolderData {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export const FILTER_DEFINITIONS: FilterDefinition[] = [
|
||||
{
|
||||
key: 'level',
|
||||
@@ -62,10 +70,6 @@ export const FILTER_DEFINITIONS: FilterDefinition[] = [
|
||||
{ value: 'this-week', label: 'This week', description: "This week's logs" },
|
||||
{ value: 'last-week', label: 'Last week', description: "Last week's logs" },
|
||||
{ value: 'this-month', label: 'This month', description: "This month's logs" },
|
||||
// Friendly relative range shortcuts like Stripe
|
||||
{ value: '"> 2 days ago"', label: '> 2 days ago', description: 'Newer than 2 days' },
|
||||
{ value: '"> last week"', label: '> last week', description: 'Newer than last week' },
|
||||
{ value: '">=2025/08/31"', label: '>= YYYY/MM/DD', description: 'Start date (YYYY/MM/DD)' },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -82,395 +86,348 @@ export const FILTER_DEFINITIONS: FilterDefinition[] = [
|
||||
},
|
||||
]
|
||||
|
||||
interface QueryContext {
|
||||
type: 'initial' | 'filter-key-partial' | 'filter-value-context' | 'text-search'
|
||||
filterKey?: string
|
||||
partialInput?: string
|
||||
startPosition?: number
|
||||
endPosition?: number
|
||||
}
|
||||
|
||||
export class SearchSuggestions {
|
||||
private availableWorkflows: string[]
|
||||
private availableFolders: string[]
|
||||
private workflowsData: WorkflowData[]
|
||||
private foldersData: FolderData[]
|
||||
|
||||
constructor(availableWorkflows: string[] = [], availableFolders: string[] = []) {
|
||||
this.availableWorkflows = availableWorkflows
|
||||
this.availableFolders = availableFolders
|
||||
constructor(workflowsData: WorkflowData[] = [], foldersData: FolderData[] = []) {
|
||||
this.workflowsData = workflowsData
|
||||
this.foldersData = foldersData
|
||||
}
|
||||
|
||||
updateAvailableData(workflows: string[] = [], folders: string[] = []) {
|
||||
this.availableWorkflows = workflows
|
||||
this.availableFolders = folders
|
||||
updateData(workflowsData: WorkflowData[] = [], foldersData: FolderData[] = []) {
|
||||
this.workflowsData = workflowsData
|
||||
this.foldersData = foldersData
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a filter value is complete (matches a valid option)
|
||||
* Get suggestions based ONLY on current input (no cursor position!)
|
||||
*/
|
||||
private isCompleteFilterValue(filterKey: string, value: string): boolean {
|
||||
const filterDef = FILTER_DEFINITIONS.find((f) => f.key === filterKey)
|
||||
if (filterDef) {
|
||||
return filterDef.options.some((option) => option.value === value)
|
||||
getSuggestions(input: string): SuggestionGroup | null {
|
||||
const trimmed = input.trim()
|
||||
|
||||
// Empty input → show all filter keys
|
||||
if (!trimmed) {
|
||||
return this.getFilterKeysList()
|
||||
}
|
||||
|
||||
// For workflow and folder filters, any quoted value is considered complete
|
||||
if (filterKey === 'workflow' || filterKey === 'folder') {
|
||||
return value.startsWith('"') && value.endsWith('"') && value.length > 2
|
||||
// Input ends with ':' → show values for that key
|
||||
if (trimmed.endsWith(':')) {
|
||||
const key = trimmed.slice(0, -1)
|
||||
return this.getFilterValues(key)
|
||||
}
|
||||
|
||||
return false
|
||||
// Input contains ':' → filter value context
|
||||
if (trimmed.includes(':')) {
|
||||
const [key, partial] = trimmed.split(':')
|
||||
return this.getFilterValues(key, partial)
|
||||
}
|
||||
|
||||
// Plain text → multi-section results
|
||||
return this.getMultiSectionResults(trimmed)
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze the current input context to determine what suggestions to show.
|
||||
* Get filter keys list (empty input state)
|
||||
*/
|
||||
private analyzeContext(input: string, cursorPosition: number): QueryContext {
|
||||
const textBeforeCursor = input.slice(0, cursorPosition)
|
||||
|
||||
if (textBeforeCursor === '' || textBeforeCursor.endsWith(' ')) {
|
||||
return { type: 'initial' }
|
||||
}
|
||||
|
||||
// Check for filter value context (must be after a space or at start, and not empty value)
|
||||
const filterValueMatch = textBeforeCursor.match(/(?:^|\s)(\w+):([\w"<>=!]*)$/)
|
||||
if (filterValueMatch && filterValueMatch[2].length > 0 && !filterValueMatch[2].includes(' ')) {
|
||||
const filterKey = filterValueMatch[1]
|
||||
const filterValue = filterValueMatch[2]
|
||||
|
||||
// If the filter value is complete, treat as ready for next filter
|
||||
if (this.isCompleteFilterValue(filterKey, filterValue)) {
|
||||
return { type: 'initial' }
|
||||
}
|
||||
|
||||
// Otherwise, treat as partial value needing completion
|
||||
return {
|
||||
type: 'filter-value-context',
|
||||
filterKey,
|
||||
partialInput: filterValue,
|
||||
startPosition:
|
||||
filterValueMatch.index! +
|
||||
(filterValueMatch[0].startsWith(' ') ? 1 : 0) +
|
||||
filterKey.length +
|
||||
1,
|
||||
endPosition: cursorPosition,
|
||||
}
|
||||
}
|
||||
|
||||
// Check for empty filter key (just "key:" with no value)
|
||||
const emptyFilterMatch = textBeforeCursor.match(/(?:^|\s)(\w+):$/)
|
||||
if (emptyFilterMatch) {
|
||||
return { type: 'initial' } // Treat as initial to show filter value suggestions
|
||||
}
|
||||
|
||||
const filterKeyMatch = textBeforeCursor.match(/(?:^|\s)(\w+):?$/)
|
||||
if (filterKeyMatch && !filterKeyMatch[0].includes(':')) {
|
||||
return {
|
||||
type: 'filter-key-partial',
|
||||
partialInput: filterKeyMatch[1],
|
||||
startPosition: filterKeyMatch.index! + (filterKeyMatch[0].startsWith(' ') ? 1 : 0),
|
||||
endPosition: cursorPosition,
|
||||
}
|
||||
}
|
||||
|
||||
return { type: 'text-search' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get filter key suggestions
|
||||
*/
|
||||
private getFilterKeySuggestions(partialInput?: string): Suggestion[] {
|
||||
private getFilterKeysList(): SuggestionGroup {
|
||||
const suggestions: Suggestion[] = []
|
||||
|
||||
// Add all filter keys
|
||||
for (const filter of FILTER_DEFINITIONS) {
|
||||
const matchesPartial =
|
||||
!partialInput ||
|
||||
filter.key.toLowerCase().startsWith(partialInput.toLowerCase()) ||
|
||||
filter.label.toLowerCase().startsWith(partialInput.toLowerCase())
|
||||
|
||||
if (matchesPartial) {
|
||||
suggestions.push({
|
||||
id: `filter-key-${filter.key}`,
|
||||
value: `${filter.key}:`,
|
||||
label: filter.label,
|
||||
description: filter.description,
|
||||
category: 'filters',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (this.availableWorkflows.length > 0) {
|
||||
const matchesWorkflow =
|
||||
!partialInput ||
|
||||
'workflow'.startsWith(partialInput.toLowerCase()) ||
|
||||
'workflows'.startsWith(partialInput.toLowerCase())
|
||||
|
||||
if (matchesWorkflow) {
|
||||
suggestions.push({
|
||||
id: 'filter-key-workflow',
|
||||
value: 'workflow:',
|
||||
label: 'Workflow',
|
||||
description: 'Filter by workflow name',
|
||||
category: 'filters',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (this.availableFolders.length > 0) {
|
||||
const matchesFolder =
|
||||
!partialInput ||
|
||||
'folder'.startsWith(partialInput.toLowerCase()) ||
|
||||
'folders'.startsWith(partialInput.toLowerCase())
|
||||
|
||||
if (matchesFolder) {
|
||||
suggestions.push({
|
||||
id: 'filter-key-folder',
|
||||
value: 'folder:',
|
||||
label: 'Folder',
|
||||
description: 'Filter by folder name',
|
||||
category: 'filters',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Always include id-based keys (workflowId, executionId)
|
||||
const idKeys: Array<{ key: string; label: string; description: string }> = [
|
||||
{ key: 'workflowId', label: 'Workflow ID', description: 'Filter by workflowId' },
|
||||
{ key: 'executionId', label: 'Execution ID', description: 'Filter by executionId' },
|
||||
]
|
||||
for (const idDef of idKeys) {
|
||||
const matchesIdKey =
|
||||
!partialInput ||
|
||||
idDef.key.toLowerCase().startsWith(partialInput.toLowerCase()) ||
|
||||
idDef.label.toLowerCase().startsWith(partialInput.toLowerCase())
|
||||
if (matchesIdKey) {
|
||||
suggestions.push({
|
||||
id: `filter-key-${idDef.key}`,
|
||||
value: `${idDef.key}:`,
|
||||
label: idDef.label,
|
||||
description: idDef.description,
|
||||
category: 'filters',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return suggestions
|
||||
}
|
||||
|
||||
/**
|
||||
* Get filter value suggestions for a specific filter key
|
||||
*/
|
||||
private getFilterValueSuggestions(filterKey: string, partialInput = ''): Suggestion[] {
|
||||
const suggestions: Suggestion[] = []
|
||||
|
||||
const filterDef = FILTER_DEFINITIONS.find((f) => f.key === filterKey)
|
||||
if (filterDef) {
|
||||
for (const option of filterDef.options) {
|
||||
const matchesPartial =
|
||||
!partialInput ||
|
||||
option.value.toLowerCase().includes(partialInput.toLowerCase()) ||
|
||||
option.label.toLowerCase().includes(partialInput.toLowerCase())
|
||||
|
||||
if (matchesPartial) {
|
||||
suggestions.push({
|
||||
id: `filter-value-${filterKey}-${option.value}`,
|
||||
value: option.value,
|
||||
label: option.label,
|
||||
description: option.description,
|
||||
category: filterKey as any,
|
||||
})
|
||||
}
|
||||
}
|
||||
return suggestions
|
||||
}
|
||||
|
||||
if (filterKey === 'workflow') {
|
||||
for (const workflow of this.availableWorkflows) {
|
||||
const matchesPartial =
|
||||
!partialInput || workflow.toLowerCase().includes(partialInput.toLowerCase())
|
||||
|
||||
if (matchesPartial) {
|
||||
suggestions.push({
|
||||
id: `filter-value-workflow-${workflow}`,
|
||||
value: `"${workflow}"`,
|
||||
label: workflow,
|
||||
description: 'Workflow name',
|
||||
category: 'workflow',
|
||||
})
|
||||
}
|
||||
}
|
||||
return suggestions.slice(0, 8)
|
||||
}
|
||||
|
||||
if (filterKey === 'folder') {
|
||||
for (const folder of this.availableFolders) {
|
||||
const matchesPartial =
|
||||
!partialInput || folder.toLowerCase().includes(partialInput.toLowerCase())
|
||||
|
||||
if (matchesPartial) {
|
||||
suggestions.push({
|
||||
id: `filter-value-folder-${folder}`,
|
||||
value: `"${folder}"`,
|
||||
label: folder,
|
||||
description: 'Folder name',
|
||||
category: 'folder',
|
||||
})
|
||||
}
|
||||
}
|
||||
return suggestions.slice(0, 8)
|
||||
}
|
||||
|
||||
if (filterKey === 'workflowId' || filterKey === 'executionId') {
|
||||
const example = partialInput || '"1234..."'
|
||||
suggestions.push({
|
||||
id: `filter-value-${filterKey}-example`,
|
||||
value: example,
|
||||
label: 'Enter exact ID',
|
||||
description: 'Use quotes for the full ID',
|
||||
category: filterKey,
|
||||
id: `filter-key-${filter.key}`,
|
||||
value: `${filter.key}:`,
|
||||
label: filter.label,
|
||||
description: filter.description,
|
||||
category: 'filters',
|
||||
})
|
||||
return suggestions
|
||||
}
|
||||
|
||||
return suggestions
|
||||
// Add workflow and folder keys
|
||||
if (this.workflowsData.length > 0) {
|
||||
suggestions.push({
|
||||
id: 'filter-key-workflow',
|
||||
value: 'workflow:',
|
||||
label: 'Workflow',
|
||||
description: 'Filter by workflow name',
|
||||
category: 'filters',
|
||||
})
|
||||
}
|
||||
|
||||
if (this.foldersData.length > 0) {
|
||||
suggestions.push({
|
||||
id: 'filter-key-folder',
|
||||
value: 'folder:',
|
||||
label: 'Folder',
|
||||
description: 'Filter by folder name',
|
||||
category: 'filters',
|
||||
})
|
||||
}
|
||||
|
||||
suggestions.push({
|
||||
id: 'filter-key-workflowId',
|
||||
value: 'workflowId:',
|
||||
label: 'Workflow ID',
|
||||
description: 'Filter by workflow ID',
|
||||
category: 'filters',
|
||||
})
|
||||
|
||||
suggestions.push({
|
||||
id: 'filter-key-executionId',
|
||||
value: 'executionId:',
|
||||
label: 'Execution ID',
|
||||
description: 'Filter by execution ID',
|
||||
category: 'filters',
|
||||
})
|
||||
|
||||
return {
|
||||
type: 'filter-keys',
|
||||
suggestions,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get suggestions based on current input and cursor position
|
||||
* Get filter values for a specific key
|
||||
*/
|
||||
getSuggestions(input: string, cursorPosition: number): SuggestionGroup | null {
|
||||
const context = this.analyzeContext(input, cursorPosition)
|
||||
private getFilterValues(key: string, partial = ''): SuggestionGroup | null {
|
||||
const filterDef = FILTER_DEFINITIONS.find((f) => f.key === key)
|
||||
|
||||
// Special case: check if we're at "key:" position for filter values
|
||||
const textBeforeCursor = input.slice(0, cursorPosition)
|
||||
const emptyFilterMatch = textBeforeCursor.match(/(?:^|\s)(\w+):$/)
|
||||
if (emptyFilterMatch) {
|
||||
const filterKey = emptyFilterMatch[1]
|
||||
const filterValueSuggestions = this.getFilterValueSuggestions(filterKey, '')
|
||||
return filterValueSuggestions.length > 0
|
||||
if (filterDef) {
|
||||
const suggestions = filterDef.options
|
||||
.filter(
|
||||
(opt) =>
|
||||
!partial ||
|
||||
opt.value.toLowerCase().includes(partial.toLowerCase()) ||
|
||||
opt.label.toLowerCase().includes(partial.toLowerCase())
|
||||
)
|
||||
.map((opt) => ({
|
||||
id: `filter-value-${key}-${opt.value}`,
|
||||
value: `${key}:${opt.value}`,
|
||||
label: opt.label,
|
||||
description: opt.description,
|
||||
category: key as any,
|
||||
}))
|
||||
|
||||
return suggestions.length > 0
|
||||
? {
|
||||
type: 'filter-values',
|
||||
filterKey,
|
||||
suggestions: filterValueSuggestions,
|
||||
filterKey: key,
|
||||
suggestions,
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
switch (context.type) {
|
||||
case 'initial':
|
||||
case 'filter-key-partial': {
|
||||
if (context.type === 'filter-key-partial' && context.partialInput) {
|
||||
const matches = FILTER_DEFINITIONS.filter(
|
||||
(f) =>
|
||||
f.key.toLowerCase().startsWith(context.partialInput!.toLowerCase()) ||
|
||||
f.label.toLowerCase().startsWith(context.partialInput!.toLowerCase())
|
||||
)
|
||||
// Workflow filter values
|
||||
if (key === 'workflow') {
|
||||
const suggestions = this.workflowsData
|
||||
.filter((w) => !partial || w.name.toLowerCase().includes(partial.toLowerCase()))
|
||||
.slice(0, 8)
|
||||
.map((w) => ({
|
||||
id: `filter-value-workflow-${w.id}`,
|
||||
value: `workflow:"${w.name}"`,
|
||||
label: w.name,
|
||||
description: w.description,
|
||||
category: 'workflow' as const,
|
||||
}))
|
||||
|
||||
if (matches.length === 1) {
|
||||
const key = matches[0].key
|
||||
const filterValueSuggestions = this.getFilterValueSuggestions(key, '')
|
||||
if (filterValueSuggestions.length > 0) {
|
||||
return {
|
||||
type: 'filter-values',
|
||||
filterKey: key,
|
||||
suggestions: filterValueSuggestions,
|
||||
}
|
||||
}
|
||||
return suggestions.length > 0
|
||||
? {
|
||||
type: 'filter-values',
|
||||
filterKey: 'workflow',
|
||||
suggestions,
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
// Folder filter values
|
||||
if (key === 'folder') {
|
||||
const suggestions = this.foldersData
|
||||
.filter((f) => !partial || f.name.toLowerCase().includes(partial.toLowerCase()))
|
||||
.slice(0, 8)
|
||||
.map((f) => ({
|
||||
id: `filter-value-folder-${f.id}`,
|
||||
value: `folder:"${f.name}"`,
|
||||
label: f.name,
|
||||
category: 'folder' as const,
|
||||
}))
|
||||
|
||||
return suggestions.length > 0
|
||||
? {
|
||||
type: 'filter-values',
|
||||
filterKey: 'folder',
|
||||
suggestions,
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get multi-section results for plain text
|
||||
*/
|
||||
private getMultiSectionResults(query: string): SuggestionGroup | null {
|
||||
const sections: Array<{ title: string; suggestions: Suggestion[] }> = []
|
||||
const allSuggestions: Suggestion[] = []
|
||||
|
||||
// Show all results option
|
||||
const showAllSuggestion: Suggestion = {
|
||||
id: 'show-all',
|
||||
value: query,
|
||||
label: `Show all results for "${query}"`,
|
||||
category: 'show-all',
|
||||
}
|
||||
allSuggestions.push(showAllSuggestion)
|
||||
|
||||
// Match filter values (e.g., "info" → "Status: Info")
|
||||
const matchingFilterValues = this.getMatchingFilterValues(query)
|
||||
if (matchingFilterValues.length > 0) {
|
||||
sections.push({
|
||||
title: 'SUGGESTED FILTERS',
|
||||
suggestions: matchingFilterValues,
|
||||
})
|
||||
allSuggestions.push(...matchingFilterValues)
|
||||
}
|
||||
|
||||
// Match workflows
|
||||
const matchingWorkflows = this.getMatchingWorkflows(query)
|
||||
if (matchingWorkflows.length > 0) {
|
||||
sections.push({
|
||||
title: 'WORKFLOWS',
|
||||
suggestions: matchingWorkflows,
|
||||
})
|
||||
allSuggestions.push(...matchingWorkflows)
|
||||
}
|
||||
|
||||
// Match folders
|
||||
const matchingFolders = this.getMatchingFolders(query)
|
||||
if (matchingFolders.length > 0) {
|
||||
sections.push({
|
||||
title: 'FOLDERS',
|
||||
suggestions: matchingFolders,
|
||||
})
|
||||
allSuggestions.push(...matchingFolders)
|
||||
}
|
||||
|
||||
// Add filter keys if no specific matches
|
||||
if (
|
||||
matchingFilterValues.length === 0 &&
|
||||
matchingWorkflows.length === 0 &&
|
||||
matchingFolders.length === 0
|
||||
) {
|
||||
const filterKeys = this.getFilterKeysList()
|
||||
if (filterKeys.suggestions.length > 0) {
|
||||
sections.push({
|
||||
title: 'SUGGESTED FILTERS',
|
||||
suggestions: filterKeys.suggestions.slice(0, 5),
|
||||
})
|
||||
allSuggestions.push(...filterKeys.suggestions.slice(0, 5))
|
||||
}
|
||||
}
|
||||
|
||||
return allSuggestions.length > 0
|
||||
? {
|
||||
type: 'multi-section',
|
||||
suggestions: allSuggestions,
|
||||
sections,
|
||||
}
|
||||
|
||||
const filterKeySuggestions = this.getFilterKeySuggestions(context.partialInput)
|
||||
return filterKeySuggestions.length > 0
|
||||
? {
|
||||
type: 'filter-keys',
|
||||
suggestions: filterKeySuggestions,
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
case 'filter-value-context': {
|
||||
if (!context.filterKey) return null
|
||||
const filterValueSuggestions = this.getFilterValueSuggestions(
|
||||
context.filterKey,
|
||||
context.partialInput
|
||||
)
|
||||
return filterValueSuggestions.length > 0
|
||||
? {
|
||||
type: 'filter-values',
|
||||
filterKey: context.filterKey,
|
||||
suggestions: filterValueSuggestions,
|
||||
}
|
||||
: null
|
||||
}
|
||||
default:
|
||||
return null
|
||||
}
|
||||
: null
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate preview text for a suggestion
|
||||
* Show suggestion at the end of input, with proper spacing logic
|
||||
* Match filter values across all definitions
|
||||
*/
|
||||
generatePreview(suggestion: Suggestion, currentValue: string, cursorPosition: number): string {
|
||||
// If input is empty, just show the suggestion
|
||||
if (!currentValue.trim()) {
|
||||
return suggestion.value
|
||||
}
|
||||
private getMatchingFilterValues(query: string): Suggestion[] {
|
||||
if (!query.trim()) return []
|
||||
|
||||
// Check if we're doing a partial replacement (like "lev" -> "level:")
|
||||
const context = this.analyzeContext(currentValue, cursorPosition)
|
||||
const matches: Suggestion[] = []
|
||||
const lowerQuery = query.toLowerCase()
|
||||
|
||||
if (
|
||||
context.type === 'filter-key-partial' &&
|
||||
context.startPosition !== undefined &&
|
||||
context.endPosition !== undefined
|
||||
) {
|
||||
const before = currentValue.slice(0, context.startPosition)
|
||||
const after = currentValue.slice(context.endPosition)
|
||||
const isFilterValue =
|
||||
!!suggestion.category && FILTER_DEFINITIONS.some((f) => f.key === suggestion.category)
|
||||
if (isFilterValue) {
|
||||
return `${before}${suggestion.category}:${suggestion.value}${after}`
|
||||
for (const filterDef of FILTER_DEFINITIONS) {
|
||||
for (const option of filterDef.options) {
|
||||
if (
|
||||
option.value.toLowerCase().includes(lowerQuery) ||
|
||||
option.label.toLowerCase().includes(lowerQuery)
|
||||
) {
|
||||
matches.push({
|
||||
id: `filter-match-${filterDef.key}-${option.value}`,
|
||||
value: `${filterDef.key}:${option.value}`,
|
||||
label: `${filterDef.label}: ${option.label}`,
|
||||
description: option.description,
|
||||
category: filterDef.key as any,
|
||||
})
|
||||
}
|
||||
}
|
||||
return `${before}${suggestion.value}${after}`
|
||||
}
|
||||
|
||||
if (
|
||||
context.type === 'filter-value-context' &&
|
||||
context.startPosition !== undefined &&
|
||||
context.endPosition !== undefined
|
||||
) {
|
||||
const before = currentValue.slice(0, context.startPosition)
|
||||
const after = currentValue.slice(context.endPosition)
|
||||
return `${before}${suggestion.value}${after}`
|
||||
}
|
||||
|
||||
let result = currentValue
|
||||
|
||||
if (currentValue.endsWith(':')) {
|
||||
result += suggestion.value
|
||||
} else if (currentValue.endsWith(' ')) {
|
||||
result += suggestion.value
|
||||
} else {
|
||||
result += ` ${suggestion.value}`
|
||||
}
|
||||
|
||||
return result
|
||||
return matches.slice(0, 5)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if a query is complete and should trigger backend calls
|
||||
* Match workflows by name/description
|
||||
*/
|
||||
validateQuery(query: string): boolean {
|
||||
const incompleteFilterMatch = query.match(/(\w+):$/)
|
||||
if (incompleteFilterMatch) {
|
||||
return false
|
||||
}
|
||||
private getMatchingWorkflows(query: string): Suggestion[] {
|
||||
if (!query.trim() || this.workflowsData.length === 0) return []
|
||||
|
||||
const openQuotes = (query.match(/"/g) || []).length
|
||||
if (openQuotes % 2 !== 0) {
|
||||
return false
|
||||
}
|
||||
const lowerQuery = query.toLowerCase()
|
||||
|
||||
return true
|
||||
const matches = this.workflowsData
|
||||
.filter(
|
||||
(workflow) =>
|
||||
workflow.name.toLowerCase().includes(lowerQuery) ||
|
||||
workflow.description?.toLowerCase().includes(lowerQuery)
|
||||
)
|
||||
.sort((a, b) => {
|
||||
const aName = a.name.toLowerCase()
|
||||
const bName = b.name.toLowerCase()
|
||||
|
||||
if (aName === lowerQuery) return -1
|
||||
if (bName === lowerQuery) return 1
|
||||
if (aName.startsWith(lowerQuery) && !bName.startsWith(lowerQuery)) return -1
|
||||
if (bName.startsWith(lowerQuery) && !aName.startsWith(lowerQuery)) return 1
|
||||
return aName.localeCompare(bName)
|
||||
})
|
||||
.slice(0, 8)
|
||||
.map((workflow) => ({
|
||||
id: `workflow-match-${workflow.id}`,
|
||||
value: `workflow:"${workflow.name}"`,
|
||||
label: workflow.name,
|
||||
description: workflow.description,
|
||||
category: 'workflow' as const,
|
||||
}))
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
/**
|
||||
* Match folders by name
|
||||
*/
|
||||
private getMatchingFolders(query: string): Suggestion[] {
|
||||
if (!query.trim() || this.foldersData.length === 0) return []
|
||||
|
||||
const lowerQuery = query.toLowerCase()
|
||||
|
||||
const matches = this.foldersData
|
||||
.filter((folder) => folder.name.toLowerCase().includes(lowerQuery))
|
||||
.sort((a, b) => {
|
||||
const aName = a.name.toLowerCase()
|
||||
const bName = b.name.toLowerCase()
|
||||
|
||||
if (aName === lowerQuery) return -1
|
||||
if (bName === lowerQuery) return 1
|
||||
if (aName.startsWith(lowerQuery) && !bName.startsWith(lowerQuery)) return -1
|
||||
if (bName.startsWith(lowerQuery) && !aName.startsWith(lowerQuery)) return 1
|
||||
return aName.localeCompare(bName)
|
||||
})
|
||||
.slice(0, 8)
|
||||
.map((folder) => ({
|
||||
id: `folder-match-${folder.id}`,
|
||||
value: `folder:"${folder.name}"`,
|
||||
label: folder.name,
|
||||
category: 'folder' as const,
|
||||
}))
|
||||
|
||||
return matches
|
||||
}
|
||||
}
|
||||
|
||||
104
apps/sim/lib/subscription/usage-visualization.ts
Normal file
104
apps/sim/lib/subscription/usage-visualization.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
/**
|
||||
* Shared utilities for consistent usage visualization across the application.
|
||||
*
|
||||
* This module provides a single source of truth for how usage metrics are
|
||||
* displayed visually through "pills" or progress indicators.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Number of pills to display in usage indicators.
|
||||
*
|
||||
* Using 8 pills provides:
|
||||
* - 12.5% granularity per pill
|
||||
* - Good balance between precision and visual clarity
|
||||
* - Consistent representation across panel and settings
|
||||
*/
|
||||
export const USAGE_PILL_COUNT = 8
|
||||
|
||||
/**
|
||||
* Color values for usage pill states
|
||||
*/
|
||||
export const USAGE_PILL_COLORS = {
|
||||
/** Unfilled pill color (gray) */
|
||||
UNFILLED: '#414141',
|
||||
/** Normal filled pill color (blue) */
|
||||
FILLED: '#34B5FF',
|
||||
/** Warning/limit reached pill color (red) */
|
||||
AT_LIMIT: '#ef4444',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Calculate the number of filled pills based on usage percentage.
|
||||
*
|
||||
* Uses Math.ceil() to ensure even minimal usage (0.01%) shows visual feedback.
|
||||
* This provides better UX by making it clear that there is some usage, even if small.
|
||||
*
|
||||
* @param percentUsed - The usage percentage (0-100). Can be a decimal (e.g., 0.315 for 0.315%)
|
||||
* @returns Number of pills that should be filled (0 to USAGE_PILL_COUNT)
|
||||
*
|
||||
* @example
|
||||
* calculateFilledPills(0.315) // Returns 1 (shows feedback for 0.315% usage)
|
||||
* calculateFilledPills(50) // Returns 4 (50% of 8 pills)
|
||||
* calculateFilledPills(100) // Returns 8 (completely filled)
|
||||
* calculateFilledPills(150) // Returns 8 (clamped to maximum)
|
||||
*/
|
||||
export function calculateFilledPills(percentUsed: number): number {
|
||||
// Clamp percentage to valid range [0, 100]
|
||||
const safePercent = Math.min(Math.max(percentUsed, 0), 100)
|
||||
|
||||
// Calculate filled pills using ceil to show feedback for any usage
|
||||
return Math.ceil((safePercent / 100) * USAGE_PILL_COUNT)
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if usage has reached the limit (all pills filled).
|
||||
*
|
||||
* @param percentUsed - The usage percentage (0-100)
|
||||
* @returns true if all pills should be filled (at or over limit)
|
||||
*/
|
||||
export function isUsageAtLimit(percentUsed: number): boolean {
|
||||
return calculateFilledPills(percentUsed) >= USAGE_PILL_COUNT
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the appropriate color for a pill based on its state.
|
||||
*
|
||||
* @param isFilled - Whether this pill should be filled
|
||||
* @param isAtLimit - Whether usage has reached the limit
|
||||
* @returns Hex color string
|
||||
*/
|
||||
export function getPillColor(isFilled: boolean, isAtLimit: boolean): string {
|
||||
if (!isFilled) return USAGE_PILL_COLORS.UNFILLED
|
||||
if (isAtLimit) return USAGE_PILL_COLORS.AT_LIMIT
|
||||
return USAGE_PILL_COLORS.FILLED
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an array of pill states for rendering.
|
||||
*
|
||||
* @param percentUsed - The usage percentage (0-100)
|
||||
* @returns Array of pill states with colors
|
||||
*
|
||||
* @example
|
||||
* const pills = generatePillStates(50)
|
||||
* pills.forEach((pill, index) => (
|
||||
* <Pill key={index} color={pill.color} filled={pill.filled} />
|
||||
* ))
|
||||
*/
|
||||
export function generatePillStates(percentUsed: number): Array<{
|
||||
filled: boolean
|
||||
color: string
|
||||
index: number
|
||||
}> {
|
||||
const filledCount = calculateFilledPills(percentUsed)
|
||||
const atLimit = isUsageAtLimit(percentUsed)
|
||||
|
||||
return Array.from({ length: USAGE_PILL_COUNT }, (_, index) => {
|
||||
const filled = index < filledCount
|
||||
return {
|
||||
filled,
|
||||
color: getPillColor(filled, atLimit),
|
||||
index,
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getBaseUrl } from '@/lib/urls/utils'
|
||||
import { isUserFile } from '@/lib/utils'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { ExecutionContext } from './execution-file-helpers'
|
||||
@@ -90,7 +89,9 @@ export async function uploadExecutionFile(
|
||||
}
|
||||
|
||||
try {
|
||||
const { uploadFile } = await import('@/lib/uploads/core/storage-service')
|
||||
const { uploadFile, generatePresignedDownloadUrl } = await import(
|
||||
'@/lib/uploads/core/storage-service'
|
||||
)
|
||||
const fileInfo = await uploadFile({
|
||||
file: fileBuffer,
|
||||
fileName: storageKey,
|
||||
@@ -101,16 +102,15 @@ export async function uploadExecutionFile(
|
||||
metadata, // Pass metadata for cloud storage and database tracking
|
||||
})
|
||||
|
||||
// Generate full URL for file access (useful for passing to external services)
|
||||
const baseUrl = getBaseUrl()
|
||||
const fullUrl = `${baseUrl}/api/files/serve/${fileInfo.key}`
|
||||
// Generate presigned URL for file access (10 minutes expiration)
|
||||
const fullUrl = await generatePresignedDownloadUrl(fileInfo.key, 'execution', 600)
|
||||
|
||||
const userFile: UserFile = {
|
||||
id: fileId,
|
||||
name: fileName,
|
||||
size: fileBuffer.length,
|
||||
type: contentType,
|
||||
url: fullUrl, // Full URL for external access and downstream workflow usage
|
||||
url: fullUrl, // Presigned URL for external access and downstream workflow usage
|
||||
key: fileInfo.key,
|
||||
context: 'execution', // Preserve context in file object
|
||||
}
|
||||
|
||||
@@ -78,13 +78,13 @@ export const ollamaProvider: ProviderConfig = {
|
||||
try {
|
||||
const response = await fetch(`${OLLAMA_HOST}/api/tags`)
|
||||
if (!response.ok) {
|
||||
useProvidersStore.getState().setModels('ollama', [])
|
||||
useProvidersStore.getState().setProviderModels('ollama', [])
|
||||
logger.warn('Ollama service is not available. The provider will be disabled.')
|
||||
return
|
||||
}
|
||||
const data = (await response.json()) as ModelsObject
|
||||
this.models = data.models.map((model) => model.name)
|
||||
useProvidersStore.getState().setModels('ollama', this.models)
|
||||
useProvidersStore.getState().setProviderModels('ollama', this.models)
|
||||
} catch (error) {
|
||||
logger.warn('Ollama model instantiation failed. The provider will be disabled.', {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
|
||||
@@ -1,25 +1,12 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import type { CustomToolsState, CustomToolsStore } from './types'
|
||||
|
||||
const logger = createLogger('CustomToolsStore')
|
||||
const API_ENDPOINT = '/api/tools/custom'
|
||||
|
||||
class ApiError extends Error {
|
||||
status: number
|
||||
constructor(message: string, status: number) {
|
||||
super(message)
|
||||
this.status = status
|
||||
this.name = 'ApiError'
|
||||
}
|
||||
}
|
||||
|
||||
const initialState: CustomToolsState = {
|
||||
tools: [],
|
||||
isLoading: false,
|
||||
error: null,
|
||||
}
|
||||
|
||||
export const useCustomToolsStore = create<CustomToolsStore>()(
|
||||
@@ -27,218 +14,9 @@ export const useCustomToolsStore = create<CustomToolsStore>()(
|
||||
(set, get) => ({
|
||||
...initialState,
|
||||
|
||||
fetchTools: async (workspaceId: string) => {
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
try {
|
||||
logger.info(`Fetching custom tools for workspace ${workspaceId}`)
|
||||
|
||||
const response = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
throw new Error(
|
||||
errorData.error || `Failed to fetch custom tools: ${response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
|
||||
// Filter and validate tools
|
||||
const validTools = data.filter((tool, index) => {
|
||||
if (!tool || typeof tool !== 'object') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: not an object`)
|
||||
return false
|
||||
}
|
||||
if (!tool.id || typeof tool.id !== 'string') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid id`)
|
||||
return false
|
||||
}
|
||||
if (!tool.title || typeof tool.title !== 'string') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid title`)
|
||||
return false
|
||||
}
|
||||
if (!tool.schema || typeof tool.schema !== 'object') {
|
||||
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid schema`)
|
||||
return false
|
||||
}
|
||||
if (!tool.code || typeof tool.code !== 'string') {
|
||||
logger.warn(`Tool at index ${index} missing code field, defaulting to empty string`)
|
||||
tool.code = ''
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
set({
|
||||
tools: validTools,
|
||||
isLoading: false,
|
||||
})
|
||||
|
||||
logger.info(`Fetched ${validTools.length} custom tools for workspace ${workspaceId}`)
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch tools'
|
||||
logger.error('Error fetching custom tools:', error)
|
||||
set({
|
||||
error: errorMessage,
|
||||
isLoading: false,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
createTool: async (workspaceId: string, tool) => {
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
try {
|
||||
logger.info(`Creating custom tool: ${tool.title} in workspace ${workspaceId}`)
|
||||
|
||||
const response = await fetch(API_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
tools: [
|
||||
{
|
||||
title: tool.title,
|
||||
schema: tool.schema,
|
||||
code: tool.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ApiError(data.error || 'Failed to create tool', response.status)
|
||||
}
|
||||
|
||||
if (!data.data || !Array.isArray(data.data)) {
|
||||
throw new Error('Invalid API response: missing tools data')
|
||||
}
|
||||
|
||||
set({ tools: data.data, isLoading: false })
|
||||
|
||||
const createdTool = get().tools.find((t) => t.title === tool.title)
|
||||
if (!createdTool) {
|
||||
throw new Error('Failed to retrieve created tool')
|
||||
}
|
||||
|
||||
logger.info(`Created custom tool: ${createdTool.id}`)
|
||||
return createdTool
|
||||
} catch (error) {
|
||||
logger.error('Error creating custom tool:', error)
|
||||
set({ isLoading: false })
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
updateTool: async (workspaceId: string, id: string, updates) => {
|
||||
const tool = get().tools.find((t) => t.id === id)
|
||||
if (!tool) {
|
||||
throw new Error('Tool not found')
|
||||
}
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().tools,
|
||||
optimisticUpdate: () => {
|
||||
set((state) => ({
|
||||
tools: state.tools.map((t) =>
|
||||
t.id === id
|
||||
? {
|
||||
...t,
|
||||
title: updates.title ?? t.title,
|
||||
schema: updates.schema ?? t.schema,
|
||||
code: updates.code ?? t.code,
|
||||
}
|
||||
: t
|
||||
),
|
||||
isLoading: true,
|
||||
error: null,
|
||||
}))
|
||||
},
|
||||
apiCall: async () => {
|
||||
logger.info(`Updating custom tool: ${id} in workspace ${workspaceId}`)
|
||||
|
||||
const response = await fetch(API_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
tools: [
|
||||
{
|
||||
id,
|
||||
title: updates.title ?? tool.title,
|
||||
schema: updates.schema ?? tool.schema,
|
||||
code: updates.code ?? tool.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ApiError(data.error || 'Failed to update tool', response.status)
|
||||
}
|
||||
|
||||
if (!data.data || !Array.isArray(data.data)) {
|
||||
throw new Error('Invalid API response: missing tools data')
|
||||
}
|
||||
|
||||
set({ tools: data.data })
|
||||
logger.info(`Updated custom tool: ${id}`)
|
||||
},
|
||||
rollback: (originalTools) => {
|
||||
set({ tools: originalTools })
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: 'Error updating custom tool',
|
||||
})
|
||||
},
|
||||
|
||||
deleteTool: async (workspaceId: string | null, id: string) => {
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().tools,
|
||||
optimisticUpdate: () => {
|
||||
set((state) => ({
|
||||
tools: state.tools.filter((tool) => tool.id !== id),
|
||||
isLoading: true,
|
||||
error: null,
|
||||
}))
|
||||
},
|
||||
apiCall: async () => {
|
||||
logger.info(`Deleting custom tool: ${id}`)
|
||||
|
||||
const url = workspaceId
|
||||
? `${API_ENDPOINT}?id=${id}&workspaceId=${workspaceId}`
|
||||
: `${API_ENDPOINT}?id=${id}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to delete tool')
|
||||
}
|
||||
|
||||
logger.info(`Deleted custom tool: ${id}`)
|
||||
},
|
||||
rollback: (originalTools) => {
|
||||
set({ tools: originalTools })
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: 'Error deleting custom tool',
|
||||
})
|
||||
setTools: (tools) => {
|
||||
logger.info(`Synced ${tools.length} custom tools`)
|
||||
set({ tools })
|
||||
},
|
||||
|
||||
getTool: (id: string) => {
|
||||
@@ -249,8 +27,6 @@ export const useCustomToolsStore = create<CustomToolsStore>()(
|
||||
return get().tools
|
||||
},
|
||||
|
||||
clearError: () => set({ error: null }),
|
||||
|
||||
reset: () => set(initialState),
|
||||
}),
|
||||
{
|
||||
|
||||
@@ -24,27 +24,12 @@ export interface CustomToolDefinition {
|
||||
|
||||
export interface CustomToolsState {
|
||||
tools: CustomToolDefinition[]
|
||||
isLoading: boolean
|
||||
error: string | null
|
||||
}
|
||||
|
||||
export interface CustomToolsActions {
|
||||
fetchTools: (workspaceId: string) => Promise<void>
|
||||
createTool: (
|
||||
workspaceId: string,
|
||||
tool: Omit<CustomToolDefinition, 'id' | 'workspaceId' | 'userId' | 'createdAt' | 'updatedAt'>
|
||||
) => Promise<CustomToolDefinition>
|
||||
updateTool: (
|
||||
workspaceId: string,
|
||||
id: string,
|
||||
updates: Partial<
|
||||
Omit<CustomToolDefinition, 'id' | 'workspaceId' | 'userId' | 'createdAt' | 'updatedAt'>
|
||||
>
|
||||
) => Promise<void>
|
||||
deleteTool: (workspaceId: string | null, id: string) => Promise<void>
|
||||
setTools: (tools: CustomToolDefinition[]) => void
|
||||
getTool: (id: string) => CustomToolDefinition | undefined
|
||||
getAllTools: () => CustomToolDefinition[]
|
||||
clearError: () => void
|
||||
reset: () => void
|
||||
}
|
||||
|
||||
|
||||
@@ -55,11 +55,20 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
// Reset auto-pan disabled state when starting execution
|
||||
if (isExecuting) {
|
||||
set({ autoPanDisabled: false })
|
||||
// Clear run path when starting a new execution
|
||||
set({ lastRunPath: new Map() })
|
||||
}
|
||||
},
|
||||
setIsDebugging: (isDebugging) => set({ isDebugging }),
|
||||
setExecutor: (executor) => set({ executor }),
|
||||
setDebugContext: (debugContext) => set({ debugContext }),
|
||||
setAutoPanDisabled: (disabled) => set({ autoPanDisabled: disabled }),
|
||||
setBlockRunStatus: (blockId, status) => {
|
||||
const { lastRunPath } = get()
|
||||
const newRunPath = new Map(lastRunPath)
|
||||
newRunPath.set(blockId, status)
|
||||
set({ lastRunPath: newRunPath })
|
||||
},
|
||||
clearRunPath: () => set({ lastRunPath: new Map() }),
|
||||
reset: () => set(initialState),
|
||||
}))
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import type { Executor } from '@/executor'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
|
||||
/**
|
||||
* Represents the execution result of a block in the last run
|
||||
*/
|
||||
export type BlockRunStatus = 'success' | 'error'
|
||||
|
||||
export interface ExecutionState {
|
||||
activeBlockIds: Set<string>
|
||||
isExecuting: boolean
|
||||
@@ -9,6 +14,11 @@ export interface ExecutionState {
|
||||
executor: Executor | null
|
||||
debugContext: ExecutionContext | null
|
||||
autoPanDisabled: boolean
|
||||
/**
|
||||
* Tracks blocks from the last execution run and their success/error status.
|
||||
* Cleared when a new run starts. Used to show run path indicators (green/red rings).
|
||||
*/
|
||||
lastRunPath: Map<string, BlockRunStatus>
|
||||
}
|
||||
|
||||
export interface ExecutionActions {
|
||||
@@ -19,6 +29,8 @@ export interface ExecutionActions {
|
||||
setExecutor: (executor: Executor | null) => void
|
||||
setDebugContext: (context: ExecutionContext | null) => void
|
||||
setAutoPanDisabled: (disabled: boolean) => void
|
||||
setBlockRunStatus: (blockId: string, status: BlockRunStatus) => void
|
||||
clearRunPath: () => void
|
||||
reset: () => void
|
||||
}
|
||||
|
||||
@@ -30,6 +42,7 @@ export const initialState: ExecutionState = {
|
||||
executor: null,
|
||||
debugContext: null,
|
||||
autoPanDisabled: false,
|
||||
lastRunPath: new Map(),
|
||||
}
|
||||
|
||||
// Types for panning functionality
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('FoldersStore')
|
||||
|
||||
@@ -36,16 +34,10 @@ export interface FolderTreeNode extends WorkflowFolder {
|
||||
|
||||
interface FolderState {
|
||||
folders: Record<string, WorkflowFolder>
|
||||
isLoading: boolean
|
||||
expandedFolders: Set<string>
|
||||
selectedWorkflows: Set<string>
|
||||
|
||||
// Actions
|
||||
setFolders: (folders: WorkflowFolder[]) => void
|
||||
addFolder: (folder: WorkflowFolder) => void
|
||||
updateFolder: (id: string, updates: Partial<WorkflowFolder>) => void
|
||||
removeFolder: (id: string) => void
|
||||
setLoading: (loading: boolean) => void
|
||||
toggleExpanded: (folderId: string) => void
|
||||
setExpanded: (folderId: string, expanded: boolean) => void
|
||||
|
||||
@@ -63,29 +55,12 @@ interface FolderState {
|
||||
getFolderById: (id: string) => WorkflowFolder | undefined
|
||||
getChildFolders: (parentId: string | null) => WorkflowFolder[]
|
||||
getFolderPath: (folderId: string) => WorkflowFolder[]
|
||||
|
||||
// API actions
|
||||
fetchFolders: (workspaceId: string) => Promise<void>
|
||||
createFolder: (data: {
|
||||
name: string
|
||||
workspaceId: string
|
||||
parentId?: string
|
||||
color?: string
|
||||
}) => Promise<WorkflowFolder>
|
||||
updateFolderAPI: (id: string, updates: Partial<WorkflowFolder>) => Promise<WorkflowFolder>
|
||||
deleteFolder: (id: string, workspaceId: string) => Promise<void>
|
||||
duplicateFolder: (id: string) => Promise<string | null>
|
||||
|
||||
// Helper functions
|
||||
isWorkflowInDeletedSubfolder: (workflow: Workflow, deletedFolderId: string) => boolean
|
||||
removeSubfoldersRecursively: (parentFolderId: string) => void
|
||||
}
|
||||
|
||||
export const useFolderStore = create<FolderState>()(
|
||||
devtools(
|
||||
(set, get) => ({
|
||||
folders: {},
|
||||
isLoading: false,
|
||||
expandedFolders: new Set(),
|
||||
selectedWorkflows: new Set(),
|
||||
|
||||
@@ -100,28 +75,6 @@ export const useFolderStore = create<FolderState>()(
|
||||
),
|
||||
})),
|
||||
|
||||
addFolder: (folder) =>
|
||||
set((state) => ({
|
||||
folders: { ...state.folders, [folder.id]: folder },
|
||||
})),
|
||||
|
||||
updateFolder: (id, updates) =>
|
||||
set((state) => ({
|
||||
folders: {
|
||||
...state.folders,
|
||||
[id]: state.folders[id] ? { ...state.folders[id], ...updates } : state.folders[id],
|
||||
},
|
||||
})),
|
||||
|
||||
removeFolder: (id) =>
|
||||
set((state) => {
|
||||
const newFolders = { ...state.folders }
|
||||
delete newFolders[id]
|
||||
return { folders: newFolders }
|
||||
}),
|
||||
|
||||
setLoading: (loading) => set({ isLoading: loading }),
|
||||
|
||||
toggleExpanded: (folderId) =>
|
||||
set((state) => {
|
||||
const newExpanded = new Set(state.expandedFolders)
|
||||
@@ -225,235 +178,6 @@ export const useFolderStore = create<FolderState>()(
|
||||
|
||||
return path
|
||||
},
|
||||
|
||||
fetchFolders: async (workspaceId) => {
|
||||
set({ isLoading: true })
|
||||
try {
|
||||
const response = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
const { folders }: { folders: any[] } = await response.json()
|
||||
|
||||
// Convert date strings to Date objects
|
||||
const processedFolders: WorkflowFolder[] = folders.map((folder: any) => ({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
userId: folder.userId,
|
||||
workspaceId: folder.workspaceId,
|
||||
parentId: folder.parentId,
|
||||
color: folder.color,
|
||||
isExpanded: folder.isExpanded,
|
||||
sortOrder: folder.sortOrder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
}))
|
||||
|
||||
get().setFolders(processedFolders)
|
||||
|
||||
// Start with all folders collapsed - only active workflow path will be expanded by the UI
|
||||
set({ expandedFolders: new Set() })
|
||||
} catch (error) {
|
||||
logger.error('Error fetching folders:', error)
|
||||
} finally {
|
||||
set({ isLoading: false })
|
||||
}
|
||||
},
|
||||
|
||||
createFolder: async (data) => {
|
||||
const response = await fetch('/api/folders', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to create folder')
|
||||
}
|
||||
|
||||
const { folder } = await response.json()
|
||||
const processedFolder = {
|
||||
...folder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
}
|
||||
|
||||
get().addFolder(processedFolder)
|
||||
return processedFolder
|
||||
},
|
||||
|
||||
updateFolderAPI: async (id, updates) => {
|
||||
const originalFolder = get().folders[id]
|
||||
if (!originalFolder) {
|
||||
throw new Error('Folder not found')
|
||||
}
|
||||
|
||||
let updatedFolder: WorkflowFolder | null = null
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => originalFolder,
|
||||
optimisticUpdate: () => {
|
||||
get().updateFolder(id, { ...updates, updatedAt: new Date() })
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(`/api/folders/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update folder')
|
||||
}
|
||||
|
||||
const { folder } = await response.json()
|
||||
const processedFolder = {
|
||||
...folder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
}
|
||||
|
||||
get().updateFolder(id, processedFolder)
|
||||
updatedFolder = processedFolder
|
||||
},
|
||||
rollback: (original) => {
|
||||
get().updateFolder(id, original)
|
||||
},
|
||||
errorMessage: 'Failed to update folder',
|
||||
})
|
||||
|
||||
return updatedFolder || { ...originalFolder, ...updates }
|
||||
},
|
||||
|
||||
deleteFolder: async (id: string, workspaceId: string) => {
|
||||
const getAllSubfolderIds = (parentId: string): string[] => {
|
||||
const folders = get().folders
|
||||
const childIds = Object.keys(folders).filter(
|
||||
(folderId) => folders[folderId].parentId === parentId
|
||||
)
|
||||
const allIds = [...childIds]
|
||||
|
||||
childIds.forEach((childId) => {
|
||||
allIds.push(...getAllSubfolderIds(childId))
|
||||
})
|
||||
|
||||
return allIds
|
||||
}
|
||||
|
||||
const deletedFolderIds = [id, ...getAllSubfolderIds(id)]
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => ({
|
||||
folders: { ...get().folders },
|
||||
expandedFolders: new Set(get().expandedFolders),
|
||||
}),
|
||||
optimisticUpdate: () => {
|
||||
deletedFolderIds.forEach((folderId) => {
|
||||
get().removeFolder(folderId)
|
||||
})
|
||||
|
||||
set((state) => {
|
||||
const newExpanded = new Set(state.expandedFolders)
|
||||
deletedFolderIds.forEach((folderId) => newExpanded.delete(folderId))
|
||||
return { expandedFolders: newExpanded }
|
||||
})
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(`/api/folders/${id}`, { method: 'DELETE' })
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to delete folder')
|
||||
}
|
||||
|
||||
const responseData = await response.json()
|
||||
logger.info(
|
||||
`Deleted ${responseData.deletedItems.workflows} workflow(s) and ${responseData.deletedItems.folders} folder(s)`
|
||||
)
|
||||
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
await workflowRegistry.loadWorkflows(workspaceId)
|
||||
},
|
||||
rollback: (originalState) => {
|
||||
set({ folders: originalState.folders, expandedFolders: originalState.expandedFolders })
|
||||
},
|
||||
errorMessage: 'Failed to delete folder',
|
||||
})
|
||||
},
|
||||
|
||||
duplicateFolder: async (id: string) => {
|
||||
const sourceFolder = get().folders[id]
|
||||
if (!sourceFolder) {
|
||||
logger.error(`Folder ${id} not found`)
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/folders/${id}/duplicate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: `${sourceFolder.name} (Copy)`,
|
||||
workspaceId: sourceFolder.workspaceId,
|
||||
parentId: sourceFolder.parentId,
|
||||
color: sourceFolder.color,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to duplicate folder')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
logger.info(
|
||||
`Successfully duplicated folder ${id} to ${result.id} with ${result.foldersCount} folder(s) and ${result.workflowsCount} workflow(s)`
|
||||
)
|
||||
|
||||
// Reload folders and workflows to reflect the duplication
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
await Promise.all([
|
||||
get().fetchFolders(sourceFolder.workspaceId),
|
||||
workflowRegistry.loadWorkflows(sourceFolder.workspaceId),
|
||||
])
|
||||
|
||||
return result.id
|
||||
} catch (error) {
|
||||
logger.error(`Failed to duplicate folder ${id}:`, error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
isWorkflowInDeletedSubfolder: (workflow: Workflow, deletedFolderId: string) => {
|
||||
if (!workflow.folderId) return false
|
||||
|
||||
const folders = get().folders
|
||||
let currentFolderId: string | null = workflow.folderId
|
||||
|
||||
while (currentFolderId && folders[currentFolderId]) {
|
||||
if (currentFolderId === deletedFolderId) {
|
||||
return true
|
||||
}
|
||||
currentFolderId = folders[currentFolderId].parentId
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
|
||||
removeSubfoldersRecursively: (parentFolderId: string) => {
|
||||
const folders = get().folders
|
||||
const childFolderIds = Object.keys(folders).filter(
|
||||
(id) => folders[id].parentId === parentFolderId
|
||||
)
|
||||
|
||||
childFolderIds.forEach((childId) => {
|
||||
get().removeSubfoldersRecursively(childId)
|
||||
get().removeFolder(childId)
|
||||
})
|
||||
},
|
||||
}),
|
||||
{ name: 'folder-store' }
|
||||
)
|
||||
|
||||
@@ -1,50 +1,9 @@
|
||||
import { create } from 'zustand'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { updateOllamaProviderModels, updateOpenRouterProviderModels } from '@/providers/utils'
|
||||
import type { ProviderConfig, ProviderName, ProvidersStore } from './types'
|
||||
import type { ProvidersStore } from './types'
|
||||
|
||||
const logger = createLogger('ProvidersStore')
|
||||
|
||||
const PROVIDER_CONFIGS: Record<ProviderName, ProviderConfig> = {
|
||||
base: {
|
||||
apiEndpoint: '/api/providers/base/models',
|
||||
dedupeModels: true,
|
||||
updateFunction: () => {},
|
||||
},
|
||||
ollama: {
|
||||
apiEndpoint: '/api/providers/ollama/models',
|
||||
updateFunction: updateOllamaProviderModels,
|
||||
},
|
||||
openrouter: {
|
||||
apiEndpoint: '/api/providers/openrouter/models',
|
||||
dedupeModels: true,
|
||||
updateFunction: updateOpenRouterProviderModels,
|
||||
},
|
||||
}
|
||||
|
||||
const fetchProviderModels = async (provider: ProviderName): Promise<string[]> => {
|
||||
try {
|
||||
const config = PROVIDER_CONFIGS[provider]
|
||||
const response = await fetch(config.apiEndpoint)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch ${provider} models from API`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
})
|
||||
return []
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return data.models || []
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching ${provider} models`, {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export const useProvidersStore = create<ProvidersStore>((set, get) => ({
|
||||
providers: {
|
||||
base: { models: [], isLoading: false },
|
||||
@@ -52,86 +11,32 @@ export const useProvidersStore = create<ProvidersStore>((set, get) => ({
|
||||
openrouter: { models: [], isLoading: false },
|
||||
},
|
||||
|
||||
setModels: (provider, models) => {
|
||||
const config = PROVIDER_CONFIGS[provider]
|
||||
|
||||
const processedModels = config.dedupeModels ? Array.from(new Set(models)) : models
|
||||
|
||||
setProviderModels: (provider, models) => {
|
||||
logger.info(`Updated ${provider} models`, { count: models.length })
|
||||
set((state) => ({
|
||||
providers: {
|
||||
...state.providers,
|
||||
[provider]: {
|
||||
...state.providers[provider],
|
||||
models: processedModels,
|
||||
models,
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
config.updateFunction(models)
|
||||
},
|
||||
|
||||
fetchModels: async (provider) => {
|
||||
if (typeof window === 'undefined') {
|
||||
logger.info(`Skipping client-side ${provider} model fetch on server`)
|
||||
return
|
||||
}
|
||||
|
||||
const currentState = get().providers[provider]
|
||||
if (currentState.isLoading) {
|
||||
logger.info(`${provider} model fetch already in progress`)
|
||||
return
|
||||
}
|
||||
if (currentState.models.length > 0) {
|
||||
logger.info(`Skipping ${provider} model fetch - models already loaded`)
|
||||
return
|
||||
}
|
||||
|
||||
logger.info(`Fetching ${provider} models from API`)
|
||||
|
||||
setProviderLoading: (provider, isLoading) => {
|
||||
set((state) => ({
|
||||
providers: {
|
||||
...state.providers,
|
||||
[provider]: {
|
||||
...state.providers[provider],
|
||||
isLoading: true,
|
||||
isLoading,
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
try {
|
||||
const models = await fetchProviderModels(provider)
|
||||
logger.info(`Successfully fetched ${provider} models`, {
|
||||
count: models.length,
|
||||
...(provider === 'ollama' ? { models } : {}),
|
||||
})
|
||||
get().setModels(provider, models)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to fetch ${provider} models`, {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
} finally {
|
||||
set((state) => ({
|
||||
providers: {
|
||||
...state.providers,
|
||||
[provider]: {
|
||||
...state.providers[provider],
|
||||
isLoading: false,
|
||||
},
|
||||
},
|
||||
}))
|
||||
}
|
||||
},
|
||||
|
||||
getProvider: (provider) => {
|
||||
return get().providers[provider]
|
||||
},
|
||||
}))
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
setTimeout(() => {
|
||||
const store = useProvidersStore.getState()
|
||||
store.fetchModels('base')
|
||||
store.fetchModels('ollama')
|
||||
store.fetchModels('openrouter')
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
@@ -7,13 +7,7 @@ export interface ProviderState {
|
||||
|
||||
export interface ProvidersStore {
|
||||
providers: Record<ProviderName, ProviderState>
|
||||
setModels: (provider: ProviderName, models: string[]) => void
|
||||
fetchModels: (provider: ProviderName) => Promise<void>
|
||||
setProviderModels: (provider: ProviderName, models: string[]) => void
|
||||
setProviderLoading: (provider: ProviderName, isLoading: boolean) => void
|
||||
getProvider: (provider: ProviderName) => ProviderState
|
||||
}
|
||||
|
||||
export interface ProviderConfig {
|
||||
apiEndpoint: string
|
||||
dedupeModels?: boolean
|
||||
updateFunction: (models: string[]) => void | Promise<void>
|
||||
}
|
||||
|
||||
40
apps/sim/stores/search-modal/store.ts
Normal file
40
apps/sim/stores/search-modal/store.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { create } from 'zustand'
|
||||
|
||||
/**
|
||||
* Global state for the universal search modal.
|
||||
*
|
||||
* Centralizing this state in a store allows any component (e.g. sidebar,
|
||||
* workflow command list, keyboard shortcuts) to open or close the modal
|
||||
* without relying on DOM events or prop drilling.
|
||||
*/
|
||||
interface SearchModalState {
|
||||
/** Whether the search modal is currently open. */
|
||||
isOpen: boolean
|
||||
/**
|
||||
* Explicitly set the open state of the modal.
|
||||
*
|
||||
* @param open - New open state.
|
||||
*/
|
||||
setOpen: (open: boolean) => void
|
||||
/**
|
||||
* Convenience method to open the modal.
|
||||
*/
|
||||
open: () => void
|
||||
/**
|
||||
* Convenience method to close the modal.
|
||||
*/
|
||||
close: () => void
|
||||
}
|
||||
|
||||
export const useSearchModalStore = create<SearchModalState>((set) => ({
|
||||
isOpen: false,
|
||||
setOpen: (open: boolean) => {
|
||||
set({ isOpen: open })
|
||||
},
|
||||
open: () => {
|
||||
set({ isOpen: true })
|
||||
},
|
||||
close: () => {
|
||||
set({ isOpen: false })
|
||||
},
|
||||
}))
|
||||
@@ -1,12 +1,7 @@
|
||||
import { create } from 'zustand'
|
||||
import { fetchPersonalEnvironment } from '@/lib/environment/api'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import type {
|
||||
CachedWorkspaceEnvData,
|
||||
EnvironmentStore,
|
||||
EnvironmentVariable,
|
||||
} from '@/stores/settings/environment/types'
|
||||
import type { EnvironmentStore, EnvironmentVariable } from '@/stores/settings/environment/types'
|
||||
|
||||
const logger = createLogger('EnvironmentStore')
|
||||
|
||||
@@ -14,187 +9,35 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
|
||||
variables: {},
|
||||
isLoading: false,
|
||||
error: null,
|
||||
workspaceEnvCache: new Map<string, CachedWorkspaceEnvData>(),
|
||||
|
||||
loadEnvironmentVariables: async () => {
|
||||
try {
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
const response = await fetch(API_ENDPOINTS.ENVIRONMENT)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load environment variables: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
if (data && typeof data === 'object') {
|
||||
set({
|
||||
variables: data,
|
||||
isLoading: false,
|
||||
})
|
||||
} else {
|
||||
set({
|
||||
variables: {},
|
||||
isLoading: false,
|
||||
})
|
||||
}
|
||||
const data = await fetchPersonalEnvironment()
|
||||
set({ variables: data, isLoading: false })
|
||||
} catch (error) {
|
||||
logger.error('Error loading environment variables:', { error })
|
||||
set({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
isLoading: false,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
saveEnvironmentVariables: async (variables: Record<string, string>) => {
|
||||
const transformedVariables = Object.entries(variables).reduce(
|
||||
(acc, [key, value]) => ({
|
||||
...acc,
|
||||
[key]: { key, value },
|
||||
}),
|
||||
{}
|
||||
)
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().variables,
|
||||
optimisticUpdate: () => {
|
||||
set({ variables: transformedVariables, isLoading: true, error: null })
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(API_ENDPOINTS.ENVIRONMENT, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
variables: Object.entries(transformedVariables).reduce(
|
||||
(acc, [key, value]) => ({
|
||||
...acc,
|
||||
[key]: (value as EnvironmentVariable).value,
|
||||
}),
|
||||
{}
|
||||
),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save environment variables: ${response.statusText}`)
|
||||
}
|
||||
|
||||
get().clearWorkspaceEnvCache()
|
||||
},
|
||||
rollback: (originalVariables) => {
|
||||
set({ variables: originalVariables })
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: 'Error saving environment variables',
|
||||
})
|
||||
setVariables: (variables: Record<string, EnvironmentVariable>) => {
|
||||
set({ variables })
|
||||
},
|
||||
|
||||
loadWorkspaceEnvironment: async (workspaceId: string) => {
|
||||
const cached = get().workspaceEnvCache.get(workspaceId)
|
||||
if (cached) {
|
||||
return {
|
||||
workspace: cached.workspace,
|
||||
personal: cached.personal,
|
||||
conflicts: cached.conflicts,
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId))
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load workspace environment: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
const envData = data as {
|
||||
workspace: Record<string, string>
|
||||
personal: Record<string, string>
|
||||
conflicts: string[]
|
||||
}
|
||||
|
||||
const cache = new Map(get().workspaceEnvCache)
|
||||
cache.set(workspaceId, {
|
||||
...envData,
|
||||
cachedAt: Date.now(),
|
||||
})
|
||||
set({ workspaceEnvCache: cache, isLoading: false })
|
||||
|
||||
return envData
|
||||
} catch (error) {
|
||||
logger.error('Error loading workspace environment:', { error })
|
||||
set({ error: error instanceof Error ? error.message : 'Unknown error', isLoading: false })
|
||||
return { workspace: {}, personal: {}, conflicts: [] }
|
||||
}
|
||||
},
|
||||
|
||||
upsertWorkspaceEnvironment: async (workspaceId: string, variables: Record<string, string>) => {
|
||||
try {
|
||||
set({ isLoading: true, error: null })
|
||||
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId), {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ variables }),
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to update workspace environment: ${response.statusText}`)
|
||||
}
|
||||
set({ isLoading: false })
|
||||
|
||||
get().clearWorkspaceEnvCache(workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Error updating workspace environment:', { error })
|
||||
set({ error: error instanceof Error ? error.message : 'Unknown error', isLoading: false })
|
||||
}
|
||||
},
|
||||
|
||||
removeWorkspaceEnvironmentKeys: async (workspaceId: string, keys: string[]) => {
|
||||
try {
|
||||
set({ isLoading: true, error: null })
|
||||
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId), {
|
||||
method: 'DELETE',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ keys }),
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to remove workspace environment keys: ${response.statusText}`)
|
||||
}
|
||||
set({ isLoading: false })
|
||||
|
||||
get().clearWorkspaceEnvCache(workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Error removing workspace environment keys:', { error })
|
||||
set({ error: error instanceof Error ? error.message : 'Unknown error', isLoading: false })
|
||||
}
|
||||
},
|
||||
|
||||
getAllVariables: (): Record<string, EnvironmentVariable> => {
|
||||
getAllVariables: () => {
|
||||
return get().variables
|
||||
},
|
||||
|
||||
clearWorkspaceEnvCache: (workspaceId?: string) => {
|
||||
const cache = new Map(get().workspaceEnvCache)
|
||||
if (workspaceId) {
|
||||
cache.delete(workspaceId)
|
||||
set({ workspaceEnvCache: cache })
|
||||
} else {
|
||||
set({ workspaceEnvCache: new Map() })
|
||||
}
|
||||
},
|
||||
|
||||
reset: () => {
|
||||
set({
|
||||
variables: {},
|
||||
isLoading: false,
|
||||
error: null,
|
||||
workspaceEnvCache: new Map(),
|
||||
})
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -14,25 +14,11 @@ export interface EnvironmentState {
|
||||
variables: Record<string, EnvironmentVariable>
|
||||
isLoading: boolean
|
||||
error: string | null
|
||||
workspaceEnvCache: Map<string, CachedWorkspaceEnvData>
|
||||
}
|
||||
|
||||
export interface EnvironmentStore extends EnvironmentState {
|
||||
loadEnvironmentVariables: () => Promise<void>
|
||||
saveEnvironmentVariables: (variables: Record<string, string>) => Promise<void>
|
||||
|
||||
loadWorkspaceEnvironment: (workspaceId: string) => Promise<{
|
||||
workspace: Record<string, string>
|
||||
personal: Record<string, string>
|
||||
conflicts: string[]
|
||||
}>
|
||||
upsertWorkspaceEnvironment: (
|
||||
workspaceId: string,
|
||||
variables: Record<string, string>
|
||||
) => Promise<void>
|
||||
removeWorkspaceEnvironmentKeys: (workspaceId: string, keys: string[]) => Promise<void>
|
||||
|
||||
setVariables: (variables: Record<string, EnvironmentVariable>) => void
|
||||
getAllVariables: () => Record<string, EnvironmentVariable>
|
||||
clearWorkspaceEnvCache: (workspaceId?: string) => void
|
||||
reset: () => void
|
||||
}
|
||||
|
||||
@@ -1,294 +1,37 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools, persist } from 'zustand/middleware'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
// COMMENTED OUT: Theme switching disabled - dark mode is forced for workspace
|
||||
// import { syncThemeToNextThemes } from '@/lib/theme-sync'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import type { General, GeneralStore, UserSettings } from '@/stores/settings/general/types'
|
||||
import type { General, GeneralStore } from '@/stores/settings/general/types'
|
||||
|
||||
const logger = createLogger('GeneralStore')
|
||||
|
||||
const CACHE_TIMEOUT = 3600000 // 1 hour - settings rarely change
|
||||
const MAX_ERROR_RETRIES = 2
|
||||
const initialState: General = {
|
||||
isAutoConnectEnabled: true,
|
||||
isAutoPanEnabled: true,
|
||||
isConsoleExpandedByDefault: true,
|
||||
showFloatingControls: true,
|
||||
showTrainingControls: false,
|
||||
superUserModeEnabled: true,
|
||||
theme: 'system',
|
||||
telemetryEnabled: true,
|
||||
isBillingUsageNotificationsEnabled: true,
|
||||
}
|
||||
|
||||
export const useGeneralStore = create<GeneralStore>()(
|
||||
devtools(
|
||||
persist(
|
||||
(set, get) => {
|
||||
let lastLoadTime = 0
|
||||
let errorRetryCount = 0
|
||||
let hasLoadedFromDb = false // Track if we've loaded from DB in this session
|
||||
|
||||
const store: General = {
|
||||
isAutoConnectEnabled: true,
|
||||
isAutoPanEnabled: true,
|
||||
isConsoleExpandedByDefault: true,
|
||||
showFloatingControls: true,
|
||||
showTrainingControls: false,
|
||||
superUserModeEnabled: true,
|
||||
theme: 'system' as const, // Keep for compatibility but not used
|
||||
telemetryEnabled: true,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
// Individual loading states
|
||||
isAutoConnectLoading: false,
|
||||
isAutoPanLoading: false,
|
||||
isConsoleExpandedByDefaultLoading: false,
|
||||
isThemeLoading: false, // Keep for compatibility but not used
|
||||
isTelemetryLoading: false,
|
||||
isBillingUsageNotificationsLoading: false,
|
||||
isBillingUsageNotificationsEnabled: true,
|
||||
isFloatingControlsLoading: false,
|
||||
isTrainingControlsLoading: false,
|
||||
isSuperUserModeLoading: false,
|
||||
}
|
||||
|
||||
const updateSettingOptimistic = async <K extends keyof UserSettings>(
|
||||
key: K,
|
||||
value: UserSettings[K],
|
||||
loadingKey: keyof General,
|
||||
stateKey: keyof General
|
||||
) => {
|
||||
if ((get() as any)[loadingKey]) return
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => (get() as any)[stateKey],
|
||||
optimisticUpdate: () => set({ [stateKey]: value, [loadingKey]: true } as any),
|
||||
apiCall: async () => {
|
||||
await get().updateSetting(key, value)
|
||||
},
|
||||
rollback: (originalValue) => set({ [stateKey]: originalValue } as any),
|
||||
onComplete: () => set({ [loadingKey]: false } as any),
|
||||
errorMessage: `Failed to update ${String(key)}, rolled back`,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
...store,
|
||||
toggleAutoConnect: async () => {
|
||||
if (get().isAutoConnectLoading) return
|
||||
const newValue = !get().isAutoConnectEnabled
|
||||
await updateSettingOptimistic(
|
||||
'autoConnect',
|
||||
newValue,
|
||||
'isAutoConnectLoading',
|
||||
'isAutoConnectEnabled'
|
||||
)
|
||||
},
|
||||
|
||||
toggleAutoPan: async () => {
|
||||
if (get().isAutoPanLoading) return
|
||||
const newValue = !get().isAutoPanEnabled
|
||||
await updateSettingOptimistic(
|
||||
'autoPan',
|
||||
newValue,
|
||||
'isAutoPanLoading',
|
||||
'isAutoPanEnabled'
|
||||
)
|
||||
},
|
||||
|
||||
toggleConsoleExpandedByDefault: async () => {
|
||||
if (get().isConsoleExpandedByDefaultLoading) return
|
||||
const newValue = !get().isConsoleExpandedByDefault
|
||||
await updateSettingOptimistic(
|
||||
'consoleExpandedByDefault',
|
||||
newValue,
|
||||
'isConsoleExpandedByDefaultLoading',
|
||||
'isConsoleExpandedByDefault'
|
||||
)
|
||||
},
|
||||
|
||||
toggleFloatingControls: async () => {
|
||||
if (get().isFloatingControlsLoading) return
|
||||
const newValue = !get().showFloatingControls
|
||||
await updateSettingOptimistic(
|
||||
'showFloatingControls',
|
||||
newValue,
|
||||
'isFloatingControlsLoading',
|
||||
'showFloatingControls'
|
||||
)
|
||||
},
|
||||
|
||||
toggleTrainingControls: async () => {
|
||||
if (get().isTrainingControlsLoading) return
|
||||
const newValue = !get().showTrainingControls
|
||||
await updateSettingOptimistic(
|
||||
'showTrainingControls',
|
||||
newValue,
|
||||
'isTrainingControlsLoading',
|
||||
'showTrainingControls'
|
||||
)
|
||||
},
|
||||
|
||||
toggleSuperUserMode: async () => {
|
||||
if (get().isSuperUserModeLoading) return
|
||||
const newValue = !get().superUserModeEnabled
|
||||
await updateSettingOptimistic(
|
||||
'superUserModeEnabled',
|
||||
newValue,
|
||||
'isSuperUserModeLoading',
|
||||
'superUserModeEnabled'
|
||||
)
|
||||
},
|
||||
|
||||
// COMMENTED OUT: Theme switching disabled - dark mode is forced for workspace
|
||||
setTheme: async (theme) => {
|
||||
if (get().isThemeLoading) return
|
||||
|
||||
// COMMENTED OUT: Dark mode is forced for workspace pages
|
||||
// await withOptimisticUpdate({
|
||||
// getCurrentState: () => get().theme,
|
||||
// optimisticUpdate: () => {
|
||||
// set({ theme, isThemeLoading: true })
|
||||
// syncThemeToNextThemes(theme)
|
||||
// },
|
||||
// apiCall: async () => {
|
||||
// await get().updateSetting('theme', theme)
|
||||
// },
|
||||
// rollback: (originalTheme) => {
|
||||
// set({ theme: originalTheme })
|
||||
// syncThemeToNextThemes(originalTheme)
|
||||
// },
|
||||
// onComplete: () => set({ isThemeLoading: false }),
|
||||
// errorMessage: 'Failed to sync theme to database',
|
||||
// })
|
||||
},
|
||||
|
||||
setTelemetryEnabled: async (enabled) => {
|
||||
if (get().isTelemetryLoading) return
|
||||
await updateSettingOptimistic(
|
||||
'telemetryEnabled',
|
||||
enabled,
|
||||
'isTelemetryLoading',
|
||||
'telemetryEnabled'
|
||||
)
|
||||
},
|
||||
|
||||
setBillingUsageNotificationsEnabled: async (enabled: boolean) => {
|
||||
if (get().isBillingUsageNotificationsLoading) return
|
||||
await updateSettingOptimistic(
|
||||
'isBillingUsageNotificationsEnabled',
|
||||
enabled,
|
||||
'isBillingUsageNotificationsLoading',
|
||||
'isBillingUsageNotificationsEnabled'
|
||||
)
|
||||
},
|
||||
|
||||
// API Actions
|
||||
loadSettings: async (force = false) => {
|
||||
// Skip if we've already loaded from DB and not forcing
|
||||
if (hasLoadedFromDb && !force) {
|
||||
logger.debug('Already loaded settings from DB, using cached data')
|
||||
return
|
||||
}
|
||||
|
||||
// If we have persisted state and not forcing, check if we need to load
|
||||
const persistedState = localStorage.getItem('general-settings')
|
||||
if (persistedState && !force) {
|
||||
try {
|
||||
const parsed = JSON.parse(persistedState)
|
||||
// If we have valid theme data, skip DB load unless forced
|
||||
if (parsed.state?.theme) {
|
||||
logger.debug('Using cached settings from localStorage')
|
||||
hasLoadedFromDb = true // Mark as loaded to prevent future API calls
|
||||
return
|
||||
}
|
||||
} catch (e) {
|
||||
// If parsing fails, continue to load from DB
|
||||
}
|
||||
}
|
||||
// Skip loading if on a chat path
|
||||
if (typeof window !== 'undefined' && window.location.pathname.startsWith('/chat/')) {
|
||||
logger.debug('Skipping settings load - on chat page')
|
||||
return
|
||||
}
|
||||
|
||||
// Skip loading if settings were recently loaded (within 5 seconds)
|
||||
const now = Date.now()
|
||||
if (!force && now - lastLoadTime < CACHE_TIMEOUT) {
|
||||
logger.debug('Skipping settings load - recently loaded')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
const response = await fetch('/api/users/me/settings')
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch settings')
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
|
||||
set({
|
||||
isAutoConnectEnabled: data.autoConnect,
|
||||
isAutoPanEnabled: data.autoPan ?? true,
|
||||
isConsoleExpandedByDefault: data.consoleExpandedByDefault ?? true,
|
||||
showFloatingControls: data.showFloatingControls ?? true,
|
||||
showTrainingControls: data.showTrainingControls ?? false,
|
||||
superUserModeEnabled: data.superUserModeEnabled ?? true,
|
||||
theme: data.theme || 'system',
|
||||
telemetryEnabled: data.telemetryEnabled,
|
||||
isBillingUsageNotificationsEnabled: data.billingUsageNotificationsEnabled ?? true,
|
||||
isLoading: false,
|
||||
})
|
||||
|
||||
// COMMENTED OUT: Theme switching disabled - dark mode is forced for workspace
|
||||
// // Sync theme to next-themes if it's different
|
||||
// if (data.theme && typeof window !== 'undefined') {
|
||||
// const currentTheme = localStorage.getItem('sim-theme')
|
||||
// if (currentTheme !== data.theme) {
|
||||
// syncThemeToNextThemes(data.theme)
|
||||
// }
|
||||
// }
|
||||
|
||||
lastLoadTime = now
|
||||
errorRetryCount = 0
|
||||
hasLoadedFromDb = true
|
||||
} catch (error) {
|
||||
logger.error('Error loading settings:', error)
|
||||
set({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
isLoading: false,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
updateSetting: async (key, value) => {
|
||||
if (typeof window !== 'undefined' && window.location.pathname.startsWith('/chat/')) {
|
||||
logger.debug(`Skipping setting update for ${key} on chat page`)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/users/me/settings', {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ [key]: value }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to update setting: ${key}`)
|
||||
}
|
||||
|
||||
set({ error: null })
|
||||
lastLoadTime = Date.now()
|
||||
errorRetryCount = 0
|
||||
} catch (error) {
|
||||
logger.error(`Error updating setting ${key}:`, error)
|
||||
set({ error: error instanceof Error ? error.message : 'Unknown error' })
|
||||
|
||||
// Don't auto-retry on individual setting updates to avoid conflicts
|
||||
throw error
|
||||
}
|
||||
},
|
||||
}
|
||||
(set) => ({
|
||||
...initialState,
|
||||
setSettings: (settings) => {
|
||||
logger.debug('Updating general settings store', {
|
||||
keys: Object.keys(settings),
|
||||
})
|
||||
set((state) => ({
|
||||
...state,
|
||||
...settings,
|
||||
}))
|
||||
},
|
||||
{
|
||||
name: 'general-settings',
|
||||
}
|
||||
),
|
||||
reset: () => set(initialState),
|
||||
}),
|
||||
{ name: 'general-store' }
|
||||
)
|
||||
)
|
||||
|
||||
@@ -7,36 +7,14 @@ export interface General {
|
||||
superUserModeEnabled: boolean
|
||||
theme: 'system' | 'light' | 'dark'
|
||||
telemetryEnabled: boolean
|
||||
isLoading: boolean
|
||||
error: string | null
|
||||
isAutoConnectLoading: boolean
|
||||
isAutoPanLoading: boolean
|
||||
isConsoleExpandedByDefaultLoading: boolean
|
||||
isThemeLoading: boolean
|
||||
isTelemetryLoading: boolean
|
||||
isBillingUsageNotificationsLoading: boolean
|
||||
isBillingUsageNotificationsEnabled: boolean
|
||||
isFloatingControlsLoading: boolean
|
||||
isTrainingControlsLoading: boolean
|
||||
isSuperUserModeLoading: boolean
|
||||
}
|
||||
|
||||
export interface GeneralActions {
|
||||
toggleAutoConnect: () => Promise<void>
|
||||
toggleAutoPan: () => Promise<void>
|
||||
toggleConsoleExpandedByDefault: () => Promise<void>
|
||||
toggleFloatingControls: () => Promise<void>
|
||||
toggleTrainingControls: () => Promise<void>
|
||||
toggleSuperUserMode: () => Promise<void>
|
||||
setTheme: (theme: 'system' | 'light' | 'dark') => Promise<void>
|
||||
setTelemetryEnabled: (enabled: boolean) => Promise<void>
|
||||
setBillingUsageNotificationsEnabled: (enabled: boolean) => Promise<void>
|
||||
loadSettings: (force?: boolean) => Promise<void>
|
||||
updateSetting: <K extends keyof UserSettings>(key: K, value: UserSettings[K]) => Promise<void>
|
||||
export interface GeneralStore extends General {
|
||||
setSettings: (settings: Partial<General>) => void
|
||||
reset: () => void
|
||||
}
|
||||
|
||||
export type GeneralStore = General & GeneralActions
|
||||
|
||||
export type UserSettings = {
|
||||
theme: 'system' | 'light' | 'dark'
|
||||
autoConnect: boolean
|
||||
|
||||
@@ -2,6 +2,7 @@ import { create } from 'zustand'
|
||||
import { devtools, persist } from 'zustand/middleware'
|
||||
import { redactApiKeys } from '@/lib/utils'
|
||||
import type { NormalizedBlockOutput } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from './types'
|
||||
|
||||
/**
|
||||
@@ -98,17 +99,19 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
},
|
||||
|
||||
/**
|
||||
* Clears console entries for a specific workflow
|
||||
* Clears console entries for a specific workflow and clears the run path
|
||||
* @param workflowId - The workflow ID to clear entries for
|
||||
*/
|
||||
clearWorkflowConsole: (workflowId: string) => {
|
||||
set((state) => ({
|
||||
entries: state.entries.filter((entry) => entry.workflowId !== workflowId),
|
||||
}))
|
||||
// Clear run path indicators when console is cleared
|
||||
useExecutionStore.getState().clearRunPath()
|
||||
},
|
||||
|
||||
/**
|
||||
* Clears all console entries or entries for a specific workflow
|
||||
* Clears all console entries or entries for a specific workflow and clears the run path
|
||||
* @param workflowId - The workflow ID to clear entries for, or null to clear all
|
||||
* @deprecated Use clearWorkflowConsole for clearing specific workflows
|
||||
*/
|
||||
@@ -118,6 +121,8 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
? state.entries.filter((entry) => entry.workflowId !== workflowId)
|
||||
: [],
|
||||
}))
|
||||
// Clear run path indicators when console is cleared
|
||||
useExecutionStore.getState().clearRunPath()
|
||||
},
|
||||
|
||||
exportConsoleCSV: (workflowId: string) => {
|
||||
|
||||
@@ -126,17 +126,6 @@ export default {
|
||||
strokeDashoffset: '-24',
|
||||
},
|
||||
},
|
||||
'pulse-ring': {
|
||||
'0%': {
|
||||
boxShadow: '0 0 0 0 hsl(var(--border))',
|
||||
},
|
||||
'50%': {
|
||||
boxShadow: '0 0 0 8px hsl(var(--border))',
|
||||
},
|
||||
'100%': {
|
||||
boxShadow: '0 0 0 0 hsl(var(--border))',
|
||||
},
|
||||
},
|
||||
'code-shimmer': {
|
||||
'0%': {
|
||||
transform: 'translateX(-100%)',
|
||||
@@ -153,15 +142,23 @@ export default {
|
||||
opacity: '0.8',
|
||||
},
|
||||
},
|
||||
'ring-pulse': {
|
||||
'0%, 100%': {
|
||||
opacity: '1',
|
||||
},
|
||||
'50%': {
|
||||
opacity: '0.6',
|
||||
},
|
||||
},
|
||||
},
|
||||
animation: {
|
||||
'caret-blink': 'caret-blink 1.25s ease-out infinite',
|
||||
'slide-left': 'slide-left 80s linear infinite',
|
||||
'slide-right': 'slide-right 80s linear infinite',
|
||||
'dash-animation': 'dash-animation 1.5s linear infinite',
|
||||
'pulse-ring': 'pulse-ring 1.5s cubic-bezier(0.4, 0, 0.6, 1) infinite',
|
||||
'code-shimmer': 'code-shimmer 1.5s infinite',
|
||||
'placeholder-pulse': 'placeholder-pulse 1.5s ease-in-out infinite',
|
||||
'ring-pulse': 'ring-pulse 1.5s ease-in-out infinite',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user