mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 16:08:04 -05:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c818b2e3e | ||
|
|
1a7de84c7a | ||
|
|
a2dea384a4 | ||
|
|
1c3e923f1b | ||
|
|
e1d5e38528 | ||
|
|
3c7b3e1a4b | ||
|
|
bc455d5bf4 | ||
|
|
2a333c7cf7 | ||
|
|
41cc0cdadc | ||
|
|
70aeb0c298 | ||
|
|
83f113984d | ||
|
|
56ede1c980 | ||
|
|
df16382a19 | ||
|
|
e271ed86b6 | ||
|
|
785b86a32e | ||
|
|
e5e8082de4 | ||
|
|
8a08afd733 | ||
|
|
ebb25469ab | ||
|
|
a2040322e7 | ||
|
|
a8be7e9fb3 |
9
.github/CONTRIBUTING.md
vendored
9
.github/CONTRIBUTING.md
vendored
@@ -416,8 +416,8 @@ In addition, you will need to update the registries:
|
||||
Your tool should export a constant with a naming convention of `{toolName}Tool`. The tool ID should follow the format `{provider}_{tool_name}`. For example:
|
||||
|
||||
```typescript:/apps/sim/tools/pinecone/fetch.ts
|
||||
import { ToolConfig, ToolResponse } from '../types'
|
||||
import { PineconeParams, PineconeResponse } from './types'
|
||||
import { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
import { PineconeParams, PineconeResponse } from '@/tools/pinecone/types'
|
||||
|
||||
export const fetchTool: ToolConfig<PineconeParams, PineconeResponse> = {
|
||||
id: 'pinecone_fetch', // Follow the {provider}_{tool_name} format
|
||||
@@ -448,9 +448,6 @@ In addition, you will need to update the registries:
|
||||
transformResponse: async (response: Response) => {
|
||||
// Transform response
|
||||
},
|
||||
transformError: (error) => {
|
||||
// Handle errors
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
@@ -458,7 +455,7 @@ In addition, you will need to update the registries:
|
||||
Update the tools registry in `/apps/sim/tools/index.ts` to include your new tool:
|
||||
|
||||
```typescript:/apps/sim/tools/index.ts
|
||||
import { fetchTool, generateEmbeddingsTool, searchTextTool } from './pinecone'
|
||||
import { fetchTool, generateEmbeddingsTool, searchTextTool } from '/@tools/pinecone'
|
||||
// ... other imports
|
||||
|
||||
export const tools: Record<string, ToolConfig> = {
|
||||
|
||||
@@ -151,8 +151,6 @@ Update multiple existing records in an Airtable table
|
||||
| `baseId` | string | Yes | ID of the Airtable base |
|
||||
| `tableId` | string | Yes | ID or name of the table |
|
||||
| `records` | json | Yes | Array of records to update, each with an `id` and a `fields` object |
|
||||
| `fields` | string | No | No description |
|
||||
| `fields` | string | No | No description |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -82,9 +82,10 @@ Runs a browser automation task using BrowserUse
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | json | Browser automation task results including task ID, success status, output data, and execution steps |
|
||||
| `error` | string | Error message if the operation failed |
|
||||
| `id` | string | Task execution identifier |
|
||||
| `success` | boolean | Task completion status |
|
||||
| `output` | json | Task output data |
|
||||
| `steps` | json | Execution steps taken |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ Convert TTS using ElevenLabs voices
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `audioUrl` | string | Generated audio URL |
|
||||
| `audioUrl` | string | The URL of the generated audio |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -71,8 +71,8 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `files` | json | Array of parsed file objects with content, metadata, and file properties |
|
||||
| `combinedContent` | string | All file contents merged into a single text string |
|
||||
| `files` | array | Array of parsed files |
|
||||
| `combinedContent` | string | Combined content of all parsed files |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -101,8 +101,8 @@ Query data from a Supabase table
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Query operation results |
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | array | Array of records returned from the query |
|
||||
|
||||
### `supabase_insert`
|
||||
|
||||
@@ -121,8 +121,8 @@ Insert data into a Supabase table
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Insert operation results |
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | array | Array of inserted records |
|
||||
|
||||
### `supabase_get_row`
|
||||
|
||||
@@ -141,8 +141,8 @@ Get a single row from a Supabase table based on filter criteria
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Get row operation results |
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | object | The row data if found, null if not found |
|
||||
|
||||
### `supabase_update`
|
||||
|
||||
@@ -162,8 +162,8 @@ Update rows in a Supabase table based on filter criteria
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Update operation results |
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | array | Array of updated records |
|
||||
|
||||
### `supabase_delete`
|
||||
|
||||
@@ -182,8 +182,8 @@ Delete rows from a Supabase table based on filter criteria
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Delete operation results |
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | array | Array of deleted records |
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -245,6 +245,8 @@ describe('Chat API Route', () => {
|
||||
NODE_ENV: 'development',
|
||||
NEXT_PUBLIC_APP_URL: 'http://localhost:3000',
|
||||
},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
|
||||
const validData = {
|
||||
@@ -287,6 +289,8 @@ describe('Chat API Route', () => {
|
||||
NODE_ENV: 'development',
|
||||
NEXT_PUBLIC_APP_URL: 'http://localhost:3000',
|
||||
},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
|
||||
const validData = {
|
||||
|
||||
@@ -98,8 +98,8 @@ ${message}
|
||||
|
||||
// Send email using Resend
|
||||
const { data, error } = await resend.emails.send({
|
||||
from: `Sim <noreply@${env.EMAIL_DOMAIN || getEmailDomain()}>`,
|
||||
to: [`help@${env.EMAIL_DOMAIN || getEmailDomain()}`],
|
||||
from: `Sim <noreply@${getEmailDomain()}>`,
|
||||
to: [`help@${getEmailDomain()}`],
|
||||
subject: `[${type.toUpperCase()}] ${subject}`,
|
||||
replyTo: email,
|
||||
text: emailText,
|
||||
@@ -121,7 +121,7 @@ ${message}
|
||||
// Send confirmation email to the user
|
||||
await resend.emails
|
||||
.send({
|
||||
from: `Sim <noreply@${env.EMAIL_DOMAIN || getEmailDomain()}>`,
|
||||
from: `Sim <noreply@${getEmailDomain()}>`,
|
||||
to: [email],
|
||||
subject: `Your ${type} request has been received: ${subject}`,
|
||||
text: `
|
||||
@@ -137,7 +137,7 @@ ${images.length > 0 ? `You attached ${images.length} image(s).` : ''}
|
||||
Best regards,
|
||||
The Sim Team
|
||||
`,
|
||||
replyTo: `help@${env.EMAIL_DOMAIN || getEmailDomain()}`,
|
||||
replyTo: `help@${getEmailDomain()}`,
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn(`[${requestId}] Failed to send confirmation email`, err)
|
||||
|
||||
118
apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts
Normal file
118
apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq, isNotNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'
|
||||
import { db } from '@/db'
|
||||
import { document, embedding, knowledgeBaseTagDefinitions } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TagDefinitionAPI')
|
||||
|
||||
// DELETE /api/knowledge/[id]/tag-definitions/[tagId] - Delete a tag definition
|
||||
export async function DELETE(
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; tagId: string }> }
|
||||
) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
const { id: knowledgeBaseId, tagId } = await params
|
||||
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Deleting tag definition ${tagId} from knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user has access to the knowledge base
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id)
|
||||
if (!accessCheck.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Get the tag definition to find which slot it uses
|
||||
const tagDefinition = await db
|
||||
.select({
|
||||
id: knowledgeBaseTagDefinitions.id,
|
||||
tagSlot: knowledgeBaseTagDefinitions.tagSlot,
|
||||
displayName: knowledgeBaseTagDefinitions.displayName,
|
||||
})
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeBaseTagDefinitions.id, tagId),
|
||||
eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (tagDefinition.length === 0) {
|
||||
return NextResponse.json({ error: 'Tag definition not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const tagDef = tagDefinition[0]
|
||||
|
||||
// Delete the tag definition and clear all document tags in a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
logger.info(`[${requestId}] Starting transaction to delete ${tagDef.tagSlot}`)
|
||||
|
||||
try {
|
||||
// Clear the tag from documents that actually have this tag set
|
||||
logger.info(`[${requestId}] Clearing tag from documents...`)
|
||||
await tx
|
||||
.update(document)
|
||||
.set({ [tagDef.tagSlot]: null })
|
||||
.where(
|
||||
and(
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNotNull(document[tagDef.tagSlot as keyof typeof document.$inferSelect])
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Documents updated successfully`)
|
||||
|
||||
// Clear the tag from embeddings that actually have this tag set
|
||||
logger.info(`[${requestId}] Clearing tag from embeddings...`)
|
||||
await tx
|
||||
.update(embedding)
|
||||
.set({ [tagDef.tagSlot]: null })
|
||||
.where(
|
||||
and(
|
||||
eq(embedding.knowledgeBaseId, knowledgeBaseId),
|
||||
isNotNull(embedding[tagDef.tagSlot as keyof typeof embedding.$inferSelect])
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Embeddings updated successfully`)
|
||||
|
||||
// Delete the tag definition
|
||||
logger.info(`[${requestId}] Deleting tag definition...`)
|
||||
await tx
|
||||
.delete(knowledgeBaseTagDefinitions)
|
||||
.where(eq(knowledgeBaseTagDefinitions.id, tagId))
|
||||
|
||||
logger.info(`[${requestId}] Tag definition deleted successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error in transaction:`, error)
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully deleted tag definition ${tagDef.displayName} (${tagDef.tagSlot})`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Tag definition "${tagDef.displayName}" deleted successfully`,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting tag definition`, error)
|
||||
return NextResponse.json({ error: 'Failed to delete tag definition' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -55,3 +55,89 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
return NextResponse.json({ error: 'Failed to get tag definitions' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/knowledge/[id]/tag-definitions - Create a new tag definition
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Creating tag definition for knowledge base ${knowledgeBaseId}`)
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user has access to the knowledge base
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id)
|
||||
if (!accessCheck.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { tagSlot, displayName, fieldType } = body
|
||||
|
||||
if (!tagSlot || !displayName || !fieldType) {
|
||||
return NextResponse.json(
|
||||
{ error: 'tagSlot, displayName, and fieldType are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if tag slot is already used
|
||||
const existingTag = await db
|
||||
.select()
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId),
|
||||
eq(knowledgeBaseTagDefinitions.tagSlot, tagSlot)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingTag.length > 0) {
|
||||
return NextResponse.json({ error: 'Tag slot is already in use' }, { status: 409 })
|
||||
}
|
||||
|
||||
// Check if display name is already used
|
||||
const existingName = await db
|
||||
.select()
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId),
|
||||
eq(knowledgeBaseTagDefinitions.displayName, displayName)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingName.length > 0) {
|
||||
return NextResponse.json({ error: 'Tag name is already in use' }, { status: 409 })
|
||||
}
|
||||
|
||||
// Create the new tag definition
|
||||
const newTagDefinition = {
|
||||
id: randomUUID(),
|
||||
knowledgeBaseId,
|
||||
tagSlot,
|
||||
displayName,
|
||||
fieldType,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
await db.insert(knowledgeBaseTagDefinitions).values(newTagDefinition)
|
||||
|
||||
logger.info(`[${requestId}] Successfully created tag definition ${displayName} (${tagSlot})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: newTagDefinition,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating tag definition`, error)
|
||||
return NextResponse.json({ error: 'Failed to create tag definition' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
88
apps/sim/app/api/knowledge/[id]/tag-usage/route.ts
Normal file
88
apps/sim/app/api/knowledge/[id]/tag-usage/route.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq, isNotNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'
|
||||
import { db } from '@/db'
|
||||
import { document, knowledgeBaseTagDefinitions } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TagUsageAPI')
|
||||
|
||||
// GET /api/knowledge/[id]/tag-usage - Get usage statistics for all tag definitions
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Getting tag usage statistics for knowledge base ${knowledgeBaseId}`)
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user has access to the knowledge base
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id)
|
||||
if (!accessCheck.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Get all tag definitions for the knowledge base
|
||||
const tagDefinitions = await db
|
||||
.select({
|
||||
id: knowledgeBaseTagDefinitions.id,
|
||||
tagSlot: knowledgeBaseTagDefinitions.tagSlot,
|
||||
displayName: knowledgeBaseTagDefinitions.displayName,
|
||||
})
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
.where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId))
|
||||
|
||||
// Get usage statistics for each tag definition
|
||||
const usageStats = await Promise.all(
|
||||
tagDefinitions.map(async (tagDef) => {
|
||||
// Count documents using this tag slot
|
||||
const tagSlotColumn = tagDef.tagSlot as keyof typeof document.$inferSelect
|
||||
|
||||
const documentsWithTag = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
[tagDef.tagSlot]: document[tagSlotColumn as keyof typeof document.$inferSelect] as any,
|
||||
})
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNotNull(document[tagSlotColumn as keyof typeof document.$inferSelect])
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
tagName: tagDef.displayName,
|
||||
tagSlot: tagDef.tagSlot,
|
||||
documentCount: documentsWithTag.length,
|
||||
documents: documentsWithTag.map((doc) => ({
|
||||
id: doc.id,
|
||||
name: doc.filename,
|
||||
tagValue: doc[tagDef.tagSlot],
|
||||
})),
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Retrieved usage statistics for ${tagDefinitions.length} tag definitions`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: usageStats,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error getting tag usage statistics`, error)
|
||||
return NextResponse.json({ error: 'Failed to get tag usage statistics' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,8 @@ vi.mock('@/lib/env', () => ({
|
||||
env: {
|
||||
OPENAI_API_KEY: 'test-api-key',
|
||||
},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/documents/utils', () => ({
|
||||
|
||||
@@ -15,7 +15,11 @@ vi.mock('drizzle-orm', () => ({
|
||||
sql: (strings: TemplateStringsArray, ...expr: any[]) => ({ strings, expr }),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/env', () => ({ env: { OPENAI_API_KEY: 'test-key' } }))
|
||||
vi.mock('@/lib/env', () => ({
|
||||
env: { OPENAI_API_KEY: 'test-key' },
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: (fn: any) => fn(),
|
||||
|
||||
@@ -235,42 +235,8 @@ export async function POST(request: Request) {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
if (tool.transformError) {
|
||||
try {
|
||||
const errorResult = tool.transformError(result)
|
||||
|
||||
// Handle both string and Promise return types
|
||||
if (typeof errorResult === 'string') {
|
||||
throw new Error(errorResult)
|
||||
}
|
||||
// It's a Promise, await it
|
||||
const transformedError = await errorResult
|
||||
// If it's a string or has an error property, use it
|
||||
if (typeof transformedError === 'string') {
|
||||
throw new Error(transformedError)
|
||||
}
|
||||
if (
|
||||
transformedError &&
|
||||
typeof transformedError === 'object' &&
|
||||
'error' in transformedError
|
||||
) {
|
||||
throw new Error(transformedError.error || 'Tool returned an error')
|
||||
}
|
||||
// Fallback
|
||||
throw new Error('Tool returned an error')
|
||||
} catch (transformError) {
|
||||
logger.error(`[${requestId}] Error transformation failed for ${toolId}`, {
|
||||
error:
|
||||
transformError instanceof Error ? transformError.message : String(transformError),
|
||||
})
|
||||
if (transformError instanceof Error) {
|
||||
throw transformError
|
||||
}
|
||||
throw new Error('Tool returned an error')
|
||||
}
|
||||
} else {
|
||||
throw new Error('Tool returned an error')
|
||||
}
|
||||
// Let the main executeTool handle error transformation to avoid double transformation
|
||||
throw new Error(result.error || 'Tool execution failed')
|
||||
}
|
||||
|
||||
const endTime = new Date()
|
||||
|
||||
147
apps/sim/app/api/tools/jira/update/route.ts
Normal file
147
apps/sim/app/api/tools/jira/update/route.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraUpdateAPI')
|
||||
|
||||
export async function PUT(request: Request) {
|
||||
try {
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
issueKey,
|
||||
summary,
|
||||
title, // Support both summary and title for backwards compatibility
|
||||
description,
|
||||
status,
|
||||
priority,
|
||||
assignee,
|
||||
cloudId: providedCloudId,
|
||||
} = await request.json()
|
||||
|
||||
// Validate required parameters
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!issueKey) {
|
||||
logger.error('Missing issue key in request')
|
||||
return NextResponse.json({ error: 'Issue key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Use provided cloudId or fetch it if not provided
|
||||
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
|
||||
logger.info('Using cloud ID:', cloudId)
|
||||
|
||||
// Build the URL using cloudId for Jira API
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}`
|
||||
|
||||
logger.info('Updating Jira issue at:', url)
|
||||
|
||||
// Map the summary from either summary or title field
|
||||
const summaryValue = summary || title
|
||||
const fields: Record<string, any> = {}
|
||||
|
||||
if (summaryValue) {
|
||||
fields.summary = summaryValue
|
||||
}
|
||||
|
||||
if (description) {
|
||||
fields.description = {
|
||||
type: 'doc',
|
||||
version: 1,
|
||||
content: [
|
||||
{
|
||||
type: 'paragraph',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: description,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
if (status) {
|
||||
fields.status = {
|
||||
name: status,
|
||||
}
|
||||
}
|
||||
|
||||
if (priority) {
|
||||
fields.priority = {
|
||||
name: priority,
|
||||
}
|
||||
}
|
||||
|
||||
if (assignee) {
|
||||
fields.assignee = {
|
||||
id: assignee,
|
||||
}
|
||||
}
|
||||
|
||||
const body = { fields }
|
||||
|
||||
// Make the request to Jira API
|
||||
const response = await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Jira API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Jira API error: ${response.status} ${response.statusText}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
// Note: Jira update API typically returns 204 No Content on success
|
||||
const responseData = response.status === 204 ? {} : await response.json()
|
||||
logger.info('Successfully updated Jira issue:', issueKey)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueKey: responseData.key || issueKey,
|
||||
summary: responseData.fields?.summary || 'Issue updated',
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Error updating Jira issue:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
162
apps/sim/app/api/tools/jira/write/route.ts
Normal file
162
apps/sim/app/api/tools/jira/write/route.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraWriteAPI')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
projectId,
|
||||
summary,
|
||||
description,
|
||||
priority,
|
||||
assignee,
|
||||
cloudId: providedCloudId,
|
||||
issueType,
|
||||
parent,
|
||||
} = await request.json()
|
||||
|
||||
// Validate required parameters
|
||||
if (!domain) {
|
||||
logger.error('Missing domain in request')
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
logger.error('Missing access token in request')
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!projectId) {
|
||||
logger.error('Missing project ID in request')
|
||||
return NextResponse.json({ error: 'Project ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!summary) {
|
||||
logger.error('Missing summary in request')
|
||||
return NextResponse.json({ error: 'Summary is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!issueType) {
|
||||
logger.error('Missing issue type in request')
|
||||
return NextResponse.json({ error: 'Issue type is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Use provided cloudId or fetch it if not provided
|
||||
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
|
||||
logger.info('Using cloud ID:', cloudId)
|
||||
|
||||
// Build the URL using cloudId for Jira API
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue`
|
||||
|
||||
logger.info('Creating Jira issue at:', url)
|
||||
|
||||
// Construct fields object with only the necessary fields
|
||||
const fields: Record<string, any> = {
|
||||
project: {
|
||||
id: projectId,
|
||||
},
|
||||
issuetype: {
|
||||
name: issueType,
|
||||
},
|
||||
summary: summary,
|
||||
}
|
||||
|
||||
// Only add description if it exists
|
||||
if (description) {
|
||||
fields.description = {
|
||||
type: 'doc',
|
||||
version: 1,
|
||||
content: [
|
||||
{
|
||||
type: 'paragraph',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: description,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
// Only add parent if it exists
|
||||
if (parent) {
|
||||
fields.parent = parent
|
||||
}
|
||||
|
||||
// Only add priority if it exists
|
||||
if (priority) {
|
||||
fields.priority = {
|
||||
name: priority,
|
||||
}
|
||||
}
|
||||
|
||||
// Only add assignee if it exists
|
||||
if (assignee) {
|
||||
fields.assignee = {
|
||||
id: assignee,
|
||||
}
|
||||
}
|
||||
|
||||
const body = { fields }
|
||||
|
||||
// Make the request to Jira API
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Jira API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Jira API error: ${response.status} ${response.statusText}`, details: errorText },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = await response.json()
|
||||
logger.info('Successfully created Jira issue:', responseData.key)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueKey: responseData.key || 'unknown',
|
||||
summary: responseData.fields?.summary || 'Issue created',
|
||||
success: true,
|
||||
url: `https://${domain}/browse/${responseData.key}`,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Error creating Jira issue:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
success: false,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
53
apps/sim/app/api/tools/thinking/route.ts
Normal file
53
apps/sim/app/api/tools/thinking/route.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { ThinkingToolParams, ThinkingToolResponse } from '@/tools/thinking/types'
|
||||
|
||||
const logger = createLogger('ThinkingToolAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* POST - Process a thinking tool request
|
||||
* Simply acknowledges the thought by returning it in the output
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body: ThinkingToolParams = await request.json()
|
||||
|
||||
logger.info(`[${requestId}] Processing thinking tool request`)
|
||||
|
||||
// Validate the required parameter
|
||||
if (!body.thought || typeof body.thought !== 'string') {
|
||||
logger.warn(`[${requestId}] Missing or invalid 'thought' parameter`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'The thought parameter is required and must be a string',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Simply acknowledge the thought by returning it in the output
|
||||
const response: ThinkingToolResponse = {
|
||||
success: true,
|
||||
output: {
|
||||
acknowledgedThought: body.thought,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Thinking tool processed successfully`)
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error processing thinking tool:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to process thinking tool request',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -352,12 +352,15 @@ async function createAirtableWebhookSubscription(
|
||||
return // Cannot proceed without base/table IDs
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'airtable') // Use 'airtable' as the providerId key
|
||||
const accessToken = await getOAuthToken(userId, 'airtable')
|
||||
if (!accessToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
|
||||
)
|
||||
return
|
||||
// Instead of silently returning, throw an error with clear user guidance
|
||||
throw new Error(
|
||||
'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const requestOrigin = new URL(request.url).origin
|
||||
|
||||
@@ -100,20 +100,41 @@ export async function POST(
|
||||
return new NextResponse('Failed to read request body', { status: 400 })
|
||||
}
|
||||
|
||||
// Parse the body as JSON
|
||||
// Parse the body - handle both JSON and form-encoded payloads
|
||||
let body: any
|
||||
try {
|
||||
body = JSON.parse(rawBody)
|
||||
// Check content type to handle both JSON and form-encoded payloads
|
||||
const contentType = request.headers.get('content-type') || ''
|
||||
|
||||
if (contentType.includes('application/x-www-form-urlencoded')) {
|
||||
// GitHub sends form-encoded data with JSON in the 'payload' field
|
||||
const formData = new URLSearchParams(rawBody)
|
||||
const payloadString = formData.get('payload')
|
||||
|
||||
if (!payloadString) {
|
||||
logger.warn(`[${requestId}] No payload field found in form-encoded data`)
|
||||
return new NextResponse('Missing payload field', { status: 400 })
|
||||
}
|
||||
|
||||
body = JSON.parse(payloadString)
|
||||
logger.debug(`[${requestId}] Parsed form-encoded GitHub webhook payload`)
|
||||
} else {
|
||||
// Default to JSON parsing
|
||||
body = JSON.parse(rawBody)
|
||||
logger.debug(`[${requestId}] Parsed JSON webhook payload`)
|
||||
}
|
||||
|
||||
if (Object.keys(body).length === 0) {
|
||||
logger.warn(`[${requestId}] Rejecting empty JSON object`)
|
||||
return new NextResponse('Empty JSON payload', { status: 400 })
|
||||
}
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse JSON body`, {
|
||||
logger.error(`[${requestId}] Failed to parse webhook body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
contentType: request.headers.get('content-type'),
|
||||
bodyPreview: `${rawBody?.slice(0, 100)}...`,
|
||||
})
|
||||
return new NextResponse('Invalid JSON payload', { status: 400 })
|
||||
return new NextResponse('Invalid payload format', { status: 400 })
|
||||
}
|
||||
|
||||
// Handle Slack challenge
|
||||
|
||||
@@ -118,44 +118,49 @@ describe('Workflow Deployment API Route', () => {
|
||||
db: {
|
||||
select: vi.fn().mockImplementation(() => {
|
||||
selectCallCount++
|
||||
const buildLimitResponse = () => ({
|
||||
limit: vi.fn().mockImplementation(() => {
|
||||
// First call: workflow lookup (should return workflow)
|
||||
if (selectCallCount === 1) {
|
||||
return Promise.resolve([{ userId: 'user-id', id: 'workflow-id' }])
|
||||
}
|
||||
// Second call: blocks lookup
|
||||
if (selectCallCount === 2) {
|
||||
return Promise.resolve([
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
positionX: '100',
|
||||
positionY: '100',
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
data: {},
|
||||
},
|
||||
])
|
||||
}
|
||||
// Third call: edges lookup
|
||||
if (selectCallCount === 3) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fourth call: subflows lookup
|
||||
if (selectCallCount === 4) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fifth call: API key lookup (should return empty for new key test)
|
||||
if (selectCallCount === 5) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Default: empty array
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
})
|
||||
|
||||
return {
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => ({
|
||||
limit: vi.fn().mockImplementation(() => {
|
||||
// First call: workflow lookup (should return workflow)
|
||||
if (selectCallCount === 1) {
|
||||
return Promise.resolve([{ userId: 'user-id', id: 'workflow-id' }])
|
||||
}
|
||||
// Second call: blocks lookup
|
||||
if (selectCallCount === 2) {
|
||||
return Promise.resolve([
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
positionX: '100',
|
||||
positionY: '100',
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
data: {},
|
||||
},
|
||||
])
|
||||
}
|
||||
// Third call: edges lookup
|
||||
if (selectCallCount === 3) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fourth call: subflows lookup
|
||||
if (selectCallCount === 4) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fifth call: API key lookup (should return empty for new key test)
|
||||
if (selectCallCount === 5) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Default: empty array
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
...buildLimitResponse(),
|
||||
orderBy: vi.fn().mockReturnValue(buildLimitResponse()),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
@@ -216,160 +221,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
expect(data).toHaveProperty('deployedAt', null)
|
||||
})
|
||||
|
||||
/**
|
||||
* Test POST deployment with no existing API key
|
||||
* This should generate a new API key
|
||||
*/
|
||||
it('should create new API key when deploying workflow for user with no API key', async () => {
|
||||
// Override the global mock for this specific test
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi
|
||||
.fn()
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([{ userId: 'user-id', id: 'workflow-id' }]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
positionX: '100',
|
||||
positionY: '100',
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
data: {},
|
||||
},
|
||||
]),
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([]), // No edges
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([]), // No subflows
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([]), // No existing API key
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
const { POST } = await import('@/app/api/workflows/[id]/deploy/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
expect(data).toHaveProperty('apiKey', 'sim_testkeygenerated12345')
|
||||
expect(data).toHaveProperty('isDeployed', true)
|
||||
expect(data).toHaveProperty('deployedAt')
|
||||
})
|
||||
|
||||
/**
|
||||
* Test POST deployment with existing API key
|
||||
* This should use the existing API key
|
||||
*/
|
||||
it('should use existing API key when deploying workflow', async () => {
|
||||
// Override the global mock for this specific test
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi
|
||||
.fn()
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([{ userId: 'user-id', id: 'workflow-id' }]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
positionX: '100',
|
||||
positionY: '100',
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
data: {},
|
||||
},
|
||||
]),
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([]), // No edges
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([]), // No subflows
|
||||
}),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([{ key: 'sim_existingtestapikey12345' }]), // Existing API key
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
const { POST } = await import('@/app/api/workflows/[id]/deploy/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
expect(data).toHaveProperty('apiKey', 'sim_existingtestapikey12345')
|
||||
expect(data).toHaveProperty('isDeployed', true)
|
||||
})
|
||||
// Removed two POST deployment tests by request
|
||||
|
||||
/**
|
||||
* Test DELETE undeployment
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -33,6 +33,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
deployedAt: workflow.deployedAt,
|
||||
userId: workflow.userId,
|
||||
deployedState: workflow.deployedState,
|
||||
pinnedApiKey: workflow.pinnedApiKey,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, id))
|
||||
@@ -56,37 +57,42 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
})
|
||||
}
|
||||
|
||||
// Fetch the user's API key
|
||||
const userApiKey = await db
|
||||
.select({
|
||||
key: apiKey.key,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.userId, workflowData.userId))
|
||||
.limit(1)
|
||||
let userKey: string | null = null
|
||||
|
||||
let userKey = null
|
||||
|
||||
// If no API key exists, create one automatically
|
||||
if (userApiKey.length === 0) {
|
||||
try {
|
||||
const newApiKey = generateApiKey()
|
||||
await db.insert(apiKey).values({
|
||||
id: uuidv4(),
|
||||
userId: workflowData.userId,
|
||||
name: 'Default API Key',
|
||||
key: newApiKey,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
userKey = newApiKey
|
||||
logger.info(`[${requestId}] Generated new API key for user: ${workflowData.userId}`)
|
||||
} catch (keyError) {
|
||||
// If key generation fails, log the error but continue with the request
|
||||
logger.error(`[${requestId}] Failed to generate API key:`, keyError)
|
||||
}
|
||||
if (workflowData.pinnedApiKey) {
|
||||
userKey = workflowData.pinnedApiKey
|
||||
} else {
|
||||
userKey = userApiKey[0].key
|
||||
// Fetch the user's API key, preferring the most recently used
|
||||
const userApiKey = await db
|
||||
.select({
|
||||
key: apiKey.key,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.userId, workflowData.userId))
|
||||
.orderBy(desc(apiKey.lastUsed), desc(apiKey.createdAt))
|
||||
.limit(1)
|
||||
|
||||
// If no API key exists, create one automatically
|
||||
if (userApiKey.length === 0) {
|
||||
try {
|
||||
const newApiKeyVal = generateApiKey()
|
||||
await db.insert(apiKey).values({
|
||||
id: uuidv4(),
|
||||
userId: workflowData.userId,
|
||||
name: 'Default API Key',
|
||||
key: newApiKeyVal,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
userKey = newApiKeyVal
|
||||
logger.info(`[${requestId}] Generated new API key for user: ${workflowData.userId}`)
|
||||
} catch (keyError) {
|
||||
// If key generation fails, log the error but continue with the request
|
||||
logger.error(`[${requestId}] Failed to generate API key:`, keyError)
|
||||
}
|
||||
} else {
|
||||
userKey = userApiKey[0].key
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the workflow has meaningful changes that would require redeployment
|
||||
@@ -139,10 +145,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return createErrorResponse(validation.error.message, validation.error.status)
|
||||
}
|
||||
|
||||
// Get the workflow to find the user (removed deprecated state column)
|
||||
// Get the workflow to find the user and existing pin (removed deprecated state column)
|
||||
const workflowData = await db
|
||||
.select({
|
||||
userId: workflow.userId,
|
||||
pinnedApiKey: workflow.pinnedApiKey,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, id))
|
||||
@@ -155,6 +162,17 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const userId = workflowData[0].userId
|
||||
|
||||
// Parse request body to capture selected API key (if provided)
|
||||
let providedApiKey: string | null = null
|
||||
try {
|
||||
const parsed = await request.json()
|
||||
if (parsed && typeof parsed.apiKey === 'string' && parsed.apiKey.trim().length > 0) {
|
||||
providedApiKey = parsed.apiKey.trim()
|
||||
}
|
||||
} catch (_err) {
|
||||
// Body may be empty; ignore
|
||||
}
|
||||
|
||||
// Get the current live state from normalized tables instead of stale JSON
|
||||
logger.debug(`[${requestId}] Getting current workflow state for deployment`)
|
||||
|
||||
@@ -241,13 +259,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
const deployedAt = new Date()
|
||||
logger.debug(`[${requestId}] Proceeding with deployment at ${deployedAt.toISOString()}`)
|
||||
|
||||
// Check if the user already has an API key
|
||||
// Check if the user already has API keys
|
||||
const userApiKey = await db
|
||||
.select({
|
||||
key: apiKey.key,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.userId, userId))
|
||||
.orderBy(desc(apiKey.lastUsed), desc(apiKey.createdAt))
|
||||
.limit(1)
|
||||
|
||||
let userKey = null
|
||||
@@ -274,15 +293,42 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
userKey = userApiKey[0].key
|
||||
}
|
||||
|
||||
// If client provided a specific API key and it belongs to the user, prefer it
|
||||
if (providedApiKey) {
|
||||
const [owned] = await db
|
||||
.select({ key: apiKey.key })
|
||||
.from(apiKey)
|
||||
.where(and(eq(apiKey.userId, userId), eq(apiKey.key, providedApiKey)))
|
||||
.limit(1)
|
||||
if (owned) {
|
||||
userKey = providedApiKey
|
||||
}
|
||||
}
|
||||
|
||||
// Update the workflow deployment status and save current state as deployed state
|
||||
await db
|
||||
.update(workflow)
|
||||
.set({
|
||||
isDeployed: true,
|
||||
deployedAt,
|
||||
deployedState: currentState,
|
||||
})
|
||||
.where(eq(workflow.id, id))
|
||||
const updateData: any = {
|
||||
isDeployed: true,
|
||||
deployedAt,
|
||||
deployedState: currentState,
|
||||
}
|
||||
// Only pin when the client explicitly provided a key in this request
|
||||
if (providedApiKey) {
|
||||
updateData.pinnedApiKey = userKey
|
||||
}
|
||||
|
||||
await db.update(workflow).set(updateData).where(eq(workflow.id, id))
|
||||
|
||||
// Update lastUsed for the key we returned
|
||||
if (userKey) {
|
||||
try {
|
||||
await db
|
||||
.update(apiKey)
|
||||
.set({ lastUsed: new Date(), updatedAt: new Date() })
|
||||
.where(eq(apiKey.key, userKey))
|
||||
} catch (e) {
|
||||
logger.warn(`[${requestId}] Failed to update lastUsed for api key`)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Workflow deployed successfully: ${id}`)
|
||||
return createSuccessResponse({ apiKey: userKey, isDeployed: true, deployedAt })
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getWorkflowById } from '@/lib/workflows/utils'
|
||||
@@ -56,22 +56,31 @@ export async function validateWorkflowAccess(
|
||||
}
|
||||
}
|
||||
|
||||
// Verify API key belongs to the user who owns the workflow
|
||||
const userApiKeys = await db
|
||||
.select({
|
||||
key: apiKey.key,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.userId, workflow.userId))
|
||||
// If a pinned key exists, only accept that specific key
|
||||
if (workflow.pinnedApiKey) {
|
||||
if (workflow.pinnedApiKey !== apiKeyHeader) {
|
||||
return {
|
||||
error: {
|
||||
message: 'Unauthorized: Invalid API key',
|
||||
status: 401,
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Otherwise, verify the key belongs to the workflow owner
|
||||
const [owned] = await db
|
||||
.select({ key: apiKey.key })
|
||||
.from(apiKey)
|
||||
.where(and(eq(apiKey.userId, workflow.userId), eq(apiKey.key, apiKeyHeader)))
|
||||
.limit(1)
|
||||
|
||||
const validApiKey = userApiKeys.some((k) => k.key === apiKeyHeader)
|
||||
|
||||
if (!validApiKey) {
|
||||
return {
|
||||
error: {
|
||||
message: 'Unauthorized: Invalid API key',
|
||||
status: 401,
|
||||
},
|
||||
if (!owned) {
|
||||
return {
|
||||
error: {
|
||||
message: 'Unauthorized: Invalid API key',
|
||||
status: 401,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,10 @@
|
||||
|
||||
/* Base Component Properties */
|
||||
--base-muted-foreground: #737373;
|
||||
|
||||
/* Gradient Colors */
|
||||
--gradient-primary: 263 85% 70%; /* More vibrant purple */
|
||||
--gradient-secondary: 336 95% 65%; /* More vibrant pink */
|
||||
}
|
||||
|
||||
/* Dark Mode Theme */
|
||||
@@ -145,6 +149,10 @@
|
||||
|
||||
/* Base Component Properties */
|
||||
--base-muted-foreground: #a3a3a3;
|
||||
|
||||
/* Gradient Colors - Adjusted for dark mode */
|
||||
--gradient-primary: 263 90% 75%; /* More vibrant purple for dark mode */
|
||||
--gradient-secondary: 336 100% 72%; /* More vibrant pink for dark mode */
|
||||
}
|
||||
}
|
||||
|
||||
@@ -325,6 +333,13 @@ input[type="search"]::-ms-clear {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
/* Gradient Text Utility - Use with Tailwind gradient directions */
|
||||
.gradient-text {
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
background-clip: text;
|
||||
}
|
||||
|
||||
/* Animation Classes */
|
||||
.animate-pulse-ring {
|
||||
animation: pulse-ring 1.5s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -680,19 +680,6 @@ export function KnowledgeBase({
|
||||
/>
|
||||
|
||||
<div className='flex items-center gap-3'>
|
||||
{/* Clear Search Button */}
|
||||
{searchQuery && (
|
||||
<button
|
||||
onClick={() => {
|
||||
setSearchQuery('')
|
||||
setCurrentPage(1)
|
||||
}}
|
||||
className='text-muted-foreground text-sm hover:text-foreground'
|
||||
>
|
||||
Clear search
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* Add Documents Button */}
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
@@ -1121,7 +1108,7 @@ export function KnowledgeBase({
|
||||
key={page}
|
||||
onClick={() => goToPage(page)}
|
||||
disabled={isLoadingDocuments}
|
||||
className={`font-medium text-sm transition-colors hover:text-foreground disabled:cursor-not-allowed disabled:opacity-50 ${
|
||||
className={`font-medium text-sm transition-colors hover:text-foreground disabled:opacity-50 ${
|
||||
page === currentPage ? 'text-foreground' : 'text-muted-foreground'
|
||||
}`}
|
||||
>
|
||||
|
||||
@@ -26,10 +26,13 @@ import {
|
||||
TooltipTrigger,
|
||||
} from '@/components/ui'
|
||||
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/use-tag-definitions'
|
||||
|
||||
const logger = createLogger('DocumentTagEntry')
|
||||
|
||||
export interface DocumentTag {
|
||||
slot: string
|
||||
displayName: string
|
||||
@@ -246,7 +249,7 @@ export function DocumentTagEntry({
|
||||
|
||||
setModalOpen(false)
|
||||
} catch (error) {
|
||||
console.error('Error saving tag:', error)
|
||||
logger.error('Error saving tag:', error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ interface SearchInputProps {
|
||||
placeholder: string
|
||||
disabled?: boolean
|
||||
className?: string
|
||||
isLoading?: boolean
|
||||
}
|
||||
|
||||
export function SearchInput({
|
||||
@@ -16,6 +17,7 @@ export function SearchInput({
|
||||
placeholder,
|
||||
disabled = false,
|
||||
className = 'max-w-md flex-1',
|
||||
isLoading = false,
|
||||
}: SearchInputProps) {
|
||||
return (
|
||||
<div className={`relative ${className}`}>
|
||||
@@ -29,13 +31,20 @@ export function SearchInput({
|
||||
disabled={disabled}
|
||||
className='h-10 w-full rounded-md border bg-background px-9 py-2 text-sm ring-offset-background file:border-0 file:bg-transparent file:font-medium file:text-sm placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50'
|
||||
/>
|
||||
{value && !disabled && (
|
||||
<button
|
||||
onClick={() => onChange('')}
|
||||
className='-translate-y-1/2 absolute top-1/2 right-3 transform text-muted-foreground hover:text-foreground'
|
||||
>
|
||||
<X className='h-[18px] w-[18px]' />
|
||||
</button>
|
||||
{isLoading ? (
|
||||
<div className='-translate-y-1/2 absolute top-1/2 right-3'>
|
||||
<div className='h-[18px] w-[18px] animate-spin rounded-full border-2 border-gray-300 border-t-[#701FFC]' />
|
||||
</div>
|
||||
) : (
|
||||
value &&
|
||||
!disabled && (
|
||||
<button
|
||||
onClick={() => onChange('')}
|
||||
className='-translate-y-1/2 absolute top-1/2 right-3 transform text-muted-foreground hover:text-foreground'
|
||||
>
|
||||
<X className='h-[18px] w-[18px]' />
|
||||
</button>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -59,6 +59,8 @@ interface DeployFormProps {
|
||||
onSubmit: (data: DeployFormValues) => void
|
||||
getInputFormatExample: () => string
|
||||
onApiKeyCreated?: () => void
|
||||
// Optional id to bind an external submit button via the `form` attribute
|
||||
formId?: string
|
||||
}
|
||||
|
||||
export function DeployForm({
|
||||
@@ -69,6 +71,7 @@ export function DeployForm({
|
||||
onSubmit,
|
||||
getInputFormatExample,
|
||||
onApiKeyCreated,
|
||||
formId,
|
||||
}: DeployFormProps) {
|
||||
// State
|
||||
const [isCreatingKey, setIsCreatingKey] = useState(false)
|
||||
@@ -148,6 +151,7 @@ export function DeployForm({
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form
|
||||
id={formId}
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault()
|
||||
onSubmit(form.getValues())
|
||||
|
||||
@@ -178,6 +178,7 @@ export function DeployModal({
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchDeploymentInfo() {
|
||||
// If not open or not deployed, clear info and stop
|
||||
if (!open || !workflowId || !isDeployed) {
|
||||
setDeploymentInfo(null)
|
||||
if (!open) {
|
||||
@@ -186,6 +187,12 @@ export function DeployModal({
|
||||
return
|
||||
}
|
||||
|
||||
// If we already have deploymentInfo (e.g., just deployed and set locally), avoid overriding it
|
||||
if (deploymentInfo?.isDeployed && !needsRedeployment) {
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsLoading(true)
|
||||
|
||||
@@ -215,7 +222,7 @@ export function DeployModal({
|
||||
}
|
||||
|
||||
fetchDeploymentInfo()
|
||||
}, [open, workflowId, isDeployed, needsRedeployment])
|
||||
}, [open, workflowId, isDeployed, needsRedeployment, deploymentInfo?.isDeployed])
|
||||
|
||||
const onDeploy = async (data: DeployFormValues) => {
|
||||
setApiDeployError(null)
|
||||
@@ -239,13 +246,13 @@ export function DeployModal({
|
||||
throw new Error(errorData.error || 'Failed to deploy workflow')
|
||||
}
|
||||
|
||||
const { isDeployed: newDeployStatus, deployedAt } = await response.json()
|
||||
const { isDeployed: newDeployStatus, deployedAt, apiKey } = await response.json()
|
||||
|
||||
setDeploymentStatus(
|
||||
workflowId,
|
||||
newDeployStatus,
|
||||
deployedAt ? new Date(deployedAt) : undefined,
|
||||
data.apiKey
|
||||
apiKey || data.apiKey
|
||||
)
|
||||
|
||||
setNeedsRedeployment(false)
|
||||
@@ -258,9 +265,9 @@ export function DeployModal({
|
||||
const newDeploymentInfo = {
|
||||
isDeployed: true,
|
||||
deployedAt: deployedAt,
|
||||
apiKey: data.apiKey,
|
||||
apiKey: apiKey || data.apiKey,
|
||||
endpoint,
|
||||
exampleCommand: `curl -X POST -H "X-API-Key: ${data.apiKey}" -H "Content-Type: application/json"${inputFormatExample} ${endpoint}`,
|
||||
exampleCommand: `curl -X POST -H "X-API-Key: ${apiKey || data.apiKey}" -H "Content-Type: application/json"${inputFormatExample} ${endpoint}`,
|
||||
needsRedeployment: false,
|
||||
}
|
||||
|
||||
@@ -331,6 +338,9 @@ export function DeployModal({
|
||||
}
|
||||
|
||||
await refetchDeployedState()
|
||||
|
||||
// Ensure modal status updates immediately
|
||||
setDeploymentInfo((prev) => (prev ? { ...prev, needsRedeployment: false } : prev))
|
||||
} catch (error: any) {
|
||||
logger.error('Error redeploying workflow:', { error })
|
||||
} finally {
|
||||
@@ -437,7 +447,9 @@ export function DeployModal({
|
||||
{isDeployed ? (
|
||||
<DeploymentInfo
|
||||
isLoading={isLoading}
|
||||
deploymentInfo={deploymentInfo}
|
||||
deploymentInfo={
|
||||
deploymentInfo ? { ...deploymentInfo, needsRedeployment } : null
|
||||
}
|
||||
onRedeploy={handleRedeploy}
|
||||
onUndeploy={handleUndeploy}
|
||||
isSubmitting={isSubmitting}
|
||||
@@ -464,6 +476,7 @@ export function DeployModal({
|
||||
onSubmit={onDeploy}
|
||||
getInputFormatExample={getInputFormatExample}
|
||||
onApiKeyCreated={fetchApiKeys}
|
||||
formId='deploy-api-form'
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
@@ -494,8 +507,8 @@ export function DeployModal({
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
type='button'
|
||||
onClick={() => onDeploy({ apiKey: apiKeys.length > 0 ? apiKeys[0].key : '' })}
|
||||
type='submit'
|
||||
form='deploy-api-form'
|
||||
disabled={isSubmitting || (!keysLoaded && !apiKeys.length)}
|
||||
className={cn(
|
||||
'gap-2 font-medium',
|
||||
|
||||
@@ -90,12 +90,12 @@ export function DeploymentControls({
|
||||
onClick={handleDeployClick}
|
||||
disabled={isDisabled}
|
||||
className={cn(
|
||||
'h-12 w-12 rounded-[11px] border-[hsl(var(--card-border))] bg-[hsl(var(--card-background))] text-[hsl(var(--card-text))] shadow-xs',
|
||||
'h-12 w-12 rounded-[11px] border bg-card text-card-foreground shadow-xs',
|
||||
'hover:border-[#701FFC] hover:bg-[#701FFC] hover:text-white',
|
||||
'transition-all duration-200',
|
||||
isDeployed && 'text-[#802FFF]',
|
||||
isDisabled &&
|
||||
'cursor-not-allowed opacity-50 hover:border-[hsl(var(--card-border))] hover:bg-[hsl(var(--card-background))] hover:text-[hsl(var(--card-text))] hover:shadow-xs'
|
||||
'cursor-not-allowed opacity-50 hover:border hover:bg-card hover:text-card-foreground hover:shadow-xs'
|
||||
)}
|
||||
>
|
||||
{isDeploying ? (
|
||||
|
||||
@@ -70,7 +70,7 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{isDisabled ? (
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
|
||||
<Download className='h-5 w-5' />
|
||||
</div>
|
||||
) : (
|
||||
|
||||
@@ -412,7 +412,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
|
||||
<Trash2 className='h-5 w-5' />
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
@@ -497,7 +497,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{isDisabled ? (
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
|
||||
<Copy className='h-5 w-5' />
|
||||
</div>
|
||||
) : (
|
||||
@@ -561,7 +561,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{isDisabled ? (
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
|
||||
{isAutoLayouting ? (
|
||||
<RefreshCw className='h-5 w-5 animate-spin' />
|
||||
) : (
|
||||
@@ -720,7 +720,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{isDisabled ? (
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
|
||||
<Store className='h-5 w-5' />
|
||||
</div>
|
||||
) : (
|
||||
@@ -771,7 +771,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
className={cn(
|
||||
'inline-flex h-12 w-12 cursor-not-allowed items-center justify-center',
|
||||
'rounded-[11px] border bg-card text-card-foreground opacity-50',
|
||||
'transition-colors [&_svg]:size-4 [&_svg]:shrink-0',
|
||||
'shadow-xs transition-colors',
|
||||
isDebugging && 'text-amber-500'
|
||||
)}
|
||||
>
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
export { ControlBar } from './control-bar/control-bar'
|
||||
export { ErrorBoundary } from './error/index'
|
||||
export { LoopNodeComponent } from './loop-node/loop-node'
|
||||
export { Panel } from './panel/panel'
|
||||
export { ParallelNodeComponent } from './parallel-node/parallel-node'
|
||||
export { SkeletonLoading } from './skeleton-loading/skeleton-loading'
|
||||
export { LoopNodeComponent } from './subflows/loop/loop-node'
|
||||
export { ParallelNodeComponent } from './subflows/parallel/parallel-node'
|
||||
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
|
||||
export { WorkflowBlock } from './workflow-block/workflow-block'
|
||||
export { WorkflowEdge } from './workflow-edge/workflow-edge'
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
// Mock the store
|
||||
vi.mock('@/stores/workflows/workflow/store', () => ({
|
||||
useWorkflowStore: vi.fn(),
|
||||
}))
|
||||
|
||||
describe('LoopBadges Store Integration', () => {
|
||||
const mockUpdateLoopType = vi.fn()
|
||||
const mockUpdateLoopCount = vi.fn()
|
||||
const mockUpdateLoopCollection = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
;(useWorkflowStore as any).mockImplementation((selector: any) => {
|
||||
const state = {
|
||||
updateLoopType: mockUpdateLoopType,
|
||||
updateLoopCount: mockUpdateLoopCount,
|
||||
updateLoopCollection: mockUpdateLoopCollection,
|
||||
}
|
||||
return selector(state)
|
||||
})
|
||||
})
|
||||
|
||||
it('should call updateLoopType when changing loop type', () => {
|
||||
// When we update loop type in the UI, it should call the store method
|
||||
const nodeId = 'loop1'
|
||||
const newType = 'forEach'
|
||||
|
||||
// Simulate the handler being called
|
||||
mockUpdateLoopType(nodeId, newType)
|
||||
|
||||
expect(mockUpdateLoopType).toHaveBeenCalledWith(nodeId, newType)
|
||||
})
|
||||
|
||||
it('should call updateLoopCount when changing loop count', () => {
|
||||
const nodeId = 'loop1'
|
||||
const newCount = 15
|
||||
|
||||
// Simulate the handler being called
|
||||
mockUpdateLoopCount(nodeId, newCount)
|
||||
|
||||
expect(mockUpdateLoopCount).toHaveBeenCalledWith(nodeId, newCount)
|
||||
})
|
||||
|
||||
it('should call updateLoopCollection when changing collection', () => {
|
||||
const nodeId = 'loop1'
|
||||
const newCollection = '["item1", "item2", "item3"]'
|
||||
|
||||
// Simulate the handler being called
|
||||
mockUpdateLoopCollection(nodeId, newCollection)
|
||||
|
||||
expect(mockUpdateLoopCollection).toHaveBeenCalledWith(nodeId, newCollection)
|
||||
})
|
||||
})
|
||||
@@ -187,17 +187,19 @@ export function ChatFileUpload({
|
||||
{files.map((file) => (
|
||||
<div
|
||||
key={file.id}
|
||||
className='flex items-center gap-2 rounded-md bg-gray-50 px-2 py-1 text-sm'
|
||||
className='flex items-center gap-2 rounded-md bg-gray-50 px-2 py-1 text-sm dark:bg-gray-800'
|
||||
>
|
||||
{getFileIcon(file.type)}
|
||||
<span className='flex-1 truncate' title={file.name}>
|
||||
<span className='flex-1 truncate dark:text-white' title={file.name}>
|
||||
{file.name}
|
||||
</span>
|
||||
<span className='text-gray-500 text-xs'>{formatFileSize(file.size)}</span>
|
||||
<span className='text-gray-500 text-xs dark:text-gray-400'>
|
||||
{formatFileSize(file.size)}
|
||||
</span>
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => handleRemoveFile(file.id)}
|
||||
className='p-0.5 text-gray-400 transition-colors hover:text-red-500'
|
||||
className='p-0.5 text-gray-400 transition-colors hover:text-red-500 dark:text-gray-500 dark:hover:text-red-400'
|
||||
title='Remove file'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
|
||||
@@ -417,6 +417,14 @@ export function ConsoleEntry({ entry, consoleWidth }: ConsoleEntryProps) {
|
||||
{entry.startedAt ? format(new Date(entry.startedAt), 'HH:mm:ss') : 'N/A'}
|
||||
</span>
|
||||
</div>
|
||||
{/* Iteration tag - only show if iteration context exists */}
|
||||
{entry.iterationCurrent !== undefined && entry.iterationTotal !== undefined && (
|
||||
<div className='flex h-5 items-center rounded-lg bg-secondary px-2'>
|
||||
<span className='font-normal text-muted-foreground text-xs leading-normal'>
|
||||
{entry.iterationCurrent}/{entry.iterationTotal}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{/* Input/Output tags - only show if input data exists */}
|
||||
{hasInputData && (
|
||||
<>
|
||||
|
||||
@@ -19,6 +19,7 @@ import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { validateName } from '@/lib/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import type { Variable, VariableType } from '@/stores/panel/variables/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -34,19 +35,17 @@ export function Variables() {
|
||||
deleteVariable,
|
||||
duplicateVariable,
|
||||
getVariablesByWorkflowId,
|
||||
loadVariables,
|
||||
} = useVariablesStore()
|
||||
const {
|
||||
collaborativeUpdateVariable,
|
||||
collaborativeAddVariable,
|
||||
collaborativeDeleteVariable,
|
||||
collaborativeDuplicateVariable,
|
||||
} = useCollaborativeWorkflow()
|
||||
|
||||
// Get variables for the current workflow
|
||||
const workflowVariables = activeWorkflowId ? getVariablesByWorkflowId(activeWorkflowId) : []
|
||||
|
||||
// Load variables when active workflow changes
|
||||
useEffect(() => {
|
||||
if (activeWorkflowId) {
|
||||
loadVariables(activeWorkflowId)
|
||||
}
|
||||
}, [activeWorkflowId, loadVariables])
|
||||
|
||||
// Track editor references
|
||||
const editorRefs = useRef<Record<string, HTMLDivElement | null>>({})
|
||||
|
||||
@@ -56,16 +55,14 @@ export function Variables() {
|
||||
// Handle variable name change with validation
|
||||
const handleVariableNameChange = (variableId: string, newName: string) => {
|
||||
const validatedName = validateName(newName)
|
||||
updateVariable(variableId, { name: validatedName })
|
||||
collaborativeUpdateVariable(variableId, 'name', validatedName)
|
||||
}
|
||||
|
||||
// Auto-save when variables are added/edited
|
||||
const handleAddVariable = () => {
|
||||
if (!activeWorkflowId) return
|
||||
|
||||
// Create a default variable - naming is handled in the store
|
||||
const id = addVariable({
|
||||
name: '', // Store will generate an appropriate name
|
||||
const id = collaborativeAddVariable({
|
||||
name: '',
|
||||
type: 'string',
|
||||
value: '',
|
||||
workflowId: activeWorkflowId,
|
||||
@@ -125,17 +122,10 @@ export function Variables() {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle editor value changes - store exactly what user types
|
||||
const handleEditorChange = (variable: Variable, newValue: string) => {
|
||||
// Store the raw value directly, no parsing or formatting
|
||||
updateVariable(variable.id, {
|
||||
value: newValue,
|
||||
// Clear any previous validation errors so they'll be recalculated
|
||||
validationError: undefined,
|
||||
})
|
||||
collaborativeUpdateVariable(variable.id, 'value', newValue)
|
||||
}
|
||||
|
||||
// Only track focus state for UI purposes
|
||||
const handleEditorBlur = (variableId: string) => {
|
||||
setActiveEditors((prev) => ({
|
||||
...prev,
|
||||
@@ -143,7 +133,6 @@ export function Variables() {
|
||||
}))
|
||||
}
|
||||
|
||||
// Track when editor becomes active
|
||||
const handleEditorFocus = (variableId: string) => {
|
||||
setActiveEditors((prev) => ({
|
||||
...prev,
|
||||
@@ -151,20 +140,14 @@ export function Variables() {
|
||||
}))
|
||||
}
|
||||
|
||||
// Always return raw value without any formatting
|
||||
const formatValue = (variable: Variable) => {
|
||||
if (variable.value === '') return ''
|
||||
|
||||
// Always return raw value exactly as typed
|
||||
return typeof variable.value === 'string' ? variable.value : JSON.stringify(variable.value)
|
||||
}
|
||||
|
||||
// Get validation status based on type and value
|
||||
const getValidationStatus = (variable: Variable): string | undefined => {
|
||||
// Empty values don't need validation
|
||||
if (variable.value === '') return undefined
|
||||
|
||||
// Otherwise validate based on type
|
||||
switch (variable.type) {
|
||||
case 'number':
|
||||
return Number.isNaN(Number(variable.value)) ? 'Not a valid number' : undefined
|
||||
@@ -174,49 +157,38 @@ export function Variables() {
|
||||
: undefined
|
||||
case 'object':
|
||||
try {
|
||||
// Handle both JavaScript and JSON syntax
|
||||
const valueToEvaluate = String(variable.value).trim()
|
||||
|
||||
// Basic security check to prevent arbitrary code execution
|
||||
if (!valueToEvaluate.startsWith('{') || !valueToEvaluate.endsWith('}')) {
|
||||
return 'Not a valid object format'
|
||||
}
|
||||
|
||||
// Use Function constructor to safely evaluate the object expression
|
||||
// This is safer than eval() and handles all JS object literal syntax
|
||||
const parsed = new Function(`return ${valueToEvaluate}`)()
|
||||
|
||||
// Verify it's actually an object (not array or null)
|
||||
if (parsed === null || typeof parsed !== 'object' || Array.isArray(parsed)) {
|
||||
return 'Not a valid object'
|
||||
}
|
||||
|
||||
return undefined // Valid object
|
||||
return undefined
|
||||
} catch (e) {
|
||||
logger.info('Object parsing error:', e)
|
||||
return 'Invalid object syntax'
|
||||
}
|
||||
case 'array':
|
||||
try {
|
||||
// Use actual JavaScript evaluation instead of trying to convert to JSON
|
||||
// This properly handles all valid JS array syntax including mixed types
|
||||
const valueToEvaluate = String(variable.value).trim()
|
||||
|
||||
// Basic security check to prevent arbitrary code execution
|
||||
if (!valueToEvaluate.startsWith('[') || !valueToEvaluate.endsWith(']')) {
|
||||
return 'Not a valid array format'
|
||||
}
|
||||
|
||||
// Use Function constructor to safely evaluate the array expression
|
||||
// This is safer than eval() and handles all JS array syntax correctly
|
||||
const parsed = new Function(`return ${valueToEvaluate}`)()
|
||||
|
||||
// Verify it's actually an array
|
||||
if (!Array.isArray(parsed)) {
|
||||
return 'Not a valid array'
|
||||
}
|
||||
|
||||
return undefined // Valid array
|
||||
return undefined
|
||||
} catch (e) {
|
||||
logger.info('Array parsing error:', e)
|
||||
return 'Invalid array syntax'
|
||||
@@ -226,9 +198,7 @@ export function Variables() {
|
||||
}
|
||||
}
|
||||
|
||||
// Clear editor refs when variables change
|
||||
useEffect(() => {
|
||||
// Clean up any references to deleted variables
|
||||
Object.keys(editorRefs.current).forEach((id) => {
|
||||
if (!workflowVariables.some((v) => v.id === id)) {
|
||||
delete editorRefs.current[id]
|
||||
@@ -276,35 +246,35 @@ export function Variables() {
|
||||
className='min-w-32 rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[#202020]'
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={() => updateVariable(variable.id, { type: 'plain' })}
|
||||
onClick={() => collaborativeUpdateVariable(variable.id, 'type', 'plain')}
|
||||
className='flex cursor-pointer items-center rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
<div className='mr-2 w-5 text-center font-[380] text-sm'>Abc</div>
|
||||
<span className='font-[380]'>Plain</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => updateVariable(variable.id, { type: 'number' })}
|
||||
onClick={() => collaborativeUpdateVariable(variable.id, 'type', 'number')}
|
||||
className='flex cursor-pointer items-center rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
<div className='mr-2 w-5 text-center font-[380] text-sm'>123</div>
|
||||
<span className='font-[380]'>Number</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => updateVariable(variable.id, { type: 'boolean' })}
|
||||
onClick={() => collaborativeUpdateVariable(variable.id, 'type', 'boolean')}
|
||||
className='flex cursor-pointer items-center rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
<div className='mr-2 w-5 text-center font-[380] text-sm'>0/1</div>
|
||||
<span className='font-[380]'>Boolean</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => updateVariable(variable.id, { type: 'object' })}
|
||||
onClick={() => collaborativeUpdateVariable(variable.id, 'type', 'object')}
|
||||
className='flex cursor-pointer items-center rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
<div className='mr-2 w-5 text-center font-[380] text-sm'>{'{}'}</div>
|
||||
<span className='font-[380]'>Object</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => updateVariable(variable.id, { type: 'array' })}
|
||||
onClick={() => collaborativeUpdateVariable(variable.id, 'type', 'array')}
|
||||
className='flex cursor-pointer items-center rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
<div className='mr-2 w-5 text-center font-[380] text-sm'>[]</div>
|
||||
@@ -329,14 +299,14 @@ export function Variables() {
|
||||
className='min-w-32 rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[#202020]'
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={() => duplicateVariable(variable.id)}
|
||||
onClick={() => collaborativeDuplicateVariable(variable.id)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
<Copy className='mr-2 h-4 w-4 text-muted-foreground' />
|
||||
Duplicate
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => deleteVariable(variable.id)}
|
||||
onClick={() => collaborativeDeleteVariable(variable.id)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 font-[380] text-destructive text-sm hover:bg-destructive/10 focus:bg-destructive/10 focus:text-destructive'
|
||||
>
|
||||
<Trash className='mr-2 h-4 w-4' />
|
||||
|
||||
@@ -1,329 +0,0 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { ChevronDown } from 'lucide-react'
|
||||
import { highlight, languages } from 'prismjs'
|
||||
import Editor from 'react-simple-code-editor'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import 'prismjs/components/prism-javascript'
|
||||
import 'prismjs/themes/prism.css'
|
||||
|
||||
interface ParallelNodeData {
|
||||
width?: number
|
||||
height?: number
|
||||
parentId?: string
|
||||
state?: string
|
||||
type?: string
|
||||
extent?: 'parent'
|
||||
parallelType?: 'count' | 'collection'
|
||||
count?: number
|
||||
collection?: string | any[] | Record<string, any>
|
||||
isPreview?: boolean
|
||||
executionState?: {
|
||||
currentExecution: number
|
||||
isExecuting: boolean
|
||||
startTime: number | null
|
||||
endTime: number | null
|
||||
}
|
||||
}
|
||||
|
||||
interface ParallelBadgesProps {
|
||||
nodeId: string
|
||||
data: ParallelNodeData
|
||||
}
|
||||
|
||||
export function ParallelBadges({ nodeId, data }: ParallelBadgesProps) {
|
||||
// Check if this is preview mode
|
||||
const isPreview = data?.isPreview || false
|
||||
|
||||
// Get parallel configuration from the workflow store (single source of truth)
|
||||
const { parallels } = useWorkflowStore()
|
||||
const parallelConfig = parallels[nodeId]
|
||||
|
||||
// Use parallel config as primary source, fallback to data for backward compatibility
|
||||
const configCount = parallelConfig?.count ?? data?.count ?? 5
|
||||
const configDistribution = parallelConfig?.distribution ?? data?.collection ?? ''
|
||||
// For parallel type, use the block's parallelType data property as the source of truth
|
||||
// Don't infer it from whether distribution exists, as that causes unwanted switching
|
||||
const configParallelType = data?.parallelType || 'collection'
|
||||
|
||||
// Derive values directly from props - no useState needed for synchronized data
|
||||
const parallelType = configParallelType
|
||||
const iterations = configCount
|
||||
const distributionString =
|
||||
typeof configDistribution === 'string'
|
||||
? configDistribution
|
||||
: JSON.stringify(configDistribution) || ''
|
||||
|
||||
// Use actual values directly for display, temporary state only for active editing
|
||||
const [tempInputValue, setTempInputValue] = useState<string | null>(null)
|
||||
const inputValue = tempInputValue ?? iterations.toString()
|
||||
const editorValue = distributionString
|
||||
const [typePopoverOpen, setTypePopoverOpen] = useState(false)
|
||||
const [configPopoverOpen, setConfigPopoverOpen] = useState(false)
|
||||
const [showTagDropdown, setShowTagDropdown] = useState(false)
|
||||
const [cursorPosition, setCursorPosition] = useState(0)
|
||||
const editorContainerRef = useRef<HTMLDivElement>(null)
|
||||
const textareaRef = useRef<HTMLTextAreaElement | null>(null)
|
||||
|
||||
// Get collaborative functions
|
||||
const {
|
||||
collaborativeUpdateParallelCount,
|
||||
collaborativeUpdateParallelCollection,
|
||||
collaborativeUpdateParallelType,
|
||||
} = useCollaborativeWorkflow()
|
||||
|
||||
// Handle parallel type change
|
||||
const handleParallelTypeChange = useCallback(
|
||||
(newType: 'count' | 'collection') => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
// Use single collaborative function that handles all the state changes atomically
|
||||
collaborativeUpdateParallelType(nodeId, newType)
|
||||
|
||||
setTypePopoverOpen(false)
|
||||
},
|
||||
[nodeId, collaborativeUpdateParallelType, isPreview]
|
||||
)
|
||||
|
||||
// Handle iterations input change
|
||||
const handleIterationsChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
const sanitizedValue = e.target.value.replace(/[^0-9]/g, '')
|
||||
const numValue = Number.parseInt(sanitizedValue)
|
||||
|
||||
if (!Number.isNaN(numValue)) {
|
||||
setTempInputValue(Math.min(20, numValue).toString())
|
||||
} else {
|
||||
setTempInputValue(sanitizedValue)
|
||||
}
|
||||
},
|
||||
[isPreview]
|
||||
)
|
||||
|
||||
// Handle iterations save
|
||||
const handleIterationsSave = useCallback(() => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
const value = Number.parseInt(inputValue)
|
||||
|
||||
if (!Number.isNaN(value)) {
|
||||
const newValue = Math.min(20, Math.max(1, value))
|
||||
// Update the collaborative state - this will cause iterations to be derived from props
|
||||
collaborativeUpdateParallelCount(nodeId, newValue)
|
||||
}
|
||||
// Clear temporary input state to show the actual value
|
||||
setTempInputValue(null)
|
||||
setConfigPopoverOpen(false)
|
||||
}, [inputValue, nodeId, collaborativeUpdateParallelCount, isPreview])
|
||||
|
||||
// Handle editor change and check for tag trigger
|
||||
const handleEditorChange = useCallback(
|
||||
(value: string) => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
// Update collaborative state directly - no local state needed
|
||||
collaborativeUpdateParallelCollection(nodeId, value)
|
||||
|
||||
// Get the textarea element and cursor position
|
||||
const textarea = editorContainerRef.current?.querySelector('textarea')
|
||||
if (textarea) {
|
||||
textareaRef.current = textarea
|
||||
const position = textarea.selectionStart || 0
|
||||
setCursorPosition(position)
|
||||
|
||||
// Check for tag trigger
|
||||
const tagTrigger = checkTagTrigger(value, position)
|
||||
setShowTagDropdown(tagTrigger.show)
|
||||
}
|
||||
},
|
||||
[nodeId, collaborativeUpdateParallelCollection, isPreview]
|
||||
)
|
||||
|
||||
// Handle tag selection
|
||||
const handleTagSelect = useCallback(
|
||||
(newValue: string) => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
// Update collaborative state directly - no local state needed
|
||||
collaborativeUpdateParallelCollection(nodeId, newValue)
|
||||
setShowTagDropdown(false)
|
||||
|
||||
// Focus back on the editor after selection
|
||||
setTimeout(() => {
|
||||
const textarea = textareaRef.current
|
||||
if (textarea) {
|
||||
textarea.focus()
|
||||
}
|
||||
}, 0)
|
||||
},
|
||||
[nodeId, collaborativeUpdateParallelCollection, isPreview]
|
||||
)
|
||||
|
||||
// Handle key events
|
||||
const handleKeyDown = useCallback((e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Escape') {
|
||||
setShowTagDropdown(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div className='-top-9 absolute right-0 left-0 z-10 flex justify-between'>
|
||||
{/* Parallel Type Badge */}
|
||||
<Popover
|
||||
open={!isPreview && typePopoverOpen}
|
||||
onOpenChange={isPreview ? undefined : setTypePopoverOpen}
|
||||
>
|
||||
<PopoverTrigger asChild onClick={(e) => e.stopPropagation()}>
|
||||
<Badge
|
||||
variant='outline'
|
||||
className={cn(
|
||||
'border-border bg-background/80 py-0.5 pr-1.5 pl-2.5 font-medium text-foreground text-sm backdrop-blur-sm',
|
||||
!isPreview && 'cursor-pointer transition-colors duration-150 hover:bg-accent/50',
|
||||
'flex items-center gap-1'
|
||||
)}
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
>
|
||||
{parallelType === 'count' ? 'Parallel Count' : 'Parallel Each'}
|
||||
{!isPreview && <ChevronDown className='h-3 w-3 text-muted-foreground' />}
|
||||
</Badge>
|
||||
</PopoverTrigger>
|
||||
{!isPreview && (
|
||||
<PopoverContent className='w-48 p-3' align='center' onClick={(e) => e.stopPropagation()}>
|
||||
<div className='space-y-2'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>Parallel Type</div>
|
||||
<div className='space-y-1'>
|
||||
<div
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-md px-2 py-1.5',
|
||||
parallelType === 'count' ? 'bg-accent' : 'hover:bg-accent/50'
|
||||
)}
|
||||
onClick={() => handleParallelTypeChange('count')}
|
||||
>
|
||||
<span className='text-sm'>Parallel Count</span>
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-md px-2 py-1.5',
|
||||
parallelType === 'collection' ? 'bg-accent' : 'hover:bg-accent/50'
|
||||
)}
|
||||
onClick={() => handleParallelTypeChange('collection')}
|
||||
>
|
||||
<span className='text-sm'>Parallel Each</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
)}
|
||||
</Popover>
|
||||
|
||||
{/* Iterations/Collection Badge */}
|
||||
<Popover
|
||||
open={!isPreview && configPopoverOpen}
|
||||
onOpenChange={isPreview ? undefined : setConfigPopoverOpen}
|
||||
>
|
||||
<PopoverTrigger asChild onClick={(e) => e.stopPropagation()}>
|
||||
<Badge
|
||||
variant='outline'
|
||||
className={cn(
|
||||
'border-border bg-background/80 py-0.5 pr-1.5 pl-2.5 font-medium text-foreground text-sm backdrop-blur-sm',
|
||||
!isPreview && 'cursor-pointer transition-colors duration-150 hover:bg-accent/50',
|
||||
'flex items-center gap-1'
|
||||
)}
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
>
|
||||
{parallelType === 'count' ? `Iterations: ${iterations}` : 'Items'}
|
||||
{!isPreview && <ChevronDown className='h-3 w-3 text-muted-foreground' />}
|
||||
</Badge>
|
||||
</PopoverTrigger>
|
||||
{!isPreview && (
|
||||
<PopoverContent
|
||||
className={cn('p-3', parallelType !== 'count' ? 'w-72' : 'w-48')}
|
||||
align='center'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
onKeyDown={handleKeyDown}
|
||||
>
|
||||
<div className='space-y-2'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>
|
||||
{parallelType === 'count' ? 'Parallel Iterations' : 'Parallel Items'}
|
||||
</div>
|
||||
|
||||
{parallelType === 'count' ? (
|
||||
// Number input for count-based parallel
|
||||
<div className='flex items-center gap-2'>
|
||||
<Input
|
||||
type='text'
|
||||
value={inputValue}
|
||||
onChange={handleIterationsChange}
|
||||
onBlur={handleIterationsSave}
|
||||
onKeyDown={(e) => e.key === 'Enter' && handleIterationsSave()}
|
||||
className='h-8 text-sm'
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
// Code editor for collection-based parallel
|
||||
<div className='relative'>
|
||||
<div
|
||||
ref={editorContainerRef}
|
||||
className='relative min-h-[80px] rounded-md border border-input bg-background px-3 pt-2 pb-3 font-mono text-sm'
|
||||
>
|
||||
{editorValue === '' && (
|
||||
<div className='pointer-events-none absolute top-[8.5px] left-3 select-none text-muted-foreground/50'>
|
||||
['item1', 'item2', 'item3']
|
||||
</div>
|
||||
)}
|
||||
<Editor
|
||||
value={editorValue}
|
||||
onValueChange={handleEditorChange}
|
||||
highlight={(code) => highlight(code, languages.javascript, 'javascript')}
|
||||
padding={0}
|
||||
style={{
|
||||
fontFamily: 'monospace',
|
||||
lineHeight: '21px',
|
||||
}}
|
||||
className='w-full focus:outline-none'
|
||||
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full overflow-hidden whitespace-pre-wrap'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Escape') {
|
||||
setShowTagDropdown(false)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div className='mt-2 text-[10px] text-muted-foreground'>
|
||||
Array or object to use for parallel execution. Type "{'<'}" to reference other
|
||||
blocks.
|
||||
</div>
|
||||
{showTagDropdown && (
|
||||
<TagDropdown
|
||||
visible={showTagDropdown}
|
||||
onSelect={handleTagSelect}
|
||||
blockId={nodeId}
|
||||
activeSourceBlockId={null}
|
||||
inputValue={editorValue}
|
||||
cursorPosition={cursorPosition}
|
||||
onClose={() => setShowTagDropdown(false)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{parallelType === 'count' && (
|
||||
<div className='text-[10px] text-muted-foreground'>
|
||||
Enter a number between 1 and 20
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
)}
|
||||
</Popover>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -12,50 +12,80 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import 'prismjs/components/prism-javascript'
|
||||
import 'prismjs/themes/prism.css'
|
||||
|
||||
interface LoopNodeData {
|
||||
type IterationType = 'loop' | 'parallel'
|
||||
type LoopType = 'for' | 'forEach'
|
||||
type ParallelType = 'count' | 'collection'
|
||||
|
||||
interface IterationNodeData {
|
||||
width?: number
|
||||
height?: number
|
||||
parentId?: string
|
||||
state?: string
|
||||
type?: string
|
||||
extent?: 'parent'
|
||||
loopType?: 'for' | 'forEach'
|
||||
loopType?: LoopType
|
||||
parallelType?: ParallelType
|
||||
// Common
|
||||
count?: number
|
||||
collection?: string | any[] | Record<string, any>
|
||||
isPreview?: boolean
|
||||
executionState?: {
|
||||
currentIteration: number
|
||||
currentIteration?: number
|
||||
currentExecution?: number
|
||||
isExecuting: boolean
|
||||
startTime: number | null
|
||||
endTime: number | null
|
||||
}
|
||||
}
|
||||
|
||||
interface LoopBadgesProps {
|
||||
interface IterationBadgesProps {
|
||||
nodeId: string
|
||||
data: LoopNodeData
|
||||
data: IterationNodeData
|
||||
iterationType: IterationType
|
||||
}
|
||||
|
||||
export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
// Check if this is preview mode
|
||||
const CONFIG = {
|
||||
loop: {
|
||||
typeLabels: { for: 'For Loop', forEach: 'For Each' },
|
||||
typeKey: 'loopType' as const,
|
||||
storeKey: 'loops' as const,
|
||||
maxIterations: 100,
|
||||
configKeys: {
|
||||
iterations: 'iterations' as const,
|
||||
items: 'forEachItems' as const,
|
||||
},
|
||||
},
|
||||
parallel: {
|
||||
typeLabels: { count: 'Parallel Count', collection: 'Parallel Each' },
|
||||
typeKey: 'parallelType' as const,
|
||||
storeKey: 'parallels' as const,
|
||||
maxIterations: 20,
|
||||
configKeys: {
|
||||
iterations: 'count' as const,
|
||||
items: 'distribution' as const,
|
||||
},
|
||||
},
|
||||
} as const
|
||||
|
||||
export function IterationBadges({ nodeId, data, iterationType }: IterationBadgesProps) {
|
||||
const config = CONFIG[iterationType]
|
||||
const isPreview = data?.isPreview || false
|
||||
|
||||
// Get loop configuration from the workflow store (single source of truth)
|
||||
const { loops } = useWorkflowStore()
|
||||
const loopConfig = loops[nodeId]
|
||||
// Get configuration from the workflow store
|
||||
const store = useWorkflowStore()
|
||||
const nodeConfig = store[config.storeKey][nodeId]
|
||||
|
||||
// Use loop config as primary source, fallback to data for backward compatibility
|
||||
const configIterations = loopConfig?.iterations ?? data?.count ?? 5
|
||||
const configLoopType = loopConfig?.loopType ?? data?.loopType ?? 'for'
|
||||
const configCollection = loopConfig?.forEachItems ?? data?.collection ?? ''
|
||||
// Determine current type and values
|
||||
const currentType = (data?.[config.typeKey] ||
|
||||
(iterationType === 'loop' ? 'for' : 'count')) as any
|
||||
const configIterations = (nodeConfig as any)?.[config.configKeys.iterations] ?? data?.count ?? 5
|
||||
const configCollection = (nodeConfig as any)?.[config.configKeys.items] ?? data?.collection ?? ''
|
||||
|
||||
// Derive values directly from props - no useState needed for synchronized data
|
||||
const loopType = configLoopType
|
||||
const iterations = configIterations
|
||||
const collectionString =
|
||||
typeof configCollection === 'string' ? configCollection : JSON.stringify(configCollection) || ''
|
||||
|
||||
// Use actual values directly for display, temporary state only for active editing
|
||||
// State management
|
||||
const [tempInputValue, setTempInputValue] = useState<string | null>(null)
|
||||
const inputValue = tempInputValue ?? iterations.toString()
|
||||
const editorValue = collectionString
|
||||
@@ -69,88 +99,91 @@ export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
// Get collaborative functions
|
||||
const {
|
||||
collaborativeUpdateLoopType,
|
||||
collaborativeUpdateLoopCount,
|
||||
collaborativeUpdateLoopCollection,
|
||||
collaborativeUpdateParallelType,
|
||||
collaborativeUpdateIterationCount,
|
||||
collaborativeUpdateIterationCollection,
|
||||
} = useCollaborativeWorkflow()
|
||||
|
||||
// Handle loop type change
|
||||
const handleLoopTypeChange = useCallback(
|
||||
(newType: 'for' | 'forEach') => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
// Update the collaborative state - this will cause the component to re-render with new derived values
|
||||
collaborativeUpdateLoopType(nodeId, newType)
|
||||
// Handle type change
|
||||
const handleTypeChange = useCallback(
|
||||
(newType: any) => {
|
||||
if (isPreview) return
|
||||
if (iterationType === 'loop') {
|
||||
collaborativeUpdateLoopType(nodeId, newType)
|
||||
} else {
|
||||
collaborativeUpdateParallelType(nodeId, newType)
|
||||
}
|
||||
setTypePopoverOpen(false)
|
||||
},
|
||||
[nodeId, collaborativeUpdateLoopType, isPreview]
|
||||
[nodeId, iterationType, collaborativeUpdateLoopType, collaborativeUpdateParallelType, isPreview]
|
||||
)
|
||||
|
||||
// Handle iterations input change
|
||||
const handleIterationsChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
if (isPreview) return
|
||||
const sanitizedValue = e.target.value.replace(/[^0-9]/g, '')
|
||||
const numValue = Number.parseInt(sanitizedValue)
|
||||
|
||||
if (!Number.isNaN(numValue)) {
|
||||
setTempInputValue(Math.min(100, numValue).toString())
|
||||
setTempInputValue(Math.min(config.maxIterations, numValue).toString())
|
||||
} else {
|
||||
setTempInputValue(sanitizedValue)
|
||||
}
|
||||
},
|
||||
[isPreview]
|
||||
[isPreview, config.maxIterations]
|
||||
)
|
||||
|
||||
// Handle iterations save
|
||||
const handleIterationsSave = useCallback(() => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
if (isPreview) return
|
||||
const value = Number.parseInt(inputValue)
|
||||
|
||||
if (!Number.isNaN(value)) {
|
||||
const newValue = Math.min(100, Math.max(1, value))
|
||||
// Update the collaborative state - this will cause iterations to be derived from props
|
||||
collaborativeUpdateLoopCount(nodeId, newValue)
|
||||
const newValue = Math.min(config.maxIterations, Math.max(1, value))
|
||||
collaborativeUpdateIterationCount(nodeId, iterationType, newValue)
|
||||
}
|
||||
// Clear temporary input state to show the actual value
|
||||
setTempInputValue(null)
|
||||
setConfigPopoverOpen(false)
|
||||
}, [inputValue, nodeId, collaborativeUpdateLoopCount, isPreview])
|
||||
}, [
|
||||
inputValue,
|
||||
nodeId,
|
||||
iterationType,
|
||||
collaborativeUpdateIterationCount,
|
||||
isPreview,
|
||||
config.maxIterations,
|
||||
])
|
||||
|
||||
// Handle editor change with tag dropdown support
|
||||
// Handle editor change
|
||||
const handleEditorChange = useCallback(
|
||||
(value: string) => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
if (isPreview) return
|
||||
|
||||
// Update collaborative state directly - no local state needed
|
||||
collaborativeUpdateLoopCollection(nodeId, value)
|
||||
|
||||
// Get the textarea element from the editor
|
||||
const textarea = editorContainerRef.current?.querySelector('textarea')
|
||||
// Capture cursor first to minimize staleness in dropdown logic
|
||||
const textarea = editorContainerRef.current?.querySelector(
|
||||
'textarea'
|
||||
) as HTMLTextAreaElement | null
|
||||
const cursorPos = textarea?.selectionStart ?? cursorPosition
|
||||
if (textarea) {
|
||||
textareaRef.current = textarea
|
||||
const cursorPos = textarea.selectionStart || 0
|
||||
setCursorPosition(cursorPos)
|
||||
|
||||
// Check for tag trigger
|
||||
const triggerCheck = checkTagTrigger(value, cursorPos)
|
||||
setShowTagDropdown(triggerCheck.show)
|
||||
}
|
||||
setCursorPosition(cursorPos)
|
||||
|
||||
collaborativeUpdateIterationCollection(nodeId, iterationType, value)
|
||||
|
||||
const triggerCheck = checkTagTrigger(value, cursorPos)
|
||||
setShowTagDropdown(triggerCheck.show)
|
||||
},
|
||||
[nodeId, collaborativeUpdateLoopCollection, isPreview]
|
||||
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview, cursorPosition]
|
||||
)
|
||||
|
||||
// Handle tag selection
|
||||
const handleTagSelect = useCallback(
|
||||
(newValue: string) => {
|
||||
if (isPreview) return // Don't allow changes in preview mode
|
||||
|
||||
// Update collaborative state directly - no local state needed
|
||||
collaborativeUpdateLoopCollection(nodeId, newValue)
|
||||
if (isPreview) return
|
||||
collaborativeUpdateIterationCollection(nodeId, iterationType, newValue)
|
||||
setShowTagDropdown(false)
|
||||
|
||||
// Focus back on the editor after a short delay
|
||||
setTimeout(() => {
|
||||
const textarea = textareaRef.current
|
||||
if (textarea) {
|
||||
@@ -158,12 +191,20 @@ export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
}
|
||||
}, 0)
|
||||
},
|
||||
[nodeId, collaborativeUpdateLoopCollection, isPreview]
|
||||
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview]
|
||||
)
|
||||
|
||||
// Determine if we're in count mode or collection mode
|
||||
const isCountMode =
|
||||
(iterationType === 'loop' && currentType === 'for') ||
|
||||
(iterationType === 'parallel' && currentType === 'count')
|
||||
|
||||
// Get type options
|
||||
const typeOptions = Object.entries(config.typeLabels)
|
||||
|
||||
return (
|
||||
<div className='-top-9 absolute right-0 left-0 z-10 flex justify-between'>
|
||||
{/* Loop Type Badge */}
|
||||
{/* Type Badge */}
|
||||
<Popover
|
||||
open={!isPreview && typePopoverOpen}
|
||||
onOpenChange={isPreview ? undefined : setTypePopoverOpen}
|
||||
@@ -178,40 +219,36 @@ export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
)}
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
>
|
||||
{loopType === 'for' ? 'For Loop' : 'For Each'}
|
||||
{config.typeLabels[currentType as keyof typeof config.typeLabels]}
|
||||
{!isPreview && <ChevronDown className='h-3 w-3 text-muted-foreground' />}
|
||||
</Badge>
|
||||
</PopoverTrigger>
|
||||
{!isPreview && (
|
||||
<PopoverContent className='w-48 p-3' align='center' onClick={(e) => e.stopPropagation()}>
|
||||
<div className='space-y-2'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>Loop Type</div>
|
||||
<div className='font-medium text-muted-foreground text-xs'>
|
||||
{iterationType === 'loop' ? 'Loop Type' : 'Parallel Type'}
|
||||
</div>
|
||||
<div className='space-y-1'>
|
||||
<div
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-md px-2 py-1.5',
|
||||
loopType === 'for' ? 'bg-accent' : 'hover:bg-accent/50'
|
||||
)}
|
||||
onClick={() => handleLoopTypeChange('for')}
|
||||
>
|
||||
<span className='text-sm'>For Loop</span>
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-md px-2 py-1.5',
|
||||
loopType === 'forEach' ? 'bg-accent' : 'hover:bg-accent/50'
|
||||
)}
|
||||
onClick={() => handleLoopTypeChange('forEach')}
|
||||
>
|
||||
<span className='text-sm'>For Each</span>
|
||||
</div>
|
||||
{typeOptions.map(([typeValue, typeLabel]) => (
|
||||
<div
|
||||
key={typeValue}
|
||||
className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-md px-2 py-1.5',
|
||||
currentType === typeValue ? 'bg-accent' : 'hover:bg-accent/50'
|
||||
)}
|
||||
onClick={() => handleTypeChange(typeValue)}
|
||||
>
|
||||
<span className='text-sm'>{typeLabel}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
)}
|
||||
</Popover>
|
||||
|
||||
{/* Iterations/Collection Badge */}
|
||||
{/* Configuration Badge */}
|
||||
<Popover
|
||||
open={!isPreview && configPopoverOpen}
|
||||
onOpenChange={isPreview ? undefined : setConfigPopoverOpen}
|
||||
@@ -226,23 +263,25 @@ export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
)}
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
>
|
||||
{loopType === 'for' ? `Iterations: ${iterations}` : 'Items'}
|
||||
{isCountMode ? `Iterations: ${iterations}` : 'Items'}
|
||||
{!isPreview && <ChevronDown className='h-3 w-3 text-muted-foreground' />}
|
||||
</Badge>
|
||||
</PopoverTrigger>
|
||||
{!isPreview && (
|
||||
<PopoverContent
|
||||
className={cn('p-3', loopType !== 'for' ? 'w-72' : 'w-48')}
|
||||
className={cn('p-3', !isCountMode ? 'w-72' : 'w-48')}
|
||||
align='center'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<div className='space-y-2'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>
|
||||
{loopType === 'for' ? 'Loop Iterations' : 'Collection Items'}
|
||||
{isCountMode
|
||||
? `${iterationType === 'loop' ? 'Loop' : 'Parallel'} Iterations`
|
||||
: `${iterationType === 'loop' ? 'Collection' : 'Parallel'} Items`}
|
||||
</div>
|
||||
|
||||
{loopType === 'for' ? (
|
||||
// Number input for 'for' loops
|
||||
{isCountMode ? (
|
||||
// Number input for count-based mode
|
||||
<div className='flex items-center gap-2'>
|
||||
<Input
|
||||
type='text'
|
||||
@@ -255,7 +294,7 @@ export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
// Code editor for 'forEach' loops
|
||||
// Code editor for collection-based mode
|
||||
<div ref={editorContainerRef} className='relative'>
|
||||
<div className='relative min-h-[80px] rounded-md border border-input bg-background px-3 pt-2 pb-3 font-mono text-sm'>
|
||||
{editorValue === '' && (
|
||||
@@ -293,9 +332,9 @@ export function LoopBadges({ nodeId, data }: LoopBadgesProps) {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{loopType === 'for' && (
|
||||
{isCountMode && (
|
||||
<div className='text-[10px] text-muted-foreground'>
|
||||
Enter a number between 1 and 100
|
||||
Enter a number between 1 and {config.maxIterations}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -1,5 +1,5 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/loop-node/loop-node'
|
||||
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
vi.mock('@/stores/workflows/workflow/store', () => ({
|
||||
@@ -6,9 +6,9 @@ import { StartIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Card } from '@/components/ui/card'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useCurrentWorkflow } from '../../hooks'
|
||||
import { LoopBadges } from './components/loop-badges'
|
||||
|
||||
// Add these styles to your existing global CSS file or create a separate CSS module
|
||||
const LoopNodeStyles: React.FC = () => {
|
||||
@@ -245,7 +245,7 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
|
||||
/>
|
||||
|
||||
{/* Loop Configuration Badges */}
|
||||
<LoopBadges nodeId={id} data={data} />
|
||||
<IterationBadges nodeId={id} data={data} iterationType='loop' />
|
||||
</Card>
|
||||
</div>
|
||||
</>
|
||||
@@ -9,7 +9,7 @@ export const ParallelTool = {
|
||||
bgColor: '#FEE12B',
|
||||
data: {
|
||||
label: 'Parallel',
|
||||
parallelType: 'collection' as 'collection' | 'count',
|
||||
parallelType: 'count' as 'collection' | 'count',
|
||||
count: 5,
|
||||
collection: '',
|
||||
extent: 'parent',
|
||||
@@ -1,5 +1,5 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/parallel-node/parallel-node'
|
||||
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
vi.mock('@/stores/workflows/workflow/store', () => ({
|
||||
@@ -6,9 +6,9 @@ import { StartIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Card } from '@/components/ui/card'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useCurrentWorkflow } from '../../hooks'
|
||||
import { ParallelBadges } from './components/parallel-badges'
|
||||
|
||||
const ParallelNodeStyles: React.FC = () => {
|
||||
return (
|
||||
@@ -263,7 +263,7 @@ export const ParallelNodeComponent = memo(({ data, selected, id }: NodeProps) =>
|
||||
/>
|
||||
|
||||
{/* Parallel Configuration Badges */}
|
||||
<ParallelBadges nodeId={id} data={data} />
|
||||
<IterationBadges nodeId={id} data={data} iterationType='parallel' />
|
||||
</Card>
|
||||
</div>
|
||||
</>
|
||||
@@ -446,24 +446,25 @@ export function Code({
|
||||
value={code}
|
||||
onValueChange={(newCode) => {
|
||||
if (!isCollapsed && !isAiStreaming && !isPreview && !disabled) {
|
||||
// Capture cursor first to minimize staleness in dropdown logic
|
||||
const textarea = editorRef.current?.querySelector(
|
||||
'textarea'
|
||||
) as HTMLTextAreaElement | null
|
||||
const pos = textarea?.selectionStart ?? cursorPosition
|
||||
setCursorPosition(pos)
|
||||
|
||||
setCode(newCode)
|
||||
setStoreValue(newCode)
|
||||
|
||||
const textarea = editorRef.current?.querySelector('textarea')
|
||||
if (textarea) {
|
||||
const pos = textarea.selectionStart
|
||||
setCursorPosition(pos)
|
||||
|
||||
const tagTrigger = checkTagTrigger(newCode, pos)
|
||||
setShowTags(tagTrigger.show)
|
||||
if (!tagTrigger.show) {
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
|
||||
const envVarTrigger = checkEnvVarTrigger(newCode, pos)
|
||||
setShowEnvVars(envVarTrigger.show)
|
||||
setSearchTerm(envVarTrigger.show ? envVarTrigger.searchTerm : '')
|
||||
const tagTrigger = checkTagTrigger(newCode, pos)
|
||||
setShowTags(tagTrigger.show)
|
||||
if (!tagTrigger.show) {
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
|
||||
const envVarTrigger = checkEnvVarTrigger(newCode, pos)
|
||||
setShowEnvVars(envVarTrigger.show)
|
||||
setSearchTerm(envVarTrigger.show ? envVarTrigger.searchTerm : '')
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
|
||||
@@ -150,13 +150,14 @@ export function ComboBox({
|
||||
const newValue = e.target.value
|
||||
const newCursorPosition = e.target.selectionStart ?? 0
|
||||
|
||||
// Update cursor first to reduce staleness for dropdown logic
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
// Update store value immediately (allow free text)
|
||||
if (!isPreview) {
|
||||
setStoreValue(newValue)
|
||||
}
|
||||
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
// Check for environment variables trigger
|
||||
const envVarTrigger = checkEnvVarTrigger(newValue, newCursorPosition)
|
||||
setShowEnvVars(envVarTrigger.show)
|
||||
|
||||
@@ -148,6 +148,9 @@ export function LongInput({
|
||||
const newValue = e.target.value
|
||||
const newCursorPosition = e.target.selectionStart ?? 0
|
||||
|
||||
// Update cursor first to minimize state staleness for dropdown selection logic
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
// Update local content immediately
|
||||
setLocalContent(newValue)
|
||||
|
||||
@@ -158,8 +161,6 @@ export function LongInput({
|
||||
setStoreValue(newValue)
|
||||
}
|
||||
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
// Check for environment variables trigger
|
||||
const envVarTrigger = checkEnvVarTrigger(newValue, newCursorPosition)
|
||||
setShowEnvVars(envVarTrigger.show)
|
||||
|
||||
@@ -106,6 +106,9 @@ export function ShortInput({
|
||||
const newValue = e.target.value
|
||||
const newCursorPosition = e.target.selectionStart ?? 0
|
||||
|
||||
// Update cursor first to minimize state staleness for dropdown selection logic
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
if (onChange) {
|
||||
onChange(newValue)
|
||||
} else if (!isPreview) {
|
||||
@@ -113,8 +116,6 @@ export function ShortInput({
|
||||
setStoreValue(newValue)
|
||||
}
|
||||
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
// Check for environment variables trigger
|
||||
const envVarTrigger = checkEnvVarTrigger(newValue, newCursorPosition)
|
||||
|
||||
|
||||
@@ -94,6 +94,8 @@ export function TriggerModal({
|
||||
setSelectedCredentialId(credentialValue)
|
||||
if (triggerDef.provider === 'gmail') {
|
||||
loadGmailLabels(credentialValue)
|
||||
} else if (triggerDef.provider === 'outlook') {
|
||||
loadOutlookFolders(credentialValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,6 +141,30 @@ export function TriggerModal({
|
||||
}
|
||||
}
|
||||
|
||||
// Load Outlook folders for the selected credential
|
||||
const loadOutlookFolders = async (credentialId: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/tools/outlook/folders?credentialId=${credentialId}`)
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
if (data.folders && Array.isArray(data.folders)) {
|
||||
const folderOptions = data.folders.map((folder: any) => ({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
}))
|
||||
setDynamicOptions((prev) => ({
|
||||
...prev,
|
||||
folderIds: folderOptions,
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
logger.error('Failed to load Outlook folders:', response.statusText)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error loading Outlook folders:', error)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate webhook path and URL
|
||||
useEffect(() => {
|
||||
// For triggers that don't use webhooks (like Gmail polling), skip URL generation
|
||||
@@ -152,15 +178,14 @@ export function TriggerModal({
|
||||
|
||||
// If no path exists, generate one automatically
|
||||
if (!finalPath) {
|
||||
const timestamp = Date.now()
|
||||
const randomId = Math.random().toString(36).substring(2, 8)
|
||||
finalPath = `/${triggerDef.provider}/${timestamp}-${randomId}`
|
||||
// Use UUID format consistent with other webhooks
|
||||
finalPath = crypto.randomUUID()
|
||||
setGeneratedPath(finalPath)
|
||||
}
|
||||
|
||||
if (finalPath) {
|
||||
const baseUrl = window.location.origin
|
||||
setWebhookUrl(`${baseUrl}/api/webhooks/trigger${finalPath}`)
|
||||
setWebhookUrl(`${baseUrl}/api/webhooks/trigger/${finalPath}`)
|
||||
}
|
||||
}, [triggerPath, triggerDef.provider, triggerDef.requiresCredentials, triggerDef.webhook])
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
export {
|
||||
AirtableConfig,
|
||||
DiscordConfig,
|
||||
GenericConfig,
|
||||
GithubConfig,
|
||||
GmailConfig,
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
import { Terminal } from 'lucide-react'
|
||||
import { Alert, AlertDescription, AlertTitle, CodeBlock, Input } from '@/components/ui'
|
||||
import {
|
||||
ConfigField,
|
||||
ConfigSection,
|
||||
InstructionsSection,
|
||||
TestResultDisplay,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/webhook/components'
|
||||
|
||||
interface DiscordConfigProps {
|
||||
webhookName: string
|
||||
setWebhookName: (name: string) => void
|
||||
avatarUrl: string
|
||||
setAvatarUrl: (url: string) => void
|
||||
isLoadingToken: boolean
|
||||
testResult: {
|
||||
success: boolean
|
||||
message?: string
|
||||
test?: any
|
||||
} | null
|
||||
copied: string | null
|
||||
copyToClipboard: (text: string, type: string) => void
|
||||
testWebhook: () => Promise<void>
|
||||
}
|
||||
|
||||
const examplePayload = JSON.stringify(
|
||||
{
|
||||
content: 'Hello from Sim!',
|
||||
username: 'Optional Custom Name',
|
||||
avatar_url: 'https://example.com/avatar.png',
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
|
||||
export function DiscordConfig({
|
||||
webhookName,
|
||||
setWebhookName,
|
||||
avatarUrl,
|
||||
setAvatarUrl,
|
||||
isLoadingToken,
|
||||
testResult,
|
||||
copied,
|
||||
copyToClipboard,
|
||||
testWebhook, // Passed to TestResultDisplay
|
||||
}: DiscordConfigProps) {
|
||||
return (
|
||||
<div className='space-y-4'>
|
||||
<ConfigSection title='Discord Appearance (Optional)'>
|
||||
<ConfigField
|
||||
id='discord-webhook-name'
|
||||
label='Webhook Name'
|
||||
description='This name will be displayed as the sender of messages in Discord.'
|
||||
>
|
||||
<Input
|
||||
id='discord-webhook-name'
|
||||
value={webhookName}
|
||||
onChange={(e) => setWebhookName(e.target.value)}
|
||||
placeholder='Sim Bot'
|
||||
disabled={isLoadingToken}
|
||||
/>
|
||||
</ConfigField>
|
||||
|
||||
<ConfigField
|
||||
id='discord-avatar-url'
|
||||
label='Avatar URL'
|
||||
description="URL to an image that will be used as the webhook's avatar."
|
||||
>
|
||||
<Input
|
||||
id='discord-avatar-url'
|
||||
value={avatarUrl}
|
||||
onChange={(e) => setAvatarUrl(e.target.value)}
|
||||
placeholder='https://example.com/avatar.png'
|
||||
disabled={isLoadingToken}
|
||||
type='url'
|
||||
/>
|
||||
</ConfigField>
|
||||
</ConfigSection>
|
||||
|
||||
<TestResultDisplay
|
||||
testResult={testResult}
|
||||
copied={copied}
|
||||
copyToClipboard={copyToClipboard}
|
||||
showCurlCommand={true} // Discord can be tested via curl
|
||||
/>
|
||||
|
||||
<InstructionsSection
|
||||
title='Receiving Events from Discord (Incoming Webhook)'
|
||||
tip='Create a webhook in Discord and paste its URL into the Webhook URL field above.'
|
||||
>
|
||||
<ol className='list-inside list-decimal space-y-1'>
|
||||
<li>Go to Discord Server Settings {'>'} Integrations.</li>
|
||||
<li>Click "Webhooks" then "New Webhook".</li>
|
||||
<li>Customize the name and channel.</li>
|
||||
<li>Click "Copy Webhook URL".</li>
|
||||
<li>
|
||||
Paste the copied Discord URL into the main <strong>Webhook URL</strong> field above.
|
||||
</li>
|
||||
<li>Your workflow triggers when Discord sends an event to that URL.</li>
|
||||
</ol>
|
||||
</InstructionsSection>
|
||||
|
||||
<InstructionsSection title='Sending Messages to Discord (Outgoing via this URL)'>
|
||||
<p>
|
||||
To send messages <i>to</i> Discord using the Sim Webhook URL (above), make a POST request
|
||||
with a JSON body like this:
|
||||
</p>
|
||||
<CodeBlock language='json' code={examplePayload} className='mt-2 text-sm' />
|
||||
<ul className='mt-3 list-outside list-disc space-y-1 pl-4'>
|
||||
<li>Customize message appearance with embeds (see Discord docs).</li>
|
||||
<li>Override the default username/avatar per request if needed.</li>
|
||||
</ul>
|
||||
</InstructionsSection>
|
||||
|
||||
<Alert>
|
||||
<Terminal className='h-4 w-4' />
|
||||
<AlertTitle>Security Note</AlertTitle>
|
||||
<AlertDescription>
|
||||
The Sim Webhook URL allows sending messages <i>to</i> Discord. Treat it like a password.
|
||||
Don't share it publicly.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
export { AirtableConfig } from './airtable'
|
||||
export { DiscordConfig } from './discord'
|
||||
export { GenericConfig } from './generic'
|
||||
export { GithubConfig } from './github'
|
||||
export { GmailConfig } from './gmail'
|
||||
|
||||
@@ -137,6 +137,10 @@ export function SlackConfig({
|
||||
<li>Paste the Webhook URL (from above) into the "Request URL" field</li>
|
||||
</ol>
|
||||
</li>
|
||||
<li>
|
||||
Go to <strong>Install App</strong> in the left sidebar and install the app into your
|
||||
desired Slack workspace and channel.
|
||||
</li>
|
||||
<li>Save changes in both Slack and here.</li>
|
||||
</ol>
|
||||
</InstructionsSection>
|
||||
|
||||
@@ -12,7 +12,6 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
AirtableConfig,
|
||||
DeleteConfirmDialog,
|
||||
DiscordConfig,
|
||||
GenericConfig,
|
||||
GithubConfig,
|
||||
GmailConfig,
|
||||
@@ -83,8 +82,7 @@ export function WebhookModal({
|
||||
// Provider-specific state
|
||||
const [whatsappVerificationToken, setWhatsappVerificationToken] = useState('')
|
||||
const [githubContentType, setGithubContentType] = useState('application/json')
|
||||
const [discordWebhookName, setDiscordWebhookName] = useState('')
|
||||
const [discordAvatarUrl, setDiscordAvatarUrl] = useState('')
|
||||
|
||||
const [slackSigningSecret, setSlackSigningSecret] = useState('')
|
||||
const [telegramBotToken, setTelegramBotToken] = useState('')
|
||||
// Microsoft Teams-specific state
|
||||
@@ -106,8 +104,7 @@ export function WebhookModal({
|
||||
secretHeaderName: '',
|
||||
requireAuth: false,
|
||||
allowedIps: '',
|
||||
discordWebhookName: '',
|
||||
discordAvatarUrl: '',
|
||||
|
||||
airtableWebhookSecret: '',
|
||||
airtableBaseId: '',
|
||||
airtableTableId: '',
|
||||
@@ -184,18 +181,6 @@ export function WebhookModal({
|
||||
const contentType = config.contentType || 'application/json'
|
||||
setGithubContentType(contentType)
|
||||
setOriginalValues((prev) => ({ ...prev, githubContentType: contentType }))
|
||||
} else if (webhookProvider === 'discord') {
|
||||
const webhookName = config.webhookName || ''
|
||||
const avatarUrl = config.avatarUrl || ''
|
||||
|
||||
setDiscordWebhookName(webhookName)
|
||||
setDiscordAvatarUrl(avatarUrl)
|
||||
|
||||
setOriginalValues((prev) => ({
|
||||
...prev,
|
||||
discordWebhookName: webhookName,
|
||||
discordAvatarUrl: avatarUrl,
|
||||
}))
|
||||
} else if (webhookProvider === 'generic') {
|
||||
// Set general webhook configuration
|
||||
const token = config.token || ''
|
||||
@@ -328,9 +313,6 @@ export function WebhookModal({
|
||||
(webhookProvider === 'whatsapp' &&
|
||||
whatsappVerificationToken !== originalValues.whatsappVerificationToken) ||
|
||||
(webhookProvider === 'github' && githubContentType !== originalValues.githubContentType) ||
|
||||
(webhookProvider === 'discord' &&
|
||||
(discordWebhookName !== originalValues.discordWebhookName ||
|
||||
discordAvatarUrl !== originalValues.discordAvatarUrl)) ||
|
||||
(webhookProvider === 'generic' &&
|
||||
(generalToken !== originalValues.generalToken ||
|
||||
secretHeaderName !== originalValues.secretHeaderName ||
|
||||
@@ -357,8 +339,6 @@ export function WebhookModal({
|
||||
webhookProvider,
|
||||
whatsappVerificationToken,
|
||||
githubContentType,
|
||||
discordWebhookName,
|
||||
discordAvatarUrl,
|
||||
generalToken,
|
||||
secretHeaderName,
|
||||
requireAuth,
|
||||
@@ -393,9 +373,7 @@ export function WebhookModal({
|
||||
case 'github':
|
||||
isValid = generalToken.trim() !== ''
|
||||
break
|
||||
case 'discord':
|
||||
isValid = discordWebhookName.trim() !== ''
|
||||
break
|
||||
|
||||
case 'telegram':
|
||||
isValid = telegramBotToken.trim() !== ''
|
||||
break
|
||||
@@ -442,11 +420,6 @@ export function WebhookModal({
|
||||
return { verificationToken: whatsappVerificationToken }
|
||||
case 'github':
|
||||
return { contentType: githubContentType }
|
||||
case 'discord':
|
||||
return {
|
||||
webhookName: discordWebhookName || undefined,
|
||||
avatarUrl: discordAvatarUrl || undefined,
|
||||
}
|
||||
case 'stripe':
|
||||
return {}
|
||||
case 'gmail':
|
||||
@@ -539,8 +512,6 @@ export function WebhookModal({
|
||||
secretHeaderName,
|
||||
requireAuth,
|
||||
allowedIps,
|
||||
discordWebhookName,
|
||||
discordAvatarUrl,
|
||||
slackSigningSecret,
|
||||
airtableWebhookSecret,
|
||||
airtableBaseId,
|
||||
@@ -738,20 +709,7 @@ export function WebhookModal({
|
||||
setIncludeRawEmail={setIncludeRawEmail}
|
||||
/>
|
||||
)
|
||||
case 'discord':
|
||||
return (
|
||||
<DiscordConfig
|
||||
webhookName={discordWebhookName}
|
||||
setWebhookName={setDiscordWebhookName}
|
||||
avatarUrl={discordAvatarUrl}
|
||||
setAvatarUrl={setDiscordAvatarUrl}
|
||||
isLoadingToken={isLoadingToken}
|
||||
testResult={testResult}
|
||||
copied={copied}
|
||||
copyToClipboard={copyToClipboard}
|
||||
testWebhook={testWebhook}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'stripe':
|
||||
return (
|
||||
<StripeConfig
|
||||
|
||||
@@ -3,7 +3,6 @@ import { ExternalLink } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
AirtableIcon,
|
||||
DiscordIcon,
|
||||
GithubIcon,
|
||||
GmailIcon,
|
||||
MicrosoftTeamsIcon,
|
||||
@@ -46,11 +45,6 @@ export interface GitHubConfig {
|
||||
contentType: string
|
||||
}
|
||||
|
||||
export interface DiscordConfig {
|
||||
webhookName?: string
|
||||
avatarUrl?: string
|
||||
}
|
||||
|
||||
export type StripeConfig = Record<string, never>
|
||||
|
||||
export interface GeneralWebhookConfig {
|
||||
@@ -103,7 +97,6 @@ export interface MicrosoftTeamsConfig {
|
||||
export type ProviderConfig =
|
||||
| WhatsAppConfig
|
||||
| GitHubConfig
|
||||
| DiscordConfig
|
||||
| StripeConfig
|
||||
| GeneralWebhookConfig
|
||||
| SlackConfig
|
||||
@@ -219,25 +212,7 @@ export const WEBHOOK_PROVIDERS: { [key: string]: WebhookProvider } = {
|
||||
},
|
||||
},
|
||||
},
|
||||
discord: {
|
||||
id: 'discord',
|
||||
name: 'Discord',
|
||||
icon: (props) => <DiscordIcon {...props} />,
|
||||
configFields: {
|
||||
webhookName: {
|
||||
type: 'string',
|
||||
label: 'Webhook Name',
|
||||
placeholder: 'Enter a name for the webhook',
|
||||
description: 'Custom name that will appear as the message sender in Discord.',
|
||||
},
|
||||
avatarUrl: {
|
||||
type: 'string',
|
||||
label: 'Avatar URL',
|
||||
placeholder: 'https://example.com/avatar.png',
|
||||
description: 'URL to an image that will be used as the webhook avatar.',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
stripe: {
|
||||
id: 'stripe',
|
||||
name: 'Stripe',
|
||||
|
||||
@@ -17,9 +17,20 @@ import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/provide
|
||||
import { ControlBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/control-bar/control-bar'
|
||||
import { DiffControls } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls'
|
||||
import { ErrorBoundary } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/error/index'
|
||||
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/loop-node/loop-node'
|
||||
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
|
||||
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/parallel-node/parallel-node'
|
||||
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
|
||||
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
|
||||
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
|
||||
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import {
|
||||
getNodeAbsolutePosition,
|
||||
getNodeDepth,
|
||||
getNodeHierarchy,
|
||||
isPointInLoopNode,
|
||||
resizeLoopNodes,
|
||||
updateNodeParent as updateNodeParentUtil,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useStreamCleanup } from '@/hooks/use-stream-cleanup'
|
||||
@@ -31,17 +42,6 @@ import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { WorkflowBlock } from './components/workflow-block/workflow-block'
|
||||
import { WorkflowEdge } from './components/workflow-edge/workflow-edge'
|
||||
import { useCurrentWorkflow } from './hooks'
|
||||
import {
|
||||
getNodeAbsolutePosition,
|
||||
getNodeDepth,
|
||||
getNodeHierarchy,
|
||||
isPointInLoopNode,
|
||||
resizeLoopNodes,
|
||||
updateNodeParent as updateNodeParentUtil,
|
||||
} from './utils'
|
||||
|
||||
const logger = createLogger('Workflow')
|
||||
|
||||
|
||||
@@ -3,7 +3,17 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import * as DialogPrimitive from '@radix-ui/react-dialog'
|
||||
import * as VisuallyHidden from '@radix-ui/react-visually-hidden'
|
||||
import { BookOpen, Building2, LibraryBig, ScrollText, Search, Shapes, Workflow } from 'lucide-react'
|
||||
import {
|
||||
BookOpen,
|
||||
Building2,
|
||||
LibraryBig,
|
||||
RepeatIcon,
|
||||
ScrollText,
|
||||
Search,
|
||||
Shapes,
|
||||
SplitIcon,
|
||||
Workflow,
|
||||
} from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Dialog, DialogOverlay, DialogPortal, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Input } from '@/components/ui/input'
|
||||
@@ -119,7 +129,7 @@ export function SearchModal({
|
||||
if (!isOnWorkflowPage) return []
|
||||
|
||||
const allBlocks = getAllBlocks()
|
||||
return allBlocks
|
||||
const regularBlocks = allBlocks
|
||||
.filter(
|
||||
(block) =>
|
||||
block.type !== 'starter' &&
|
||||
@@ -137,7 +147,28 @@ export function SearchModal({
|
||||
type: block.type,
|
||||
})
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
|
||||
// Add special blocks (loop and parallel)
|
||||
const specialBlocks: BlockItem[] = [
|
||||
{
|
||||
id: 'loop',
|
||||
name: 'Loop',
|
||||
description: 'Create a Loop',
|
||||
icon: RepeatIcon,
|
||||
bgColor: '#2FB3FF',
|
||||
type: 'loop',
|
||||
},
|
||||
{
|
||||
id: 'parallel',
|
||||
name: 'Parallel',
|
||||
description: 'Parallel Execution',
|
||||
icon: SplitIcon,
|
||||
bgColor: '#FEE12B',
|
||||
type: 'parallel',
|
||||
},
|
||||
]
|
||||
|
||||
return [...regularBlocks, ...specialBlocks].sort((a, b) => a.name.localeCompare(b.name))
|
||||
}, [isOnWorkflowPage])
|
||||
|
||||
// Get all available tools - only when on workflow page
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
export { CreateMenu } from './create-menu/create-menu'
|
||||
export { FolderTree } from './folder-tree/folder-tree'
|
||||
export { HelpModal } from './help-modal/help-modal'
|
||||
export { KnowledgeBaseTags } from './knowledge-base-tags/knowledge-base-tags'
|
||||
export { KnowledgeTags } from './knowledge-tags/knowledge-tags'
|
||||
export { LogsFilters } from './logs-filters/logs-filters'
|
||||
export { SettingsModal } from './settings-modal/settings-modal'
|
||||
export { SubscriptionModal } from './subscription-modal/subscription-modal'
|
||||
export { Toolbar } from './toolbar/toolbar'
|
||||
export { UsageIndicator } from './usage-indicator/usage-indicator'
|
||||
export { WorkflowContextMenu } from './workflow-context-menu/workflow-context-menu'
|
||||
export { WorkflowList } from './workflow-list/workflow-list'
|
||||
export { WorkspaceHeader } from './workspace-header/workspace-header'
|
||||
|
||||
@@ -0,0 +1,565 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { Eye, MoreHorizontal, Plus, Trash2, X } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
Input,
|
||||
Label,
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/ui'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from '@/components/ui/alert-dialog'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
|
||||
const logger = createLogger('KnowledgeBaseTags')
|
||||
|
||||
// Predetermined colors for each tag slot (same as document tags)
|
||||
const TAG_SLOT_COLORS = [
|
||||
'#701FFC', // Purple
|
||||
'#FF6B35', // Orange
|
||||
'#4ECDC4', // Teal
|
||||
'#45B7D1', // Blue
|
||||
'#96CEB4', // Green
|
||||
'#FFEAA7', // Yellow
|
||||
'#DDA0DD', // Plum
|
||||
'#FF7675', // Red
|
||||
'#74B9FF', // Light Blue
|
||||
'#A29BFE', // Lavender
|
||||
] as const
|
||||
|
||||
interface KnowledgeBaseTagsProps {
|
||||
knowledgeBaseId: string
|
||||
}
|
||||
|
||||
interface TagUsageData {
|
||||
tagName: string
|
||||
tagSlot: string
|
||||
documentCount: number
|
||||
documents: Array<{ id: string; name: string; tagValue: string }>
|
||||
}
|
||||
|
||||
export function KnowledgeBaseTags({ knowledgeBaseId }: KnowledgeBaseTagsProps) {
|
||||
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } =
|
||||
useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false)
|
||||
const [selectedTag, setSelectedTag] = useState<TagDefinition | null>(null)
|
||||
const [viewDocumentsDialogOpen, setViewDocumentsDialogOpen] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [tagUsageData, setTagUsageData] = useState<TagUsageData[]>([])
|
||||
const [isLoadingUsage, setIsLoadingUsage] = useState(false)
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [createForm, setCreateForm] = useState({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
})
|
||||
|
||||
// Get color for a tag based on its slot
|
||||
const getTagColor = (slot: string) => {
|
||||
const slotMatch = slot.match(/tag(\d+)/)
|
||||
const slotNumber = slotMatch ? Number.parseInt(slotMatch[1]) - 1 : 0
|
||||
return TAG_SLOT_COLORS[slotNumber % TAG_SLOT_COLORS.length]
|
||||
}
|
||||
|
||||
// Fetch tag usage data from API
|
||||
const fetchTagUsage = async () => {
|
||||
if (!knowledgeBaseId) return
|
||||
|
||||
setIsLoadingUsage(true)
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-usage`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch tag usage')
|
||||
}
|
||||
const result = await response.json()
|
||||
if (result.success) {
|
||||
setTagUsageData(result.data)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching tag usage:', error)
|
||||
} finally {
|
||||
setIsLoadingUsage(false)
|
||||
}
|
||||
}
|
||||
|
||||
// Load tag usage data when component mounts or knowledge base changes
|
||||
useEffect(() => {
|
||||
fetchTagUsage()
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Get usage data for a tag
|
||||
const getTagUsage = (tagName: string): TagUsageData => {
|
||||
return (
|
||||
tagUsageData.find((usage) => usage.tagName === tagName) || {
|
||||
tagName,
|
||||
tagSlot: '',
|
||||
documentCount: 0,
|
||||
documents: [],
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const handleDeleteTag = async (tag: TagDefinition) => {
|
||||
setSelectedTag(tag)
|
||||
// Fetch fresh usage data before showing the delete dialog
|
||||
await fetchTagUsage()
|
||||
setDeleteDialogOpen(true)
|
||||
}
|
||||
|
||||
const handleViewDocuments = async (tag: TagDefinition) => {
|
||||
setSelectedTag(tag)
|
||||
// Fetch fresh usage data before showing the view documents dialog
|
||||
await fetchTagUsage()
|
||||
setViewDocumentsDialogOpen(true)
|
||||
}
|
||||
|
||||
const openTagCreator = () => {
|
||||
setCreateForm({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
})
|
||||
setIsCreating(true)
|
||||
}
|
||||
|
||||
const cancelCreating = () => {
|
||||
setCreateForm({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
})
|
||||
setIsCreating(false)
|
||||
}
|
||||
|
||||
const hasNameConflict = (name: string) => {
|
||||
if (!name.trim()) return false
|
||||
return kbTagDefinitions.some(
|
||||
(tag) => tag.displayName.toLowerCase() === name.trim().toLowerCase()
|
||||
)
|
||||
}
|
||||
|
||||
// Check for conflicts in real-time during creation (but not while saving)
|
||||
const nameConflict = isCreating && !isSaving && hasNameConflict(createForm.displayName)
|
||||
|
||||
const canSave = () => {
|
||||
return createForm.displayName.trim() && !hasNameConflict(createForm.displayName)
|
||||
}
|
||||
|
||||
const saveTagDefinition = async () => {
|
||||
if (!canSave()) return
|
||||
|
||||
setIsSaving(true)
|
||||
try {
|
||||
// Find next available slot
|
||||
const usedSlots = new Set(kbTagDefinitions.map((def) => def.tagSlot))
|
||||
const availableSlot = (
|
||||
['tag1', 'tag2', 'tag3', 'tag4', 'tag5', 'tag6', 'tag7'] as const
|
||||
).find((slot) => !usedSlots.has(slot))
|
||||
|
||||
if (!availableSlot) {
|
||||
throw new Error('No available tag slots')
|
||||
}
|
||||
|
||||
// Create the tag definition
|
||||
const newTagDefinition = {
|
||||
tagSlot: availableSlot,
|
||||
displayName: createForm.displayName.trim(),
|
||||
fieldType: createForm.fieldType,
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(newTagDefinition),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to create tag definition')
|
||||
}
|
||||
|
||||
// Refresh tag definitions and usage data
|
||||
await Promise.all([refreshTagDefinitions(), fetchTagUsage()])
|
||||
|
||||
// Reset form and close creator
|
||||
setCreateForm({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
})
|
||||
setIsCreating(false)
|
||||
} catch (error) {
|
||||
logger.error('Error creating tag definition:', error)
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}
|
||||
|
||||
const confirmDeleteTag = async () => {
|
||||
if (!selectedTag) return
|
||||
|
||||
logger.info('Starting delete operation for:', selectedTag.displayName)
|
||||
setIsDeleting(true)
|
||||
|
||||
try {
|
||||
logger.info('Calling delete API for tag:', selectedTag.displayName)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/tag-definitions/${selectedTag.id}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
logger.info('Delete API response status:', response.status)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Delete API failed:', errorText)
|
||||
throw new Error(`Failed to delete tag definition: ${response.status} ${errorText}`)
|
||||
}
|
||||
|
||||
logger.info('Delete API successful, refreshing data...')
|
||||
|
||||
// Refresh both tag definitions and usage data
|
||||
await Promise.all([refreshTagDefinitions(), fetchTagUsage()])
|
||||
|
||||
logger.info('Data refresh complete, closing dialog')
|
||||
|
||||
// Only close dialog and reset state after successful deletion and refresh
|
||||
setDeleteDialogOpen(false)
|
||||
setSelectedTag(null)
|
||||
|
||||
logger.info('Delete operation completed successfully')
|
||||
} catch (error) {
|
||||
logger.error('Error deleting tag definition:', error)
|
||||
// Don't close dialog on error - let user see the error and try again or cancel
|
||||
} finally {
|
||||
logger.info('Setting isDeleting to false')
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}
|
||||
|
||||
// Don't show if user can't edit
|
||||
if (!userPermissions.canEdit) {
|
||||
return null
|
||||
}
|
||||
|
||||
const selectedTagUsage = selectedTag ? getTagUsage(selectedTag.displayName) : null
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='h-full w-full overflow-hidden'>
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<div className='px-2 py-2'>
|
||||
{/* KB Tag Definitions Section */}
|
||||
<div className='mb-1 space-y-1'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>Knowledge Base Tags</div>
|
||||
<div>
|
||||
{/* Existing Tag Definitions */}
|
||||
<div>
|
||||
{kbTagDefinitions.length === 0 && !isCreating ? (
|
||||
<div className='mb-1 rounded-[10px] border border-dashed bg-card p-3 text-center'>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
No tag definitions yet.
|
||||
<br />
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
kbTagDefinitions.length > 0 &&
|
||||
kbTagDefinitions.map((tag, index) => {
|
||||
const usage = getTagUsage(tag.displayName)
|
||||
return (
|
||||
<div key={tag.id} className='mb-1'>
|
||||
<div className='cursor-default rounded-[10px] border bg-card p-2 transition-colors'>
|
||||
<div className='flex items-center justify-between text-sm'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-2'>
|
||||
<div
|
||||
className='h-2 w-2 rounded-full'
|
||||
style={{ backgroundColor: getTagColor(tag.tagSlot) }}
|
||||
/>
|
||||
<div className='min-w-0 flex-1'>
|
||||
<div className='truncate font-medium'>{tag.displayName}</div>
|
||||
</div>
|
||||
</div>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-6 w-6 p-0 text-muted-foreground hover:text-foreground'
|
||||
>
|
||||
<MoreHorizontal className='h-3 w-3' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align='end'
|
||||
className='w-[180px] rounded-lg border bg-card shadow-xs'
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={() => handleViewDocuments(tag)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 text-sm hover:bg-secondary/50'
|
||||
>
|
||||
<Eye className='mr-2 h-3 w-3 flex-shrink-0' />
|
||||
<span className='whitespace-nowrap'>View Docs</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => handleDeleteTag(tag)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 text-red-600 text-sm hover:bg-red-50 hover:text-red-600 dark:hover:bg-red-950'
|
||||
>
|
||||
<Trash2 className='mr-2 h-3 w-3' />
|
||||
Delete Tag
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Add New Tag Button or Inline Creator */}
|
||||
{!isCreating && userPermissions.canEdit && (
|
||||
<div className='mb-1'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={openTagCreator}
|
||||
className='w-full justify-start gap-2 rounded-[10px] border border-dashed bg-card text-muted-foreground hover:text-foreground'
|
||||
disabled={kbTagDefinitions.length >= MAX_TAG_SLOTS}
|
||||
>
|
||||
<Plus className='h-4 w-4' />
|
||||
Add Tag Definition
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Inline Tag Creation Form */}
|
||||
{isCreating && (
|
||||
<div className='mb-1 w-full max-w-full space-y-2 rounded-[10px] border bg-card p-2'>
|
||||
<div className='space-y-1.5'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label className='font-medium text-xs'>Tag Name</Label>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={cancelCreating}
|
||||
className='h-6 w-6 p-0 text-muted-foreground hover:text-red-600'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
</div>
|
||||
<Input
|
||||
value={createForm.displayName}
|
||||
onChange={(e) =>
|
||||
setCreateForm({ ...createForm, displayName: e.target.value })
|
||||
}
|
||||
placeholder='Enter tag name'
|
||||
className='h-8 w-full rounded-md text-sm'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && canSave()) {
|
||||
e.preventDefault()
|
||||
saveTagDefinition()
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault()
|
||||
cancelCreating()
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{nameConflict && (
|
||||
<div className='text-red-600 text-xs'>
|
||||
A tag with this name already exists
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='font-medium text-xs'>Type</Label>
|
||||
<Select
|
||||
value={createForm.fieldType}
|
||||
onValueChange={(value) =>
|
||||
setCreateForm({ ...createForm, fieldType: value })
|
||||
}
|
||||
>
|
||||
<SelectTrigger className='h-8 w-full text-sm'>
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value='text'>Text</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
{/* Action buttons */}
|
||||
<div className='flex pt-1.5'>
|
||||
<Button
|
||||
size='sm'
|
||||
onClick={saveTagDefinition}
|
||||
className='h-7 w-full text-xs'
|
||||
disabled={!canSave() || isSaving}
|
||||
>
|
||||
{isSaving ? 'Creating...' : 'Save'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='mt-2 text-muted-foreground text-xs'>
|
||||
{kbTagDefinitions.length} of {MAX_TAG_SLOTS} tag slots used
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
|
||||
{/* Delete Confirmation Dialog */}
|
||||
<AlertDialog open={deleteDialogOpen} onOpenChange={setDeleteDialogOpen}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Delete Tag</AlertDialogTitle>
|
||||
<AlertDialogDescription asChild>
|
||||
<div>
|
||||
<div className='mb-2'>
|
||||
Are you sure you want to delete the "{selectedTag?.displayName}" tag? This will
|
||||
remove this tag from {selectedTagUsage?.documentCount || 0} document
|
||||
{selectedTagUsage?.documentCount !== 1 ? 's' : ''}.{' '}
|
||||
<span className='text-red-500 dark:text-red-500'>
|
||||
This action cannot be undone.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{selectedTagUsage && selectedTagUsage.documentCount > 0 && (
|
||||
<div className='mt-4'>
|
||||
<div className='mb-2 font-medium text-sm'>Affected documents:</div>
|
||||
<div className='rounded-md border border-border bg-background'>
|
||||
<div className='max-h-32 overflow-y-auto'>
|
||||
{selectedTagUsage.documents.slice(0, 5).map((doc, index) => {
|
||||
const DocumentIcon = getDocumentIcon('', doc.name)
|
||||
return (
|
||||
<div
|
||||
key={doc.id}
|
||||
className='flex items-center gap-3 border-border/50 border-b p-3 last:border-b-0'
|
||||
>
|
||||
<DocumentIcon className='h-4 w-4 flex-shrink-0' />
|
||||
<div className='min-w-0 flex-1'>
|
||||
<div className='truncate font-medium text-sm'>{doc.name}</div>
|
||||
{doc.tagValue && (
|
||||
<div className='mt-1 text-muted-foreground text-xs'>
|
||||
Tag value: <span className='font-medium'>{doc.tagValue}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
{selectedTagUsage.documentCount > 5 && (
|
||||
<div className='flex items-center gap-3 p-3 text-muted-foreground text-sm'>
|
||||
<div className='h-4 w-4' />
|
||||
<div className='font-medium'>
|
||||
and {selectedTagUsage.documentCount - 5} more documents...
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter className='flex'>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]' disabled={isDeleting}>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<Button
|
||||
onClick={confirmDeleteTag}
|
||||
disabled={isDeleting}
|
||||
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
|
||||
>
|
||||
{isDeleting ? 'Deleting...' : 'Delete Tag'}
|
||||
</Button>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
|
||||
{/* View Documents Dialog */}
|
||||
<AlertDialog open={viewDocumentsDialogOpen} onOpenChange={setViewDocumentsDialogOpen}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Documents using "{selectedTag?.displayName}"</AlertDialogTitle>
|
||||
<AlertDialogDescription asChild>
|
||||
<div>
|
||||
<div className='mb-4 text-muted-foreground'>
|
||||
{selectedTagUsage?.documentCount || 0} document
|
||||
{selectedTagUsage?.documentCount !== 1 ? 's are' : ' is'} currently using this tag
|
||||
definition.
|
||||
</div>
|
||||
|
||||
{selectedTagUsage?.documentCount === 0 ? (
|
||||
<div className='rounded-md bg-muted/30 p-6 text-center'>
|
||||
<div className='text-muted-foreground text-sm'>
|
||||
This tag definition is not being used by any documents. You can safely delete
|
||||
it to free up the tag slot.
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className='rounded-md border border-border bg-background'>
|
||||
<div className='max-h-80 overflow-y-auto'>
|
||||
{selectedTagUsage?.documents.map((doc, index) => {
|
||||
const DocumentIcon = getDocumentIcon('', doc.name)
|
||||
return (
|
||||
<div
|
||||
key={doc.id}
|
||||
className='flex items-center gap-3 border-border/50 border-b p-3 transition-colors last:border-b-0 hover:bg-muted/30'
|
||||
>
|
||||
<DocumentIcon className='h-4 w-4 flex-shrink-0' />
|
||||
<div className='min-w-0 flex-1'>
|
||||
<div className='truncate font-medium text-sm'>{doc.name}</div>
|
||||
{doc.tagValue && (
|
||||
<div className='mt-1 text-muted-foreground text-xs'>
|
||||
Tag value: <span className='font-medium'>{doc.tagValue}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,796 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { ChevronDown, Plus, X } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
Input,
|
||||
Label,
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/ui'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { DocumentTag } from '@/app/workspace/[workspaceId]/knowledge/components/document-tag-entry/document-tag-entry'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/use-tag-definitions'
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('KnowledgeTags')
|
||||
|
||||
interface KnowledgeTagsProps {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
}
|
||||
|
||||
// Predetermined colors for each tag slot
|
||||
const TAG_SLOT_COLORS = [
|
||||
'#701FFC', // Purple
|
||||
'#FF6B35', // Orange
|
||||
'#4ECDC4', // Teal
|
||||
'#45B7D1', // Blue
|
||||
'#96CEB4', // Green
|
||||
'#FFEAA7', // Yellow
|
||||
'#DDA0DD', // Plum
|
||||
'#FF7675', // Red
|
||||
'#74B9FF', // Light Blue
|
||||
'#A29BFE', // Lavender
|
||||
] as const
|
||||
|
||||
export function KnowledgeTags({ knowledgeBaseId, documentId }: KnowledgeTagsProps) {
|
||||
const { getCachedDocuments, updateDocument: updateDocumentInStore } = useKnowledgeStore()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
// Use different hooks based on whether we have a documentId
|
||||
const documentTagHook = useTagDefinitions(knowledgeBaseId, documentId)
|
||||
const kbTagHook = useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
const { getNextAvailableSlot: getServerNextSlot } = useNextAvailableSlot(knowledgeBaseId)
|
||||
|
||||
// Use the document-level hook since we have documentId
|
||||
const { saveTagDefinitions, tagDefinitions, fetchTagDefinitions } = documentTagHook
|
||||
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } = kbTagHook
|
||||
|
||||
const [documentTags, setDocumentTags] = useState<DocumentTag[]>([])
|
||||
const [documentData, setDocumentData] = useState<DocumentData | null>(null)
|
||||
const [isLoadingDocument, setIsLoadingDocument] = useState(true)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
// Inline editing state
|
||||
const [editingTagIndex, setEditingTagIndex] = useState<number | null>(null)
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [editForm, setEditForm] = useState({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
value: '',
|
||||
})
|
||||
|
||||
// Function to build document tags from data and definitions
|
||||
const buildDocumentTags = useCallback(
|
||||
(docData: DocumentData, definitions: TagDefinition[], currentTags?: DocumentTag[]) => {
|
||||
const tags: DocumentTag[] = []
|
||||
const tagSlots = TAG_SLOTS
|
||||
|
||||
tagSlots.forEach((slot) => {
|
||||
const value = docData[slot] as string | null | undefined
|
||||
const definition = definitions.find((def) => def.tagSlot === slot)
|
||||
const currentTag = currentTags?.find((tag) => tag.slot === slot)
|
||||
|
||||
// Only include tag if the document has a value AND a corresponding KB tag definition exists
|
||||
if (value?.trim() && definition) {
|
||||
tags.push({
|
||||
slot,
|
||||
displayName: definition.displayName,
|
||||
fieldType: definition.fieldType,
|
||||
value: value.trim(),
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return tags
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
// Handle tag updates (local state only, no API calls)
|
||||
const handleTagsChange = useCallback((newTags: DocumentTag[]) => {
|
||||
// Only update local state, don't save to API
|
||||
setDocumentTags(newTags)
|
||||
}, [])
|
||||
|
||||
// Handle saving document tag values to the API
|
||||
const handleSaveDocumentTags = useCallback(
|
||||
async (tagsToSave: DocumentTag[]) => {
|
||||
if (!documentData) return
|
||||
|
||||
try {
|
||||
// Convert DocumentTag array to tag data for API
|
||||
const tagData: Record<string, string> = {}
|
||||
const tagSlots = TAG_SLOTS
|
||||
|
||||
// Clear all tags first
|
||||
tagSlots.forEach((slot) => {
|
||||
tagData[slot] = ''
|
||||
})
|
||||
|
||||
// Set values from tagsToSave
|
||||
tagsToSave.forEach((tag) => {
|
||||
if (tag.value.trim()) {
|
||||
tagData[tag.slot] = tag.value.trim()
|
||||
}
|
||||
})
|
||||
|
||||
// Update document via API
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(tagData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update document tags')
|
||||
}
|
||||
|
||||
// Update the document in the store and local state
|
||||
updateDocumentInStore(knowledgeBaseId, documentId, tagData)
|
||||
setDocumentData((prev) => (prev ? { ...prev, ...tagData } : null))
|
||||
|
||||
// Refresh tag definitions to update the display
|
||||
await fetchTagDefinitions()
|
||||
} catch (error) {
|
||||
logger.error('Error updating document tags:', error)
|
||||
throw error // Re-throw so the component can handle it
|
||||
}
|
||||
},
|
||||
[documentData, knowledgeBaseId, documentId, updateDocumentInStore, fetchTagDefinitions]
|
||||
)
|
||||
|
||||
// Handle removing a tag
|
||||
const handleRemoveTag = async (index: number) => {
|
||||
const updatedTags = documentTags.filter((_, i) => i !== index)
|
||||
handleTagsChange(updatedTags)
|
||||
|
||||
// Persist the changes
|
||||
try {
|
||||
await handleSaveDocumentTags(updatedTags)
|
||||
} catch (error) {
|
||||
// Handle error silently - the UI will show the optimistic update
|
||||
// but the user can retry if needed
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle inline editor for existing tag
|
||||
const toggleTagEditor = (index: number) => {
|
||||
if (editingTagIndex === index) {
|
||||
// Already editing this tag - collapse it
|
||||
cancelEditing()
|
||||
} else {
|
||||
// Start editing this tag
|
||||
const tag = documentTags[index]
|
||||
setEditingTagIndex(index)
|
||||
setEditForm({
|
||||
displayName: tag.displayName,
|
||||
fieldType: tag.fieldType,
|
||||
value: tag.value,
|
||||
})
|
||||
setIsCreating(false)
|
||||
}
|
||||
}
|
||||
|
||||
// Open inline creator for new tag
|
||||
const openTagCreator = () => {
|
||||
setEditingTagIndex(null)
|
||||
setEditForm({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
value: '',
|
||||
})
|
||||
setIsCreating(true)
|
||||
}
|
||||
|
||||
// Save tag (create or edit)
|
||||
const saveTag = async () => {
|
||||
if (!editForm.displayName.trim() || !editForm.value.trim()) return
|
||||
|
||||
// Close the edit form immediately and set saving flag
|
||||
const formData = { ...editForm }
|
||||
const currentEditingIndex = editingTagIndex
|
||||
// Capture original tag data before updating
|
||||
const originalTag = currentEditingIndex !== null ? documentTags[currentEditingIndex] : null
|
||||
setEditingTagIndex(null)
|
||||
setIsCreating(false)
|
||||
setIsSaving(true)
|
||||
|
||||
try {
|
||||
let targetSlot: string
|
||||
|
||||
if (currentEditingIndex !== null && originalTag) {
|
||||
// EDIT MODE: Editing existing tag - use existing slot
|
||||
targetSlot = originalTag.slot
|
||||
} else {
|
||||
// CREATE MODE: Check if using existing definition or creating new one
|
||||
const existingDefinition = kbTagDefinitions.find(
|
||||
(def) => def.displayName.toLowerCase() === formData.displayName.toLowerCase()
|
||||
)
|
||||
|
||||
if (existingDefinition) {
|
||||
// Using existing definition - use its slot
|
||||
targetSlot = existingDefinition.tagSlot
|
||||
} else {
|
||||
// Creating new definition - get next available slot from server
|
||||
const serverSlot = await getServerNextSlot(formData.fieldType)
|
||||
if (!serverSlot) {
|
||||
throw new Error(`No available slots for new tag of type '${formData.fieldType}'`)
|
||||
}
|
||||
targetSlot = serverSlot
|
||||
}
|
||||
}
|
||||
|
||||
// Update the tags array
|
||||
let updatedTags: DocumentTag[]
|
||||
if (currentEditingIndex !== null) {
|
||||
// Editing existing tag
|
||||
updatedTags = [...documentTags]
|
||||
updatedTags[currentEditingIndex] = {
|
||||
...updatedTags[currentEditingIndex],
|
||||
displayName: formData.displayName,
|
||||
fieldType: formData.fieldType,
|
||||
value: formData.value,
|
||||
}
|
||||
} else {
|
||||
// Creating new tag
|
||||
const newTag: DocumentTag = {
|
||||
slot: targetSlot,
|
||||
displayName: formData.displayName,
|
||||
fieldType: formData.fieldType,
|
||||
value: formData.value,
|
||||
}
|
||||
updatedTags = [...documentTags, newTag]
|
||||
}
|
||||
|
||||
handleTagsChange(updatedTags)
|
||||
|
||||
// Handle tag definition creation/update based on edit mode
|
||||
if (currentEditingIndex !== null && originalTag) {
|
||||
// EDIT MODE: Always update existing definition, never create new slots
|
||||
const currentDefinition = kbTagDefinitions.find(
|
||||
(def) => def.displayName.toLowerCase() === originalTag.displayName.toLowerCase()
|
||||
)
|
||||
|
||||
if (currentDefinition) {
|
||||
const updatedDefinition: TagDefinitionInput = {
|
||||
displayName: formData.displayName,
|
||||
fieldType: currentDefinition.fieldType, // Keep existing field type (can't change in edit mode)
|
||||
tagSlot: currentDefinition.tagSlot, // Keep existing slot
|
||||
_originalDisplayName: originalTag.displayName, // Tell server which definition to update
|
||||
}
|
||||
|
||||
if (saveTagDefinitions) {
|
||||
await saveTagDefinitions([updatedDefinition])
|
||||
}
|
||||
await refreshTagDefinitions()
|
||||
}
|
||||
} else {
|
||||
// CREATE MODE: Adding new tag
|
||||
const existingDefinition = kbTagDefinitions.find(
|
||||
(def) => def.displayName.toLowerCase() === formData.displayName.toLowerCase()
|
||||
)
|
||||
|
||||
if (!existingDefinition) {
|
||||
// Create new definition
|
||||
const newDefinition: TagDefinitionInput = {
|
||||
displayName: formData.displayName,
|
||||
fieldType: formData.fieldType,
|
||||
tagSlot: targetSlot as TagSlot,
|
||||
}
|
||||
|
||||
if (saveTagDefinitions) {
|
||||
await saveTagDefinitions([newDefinition])
|
||||
}
|
||||
await refreshTagDefinitions()
|
||||
}
|
||||
}
|
||||
|
||||
// Save the actual document tags
|
||||
await handleSaveDocumentTags(updatedTags)
|
||||
|
||||
// Reset form
|
||||
setEditForm({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
value: '',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error saving tag:', error)
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if tag name already exists on this document
|
||||
const hasNameConflict = (name: string) => {
|
||||
if (!name.trim()) return false
|
||||
|
||||
return documentTags.some((tag, index) => {
|
||||
// When editing, don't consider the current tag being edited as a conflict
|
||||
if (editingTagIndex !== null && index === editingTagIndex) {
|
||||
return false
|
||||
}
|
||||
return tag.displayName.toLowerCase() === name.trim().toLowerCase()
|
||||
})
|
||||
}
|
||||
|
||||
// Get color for a tag based on its slot
|
||||
const getTagColor = (slot: string) => {
|
||||
// Extract slot number from slot string (e.g., "tag1" -> 1, "tag2" -> 2, etc.)
|
||||
const slotMatch = slot.match(/tag(\d+)/)
|
||||
const slotNumber = slotMatch ? Number.parseInt(slotMatch[1]) - 1 : 0
|
||||
return TAG_SLOT_COLORS[slotNumber % TAG_SLOT_COLORS.length]
|
||||
}
|
||||
|
||||
const cancelEditing = () => {
|
||||
setEditForm({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
value: '',
|
||||
})
|
||||
setEditingTagIndex(null)
|
||||
setIsCreating(false)
|
||||
}
|
||||
|
||||
// Filter available tag definitions - exclude all used tag names on this document
|
||||
const availableDefinitions = kbTagDefinitions.filter((def) => {
|
||||
// Always exclude all already used tag names (including current tag being edited)
|
||||
return !documentTags.some(
|
||||
(tag) => tag.displayName.toLowerCase() === def.displayName.toLowerCase()
|
||||
)
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
const fetchDocument = async () => {
|
||||
try {
|
||||
setIsLoadingDocument(true)
|
||||
setError(null)
|
||||
|
||||
const cachedDocuments = getCachedDocuments(knowledgeBaseId)
|
||||
const cachedDoc = cachedDocuments?.documents?.find((d) => d.id === documentId)
|
||||
|
||||
if (cachedDoc) {
|
||||
setDocumentData(cachedDoc)
|
||||
// Initialize tags from cached document
|
||||
const initialTags = buildDocumentTags(cachedDoc, tagDefinitions)
|
||||
setDocumentTags(initialTags)
|
||||
setIsLoadingDocument(false)
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new Error('Document not found')
|
||||
}
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
setDocumentData(result.data)
|
||||
// Initialize tags from fetched document
|
||||
const initialTags = buildDocumentTags(result.data, tagDefinitions, [])
|
||||
setDocumentTags(initialTags)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to fetch document')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error fetching document:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
setIsLoadingDocument(false)
|
||||
}
|
||||
}
|
||||
|
||||
if (knowledgeBaseId && documentId) {
|
||||
fetchDocument()
|
||||
}
|
||||
}, [knowledgeBaseId, documentId, getCachedDocuments, buildDocumentTags])
|
||||
|
||||
// Separate effect to rebuild tags when tag definitions change (without re-fetching document)
|
||||
useEffect(() => {
|
||||
if (documentData && !isSaving) {
|
||||
const rebuiltTags = buildDocumentTags(documentData, tagDefinitions, documentTags)
|
||||
setDocumentTags(rebuiltTags)
|
||||
}
|
||||
}, [documentData, tagDefinitions, buildDocumentTags, isSaving])
|
||||
|
||||
if (isLoadingDocument) {
|
||||
return (
|
||||
<div className='h-full'>
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<div className='px-2 py-2'>
|
||||
<div className='h-20 animate-pulse rounded-md bg-muted' />
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error || !documentData) {
|
||||
return null // Don't show anything if there's an error or no document
|
||||
}
|
||||
|
||||
const isEditing = editingTagIndex !== null || isCreating
|
||||
const nameConflict = hasNameConflict(editForm.displayName)
|
||||
|
||||
// Check if there are actual changes (for editing mode)
|
||||
const hasChanges = () => {
|
||||
if (editingTagIndex === null) return true // Creating new tag always has changes
|
||||
|
||||
const originalTag = documentTags[editingTagIndex]
|
||||
if (!originalTag) return true
|
||||
|
||||
return (
|
||||
originalTag.displayName !== editForm.displayName ||
|
||||
originalTag.value !== editForm.value ||
|
||||
originalTag.fieldType !== editForm.fieldType
|
||||
)
|
||||
}
|
||||
|
||||
// Check if save should be enabled
|
||||
const canSave =
|
||||
editForm.displayName.trim() && editForm.value.trim() && !nameConflict && hasChanges()
|
||||
|
||||
return (
|
||||
<div className='h-full w-full overflow-hidden'>
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<div className='px-2 py-2'>
|
||||
{/* Document Tags Section */}
|
||||
<div className='mb-1 space-y-1'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>Document Tags</div>
|
||||
<div>
|
||||
{/* Existing Tags */}
|
||||
<div>
|
||||
{documentTags.map((tag, index) => {
|
||||
return (
|
||||
<div key={index} className='mb-1'>
|
||||
<div
|
||||
className={`cursor-pointer rounded-[10px] border bg-card transition-colors ${editingTagIndex === index ? 'space-y-2 p-2' : 'p-2'}`}
|
||||
onClick={() => userPermissions.canEdit && toggleTagEditor(index)}
|
||||
>
|
||||
{/* Always show the tag display */}
|
||||
<div className='flex items-center justify-between text-sm'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-2'>
|
||||
<div
|
||||
className='h-2 w-2 rounded-full'
|
||||
style={{ backgroundColor: getTagColor(tag.slot) }}
|
||||
/>
|
||||
<div className='truncate font-medium'>{tag.displayName}</div>
|
||||
</div>
|
||||
{userPermissions.canEdit && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleRemoveTag(index)
|
||||
}}
|
||||
className='h-6 w-6 p-0 text-muted-foreground hover:text-red-600'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Show edit form when this tag is being edited */}
|
||||
{editingTagIndex === index && (
|
||||
<div className='space-y-1.5' onClick={(e) => e.stopPropagation()}>
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='font-medium text-xs'>Tag Name</Label>
|
||||
<div className='flex gap-1.5'>
|
||||
<Input
|
||||
value={editForm.displayName}
|
||||
onChange={(e) =>
|
||||
setEditForm({ ...editForm, displayName: e.target.value })
|
||||
}
|
||||
placeholder='Enter tag name'
|
||||
className='h-8 min-w-0 flex-1 rounded-md text-sm'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && canSave) {
|
||||
e.preventDefault()
|
||||
saveTag()
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault()
|
||||
cancelEditing()
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{availableDefinitions.length > 0 && (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
className='h-8 w-7 flex-shrink-0 p-0'
|
||||
>
|
||||
<ChevronDown className='h-3 w-3' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align='end'
|
||||
className='w-[160px] rounded-lg border bg-card shadow-xs'
|
||||
>
|
||||
{availableDefinitions.map((def) => (
|
||||
<DropdownMenuItem
|
||||
key={def.id}
|
||||
onClick={() =>
|
||||
setEditForm({
|
||||
...editForm,
|
||||
displayName: def.displayName,
|
||||
fieldType: def.fieldType,
|
||||
})
|
||||
}
|
||||
className='cursor-pointer rounded-md px-3 py-2 text-sm hover:bg-secondary/50'
|
||||
>
|
||||
{def.displayName}
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</div>
|
||||
{nameConflict && (
|
||||
<div className='text-red-600 text-xs'>
|
||||
A tag with this name already exists on this document
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='font-medium text-xs'>Type</Label>
|
||||
<Select
|
||||
value={editForm.fieldType}
|
||||
onValueChange={(value) =>
|
||||
setEditForm({ ...editForm, fieldType: value })
|
||||
}
|
||||
disabled={editingTagIndex !== null} // Disable in edit mode
|
||||
>
|
||||
<SelectTrigger className='h-8 w-full text-sm'>
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value='text'>Text</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='font-medium text-xs'>Value</Label>
|
||||
<Input
|
||||
value={editForm.value}
|
||||
onChange={(e) =>
|
||||
setEditForm({ ...editForm, value: e.target.value })
|
||||
}
|
||||
placeholder='Enter tag value'
|
||||
className='h-8 w-full rounded-md text-sm'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && canSave) {
|
||||
e.preventDefault()
|
||||
saveTag()
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault()
|
||||
cancelEditing()
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='pt-1'>
|
||||
<Button
|
||||
onClick={saveTag}
|
||||
size='sm'
|
||||
className='h-7 w-full text-xs'
|
||||
disabled={!canSave}
|
||||
>
|
||||
Save Changes
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
|
||||
{documentTags.length === 0 && !isCreating && (
|
||||
<div className='mb-1 rounded-[10px] border border-dashed bg-card p-3 text-center'>
|
||||
<p className='text-muted-foreground text-xs'>No tags added yet.</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Add New Tag Button or Inline Creator */}
|
||||
{!isEditing && userPermissions.canEdit && (
|
||||
<div className='mb-1'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={openTagCreator}
|
||||
className='w-full justify-start gap-2 rounded-[10px] border border-dashed bg-card text-muted-foreground hover:text-foreground'
|
||||
disabled={
|
||||
kbTagDefinitions.length >= MAX_TAG_SLOTS && availableDefinitions.length === 0
|
||||
}
|
||||
>
|
||||
<Plus className='h-4 w-4' />
|
||||
Add Tag
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Inline Tag Creation Form */}
|
||||
{isCreating && (
|
||||
<div className='mb-1 w-full max-w-full space-y-2 rounded-[10px] border bg-card p-2'>
|
||||
<div className='space-y-1.5'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label className='font-medium text-xs'>Tag Name</Label>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={cancelEditing}
|
||||
className='h-6 w-6 p-0 text-muted-foreground hover:text-red-600'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
</div>
|
||||
<div className='flex gap-1.5'>
|
||||
<Input
|
||||
value={editForm.displayName}
|
||||
onChange={(e) => setEditForm({ ...editForm, displayName: e.target.value })}
|
||||
placeholder='Enter tag name'
|
||||
className='h-8 min-w-0 flex-1 rounded-md text-sm'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && canSave) {
|
||||
e.preventDefault()
|
||||
saveTag()
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault()
|
||||
cancelEditing()
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{availableDefinitions.length > 0 && (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
className='h-8 w-7 flex-shrink-0 p-0'
|
||||
>
|
||||
<ChevronDown className='h-3 w-3' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align='end'
|
||||
className='w-[160px] rounded-lg border bg-card shadow-xs'
|
||||
>
|
||||
{availableDefinitions.map((def) => (
|
||||
<DropdownMenuItem
|
||||
key={def.id}
|
||||
onClick={() =>
|
||||
setEditForm({
|
||||
...editForm,
|
||||
displayName: def.displayName,
|
||||
fieldType: def.fieldType,
|
||||
})
|
||||
}
|
||||
className='cursor-pointer rounded-md px-3 py-2 text-sm hover:bg-secondary/50'
|
||||
>
|
||||
{def.displayName}
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</div>
|
||||
{nameConflict && (
|
||||
<div className='text-red-600 text-xs'>
|
||||
A tag with this name already exists on this document
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='font-medium text-xs'>Type</Label>
|
||||
<Select
|
||||
value={editForm.fieldType}
|
||||
onValueChange={(value) => setEditForm({ ...editForm, fieldType: value })}
|
||||
>
|
||||
<SelectTrigger className='h-8 w-full text-sm'>
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value='text'>Text</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='font-medium text-xs'>Value</Label>
|
||||
<Input
|
||||
value={editForm.value}
|
||||
onChange={(e) => setEditForm({ ...editForm, value: e.target.value })}
|
||||
placeholder='Enter tag value'
|
||||
className='h-8 w-full rounded-md text-sm'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && canSave) {
|
||||
e.preventDefault()
|
||||
saveTag()
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
e.preventDefault()
|
||||
cancelEditing()
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Warning when at max slots */}
|
||||
{kbTagDefinitions.length >= MAX_TAG_SLOTS && (
|
||||
<div className='rounded-md border border-amber-200 bg-amber-50 p-2 dark:border-amber-800 dark:bg-amber-950'>
|
||||
<div className='text-amber-800 text-xs dark:text-amber-200'>
|
||||
<span className='font-medium'>Maximum tag definitions reached</span>
|
||||
</div>
|
||||
<p className='text-amber-700 text-xs dark:text-amber-300'>
|
||||
You can still use existing tag definitions, but cannot create new ones.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='pt-2'>
|
||||
<Button
|
||||
onClick={saveTag}
|
||||
size='sm'
|
||||
className='h-7 w-full text-xs'
|
||||
disabled={
|
||||
!canSave ||
|
||||
(kbTagDefinitions.length >= MAX_TAG_SLOTS &&
|
||||
!kbTagDefinitions.find(
|
||||
(def) =>
|
||||
def.displayName.toLowerCase() === editForm.displayName.toLowerCase()
|
||||
))
|
||||
}
|
||||
>
|
||||
Create Tag
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='mt-2 text-muted-foreground text-xs'>
|
||||
{kbTagDefinitions.length} of {MAX_TAG_SLOTS} tag slots used
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
UserCircle,
|
||||
Users,
|
||||
} from 'lucide-react'
|
||||
import { isDev } from '@/lib/environment'
|
||||
import { isBillingEnabled } from '@/lib/environment'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useSubscriptionStore } from '@/stores/subscription/store'
|
||||
|
||||
@@ -40,7 +40,7 @@ type NavigationItem = {
|
||||
| 'privacy'
|
||||
label: string
|
||||
icon: React.ComponentType<{ className?: string }>
|
||||
hideInDev?: boolean
|
||||
hideWhenBillingDisabled?: boolean
|
||||
requiresTeam?: boolean
|
||||
}
|
||||
|
||||
@@ -79,13 +79,13 @@ const allNavigationItems: NavigationItem[] = [
|
||||
id: 'subscription',
|
||||
label: 'Subscription',
|
||||
icon: CreditCard,
|
||||
hideInDev: true,
|
||||
hideWhenBillingDisabled: true,
|
||||
},
|
||||
{
|
||||
id: 'team',
|
||||
label: 'Team',
|
||||
icon: Users,
|
||||
hideInDev: true,
|
||||
hideWhenBillingDisabled: true,
|
||||
requiresTeam: true,
|
||||
},
|
||||
]
|
||||
@@ -99,7 +99,7 @@ export function SettingsNavigation({
|
||||
const subscription = getSubscriptionStatus()
|
||||
|
||||
const navigationItems = allNavigationItems.filter((item) => {
|
||||
if (item.hideInDev && isDev) {
|
||||
if (item.hideWhenBillingDisabled && !isBillingEnabled) {
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { X } from 'lucide-react'
|
||||
import { Button, Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui'
|
||||
import { client } from '@/lib/auth-client'
|
||||
import { isBillingEnabled } from '@/lib/environment'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
@@ -82,7 +82,14 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
}
|
||||
}, [onOpenChange])
|
||||
|
||||
const isSubscriptionEnabled = !!client.subscription
|
||||
// Redirect away from billing tabs if billing is disabled
|
||||
useEffect(() => {
|
||||
if (!isBillingEnabled && (activeSection === 'subscription' || activeSection === 'team')) {
|
||||
setActiveSection('general')
|
||||
}
|
||||
}, [activeSection])
|
||||
|
||||
const isSubscriptionEnabled = isBillingEnabled
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
@@ -134,9 +141,11 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
<Subscription onOpenChange={onOpenChange} />
|
||||
</div>
|
||||
)}
|
||||
<div className={cn('h-full', activeSection === 'team' ? 'block' : 'hidden')}>
|
||||
<TeamManagement />
|
||||
</div>
|
||||
{isBillingEnabled && (
|
||||
<div className={cn('h-full', activeSection === 'team' ? 'block' : 'hidden')}>
|
||||
<TeamManagement />
|
||||
</div>
|
||||
)}
|
||||
<div className={cn('h-full', activeSection === 'privacy' ? 'block' : 'hidden')}>
|
||||
<Privacy />
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,281 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect } from 'react'
|
||||
import {
|
||||
Building2,
|
||||
Check,
|
||||
Clock,
|
||||
Database,
|
||||
DollarSign,
|
||||
HeadphonesIcon,
|
||||
Infinity as InfinityIcon,
|
||||
MessageSquare,
|
||||
Server,
|
||||
Users,
|
||||
Workflow,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from '@/components/ui/alert-dialog'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { useSession, useSubscription } from '@/lib/auth-client'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useOrganizationStore } from '@/stores/organization'
|
||||
import { useSubscriptionStore } from '@/stores/subscription/store'
|
||||
|
||||
const logger = createLogger('SubscriptionModal')
|
||||
|
||||
interface SubscriptionModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
}
|
||||
|
||||
interface PlanFeature {
|
||||
text: string
|
||||
included: boolean
|
||||
icon?: any
|
||||
}
|
||||
|
||||
export function SubscriptionModal({ open, onOpenChange }: SubscriptionModalProps) {
|
||||
const { data: session } = useSession()
|
||||
const betterAuthSubscription = useSubscription()
|
||||
const { activeOrganization } = useOrganizationStore()
|
||||
const { loadData, getSubscriptionStatus, isLoading } = useSubscriptionStore()
|
||||
|
||||
// Load subscription data when modal opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
loadData()
|
||||
}
|
||||
}, [open, loadData])
|
||||
|
||||
const subscription = getSubscriptionStatus()
|
||||
|
||||
const handleUpgrade = useCallback(
|
||||
async (targetPlan: 'pro' | 'team') => {
|
||||
if (!session?.user?.id) return
|
||||
|
||||
const subscriptionData = useSubscriptionStore.getState().subscriptionData
|
||||
const currentSubscriptionId = subscriptionData?.stripeSubscriptionId
|
||||
|
||||
let referenceId = session.user.id
|
||||
if (subscription.isTeam && activeOrganization?.id) {
|
||||
referenceId = activeOrganization.id
|
||||
}
|
||||
|
||||
const currentUrl = window.location.origin + window.location.pathname
|
||||
|
||||
try {
|
||||
const upgradeParams: any = {
|
||||
plan: targetPlan,
|
||||
referenceId,
|
||||
successUrl: currentUrl,
|
||||
cancelUrl: currentUrl,
|
||||
seats: targetPlan === 'team' ? 1 : undefined,
|
||||
}
|
||||
|
||||
if (currentSubscriptionId) {
|
||||
upgradeParams.subscriptionId = currentSubscriptionId
|
||||
}
|
||||
|
||||
await betterAuthSubscription.upgrade(upgradeParams)
|
||||
} catch (error) {
|
||||
logger.error('Failed to initiate subscription upgrade:', error)
|
||||
alert('Failed to initiate upgrade. Please try again or contact support.')
|
||||
}
|
||||
},
|
||||
[session?.user?.id, subscription.isTeam, activeOrganization?.id, betterAuthSubscription]
|
||||
)
|
||||
|
||||
const handleContactUs = () => {
|
||||
window.open('https://form.typeform.com/to/jqCO12pF', '_blank')
|
||||
}
|
||||
|
||||
// Define all 4 plans
|
||||
const plans = [
|
||||
{
|
||||
name: 'Free',
|
||||
price: '$0',
|
||||
description: '',
|
||||
features: [
|
||||
{ text: '$10 free inference credit', included: true, icon: DollarSign },
|
||||
{ text: '10 runs per minute (sync)', included: true, icon: Zap },
|
||||
{ text: '50 runs per minute (async)', included: true, icon: Clock },
|
||||
{ text: '7-day log retention', included: true, icon: Database },
|
||||
],
|
||||
isActive: subscription.isFree,
|
||||
action: null, // No action for free plan
|
||||
},
|
||||
{
|
||||
name: 'Pro',
|
||||
price: '$20',
|
||||
description: '/month',
|
||||
features: [
|
||||
{ text: '25 runs per minute (sync)', included: true, icon: Zap },
|
||||
{ text: '200 runs per minute (async)', included: true, icon: Clock },
|
||||
{ text: 'Unlimited workspaces', included: true, icon: Building2 },
|
||||
{ text: 'Unlimited workflows', included: true, icon: Workflow },
|
||||
{ text: 'Unlimited invites', included: true, icon: Users },
|
||||
{ text: 'Unlimited log retention', included: true, icon: Database },
|
||||
],
|
||||
isActive: subscription.isPro && !subscription.isTeam,
|
||||
action: subscription.isFree ? () => handleUpgrade('pro') : null,
|
||||
},
|
||||
{
|
||||
name: 'Team',
|
||||
price: '$40',
|
||||
description: '/month',
|
||||
features: [
|
||||
{ text: '75 runs per minute (sync)', included: true, icon: Zap },
|
||||
{ text: '500 runs per minute (async)', included: true, icon: Clock },
|
||||
{ text: 'Everything in Pro', included: true, icon: InfinityIcon },
|
||||
{ text: 'Dedicated Slack channel', included: true, icon: MessageSquare },
|
||||
],
|
||||
isActive: subscription.isTeam,
|
||||
action: !subscription.isTeam ? () => handleUpgrade('team') : null,
|
||||
},
|
||||
{
|
||||
name: 'Enterprise',
|
||||
price: '',
|
||||
description: '',
|
||||
features: [
|
||||
{ text: 'Custom rate limits', included: true, icon: Zap },
|
||||
{ text: 'Enterprise hosting license', included: true, icon: Server },
|
||||
{ text: 'Custom enterprise support', included: true, icon: HeadphonesIcon },
|
||||
],
|
||||
isActive: subscription.isEnterprise,
|
||||
action: handleContactUs,
|
||||
},
|
||||
]
|
||||
|
||||
return (
|
||||
<AlertDialog open={open} onOpenChange={onOpenChange}>
|
||||
<AlertDialogContent className='!fixed !inset-0 !m-0 data-[state=open]:!translate-x-0 data-[state=open]:!translate-y-0 flex h-full max-h-full w-full max-w-full flex-col gap-0 rounded-none border-0 p-0'>
|
||||
<AlertDialogHeader className='flex-shrink-0 px-6 py-5'>
|
||||
<AlertDialogTitle className='font-medium text-lg'>Upgrade your plan</AlertDialogTitle>
|
||||
</AlertDialogHeader>
|
||||
|
||||
<div className='flex min-h-0 flex-1 items-center justify-center overflow-hidden px-8 pb-8'>
|
||||
<div className='flex w-full max-w-4xl flex-col gap-6'>
|
||||
{/* Main Plans Grid - Free, Pro, Team */}
|
||||
<div className='grid grid-cols-1 gap-6 md:grid-cols-3'>
|
||||
{plans.slice(0, 3).map((plan) => (
|
||||
<div
|
||||
key={plan.name}
|
||||
className={cn('relative flex flex-col rounded-[10px] border p-6')}
|
||||
>
|
||||
{/* Plan Header */}
|
||||
<div className='mb-6'>
|
||||
<h3 className='mb-3 font-semibold text-lg'>{plan.name}</h3>
|
||||
<div className='flex items-baseline'>
|
||||
<span className='font-semibold text-3xl'>{plan.price}</span>
|
||||
{plan.description && (
|
||||
<span className='ml-1 text-muted-foreground text-sm'>
|
||||
{plan.description}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Features */}
|
||||
<ul className='mb-6 flex-1 space-y-3'>
|
||||
{plan.features.map((feature, index) => (
|
||||
<li key={index} className='flex items-start gap-2 text-sm'>
|
||||
{feature.icon ? (
|
||||
<feature.icon className='mt-0.5 h-4 w-4 flex-shrink-0 text-muted-foreground' />
|
||||
) : (
|
||||
<Check className='mt-0.5 h-4 w-4 flex-shrink-0 text-green-500' />
|
||||
)}
|
||||
<span className='text-muted-foreground'>{feature.text}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
|
||||
{/* Action Button */}
|
||||
<div className='mt-auto'>
|
||||
{plan.isActive ? (
|
||||
<Button variant='secondary' className='w-full rounded-[8px]' disabled>
|
||||
Current plan
|
||||
</Button>
|
||||
) : plan.action ? (
|
||||
<Button
|
||||
onClick={plan.action}
|
||||
className='w-full rounded-[8px]'
|
||||
variant='default'
|
||||
>
|
||||
Upgrade
|
||||
</Button>
|
||||
) : (
|
||||
<Button variant='outline' className='w-full rounded-[8px]' disabled>
|
||||
{plan.name === 'Free' ? 'Basic plan' : 'Upgrade'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Enterprise Plan - Full Width */}
|
||||
<div
|
||||
className={cn(
|
||||
'relative flex flex-col rounded-[10px] border p-6 md:flex-row md:items-center md:justify-between',
|
||||
plans[3].isActive && 'border-gray-400'
|
||||
)}
|
||||
>
|
||||
{/* Left Side - Plan Info */}
|
||||
<div className='mb-4 md:mb-0'>
|
||||
<h3 className='mb-2 font-semibold text-lg'>{plans[3].name}</h3>
|
||||
<p className='mb-3 text-muted-foreground text-sm'>
|
||||
Custom solutions tailored to your enterprise needs
|
||||
</p>
|
||||
<div className='flex items-center gap-4'>
|
||||
{plans[3].features.map((feature, index) => (
|
||||
<div key={index} className='flex items-center gap-4'>
|
||||
<div className='flex items-center gap-2 text-sm'>
|
||||
{feature.icon ? (
|
||||
<feature.icon className='h-4 w-4 flex-shrink-0 text-muted-foreground' />
|
||||
) : (
|
||||
<Check className='h-4 w-4 flex-shrink-0 text-green-500' />
|
||||
)}
|
||||
<span className='text-muted-foreground'>{feature.text}</span>
|
||||
</div>
|
||||
{index < plans[3].features.length - 1 && (
|
||||
<div className='h-4 w-px bg-border' />
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Right Side - Button */}
|
||||
<div className='md:ml-auto md:w-[200px]'>
|
||||
{plans[3].isActive ? (
|
||||
<Button variant='secondary' className='w-full rounded-[8px]' disabled>
|
||||
Current plan
|
||||
</Button>
|
||||
) : plans[3].action ? (
|
||||
<Button
|
||||
onClick={plans[3].action}
|
||||
className='w-full rounded-[8px]'
|
||||
variant='default'
|
||||
>
|
||||
Contact us
|
||||
</Button>
|
||||
) : (
|
||||
<Button className='w-full rounded-[8px]' variant='default' disabled>
|
||||
Contact us
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
)
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/loop-node/loop-config'
|
||||
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
|
||||
|
||||
type LoopToolbarItemProps = {
|
||||
disabled?: boolean
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/parallel-node/parallel-config'
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
|
||||
type ParallelToolbarItemProps = {
|
||||
disabled?: boolean
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect } from 'react'
|
||||
import { Badge, Progress, Skeleton } from '@/components/ui'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useSubscriptionStore } from '@/stores/subscription/store'
|
||||
|
||||
// Constants for reusable styles
|
||||
const GRADIENT_BADGE_STYLES =
|
||||
'gradient-text h-[1.125rem] rounded-[6px] border-gradient-primary/20 bg-gradient-to-b from-gradient-primary via-gradient-secondary to-gradient-primary px-2 py-0 font-medium text-xs'
|
||||
const GRADIENT_TEXT_STYLES =
|
||||
'gradient-text bg-gradient-to-b from-gradient-primary via-gradient-secondary to-gradient-primary'
|
||||
const CONTAINER_STYLES =
|
||||
'pointer-events-auto flex-shrink-0 rounded-[10px] border bg-background px-3 py-2.5 shadow-xs cursor-pointer transition-colors hover:bg-muted/50'
|
||||
|
||||
// Plan name mapping
|
||||
const PLAN_NAMES = {
|
||||
enterprise: 'Enterprise',
|
||||
team: 'Team',
|
||||
pro: 'Pro',
|
||||
free: 'Free',
|
||||
} as const
|
||||
|
||||
interface UsageIndicatorProps {
|
||||
onClick?: (badgeType: 'add' | 'upgrade') => void
|
||||
}
|
||||
|
||||
export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
const { loadData, getUsage, getSubscriptionStatus, isLoading } = useSubscriptionStore()
|
||||
|
||||
// Load subscription data on mount
|
||||
useEffect(() => {
|
||||
loadData()
|
||||
}, [loadData])
|
||||
|
||||
const usage = getUsage()
|
||||
const subscription = getSubscriptionStatus()
|
||||
|
||||
// Show skeleton while loading
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className={CONTAINER_STYLES} onClick={() => onClick?.('upgrade')}>
|
||||
<div className='space-y-2'>
|
||||
{/* Plan and usage info skeleton */}
|
||||
<div className='flex items-center justify-between'>
|
||||
<Skeleton className='h-5 w-12' />
|
||||
<Skeleton className='h-4 w-20' />
|
||||
</div>
|
||||
|
||||
{/* Progress Bar skeleton */}
|
||||
<Skeleton className='h-2 w-full' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Calculate progress percentage (capped at 100)
|
||||
const progressPercentage = Math.min(usage.percentUsed, 100)
|
||||
|
||||
// Determine plan type
|
||||
const planType = subscription.isEnterprise
|
||||
? 'enterprise'
|
||||
: subscription.isTeam
|
||||
? 'team'
|
||||
: subscription.isPro
|
||||
? 'pro'
|
||||
: 'free'
|
||||
|
||||
// Determine badge to show
|
||||
const showAddBadge = planType !== 'free' && usage.percentUsed >= 85
|
||||
const badgeText = planType === 'free' ? 'Upgrade' : 'Add'
|
||||
const badgeType = planType === 'free' ? 'upgrade' : 'add'
|
||||
|
||||
return (
|
||||
<div className={CONTAINER_STYLES} onClick={() => onClick?.(badgeType)}>
|
||||
<div className='space-y-2'>
|
||||
{/* Plan and usage info */}
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<span
|
||||
className={cn(
|
||||
'font-medium text-sm',
|
||||
planType === 'free' ? 'text-foreground' : GRADIENT_TEXT_STYLES
|
||||
)}
|
||||
>
|
||||
{PLAN_NAMES[planType]}
|
||||
</span>
|
||||
{(showAddBadge || planType === 'free') && (
|
||||
<Badge className={GRADIENT_BADGE_STYLES}>{badgeText}</Badge>
|
||||
)}
|
||||
</div>
|
||||
<span className='text-muted-foreground text-xs tabular-nums'>
|
||||
${usage.current.toFixed(2)} / ${usage.limit}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Progress Bar */}
|
||||
<Progress value={progressPercentage} className='h-2' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -171,11 +171,6 @@ const PermissionsTableSkeleton = React.memo(() => (
|
||||
|
||||
PermissionsTableSkeleton.displayName = 'PermissionsTableSkeleton'
|
||||
|
||||
const getStatusBadgeStyles = (status: 'sent' | 'member' | 'modified'): string => {
|
||||
// Use consistent gray styling for all statuses to align with modal design
|
||||
return 'inline-flex items-center rounded-[8px] bg-gray-100 px-2 py-1 text-xs font-medium text-gray-700 dark:bg-gray-800 dark:text-gray-300'
|
||||
}
|
||||
|
||||
const PermissionsTable = ({
|
||||
userPermissions,
|
||||
onPermissionChange,
|
||||
@@ -314,12 +309,12 @@ const PermissionsTable = ({
|
||||
<div className='flex items-center gap-2'>
|
||||
<span className='font-medium text-card-foreground text-sm'>{user.email}</span>
|
||||
{isPendingInvitation && (
|
||||
<span className='inline-flex items-center rounded-[8px] bg-blue-100 px-2 py-1 font-medium text-blue-700 text-xs dark:bg-blue-900/30 dark:text-blue-400'>
|
||||
<span className='inline-flex items-center rounded-[8px] bg-gray-100 px-2 py-1 font-medium text-gray-700 text-xs dark:bg-gray-800 dark:text-gray-300'>
|
||||
Sent
|
||||
</span>
|
||||
)}
|
||||
{hasChanges && (
|
||||
<span className='inline-flex items-center rounded-[8px] bg-orange-100 px-2 py-1 font-medium text-orange-700 text-xs dark:bg-orange-900/30 dark:text-orange-400'>
|
||||
<span className='inline-flex items-center rounded-[8px] bg-gray-100 px-2 py-1 font-medium text-gray-700 text-xs dark:bg-gray-800 dark:text-gray-300'>
|
||||
Modified
|
||||
</span>
|
||||
)}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { HelpCircle, LibraryBig, ScrollText, Search, Settings, Shapes } from 'lu
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, ScrollArea, Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { isBillingEnabled } from '@/lib/environment'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateWorkspaceName } from '@/lib/naming'
|
||||
import { cn } from '@/lib/utils'
|
||||
@@ -14,9 +15,13 @@ import {
|
||||
CreateMenu,
|
||||
FolderTree,
|
||||
HelpModal,
|
||||
KnowledgeBaseTags,
|
||||
KnowledgeTags,
|
||||
LogsFilters,
|
||||
SettingsModal,
|
||||
SubscriptionModal,
|
||||
Toolbar,
|
||||
UsageIndicator,
|
||||
WorkspaceHeader,
|
||||
WorkspaceSelector,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components'
|
||||
@@ -141,8 +146,9 @@ const SIDEBAR_HEIGHTS = {
|
||||
WORKSPACE_HEADER: 48, // estimated height of workspace header
|
||||
SEARCH: 48, // h-12
|
||||
WORKFLOW_SELECTOR: 212, // h-[212px]
|
||||
NAVIGATION: 48, // h-12 buttons
|
||||
NAVIGATION: 42, // h-[42px] buttons
|
||||
WORKSPACE_SELECTOR: 171, // optimized height: p-2(16) + h-[104px](104) + mt-2(8) + border-t(1) + pt-2(8) + h-8(32) = 169px
|
||||
USAGE_INDICATOR: 58, // actual height: border(2) + py-2.5(20) + content(~36) = 58px
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -246,6 +252,41 @@ export function Sidebar() {
|
||||
return logsPageRegex.test(pathname)
|
||||
}, [pathname])
|
||||
|
||||
// Check if we're on any knowledge base page (overview or document)
|
||||
const isOnKnowledgePage = useMemo(() => {
|
||||
// Pattern: /workspace/[workspaceId]/knowledge/[id] or /workspace/[workspaceId]/knowledge/[id]/[documentId]
|
||||
const knowledgePageRegex = /^\/workspace\/[^/]+\/knowledge\/[^/]+/
|
||||
return knowledgePageRegex.test(pathname)
|
||||
}, [pathname])
|
||||
|
||||
// Extract knowledge base ID and document ID from the pathname
|
||||
const { knowledgeBaseId, documentId } = useMemo(() => {
|
||||
if (!isOnKnowledgePage) {
|
||||
return { knowledgeBaseId: null, documentId: null }
|
||||
}
|
||||
|
||||
// Handle both KB overview (/knowledge/[kbId]) and document page (/knowledge/[kbId]/[docId])
|
||||
const kbOverviewMatch = pathname.match(/^\/workspace\/[^/]+\/knowledge\/([^/]+)$/)
|
||||
const docPageMatch = pathname.match(/^\/workspace\/[^/]+\/knowledge\/([^/]+)\/([^/]+)$/)
|
||||
|
||||
if (docPageMatch) {
|
||||
// Document page - has both kbId and docId
|
||||
return {
|
||||
knowledgeBaseId: docPageMatch[1],
|
||||
documentId: docPageMatch[2],
|
||||
}
|
||||
}
|
||||
if (kbOverviewMatch) {
|
||||
// KB overview page - has only kbId
|
||||
return {
|
||||
knowledgeBaseId: kbOverviewMatch[1],
|
||||
documentId: null,
|
||||
}
|
||||
}
|
||||
|
||||
return { knowledgeBaseId: null, documentId: null }
|
||||
}, [pathname, isOnKnowledgePage])
|
||||
|
||||
// Use optimized auto-scroll hook
|
||||
const { handleDragOver, stopScroll } = useAutoScroll(workflowScrollAreaRef)
|
||||
|
||||
@@ -677,8 +718,16 @@ export function Sidebar() {
|
||||
) as HTMLElement
|
||||
if (activeWorkflow) {
|
||||
activeWorkflow.scrollIntoView({
|
||||
block: 'nearest',
|
||||
block: 'start',
|
||||
})
|
||||
|
||||
// Adjust scroll position to eliminate the small gap at the top
|
||||
const scrollViewport = scrollContainer.querySelector(
|
||||
'[data-radix-scroll-area-viewport]'
|
||||
) as HTMLElement
|
||||
if (scrollViewport && scrollViewport.scrollTop > 0) {
|
||||
scrollViewport.scrollTop = Math.max(0, scrollViewport.scrollTop - 8)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -688,6 +737,7 @@ export function Sidebar() {
|
||||
const [showHelp, setShowHelp] = useState(false)
|
||||
const [showInviteMembers, setShowInviteMembers] = useState(false)
|
||||
const [showSearchModal, setShowSearchModal] = useState(false)
|
||||
const [showSubscriptionModal, setShowSubscriptionModal] = useState(false)
|
||||
|
||||
// Separate regular workflows from temporary marketplace workflows
|
||||
const { regularWorkflows, tempWorkflows } = useMemo(() => {
|
||||
@@ -1008,7 +1058,7 @@ export function Sidebar() {
|
||||
}`}
|
||||
style={{
|
||||
top: `${toolbarTop}px`,
|
||||
bottom: `${navigationBottom + 42 + 12}px`, // Navigation height + gap
|
||||
bottom: `${navigationBottom + SIDEBAR_HEIGHTS.NAVIGATION + SIDEBAR_GAP + (isBillingEnabled ? SIDEBAR_HEIGHTS.USAGE_INDICATOR + SIDEBAR_GAP : 0)}px`, // Navigation height + gap + UsageIndicator height + gap (if billing enabled)
|
||||
}}
|
||||
>
|
||||
<Toolbar
|
||||
@@ -1024,12 +1074,52 @@ export function Sidebar() {
|
||||
}`}
|
||||
style={{
|
||||
top: `${toolbarTop}px`,
|
||||
bottom: `${navigationBottom + 42 + 12}px`, // Navigation height + gap
|
||||
bottom: `${navigationBottom + SIDEBAR_HEIGHTS.NAVIGATION + SIDEBAR_GAP + (isBillingEnabled ? SIDEBAR_HEIGHTS.USAGE_INDICATOR + SIDEBAR_GAP : 0)}px`, // Navigation height + gap + UsageIndicator height + gap (if billing enabled)
|
||||
}}
|
||||
>
|
||||
<LogsFilters />
|
||||
</div>
|
||||
|
||||
{/* Floating Knowledge Tags - Only on knowledge pages */}
|
||||
<div
|
||||
className={`pointer-events-auto fixed left-4 z-50 w-56 rounded-[10px] border bg-background shadow-xs ${
|
||||
!isOnKnowledgePage || isSidebarCollapsed || !knowledgeBaseId ? 'hidden' : ''
|
||||
}`}
|
||||
style={{
|
||||
top: `${toolbarTop}px`,
|
||||
bottom: `${navigationBottom + SIDEBAR_HEIGHTS.NAVIGATION + SIDEBAR_GAP + (isBillingEnabled ? SIDEBAR_HEIGHTS.USAGE_INDICATOR + SIDEBAR_GAP : 0)}px`, // Navigation height + gap + UsageIndicator height + gap (if billing enabled)
|
||||
}}
|
||||
>
|
||||
{knowledgeBaseId && documentId && (
|
||||
<KnowledgeTags knowledgeBaseId={knowledgeBaseId} documentId={documentId} />
|
||||
)}
|
||||
{knowledgeBaseId && !documentId && <KnowledgeBaseTags knowledgeBaseId={knowledgeBaseId} />}
|
||||
</div>
|
||||
|
||||
{/* Floating Usage Indicator - Only shown when billing enabled */}
|
||||
{isBillingEnabled && (
|
||||
<div
|
||||
className='pointer-events-auto fixed left-4 z-50 w-56'
|
||||
style={{ bottom: `${navigationBottom + SIDEBAR_HEIGHTS.NAVIGATION + SIDEBAR_GAP}px` }} // Navigation height + gap
|
||||
>
|
||||
<UsageIndicator
|
||||
onClick={(badgeType) => {
|
||||
if (badgeType === 'add') {
|
||||
// Open settings modal on subscription tab
|
||||
if (typeof window !== 'undefined') {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('open-settings', { detail: { tab: 'subscription' } })
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// Open subscription modal for upgrade
|
||||
setShowSubscriptionModal(true)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Floating Navigation - Always visible */}
|
||||
<div
|
||||
className='pointer-events-auto fixed left-4 z-50 w-56'
|
||||
@@ -1046,6 +1136,7 @@ export function Sidebar() {
|
||||
<SettingsModal open={showSettings} onOpenChange={setShowSettings} />
|
||||
<HelpModal open={showHelp} onOpenChange={setShowHelp} />
|
||||
<InviteModal open={showInviteMembers} onOpenChange={setShowInviteMembers} />
|
||||
<SubscriptionModal open={showSubscriptionModal} onOpenChange={setShowSubscriptionModal} />
|
||||
<SearchModal
|
||||
open={showSearchModal}
|
||||
onOpenChange={setShowSearchModal}
|
||||
|
||||
@@ -15,8 +15,8 @@ import 'reactflow/dist/style.css'
|
||||
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/loop-node/loop-node'
|
||||
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/parallel-node/parallel-node'
|
||||
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
|
||||
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
|
||||
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
|
||||
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
|
||||
import { getBlock } from '@/blocks'
|
||||
|
||||
@@ -10,7 +10,7 @@ import { fetchAndProcessAirtablePayloads, formatWebhookInput } from '@/lib/webho
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { environment as environmentTable, userStats } from '@/db/schema'
|
||||
import { environment as environmentTable, userStats, webhook } from '@/db/schema'
|
||||
import { Executor } from '@/executor'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
@@ -141,10 +141,21 @@ export const webhookExecution = task({
|
||||
`[${requestId}] Processing Airtable webhook via fetchAndProcessAirtablePayloads`
|
||||
)
|
||||
|
||||
// Load the actual webhook record from database to get providerConfig
|
||||
const [webhookRecord] = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(eq(webhook.id, payload.webhookId))
|
||||
.limit(1)
|
||||
|
||||
if (!webhookRecord) {
|
||||
throw new Error(`Webhook record not found: ${payload.webhookId}`)
|
||||
}
|
||||
|
||||
const webhookData = {
|
||||
id: payload.webhookId,
|
||||
provider: payload.provider,
|
||||
providerConfig: {}, // Will be loaded within fetchAndProcessAirtablePayloads
|
||||
providerConfig: webhookRecord.providerConfig,
|
||||
}
|
||||
|
||||
// Create a mock workflow object for Airtable processing
|
||||
@@ -153,12 +164,85 @@ export const webhookExecution = task({
|
||||
userId: payload.userId,
|
||||
}
|
||||
|
||||
await fetchAndProcessAirtablePayloads(webhookData, mockWorkflow, requestId)
|
||||
// Get the processed Airtable input
|
||||
const airtableInput = await fetchAndProcessAirtablePayloads(
|
||||
webhookData,
|
||||
mockWorkflow,
|
||||
requestId
|
||||
)
|
||||
|
||||
// If we got input (changes), execute the workflow like other providers
|
||||
if (airtableInput) {
|
||||
logger.info(`[${requestId}] Executing workflow with Airtable changes`)
|
||||
|
||||
// Create executor and execute (same as standard webhook flow)
|
||||
const executor = new Executor({
|
||||
workflow: serializedWorkflow,
|
||||
currentBlockStates: processedBlockStates,
|
||||
envVarValues: decryptedEnvVars,
|
||||
workflowInput: airtableInput,
|
||||
workflowVariables,
|
||||
contextExtensions: {
|
||||
executionId,
|
||||
workspaceId: '',
|
||||
},
|
||||
})
|
||||
|
||||
// Set up logging on the executor
|
||||
loggingSession.setupExecutor(executor)
|
||||
|
||||
// Execute the workflow
|
||||
const result = await executor.execute(payload.workflowId, payload.blockId)
|
||||
|
||||
// Check if we got a StreamingExecution result
|
||||
const executionResult =
|
||||
'stream' in result && 'execution' in result ? result.execution : result
|
||||
|
||||
logger.info(`[${requestId}] Airtable webhook execution completed`, {
|
||||
success: executionResult.success,
|
||||
workflowId: payload.workflowId,
|
||||
})
|
||||
|
||||
// Update workflow run counts on success
|
||||
if (executionResult.success) {
|
||||
await updateWorkflowRunCounts(payload.workflowId)
|
||||
|
||||
// Track execution in user stats
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
totalWebhookTriggers: sql`total_webhook_triggers + 1`,
|
||||
lastActive: sql`now()`,
|
||||
})
|
||||
.where(eq(userStats.userId, payload.userId))
|
||||
}
|
||||
|
||||
// Build trace spans and complete logging session
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
|
||||
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: totalDuration || 0,
|
||||
finalOutput: executionResult.output || {},
|
||||
traceSpans: traceSpans as any,
|
||||
})
|
||||
|
||||
return {
|
||||
success: executionResult.success,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
output: executionResult.output,
|
||||
executedAt: new Date().toISOString(),
|
||||
provider: payload.provider,
|
||||
}
|
||||
}
|
||||
// No changes to process
|
||||
logger.info(`[${requestId}] No Airtable changes to process`)
|
||||
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
finalOutput: { message: 'Airtable webhook processed' },
|
||||
finalOutput: { message: 'No Airtable changes to process' },
|
||||
traceSpans: [],
|
||||
})
|
||||
|
||||
@@ -166,7 +250,7 @@ export const webhookExecution = task({
|
||||
success: true,
|
||||
workflowId: payload.workflowId,
|
||||
executionId,
|
||||
output: { message: 'Airtable webhook processed' },
|
||||
output: { message: 'No Airtable changes to process' },
|
||||
executedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,19 +213,5 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Message content' },
|
||||
data: { type: 'json', description: 'Response data' },
|
||||
// Trigger outputs
|
||||
content: { type: 'string', description: 'Message content from Discord webhook' },
|
||||
username: { type: 'string', description: 'Username of the sender (if provided)' },
|
||||
avatar_url: { type: 'string', description: 'Avatar URL of the sender (if provided)' },
|
||||
timestamp: { type: 'string', description: 'Timestamp when the webhook was triggered' },
|
||||
webhook_id: { type: 'string', description: 'Discord webhook identifier' },
|
||||
webhook_token: { type: 'string', description: 'Discord webhook token' },
|
||||
guild_id: { type: 'string', description: 'Discord server/guild ID' },
|
||||
channel_id: { type: 'string', description: 'Discord channel ID where the event occurred' },
|
||||
embeds: { type: 'string', description: 'Embedded content data (if any)' },
|
||||
},
|
||||
triggers: {
|
||||
enabled: true,
|
||||
available: ['discord_webhook'],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -125,6 +125,14 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
|
||||
condition: { field: 'operation', value: ['write_chat', 'write_channel'] },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'triggerConfig',
|
||||
title: 'Trigger Configuration',
|
||||
type: 'trigger-config',
|
||||
layout: 'full',
|
||||
triggerProvider: 'microsoftteams',
|
||||
availableTriggers: ['microsoftteams_webhook'],
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
|
||||
@@ -46,6 +46,14 @@ export const WhatsAppBlock: BlockConfig<WhatsAppResponse> = {
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'triggerConfig',
|
||||
title: 'Trigger Configuration',
|
||||
type: 'trigger-config',
|
||||
layout: 'full',
|
||||
triggerProvider: 'whatsapp',
|
||||
availableTriggers: ['whatsapp_webhook'],
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['whatsapp_send_message'],
|
||||
|
||||
@@ -539,8 +539,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const containingLoopBlock = blocks[loopId]
|
||||
if (containingLoopBlock) {
|
||||
const loopBlockName = containingLoopBlock.name || containingLoopBlock.type
|
||||
const normalizedLoopBlockName = normalizeBlockName(loopBlockName)
|
||||
contextualTags.push(`${normalizedLoopBlockName}.results`)
|
||||
|
||||
loopBlockGroup = {
|
||||
blockName: loopBlockName,
|
||||
@@ -565,8 +563,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const containingParallelBlock = blocks[parallelId]
|
||||
if (containingParallelBlock) {
|
||||
const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type
|
||||
const normalizedParallelBlockName = normalizeBlockName(parallelBlockName)
|
||||
contextualTags.push(`${normalizedParallelBlockName}.results`)
|
||||
|
||||
parallelBlockGroup = {
|
||||
blockName: parallelBlockName,
|
||||
@@ -803,11 +799,23 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
})
|
||||
} else {
|
||||
const path = tagParts.slice(1).join('.')
|
||||
directTags.push({
|
||||
key: path || group.blockName,
|
||||
display: path || group.blockName,
|
||||
fullTag: tag,
|
||||
})
|
||||
// Handle contextual tags for loop/parallel blocks (single words like 'index', 'currentItem')
|
||||
if (
|
||||
(group.blockType === 'loop' || group.blockType === 'parallel') &&
|
||||
tagParts.length === 1
|
||||
) {
|
||||
directTags.push({
|
||||
key: tag,
|
||||
display: tag,
|
||||
fullTag: tag,
|
||||
})
|
||||
} else {
|
||||
directTags.push({
|
||||
key: path || group.blockName,
|
||||
display: path || group.blockName,
|
||||
fullTag: tag,
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -861,8 +869,25 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
const handleTagSelect = useCallback(
|
||||
(tag: string, blockGroup?: BlockTagGroup) => {
|
||||
const textBeforeCursor = inputValue.slice(0, cursorPosition)
|
||||
const textAfterCursor = inputValue.slice(cursorPosition)
|
||||
// Use the live DOM selection/value if available to avoid off-by-one state
|
||||
// when users type and immediately confirm a selection.
|
||||
let liveCursor = cursorPosition
|
||||
let liveValue = inputValue
|
||||
|
||||
if (typeof window !== 'undefined' && document?.activeElement) {
|
||||
const activeEl = document.activeElement as HTMLInputElement | HTMLTextAreaElement | null
|
||||
if (activeEl && typeof activeEl.selectionStart === 'number') {
|
||||
liveCursor = activeEl.selectionStart ?? cursorPosition
|
||||
// Prefer the active element value if present. This ensures we include the most
|
||||
// recently typed character(s) that might not yet be reflected in React state.
|
||||
if (typeof (activeEl as any).value === 'string') {
|
||||
liveValue = (activeEl as any).value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const textBeforeCursor = liveValue.slice(0, liveCursor)
|
||||
const textAfterCursor = liveValue.slice(liveCursor)
|
||||
|
||||
const lastOpenBracket = textBeforeCursor.lastIndexOf('<')
|
||||
if (lastOpenBracket === -1) return
|
||||
|
||||
@@ -50,12 +50,14 @@ interface SocketContextType {
|
||||
value: any,
|
||||
operationId?: string
|
||||
) => void
|
||||
emitVariableUpdate: (variableId: string, field: string, value: any, operationId?: string) => void
|
||||
|
||||
emitCursorUpdate: (cursor: { x: number; y: number }) => void
|
||||
emitSelectionUpdate: (selection: { type: 'block' | 'edge' | 'none'; id?: string }) => void
|
||||
// Event handlers for receiving real-time updates
|
||||
onWorkflowOperation: (handler: (data: any) => void) => void
|
||||
onSubblockUpdate: (handler: (data: any) => void) => void
|
||||
onVariableUpdate: (handler: (data: any) => void) => void
|
||||
|
||||
onCursorUpdate: (handler: (data: any) => void) => void
|
||||
onSelectionUpdate: (handler: (data: any) => void) => void
|
||||
@@ -77,10 +79,12 @@ const SocketContext = createContext<SocketContextType>({
|
||||
leaveWorkflow: () => {},
|
||||
emitWorkflowOperation: () => {},
|
||||
emitSubblockUpdate: () => {},
|
||||
emitVariableUpdate: () => {},
|
||||
emitCursorUpdate: () => {},
|
||||
emitSelectionUpdate: () => {},
|
||||
onWorkflowOperation: () => {},
|
||||
onSubblockUpdate: () => {},
|
||||
onVariableUpdate: () => {},
|
||||
onCursorUpdate: () => {},
|
||||
onSelectionUpdate: () => {},
|
||||
onUserJoined: () => {},
|
||||
@@ -113,6 +117,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
const eventHandlers = useRef<{
|
||||
workflowOperation?: (data: any) => void
|
||||
subblockUpdate?: (data: any) => void
|
||||
variableUpdate?: (data: any) => void
|
||||
|
||||
cursorUpdate?: (data: any) => void
|
||||
selectionUpdate?: (data: any) => void
|
||||
@@ -292,6 +297,11 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
eventHandlers.current.subblockUpdate?.(data)
|
||||
})
|
||||
|
||||
// Variable update events
|
||||
socketInstance.on('variable-update', (data) => {
|
||||
eventHandlers.current.variableUpdate?.(data)
|
||||
})
|
||||
|
||||
// Workflow deletion events
|
||||
socketInstance.on('workflow-deleted', (data) => {
|
||||
logger.warn(`Workflow ${data.workflowId} has been deleted`)
|
||||
@@ -697,6 +707,30 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
[socket, currentWorkflowId]
|
||||
)
|
||||
|
||||
// Emit variable value updates
|
||||
const emitVariableUpdate = useCallback(
|
||||
(variableId: string, field: string, value: any, operationId?: string) => {
|
||||
// Only emit if socket is connected and we're in a valid workflow room
|
||||
if (socket && currentWorkflowId) {
|
||||
socket.emit('variable-update', {
|
||||
variableId,
|
||||
field,
|
||||
value,
|
||||
timestamp: Date.now(),
|
||||
operationId, // Include operation ID for queue tracking
|
||||
})
|
||||
} else {
|
||||
logger.warn('Cannot emit variable update: no socket connection or workflow room', {
|
||||
hasSocket: !!socket,
|
||||
currentWorkflowId,
|
||||
variableId,
|
||||
field,
|
||||
})
|
||||
}
|
||||
},
|
||||
[socket, currentWorkflowId]
|
||||
)
|
||||
|
||||
// Cursor throttling optimized for database connection health
|
||||
const lastCursorEmit = useRef(0)
|
||||
const emitCursorUpdate = useCallback(
|
||||
@@ -732,6 +766,10 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
eventHandlers.current.subblockUpdate = handler
|
||||
}, [])
|
||||
|
||||
const onVariableUpdate = useCallback((handler: (data: any) => void) => {
|
||||
eventHandlers.current.variableUpdate = handler
|
||||
}, [])
|
||||
|
||||
const onCursorUpdate = useCallback((handler: (data: any) => void) => {
|
||||
eventHandlers.current.cursorUpdate = handler
|
||||
}, [])
|
||||
@@ -776,11 +814,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
leaveWorkflow,
|
||||
emitWorkflowOperation,
|
||||
emitSubblockUpdate,
|
||||
emitVariableUpdate,
|
||||
|
||||
emitCursorUpdate,
|
||||
emitSelectionUpdate,
|
||||
onWorkflowOperation,
|
||||
onSubblockUpdate,
|
||||
onVariableUpdate,
|
||||
|
||||
onCursorUpdate,
|
||||
onSelectionUpdate,
|
||||
|
||||
1
apps/sim/db/migrations/0071_free_sharon_carter.sql
Normal file
1
apps/sim/db/migrations/0071_free_sharon_carter.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "workflow" ADD COLUMN "pinned_api_key" text;
|
||||
5869
apps/sim/db/migrations/meta/0071_snapshot.json
Normal file
5869
apps/sim/db/migrations/meta/0071_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -491,6 +491,13 @@
|
||||
"when": 1754682155062,
|
||||
"tag": "0070_charming_wrecking_crew",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 71,
|
||||
"version": "7",
|
||||
"when": 1754719531015,
|
||||
"tag": "0071_free_sharon_carter",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -130,6 +130,8 @@ export const workflow = pgTable(
|
||||
isDeployed: boolean('is_deployed').notNull().default(false),
|
||||
deployedState: json('deployed_state'),
|
||||
deployedAt: timestamp('deployed_at'),
|
||||
// When set, only this API key is authorized for execution
|
||||
pinnedApiKey: text('pinned_api_key'),
|
||||
collaborators: json('collaborators').notNull().default('[]'),
|
||||
runCount: integer('run_count').notNull().default(0),
|
||||
lastRunAt: timestamp('last_run_at'),
|
||||
|
||||
@@ -23,14 +23,172 @@ export class TriggerBlockHandler implements BlockHandler {
|
||||
async execute(
|
||||
block: SerializedBlock,
|
||||
inputs: Record<string, any>,
|
||||
_context: ExecutionContext
|
||||
context: ExecutionContext
|
||||
): Promise<any> {
|
||||
logger.info(`Executing trigger block: ${block.id} (Type: ${block.metadata?.id})`)
|
||||
|
||||
// Trigger blocks don't execute anything - they just pass through their input data
|
||||
// The input data comes from the webhook execution context or initial workflow inputs
|
||||
// For trigger blocks, return the starter block's output which contains the workflow input
|
||||
// This ensures webhook data like message, sender, chat, etc. are accessible
|
||||
const starterBlock = context.workflow?.blocks?.find((b) => b.metadata?.id === 'starter')
|
||||
if (starterBlock) {
|
||||
const starterState = context.blockStates.get(starterBlock.id)
|
||||
if (starterState?.output && Object.keys(starterState.output).length > 0) {
|
||||
const starterOutput = starterState.output
|
||||
|
||||
// For trigger blocks, return the inputs directly - these contain the webhook/trigger data
|
||||
// Generic handling for webhook triggers - extract provider-specific data
|
||||
// Check if this is a webhook execution with nested structure
|
||||
if (starterOutput.webhook?.data) {
|
||||
const webhookData = starterOutput.webhook.data
|
||||
const provider = webhookData.provider
|
||||
|
||||
logger.debug(`Processing webhook trigger for block ${block.id}`, {
|
||||
provider,
|
||||
blockType: block.metadata?.id,
|
||||
})
|
||||
|
||||
// Extract the flattened properties that should be at root level
|
||||
const result: any = {
|
||||
// Always keep the input at root level
|
||||
input: starterOutput.input,
|
||||
}
|
||||
|
||||
// FIRST: Copy all existing top-level properties (like 'event', 'message', etc.)
|
||||
// This ensures that properties already flattened in webhook utils are preserved
|
||||
for (const [key, value] of Object.entries(starterOutput)) {
|
||||
if (key !== 'webhook' && key !== provider) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// SECOND: Generic extraction logic based on common webhook patterns
|
||||
// Pattern 1: Provider-specific nested object (telegram, microsoftteams, etc.)
|
||||
if (provider && starterOutput[provider]) {
|
||||
// Copy all properties from provider object to root level for direct access
|
||||
const providerData = starterOutput[provider]
|
||||
|
||||
for (const [key, value] of Object.entries(providerData)) {
|
||||
// Special handling for GitHub provider - copy all properties
|
||||
if (provider === 'github') {
|
||||
// For GitHub, copy all properties (objects and primitives) to root level
|
||||
if (!result[key]) {
|
||||
// Special handling for complex objects that might have enumeration issues
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
try {
|
||||
// Deep clone complex objects to avoid reference issues
|
||||
result[key] = JSON.parse(JSON.stringify(value))
|
||||
} catch (error) {
|
||||
// If JSON serialization fails, try direct assignment
|
||||
result[key] = value
|
||||
}
|
||||
} else {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// For other providers, keep existing logic (only copy objects)
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
// Don't overwrite existing top-level properties
|
||||
if (!result[key]) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep nested structure for backwards compatibility
|
||||
result[provider] = providerData
|
||||
|
||||
// Special handling for GitHub complex objects that might not be copied by the main loop
|
||||
if (provider === 'github') {
|
||||
// Comprehensive GitHub object extraction from multiple possible sources
|
||||
const githubObjects = ['repository', 'sender', 'pusher', 'commits', 'head_commit']
|
||||
|
||||
for (const objName of githubObjects) {
|
||||
// ALWAYS try to get the object, even if something exists (fix for conflicts)
|
||||
let objectValue = null
|
||||
|
||||
// Source 1: Direct from provider data
|
||||
if (providerData[objName]) {
|
||||
objectValue = providerData[objName]
|
||||
}
|
||||
// Source 2: From webhook payload (raw GitHub webhook)
|
||||
else if (starterOutput.webhook?.data?.payload?.[objName]) {
|
||||
objectValue = starterOutput.webhook.data.payload[objName]
|
||||
}
|
||||
// Source 3: For commits, try parsing JSON string version if no object found
|
||||
else if (objName === 'commits' && typeof result.commits === 'string') {
|
||||
try {
|
||||
objectValue = JSON.parse(result.commits)
|
||||
} catch (e) {
|
||||
// Keep as string if parsing fails
|
||||
objectValue = result.commits
|
||||
}
|
||||
}
|
||||
|
||||
// FORCE the object to root level (removed the !result[objName] condition)
|
||||
if (objectValue !== null && objectValue !== undefined) {
|
||||
result[objName] = objectValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pattern 2: Provider data directly in webhook.data (based on actual structure)
|
||||
else if (provider && webhookData[provider]) {
|
||||
const providerData = webhookData[provider]
|
||||
|
||||
// Extract all provider properties to root level
|
||||
for (const [key, value] of Object.entries(providerData)) {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
// Don't overwrite existing top-level properties
|
||||
if (!result[key]) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep nested structure for backwards compatibility
|
||||
result[provider] = providerData
|
||||
}
|
||||
|
||||
// Pattern 3: Email providers with data in webhook.data.payload.email (Gmail, Outlook)
|
||||
else if (
|
||||
provider &&
|
||||
(provider === 'gmail' || provider === 'outlook') &&
|
||||
webhookData.payload?.email
|
||||
) {
|
||||
const emailData = webhookData.payload.email
|
||||
|
||||
// Flatten email fields to root level for direct access
|
||||
for (const [key, value] of Object.entries(emailData)) {
|
||||
if (!result[key]) {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Keep the email object for backwards compatibility
|
||||
result.email = emailData
|
||||
|
||||
// Also keep timestamp if present in payload
|
||||
if (webhookData.payload.timestamp) {
|
||||
result.timestamp = webhookData.payload.timestamp
|
||||
}
|
||||
}
|
||||
|
||||
// Always keep webhook metadata
|
||||
if (starterOutput.webhook) result.webhook = starterOutput.webhook
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
logger.debug(`Returning starter block output for trigger block ${block.id}`, {
|
||||
starterOutputKeys: Object.keys(starterOutput),
|
||||
})
|
||||
return starterOutput
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to resolved inputs if no starter block output
|
||||
if (inputs && Object.keys(inputs).length > 0) {
|
||||
logger.debug(`Returning trigger inputs for block ${block.id}`, {
|
||||
inputKeys: Object.keys(inputs),
|
||||
|
||||
@@ -107,15 +107,13 @@ describe('WorkflowBlockHandler', () => {
|
||||
|
||||
// Simulate a cycle by adding the execution to the stack
|
||||
|
||||
;(WorkflowBlockHandler as any).executionStack.add('parent-workflow-id_sub_child-workflow-id')
|
||||
;(WorkflowBlockHandler as any).executionStack.add(
|
||||
'parent-workflow-id_sub_child-workflow-id_workflow-block-1'
|
||||
)
|
||||
|
||||
const result = await handler.execute(mockBlock, inputs, mockContext)
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
error: 'Cyclic workflow dependency detected: parent-workflow-id_sub_child-workflow-id',
|
||||
childWorkflowName: 'child-workflow-id',
|
||||
})
|
||||
await expect(handler.execute(mockBlock, inputs, mockContext)).rejects.toThrow(
|
||||
'Error in child workflow "child-workflow-id": Cyclic workflow dependency detected: parent-workflow-id_sub_child-workflow-id_workflow-block-1'
|
||||
)
|
||||
})
|
||||
|
||||
it('should enforce maximum depth limit', async () => {
|
||||
@@ -128,13 +126,9 @@ describe('WorkflowBlockHandler', () => {
|
||||
'level1_sub_level2_sub_level3_sub_level4_sub_level5_sub_level6_sub_level7_sub_level8_sub_level9_sub_level10_sub_level11',
|
||||
}
|
||||
|
||||
const result = await handler.execute(mockBlock, inputs, deepContext)
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
error: 'Maximum workflow nesting depth of 10 exceeded',
|
||||
childWorkflowName: 'child-workflow-id',
|
||||
})
|
||||
await expect(handler.execute(mockBlock, inputs, deepContext)).rejects.toThrow(
|
||||
'Error in child workflow "child-workflow-id": Maximum workflow nesting depth of 10 exceeded'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle child workflow not found', async () => {
|
||||
@@ -146,13 +140,9 @@ describe('WorkflowBlockHandler', () => {
|
||||
statusText: 'Not Found',
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockBlock, inputs, mockContext)
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
error: 'Child workflow non-existent-workflow not found',
|
||||
childWorkflowName: 'non-existent-workflow',
|
||||
})
|
||||
await expect(handler.execute(mockBlock, inputs, mockContext)).rejects.toThrow(
|
||||
'Error in child workflow "non-existent-workflow": Child workflow non-existent-workflow not found'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle fetch errors gracefully', async () => {
|
||||
@@ -160,13 +150,9 @@ describe('WorkflowBlockHandler', () => {
|
||||
|
||||
mockFetch.mockRejectedValueOnce(new Error('Network error'))
|
||||
|
||||
const result = await handler.execute(mockBlock, inputs, mockContext)
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
error: 'Child workflow child-workflow-id not found',
|
||||
childWorkflowName: 'child-workflow-id',
|
||||
})
|
||||
await expect(handler.execute(mockBlock, inputs, mockContext)).rejects.toThrow(
|
||||
'Error in child workflow "child-workflow-id": Child workflow child-workflow-id not found'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -46,8 +46,8 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
throw new Error(`Maximum workflow nesting depth of ${MAX_WORKFLOW_DEPTH} exceeded`)
|
||||
}
|
||||
|
||||
// Check for cycles
|
||||
const executionId = `${context.workflowId}_sub_${workflowId}`
|
||||
// Check for cycles - include block ID to differentiate parallel executions
|
||||
const executionId = `${context.workflowId}_sub_${workflowId}_${block.id}`
|
||||
if (WorkflowBlockHandler.executionStack.has(executionId)) {
|
||||
throw new Error(`Cyclic workflow dependency detected: ${executionId}`)
|
||||
}
|
||||
@@ -90,6 +90,9 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
workflowInput: childWorkflowInput,
|
||||
envVarValues: context.environmentVariables,
|
||||
workflowVariables: childWorkflow.variables || {},
|
||||
contextExtensions: {
|
||||
isChildExecution: true, // Prevent child executor from managing global state
|
||||
},
|
||||
})
|
||||
|
||||
const startTime = performance.now()
|
||||
@@ -105,12 +108,25 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
logger.info(`Child workflow ${childWorkflowName} completed in ${Math.round(duration)}ms`)
|
||||
|
||||
// Map child workflow output to parent block output
|
||||
return this.mapChildOutputToParent(result, workflowId, childWorkflowName, duration)
|
||||
const mappedResult = this.mapChildOutputToParent(
|
||||
result,
|
||||
workflowId,
|
||||
childWorkflowName,
|
||||
duration
|
||||
)
|
||||
|
||||
// If the child workflow failed, throw an error to trigger proper error handling in the parent
|
||||
if ((mappedResult as any).success === false) {
|
||||
const childError = (mappedResult as any).error || 'Unknown error'
|
||||
throw new Error(`Error in child workflow "${childWorkflowName}": ${childError}`)
|
||||
}
|
||||
|
||||
return mappedResult
|
||||
} catch (error: any) {
|
||||
logger.error(`Error executing child workflow ${workflowId}:`, error)
|
||||
|
||||
// Clean up execution stack in case of error
|
||||
const executionId = `${context.workflowId}_sub_${workflowId}`
|
||||
const executionId = `${context.workflowId}_sub_${workflowId}_${block.id}`
|
||||
WorkflowBlockHandler.executionStack.delete(executionId)
|
||||
|
||||
// Get workflow name for error reporting
|
||||
@@ -118,11 +134,15 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
const workflowMetadata = workflows[workflowId]
|
||||
const childWorkflowName = workflowMetadata?.name || workflowId
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childWorkflowName: childWorkflowName,
|
||||
} as Record<string, any>
|
||||
// Enhance error message with child workflow context
|
||||
const originalError = error.message || 'Unknown error'
|
||||
|
||||
// Check if error message already has child workflow context to avoid duplication
|
||||
if (originalError.startsWith('Error in child workflow')) {
|
||||
throw error // Re-throw as-is to avoid duplication
|
||||
}
|
||||
|
||||
throw new Error(`Error in child workflow "${childWorkflowName}": ${originalError}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1165,4 +1165,341 @@ describe('Executor', () => {
|
||||
).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Parallel workflow blocks tests - testing the fix for UI state interference
|
||||
*/
|
||||
describe('parallel workflow blocks execution', () => {
|
||||
it.concurrent(
|
||||
'should prevent child executors from interfering with parent UI state',
|
||||
async () => {
|
||||
// Create a workflow with parallel workflow blocks
|
||||
const workflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'starter',
|
||||
position: { x: 0, y: 0 },
|
||||
metadata: { id: BlockType.STARTER, name: 'Starter Block' },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: {} as Record<string, BlockOutput>,
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-1',
|
||||
position: { x: 100, y: 0 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 1' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-1',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-2',
|
||||
position: { x: 100, y: 100 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 2' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-2',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'starter', target: 'workflow-block-1' },
|
||||
{ source: 'starter', target: 'workflow-block-2' },
|
||||
],
|
||||
loops: {},
|
||||
}
|
||||
|
||||
const executor = new Executor({
|
||||
workflow,
|
||||
workflowInput: {},
|
||||
})
|
||||
|
||||
const result = await executor.execute('test-workflow-id')
|
||||
|
||||
// Verify execution completed (may succeed or fail depending on child workflow availability)
|
||||
expect(result).toBeDefined()
|
||||
if ('success' in result) {
|
||||
// Either success or failure is acceptable in test environment
|
||||
expect(typeof result.success).toBe('boolean')
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should handle workflow blocks with isChildExecution flag', async () => {
|
||||
const workflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'starter',
|
||||
position: { x: 0, y: 0 },
|
||||
metadata: { id: BlockType.STARTER, name: 'Starter Block' },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: {} as Record<string, BlockOutput>,
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block',
|
||||
position: { x: 100, y: 0 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [{ source: 'starter', target: 'workflow-block' }],
|
||||
loops: {},
|
||||
}
|
||||
|
||||
const executor = new Executor({
|
||||
workflow,
|
||||
workflowInput: {},
|
||||
})
|
||||
|
||||
// Verify that child executor is created with isChildExecution flag
|
||||
const result = await executor.execute('test-workflow-id')
|
||||
|
||||
expect(result).toBeDefined()
|
||||
if ('success' in result) {
|
||||
// Either success or failure is acceptable in test environment
|
||||
expect(typeof result.success).toBe('boolean')
|
||||
}
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should handle multiple parallel workflow blocks without state conflicts',
|
||||
async () => {
|
||||
const workflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'starter',
|
||||
position: { x: 0, y: 0 },
|
||||
metadata: { id: BlockType.STARTER, name: 'Starter Block' },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: {} as Record<string, BlockOutput>,
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-1',
|
||||
position: { x: 100, y: 0 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 1' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-1',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-2',
|
||||
position: { x: 100, y: 100 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 2' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-2',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-3',
|
||||
position: { x: 100, y: 200 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 3' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-3',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'starter', target: 'workflow-block-1' },
|
||||
{ source: 'starter', target: 'workflow-block-2' },
|
||||
{ source: 'starter', target: 'workflow-block-3' },
|
||||
],
|
||||
loops: {},
|
||||
}
|
||||
|
||||
const executor = new Executor({
|
||||
workflow,
|
||||
workflowInput: {},
|
||||
})
|
||||
|
||||
const result = await executor.execute('test-workflow-id')
|
||||
|
||||
// Verify execution completed (may succeed or fail depending on child workflow availability)
|
||||
expect(result).toBeDefined()
|
||||
if ('success' in result) {
|
||||
// Either success or failure is acceptable in test environment
|
||||
expect(typeof result.success).toBe('boolean')
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should maintain proper execution flow for parallel workflow blocks',
|
||||
async () => {
|
||||
const workflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'starter',
|
||||
position: { x: 0, y: 0 },
|
||||
metadata: { id: BlockType.STARTER, name: 'Starter Block' },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: {} as Record<string, BlockOutput>,
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-1',
|
||||
position: { x: 100, y: 0 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 1' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-1',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block-2',
|
||||
position: { x: 100, y: 100 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Workflow Block 2' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'child-workflow-2',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'starter', target: 'workflow-block-1' },
|
||||
{ source: 'starter', target: 'workflow-block-2' },
|
||||
],
|
||||
loops: {},
|
||||
}
|
||||
|
||||
const executor = new Executor({
|
||||
workflow,
|
||||
workflowInput: {},
|
||||
})
|
||||
|
||||
const result = await executor.execute('test-workflow-id')
|
||||
|
||||
// Verify execution completed (may succeed or fail depending on child workflow availability)
|
||||
expect(result).toBeDefined()
|
||||
if ('success' in result) {
|
||||
// Either success or failure is acceptable in test environment
|
||||
expect(typeof result.success).toBe('boolean')
|
||||
}
|
||||
|
||||
// Verify that parallel blocks were handled correctly
|
||||
if ('logs' in result) {
|
||||
expect(result.logs).toBeDefined()
|
||||
expect(Array.isArray(result.logs)).toBe(true)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should propagate errors from child workflows to parent workflow', async () => {
|
||||
const workflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'starter',
|
||||
position: { x: 0, y: 0 },
|
||||
metadata: { id: BlockType.STARTER, name: 'Starter Block' },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: {} as Record<string, BlockOutput>,
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'workflow-block',
|
||||
position: { x: 100, y: 0 },
|
||||
metadata: { id: BlockType.WORKFLOW, name: 'Failing Workflow Block' },
|
||||
config: {
|
||||
tool: 'workflow',
|
||||
params: {
|
||||
workflowId: 'failing-child-workflow',
|
||||
input: {},
|
||||
},
|
||||
},
|
||||
inputs: {} as Record<string, ParamType>,
|
||||
outputs: { output: 'json' as BlockOutput },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [{ source: 'starter', target: 'workflow-block' }],
|
||||
loops: {},
|
||||
}
|
||||
|
||||
const executor = new Executor({
|
||||
workflow,
|
||||
workflowInput: {},
|
||||
})
|
||||
|
||||
const result = await executor.execute('test-workflow-id')
|
||||
|
||||
// Verify that child workflow errors propagate to parent
|
||||
expect(result).toBeDefined()
|
||||
if ('success' in result) {
|
||||
// The workflow should fail due to child workflow failure
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toBeDefined()
|
||||
|
||||
// Error message should indicate it came from a child workflow
|
||||
if (result.error && typeof result.error === 'string') {
|
||||
expect(result.error).toContain('Error in child workflow')
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -72,6 +72,7 @@ export class Executor {
|
||||
private contextExtensions: any = {}
|
||||
private actualWorkflow: SerializedWorkflow
|
||||
private isCancelled = false
|
||||
private isChildExecution = false
|
||||
|
||||
constructor(
|
||||
private workflowParam:
|
||||
@@ -89,6 +90,7 @@ export class Executor {
|
||||
onStream?: (streamingExecution: StreamingExecution) => Promise<void>
|
||||
executionId?: string
|
||||
workspaceId?: string
|
||||
isChildExecution?: boolean
|
||||
}
|
||||
},
|
||||
private initialBlockStates: Record<string, BlockOutput> = {},
|
||||
@@ -108,6 +110,7 @@ export class Executor {
|
||||
// Store context extensions for streaming and output selection
|
||||
if (options.contextExtensions) {
|
||||
this.contextExtensions = options.contextExtensions
|
||||
this.isChildExecution = options.contextExtensions.isChildExecution || false
|
||||
|
||||
if (this.contextExtensions.stream) {
|
||||
logger.info('Executor initialized with streaming enabled', {
|
||||
@@ -204,10 +207,13 @@ export class Executor {
|
||||
const context = this.createExecutionContext(workflowId, startTime, startBlockId)
|
||||
|
||||
try {
|
||||
setIsExecuting(true)
|
||||
// Only manage global execution state for parent executions
|
||||
if (!this.isChildExecution) {
|
||||
setIsExecuting(true)
|
||||
|
||||
if (this.isDebugging) {
|
||||
setIsDebugging(true)
|
||||
if (this.isDebugging) {
|
||||
setIsDebugging(true)
|
||||
}
|
||||
}
|
||||
|
||||
let hasMoreLayers = true
|
||||
@@ -492,7 +498,8 @@ export class Executor {
|
||||
logs: context.blockLogs,
|
||||
}
|
||||
} finally {
|
||||
if (!this.isDebugging) {
|
||||
// Only reset global state for parent executions
|
||||
if (!this.isChildExecution && !this.isDebugging) {
|
||||
reset()
|
||||
}
|
||||
}
|
||||
@@ -1270,7 +1277,10 @@ export class Executor {
|
||||
}
|
||||
})
|
||||
|
||||
setActiveBlocks(activeBlockIds)
|
||||
// Only manage active blocks for parent executions
|
||||
if (!this.isChildExecution) {
|
||||
setActiveBlocks(activeBlockIds)
|
||||
}
|
||||
|
||||
const settledResults = await Promise.allSettled(
|
||||
blockIds.map((blockId) => this.executeBlock(blockId, context))
|
||||
@@ -1316,7 +1326,10 @@ export class Executor {
|
||||
return results
|
||||
} catch (error) {
|
||||
// If there's an uncaught error, clear all active blocks as a safety measure
|
||||
setActiveBlocks(new Set())
|
||||
// Only manage active blocks for parent executions
|
||||
if (!this.isChildExecution) {
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -1433,27 +1446,30 @@ export class Executor {
|
||||
|
||||
// Remove this block from active blocks immediately after execution
|
||||
// This ensures the pulse effect stops as soon as the block completes
|
||||
useExecutionStore.setState((state) => {
|
||||
const updatedActiveBlockIds = new Set(state.activeBlockIds)
|
||||
updatedActiveBlockIds.delete(blockId)
|
||||
// Only manage active blocks for parent executions
|
||||
if (!this.isChildExecution) {
|
||||
useExecutionStore.setState((state) => {
|
||||
const updatedActiveBlockIds = new Set(state.activeBlockIds)
|
||||
updatedActiveBlockIds.delete(blockId)
|
||||
|
||||
// For virtual blocks, also check if we should remove the actual block ID
|
||||
if (parallelInfo) {
|
||||
// Check if there are any other virtual blocks for the same actual block still active
|
||||
const hasOtherVirtualBlocks = Array.from(state.activeBlockIds).some((activeId) => {
|
||||
if (activeId === blockId) return false // Skip the current block we're removing
|
||||
const mapping = context.parallelBlockMapping?.get(activeId)
|
||||
return mapping && mapping.originalBlockId === parallelInfo.originalBlockId
|
||||
})
|
||||
// For virtual blocks, also check if we should remove the actual block ID
|
||||
if (parallelInfo) {
|
||||
// Check if there are any other virtual blocks for the same actual block still active
|
||||
const hasOtherVirtualBlocks = Array.from(state.activeBlockIds).some((activeId) => {
|
||||
if (activeId === blockId) return false // Skip the current block we're removing
|
||||
const mapping = context.parallelBlockMapping?.get(activeId)
|
||||
return mapping && mapping.originalBlockId === parallelInfo.originalBlockId
|
||||
})
|
||||
|
||||
// If no other virtual blocks are active for this actual block, remove the actual block ID too
|
||||
if (!hasOtherVirtualBlocks) {
|
||||
updatedActiveBlockIds.delete(parallelInfo.originalBlockId)
|
||||
// If no other virtual blocks are active for this actual block, remove the actual block ID too
|
||||
if (!hasOtherVirtualBlocks) {
|
||||
updatedActiveBlockIds.delete(parallelInfo.originalBlockId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { activeBlockIds: updatedActiveBlockIds }
|
||||
})
|
||||
return { activeBlockIds: updatedActiveBlockIds }
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
rawOutput &&
|
||||
@@ -1492,6 +1508,43 @@ export class Executor {
|
||||
// Skip console logging for infrastructure blocks like loops and parallels
|
||||
// For streaming blocks, we'll add the console entry after stream processing
|
||||
if (block.metadata?.id !== BlockType.LOOP && block.metadata?.id !== BlockType.PARALLEL) {
|
||||
// Determine iteration context for this block
|
||||
let iterationCurrent: number | undefined
|
||||
let iterationTotal: number | undefined
|
||||
let iterationType: 'loop' | 'parallel' | undefined
|
||||
const blockName = block.metadata?.name || 'Unnamed Block'
|
||||
|
||||
if (parallelInfo) {
|
||||
// This is a parallel iteration
|
||||
const parallelState = context.parallelExecutions?.get(parallelInfo.parallelId)
|
||||
iterationCurrent = parallelInfo.iterationIndex + 1
|
||||
iterationTotal = parallelState?.parallelCount
|
||||
iterationType = 'parallel'
|
||||
} else {
|
||||
// Check if this block is inside a loop
|
||||
const containingLoopId = this.resolver.getContainingLoopId(block.id)
|
||||
if (containingLoopId) {
|
||||
const currentIteration = context.loopIterations.get(containingLoopId)
|
||||
const loop = context.workflow?.loops?.[containingLoopId]
|
||||
if (currentIteration !== undefined && loop) {
|
||||
iterationCurrent = currentIteration
|
||||
if (loop.loopType === 'forEach') {
|
||||
// For forEach loops, get the total from the items
|
||||
const forEachItems = context.loopItems.get(`${containingLoopId}_items`)
|
||||
if (forEachItems) {
|
||||
iterationTotal = Array.isArray(forEachItems)
|
||||
? forEachItems.length
|
||||
: Object.keys(forEachItems).length
|
||||
}
|
||||
} else {
|
||||
// For regular loops, use the iterations count
|
||||
iterationTotal = loop.iterations || 5
|
||||
}
|
||||
iterationType = 'loop'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addConsole({
|
||||
input: blockLog.input,
|
||||
output: blockLog.output,
|
||||
@@ -1502,12 +1555,11 @@ export class Executor {
|
||||
workflowId: context.workflowId,
|
||||
blockId: parallelInfo ? blockId : block.id,
|
||||
executionId: this.contextExtensions.executionId,
|
||||
blockName: parallelInfo
|
||||
? `${block.metadata?.name || 'Unnamed Block'} (iteration ${
|
||||
parallelInfo.iterationIndex + 1
|
||||
})`
|
||||
: block.metadata?.name || 'Unnamed Block',
|
||||
blockName,
|
||||
blockType: block.metadata?.id || 'unknown',
|
||||
iterationCurrent,
|
||||
iterationTotal,
|
||||
iterationType,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1562,6 +1614,43 @@ export class Executor {
|
||||
|
||||
// Skip console logging for infrastructure blocks like loops and parallels
|
||||
if (block.metadata?.id !== BlockType.LOOP && block.metadata?.id !== BlockType.PARALLEL) {
|
||||
// Determine iteration context for this block
|
||||
let iterationCurrent: number | undefined
|
||||
let iterationTotal: number | undefined
|
||||
let iterationType: 'loop' | 'parallel' | undefined
|
||||
const blockName = block.metadata?.name || 'Unnamed Block'
|
||||
|
||||
if (parallelInfo) {
|
||||
// This is a parallel iteration
|
||||
const parallelState = context.parallelExecutions?.get(parallelInfo.parallelId)
|
||||
iterationCurrent = parallelInfo.iterationIndex + 1
|
||||
iterationTotal = parallelState?.parallelCount
|
||||
iterationType = 'parallel'
|
||||
} else {
|
||||
// Check if this block is inside a loop
|
||||
const containingLoopId = this.resolver.getContainingLoopId(block.id)
|
||||
if (containingLoopId) {
|
||||
const currentIteration = context.loopIterations.get(containingLoopId)
|
||||
const loop = context.workflow?.loops?.[containingLoopId]
|
||||
if (currentIteration !== undefined && loop) {
|
||||
iterationCurrent = currentIteration
|
||||
if (loop.loopType === 'forEach') {
|
||||
// For forEach loops, get the total from the items
|
||||
const forEachItems = context.loopItems.get(`${containingLoopId}_items`)
|
||||
if (forEachItems) {
|
||||
iterationTotal = Array.isArray(forEachItems)
|
||||
? forEachItems.length
|
||||
: Object.keys(forEachItems).length
|
||||
}
|
||||
} else {
|
||||
// For regular loops, use the iterations count
|
||||
iterationTotal = loop.iterations || 5
|
||||
}
|
||||
iterationType = 'loop'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addConsole({
|
||||
input: blockLog.input,
|
||||
output: blockLog.output,
|
||||
@@ -1572,12 +1661,11 @@ export class Executor {
|
||||
workflowId: context.workflowId,
|
||||
blockId: parallelInfo ? blockId : block.id,
|
||||
executionId: this.contextExtensions.executionId,
|
||||
blockName: parallelInfo
|
||||
? `${block.metadata?.name || 'Unnamed Block'} (iteration ${
|
||||
parallelInfo.iterationIndex + 1
|
||||
})`
|
||||
: block.metadata?.name || 'Unnamed Block',
|
||||
blockName,
|
||||
blockType: block.metadata?.id || 'unknown',
|
||||
iterationCurrent,
|
||||
iterationTotal,
|
||||
iterationType,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1595,27 +1683,30 @@ export class Executor {
|
||||
return output
|
||||
} catch (error: any) {
|
||||
// Remove this block from active blocks if there's an error
|
||||
useExecutionStore.setState((state) => {
|
||||
const updatedActiveBlockIds = new Set(state.activeBlockIds)
|
||||
updatedActiveBlockIds.delete(blockId)
|
||||
// Only manage active blocks for parent executions
|
||||
if (!this.isChildExecution) {
|
||||
useExecutionStore.setState((state) => {
|
||||
const updatedActiveBlockIds = new Set(state.activeBlockIds)
|
||||
updatedActiveBlockIds.delete(blockId)
|
||||
|
||||
// For virtual blocks, also check if we should remove the actual block ID
|
||||
if (parallelInfo) {
|
||||
// Check if there are any other virtual blocks for the same actual block still active
|
||||
const hasOtherVirtualBlocks = Array.from(state.activeBlockIds).some((activeId) => {
|
||||
if (activeId === blockId) return false // Skip the current block we're removing
|
||||
const mapping = context.parallelBlockMapping?.get(activeId)
|
||||
return mapping && mapping.originalBlockId === parallelInfo.originalBlockId
|
||||
})
|
||||
// For virtual blocks, also check if we should remove the actual block ID
|
||||
if (parallelInfo) {
|
||||
// Check if there are any other virtual blocks for the same actual block still active
|
||||
const hasOtherVirtualBlocks = Array.from(state.activeBlockIds).some((activeId) => {
|
||||
if (activeId === blockId) return false // Skip the current block we're removing
|
||||
const mapping = context.parallelBlockMapping?.get(activeId)
|
||||
return mapping && mapping.originalBlockId === parallelInfo.originalBlockId
|
||||
})
|
||||
|
||||
// If no other virtual blocks are active for this actual block, remove the actual block ID too
|
||||
if (!hasOtherVirtualBlocks) {
|
||||
updatedActiveBlockIds.delete(parallelInfo.originalBlockId)
|
||||
// If no other virtual blocks are active for this actual block, remove the actual block ID too
|
||||
if (!hasOtherVirtualBlocks) {
|
||||
updatedActiveBlockIds.delete(parallelInfo.originalBlockId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { activeBlockIds: updatedActiveBlockIds }
|
||||
})
|
||||
return { activeBlockIds: updatedActiveBlockIds }
|
||||
})
|
||||
}
|
||||
|
||||
blockLog.success = false
|
||||
blockLog.error =
|
||||
@@ -1630,6 +1721,43 @@ export class Executor {
|
||||
|
||||
// Skip console logging for infrastructure blocks like loops and parallels
|
||||
if (block.metadata?.id !== BlockType.LOOP && block.metadata?.id !== BlockType.PARALLEL) {
|
||||
// Determine iteration context for this block
|
||||
let iterationCurrent: number | undefined
|
||||
let iterationTotal: number | undefined
|
||||
let iterationType: 'loop' | 'parallel' | undefined
|
||||
const blockName = block.metadata?.name || 'Unnamed Block'
|
||||
|
||||
if (parallelInfo) {
|
||||
// This is a parallel iteration
|
||||
const parallelState = context.parallelExecutions?.get(parallelInfo.parallelId)
|
||||
iterationCurrent = parallelInfo.iterationIndex + 1
|
||||
iterationTotal = parallelState?.parallelCount
|
||||
iterationType = 'parallel'
|
||||
} else {
|
||||
// Check if this block is inside a loop
|
||||
const containingLoopId = this.resolver.getContainingLoopId(block.id)
|
||||
if (containingLoopId) {
|
||||
const currentIteration = context.loopIterations.get(containingLoopId)
|
||||
const loop = context.workflow?.loops?.[containingLoopId]
|
||||
if (currentIteration !== undefined && loop) {
|
||||
iterationCurrent = currentIteration
|
||||
if (loop.loopType === 'forEach') {
|
||||
// For forEach loops, get the total from the items
|
||||
const forEachItems = context.loopItems.get(`${containingLoopId}_items`)
|
||||
if (forEachItems) {
|
||||
iterationTotal = Array.isArray(forEachItems)
|
||||
? forEachItems.length
|
||||
: Object.keys(forEachItems).length
|
||||
}
|
||||
} else {
|
||||
// For regular loops, use the iterations count
|
||||
iterationTotal = loop.iterations || 5
|
||||
}
|
||||
iterationType = 'loop'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addConsole({
|
||||
input: blockLog.input,
|
||||
output: {},
|
||||
@@ -1643,10 +1771,11 @@ export class Executor {
|
||||
workflowId: context.workflowId,
|
||||
blockId: parallelInfo ? blockId : block.id,
|
||||
executionId: this.contextExtensions.executionId,
|
||||
blockName: parallelInfo
|
||||
? `${block.metadata?.name || 'Unnamed Block'} (iteration ${parallelInfo.iterationIndex + 1})`
|
||||
: block.metadata?.name || 'Unnamed Block',
|
||||
blockName,
|
||||
blockType: block.metadata?.id || 'unknown',
|
||||
iterationCurrent,
|
||||
iterationTotal,
|
||||
iterationType,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ export class InputResolver {
|
||||
private blockById: Map<string, SerializedBlock>
|
||||
private blockByNormalizedName: Map<string, SerializedBlock>
|
||||
private loopsByBlockId: Map<string, string> // Maps block ID to containing loop ID
|
||||
private parallelsByBlockId: Map<string, string> // Maps block ID to containing parallel ID
|
||||
|
||||
constructor(
|
||||
private workflow: SerializedWorkflow,
|
||||
@@ -61,6 +62,14 @@ export class InputResolver {
|
||||
this.loopsByBlockId.set(blockId, loopId)
|
||||
}
|
||||
}
|
||||
|
||||
// Create efficient parallel lookup map
|
||||
this.parallelsByBlockId = new Map()
|
||||
for (const [parallelId, parallel] of Object.entries(workflow.parallels || {})) {
|
||||
for (const blockId of parallel.nodes) {
|
||||
this.parallelsByBlockId.set(blockId, parallelId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -651,15 +660,8 @@ export class InputResolver {
|
||||
|
||||
// Special case for "parallel" references - allows accessing parallel properties
|
||||
if (blockRef.toLowerCase() === 'parallel') {
|
||||
// Find which parallel this block belongs to
|
||||
let containingParallelId: string | undefined
|
||||
|
||||
for (const [parallelId, parallel] of Object.entries(context.workflow?.parallels || {})) {
|
||||
if (parallel.nodes.includes(currentBlock.id)) {
|
||||
containingParallelId = parallelId
|
||||
break
|
||||
}
|
||||
}
|
||||
// Find which parallel this block belongs to using efficient lookup
|
||||
const containingParallelId = this.parallelsByBlockId.get(currentBlock.id)
|
||||
|
||||
if (containingParallelId) {
|
||||
const formattedValue = this.resolveParallelReference(
|
||||
@@ -1091,8 +1093,10 @@ export class InputResolver {
|
||||
}
|
||||
|
||||
// Special case: blocks in the same parallel can reference each other
|
||||
for (const [parallelId, parallel] of Object.entries(this.workflow.parallels || {})) {
|
||||
if (parallel.nodes.includes(currentBlockId)) {
|
||||
const currentBlockParallel = this.parallelsByBlockId.get(currentBlockId)
|
||||
if (currentBlockParallel) {
|
||||
const parallel = this.workflow.parallels?.[currentBlockParallel]
|
||||
if (parallel) {
|
||||
for (const nodeId of parallel.nodes) {
|
||||
accessibleBlocks.add(nodeId)
|
||||
}
|
||||
@@ -1845,4 +1849,22 @@ export class InputResolver {
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the containing loop ID for a block
|
||||
* @param blockId - The ID of the block
|
||||
* @returns The containing loop ID or undefined if not in a loop
|
||||
*/
|
||||
getContainingLoopId(blockId: string): string | undefined {
|
||||
return this.loopsByBlockId.get(blockId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the containing parallel ID for a block
|
||||
* @param blockId - The ID of the block
|
||||
* @returns The containing parallel ID or undefined if not in a parallel
|
||||
*/
|
||||
getContainingParallelId(blockId: string): string | undefined {
|
||||
return this.parallelsByBlockId.get(blockId)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,14 +271,6 @@ export interface Tool<P = any, O = Record<string, any>> {
|
||||
output: O
|
||||
error?: string
|
||||
}>
|
||||
|
||||
transformError?: (error: any) =>
|
||||
| string
|
||||
| Promise<{
|
||||
success: boolean
|
||||
output: O
|
||||
error?: string
|
||||
}> // Function to format error messages
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,7 @@ import { getBlock } from '@/blocks'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { useSocket } from '@/contexts/socket-context'
|
||||
import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -23,8 +24,10 @@ export function useCollaborativeWorkflow() {
|
||||
leaveWorkflow,
|
||||
emitWorkflowOperation,
|
||||
emitSubblockUpdate,
|
||||
emitVariableUpdate,
|
||||
onWorkflowOperation,
|
||||
onSubblockUpdate,
|
||||
onVariableUpdate,
|
||||
onUserJoined,
|
||||
onUserLeft,
|
||||
onWorkflowDeleted,
|
||||
@@ -36,6 +39,7 @@ export function useCollaborativeWorkflow() {
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const workflowStore = useWorkflowStore()
|
||||
const subBlockStore = useSubBlockStore()
|
||||
const variablesStore = useVariablesStore()
|
||||
const { data: session } = useSession()
|
||||
const { isShowingDiff } = useWorkflowDiffStore()
|
||||
|
||||
@@ -53,6 +57,7 @@ export function useCollaborativeWorkflow() {
|
||||
confirmOperation,
|
||||
failOperation,
|
||||
cancelOperationsForBlock,
|
||||
cancelOperationsForVariable,
|
||||
} = useOperationQueue()
|
||||
|
||||
// Clear position timestamps when switching workflows
|
||||
@@ -73,8 +78,13 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
// Register emit functions with operation queue store
|
||||
useEffect(() => {
|
||||
registerEmitFunctions(emitWorkflowOperation, emitSubblockUpdate, currentWorkflowId)
|
||||
}, [emitWorkflowOperation, emitSubblockUpdate, currentWorkflowId])
|
||||
registerEmitFunctions(
|
||||
emitWorkflowOperation,
|
||||
emitSubblockUpdate,
|
||||
emitVariableUpdate,
|
||||
currentWorkflowId
|
||||
)
|
||||
}, [emitWorkflowOperation, emitSubblockUpdate, emitVariableUpdate, currentWorkflowId])
|
||||
|
||||
useEffect(() => {
|
||||
const handleWorkflowOperation = (data: any) => {
|
||||
@@ -232,6 +242,26 @@ export function useCollaborativeWorkflow() {
|
||||
}
|
||||
break
|
||||
}
|
||||
} else if (target === 'variable') {
|
||||
switch (operation) {
|
||||
case 'add':
|
||||
variablesStore.addVariable(
|
||||
{
|
||||
workflowId: payload.workflowId,
|
||||
name: payload.name,
|
||||
type: payload.type,
|
||||
value: payload.value,
|
||||
},
|
||||
payload.id
|
||||
)
|
||||
break
|
||||
case 'remove':
|
||||
variablesStore.deleteVariable(payload.variableId)
|
||||
break
|
||||
case 'duplicate':
|
||||
variablesStore.duplicateVariable(payload.sourceVariableId, payload.id)
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error applying remote operation:', error)
|
||||
@@ -259,6 +289,30 @@ export function useCollaborativeWorkflow() {
|
||||
}
|
||||
}
|
||||
|
||||
const handleVariableUpdate = (data: any) => {
|
||||
const { variableId, field, value, userId } = data
|
||||
|
||||
if (isApplyingRemoteChange.current) return
|
||||
|
||||
logger.info(`Received variable update from user ${userId}: ${variableId}.${field}`)
|
||||
|
||||
isApplyingRemoteChange.current = true
|
||||
|
||||
try {
|
||||
if (field === 'name') {
|
||||
variablesStore.updateVariable(variableId, { name: value })
|
||||
} else if (field === 'value') {
|
||||
variablesStore.updateVariable(variableId, { value })
|
||||
} else if (field === 'type') {
|
||||
variablesStore.updateVariable(variableId, { type: value })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error applying remote variable update:', error)
|
||||
} finally {
|
||||
isApplyingRemoteChange.current = false
|
||||
}
|
||||
}
|
||||
|
||||
const handleUserJoined = (data: any) => {
|
||||
logger.info(`User joined: ${data.userName}`)
|
||||
}
|
||||
@@ -364,6 +418,7 @@ export function useCollaborativeWorkflow() {
|
||||
// Register event handlers
|
||||
onWorkflowOperation(handleWorkflowOperation)
|
||||
onSubblockUpdate(handleSubblockUpdate)
|
||||
onVariableUpdate(handleVariableUpdate)
|
||||
onUserJoined(handleUserJoined)
|
||||
onUserLeft(handleUserLeft)
|
||||
onWorkflowDeleted(handleWorkflowDeleted)
|
||||
@@ -377,6 +432,7 @@ export function useCollaborativeWorkflow() {
|
||||
}, [
|
||||
onWorkflowOperation,
|
||||
onSubblockUpdate,
|
||||
onVariableUpdate,
|
||||
onUserJoined,
|
||||
onUserLeft,
|
||||
onWorkflowDeleted,
|
||||
@@ -385,6 +441,7 @@ export function useCollaborativeWorkflow() {
|
||||
onOperationFailed,
|
||||
workflowStore,
|
||||
subBlockStore,
|
||||
variablesStore,
|
||||
activeWorkflowId,
|
||||
confirmOperation,
|
||||
failOperation,
|
||||
@@ -937,36 +994,6 @@ export function useCollaborativeWorkflow() {
|
||||
[executeQueuedOperation, workflowStore, subBlockStore, activeWorkflowId]
|
||||
)
|
||||
|
||||
const collaborativeUpdateLoopCount = useCallback(
|
||||
(loopId: string, count: number) => {
|
||||
// Get current state BEFORE making changes
|
||||
const currentBlock = workflowStore.blocks[loopId]
|
||||
if (!currentBlock || currentBlock.type !== 'loop') return
|
||||
|
||||
// Find child nodes before state changes
|
||||
const childNodes = Object.values(workflowStore.blocks)
|
||||
.filter((b) => b.data?.parentId === loopId)
|
||||
.map((b) => b.id)
|
||||
|
||||
// Get current values to preserve them
|
||||
const currentLoopType = currentBlock.data?.loopType || 'for'
|
||||
const currentCollection = currentBlock.data?.collection || ''
|
||||
|
||||
const config = {
|
||||
id: loopId,
|
||||
nodes: childNodes,
|
||||
iterations: count,
|
||||
loopType: currentLoopType,
|
||||
forEachItems: currentCollection,
|
||||
}
|
||||
|
||||
executeQueuedOperation('update', 'subflow', { id: loopId, type: 'loop', config }, () =>
|
||||
workflowStore.updateLoopCount(loopId, count)
|
||||
)
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateLoopType = useCallback(
|
||||
(loopId: string, loopType: 'for' | 'forEach') => {
|
||||
const currentBlock = workflowStore.blocks[loopId]
|
||||
@@ -994,93 +1021,6 @@ export function useCollaborativeWorkflow() {
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateLoopCollection = useCallback(
|
||||
(loopId: string, collection: string) => {
|
||||
const currentBlock = workflowStore.blocks[loopId]
|
||||
if (!currentBlock || currentBlock.type !== 'loop') return
|
||||
|
||||
const childNodes = Object.values(workflowStore.blocks)
|
||||
.filter((b) => b.data?.parentId === loopId)
|
||||
.map((b) => b.id)
|
||||
|
||||
const currentIterations = currentBlock.data?.count || 5
|
||||
const currentLoopType = currentBlock.data?.loopType || 'for'
|
||||
|
||||
const config = {
|
||||
id: loopId,
|
||||
nodes: childNodes,
|
||||
iterations: currentIterations,
|
||||
loopType: currentLoopType,
|
||||
forEachItems: collection,
|
||||
}
|
||||
|
||||
executeQueuedOperation('update', 'subflow', { id: loopId, type: 'loop', config }, () =>
|
||||
workflowStore.updateLoopCollection(loopId, collection)
|
||||
)
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateParallelCount = useCallback(
|
||||
(parallelId: string, count: number) => {
|
||||
const currentBlock = workflowStore.blocks[parallelId]
|
||||
if (!currentBlock || currentBlock.type !== 'parallel') return
|
||||
|
||||
const childNodes = Object.values(workflowStore.blocks)
|
||||
.filter((b) => b.data?.parentId === parallelId)
|
||||
.map((b) => b.id)
|
||||
|
||||
const currentDistribution = currentBlock.data?.collection || ''
|
||||
const currentParallelType = currentBlock.data?.parallelType || 'collection'
|
||||
|
||||
const config = {
|
||||
id: parallelId,
|
||||
nodes: childNodes,
|
||||
count: Math.max(1, Math.min(20, count)), // Clamp between 1-20
|
||||
distribution: currentDistribution,
|
||||
parallelType: currentParallelType,
|
||||
}
|
||||
|
||||
executeQueuedOperation(
|
||||
'update',
|
||||
'subflow',
|
||||
{ id: parallelId, type: 'parallel', config },
|
||||
() => workflowStore.updateParallelCount(parallelId, count)
|
||||
)
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateParallelCollection = useCallback(
|
||||
(parallelId: string, collection: string) => {
|
||||
const currentBlock = workflowStore.blocks[parallelId]
|
||||
if (!currentBlock || currentBlock.type !== 'parallel') return
|
||||
|
||||
const childNodes = Object.values(workflowStore.blocks)
|
||||
.filter((b) => b.data?.parentId === parallelId)
|
||||
.map((b) => b.id)
|
||||
|
||||
const currentCount = currentBlock.data?.count || 5
|
||||
const currentParallelType = currentBlock.data?.parallelType || 'collection'
|
||||
|
||||
const config = {
|
||||
id: parallelId,
|
||||
nodes: childNodes,
|
||||
count: currentCount,
|
||||
distribution: collection,
|
||||
parallelType: currentParallelType,
|
||||
}
|
||||
|
||||
executeQueuedOperation(
|
||||
'update',
|
||||
'subflow',
|
||||
{ id: parallelId, type: 'parallel', config },
|
||||
() => workflowStore.updateParallelCollection(parallelId, collection)
|
||||
)
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateParallelType = useCallback(
|
||||
(parallelId: string, parallelType: 'count' | 'collection') => {
|
||||
const currentBlock = workflowStore.blocks[parallelId]
|
||||
@@ -1122,6 +1062,161 @@ export function useCollaborativeWorkflow() {
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
// Unified iteration management functions - count and collection only
|
||||
const collaborativeUpdateIterationCount = useCallback(
|
||||
(nodeId: string, iterationType: 'loop' | 'parallel', count: number) => {
|
||||
const currentBlock = workflowStore.blocks[nodeId]
|
||||
if (!currentBlock || currentBlock.type !== iterationType) return
|
||||
|
||||
const childNodes = Object.values(workflowStore.blocks)
|
||||
.filter((b) => b.data?.parentId === nodeId)
|
||||
.map((b) => b.id)
|
||||
|
||||
if (iterationType === 'loop') {
|
||||
const currentLoopType = currentBlock.data?.loopType || 'for'
|
||||
const currentCollection = currentBlock.data?.collection || ''
|
||||
|
||||
const config = {
|
||||
id: nodeId,
|
||||
nodes: childNodes,
|
||||
iterations: Math.max(1, Math.min(100, count)), // Clamp between 1-100 for loops
|
||||
loopType: currentLoopType,
|
||||
forEachItems: currentCollection,
|
||||
}
|
||||
|
||||
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () =>
|
||||
workflowStore.updateLoopCount(nodeId, count)
|
||||
)
|
||||
} else {
|
||||
const currentDistribution = currentBlock.data?.collection || ''
|
||||
const currentParallelType = currentBlock.data?.parallelType || 'count'
|
||||
|
||||
const config = {
|
||||
id: nodeId,
|
||||
nodes: childNodes,
|
||||
count: Math.max(1, Math.min(20, count)), // Clamp between 1-20 for parallels
|
||||
distribution: currentDistribution,
|
||||
parallelType: currentParallelType,
|
||||
}
|
||||
|
||||
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () =>
|
||||
workflowStore.updateParallelCount(nodeId, count)
|
||||
)
|
||||
}
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateIterationCollection = useCallback(
|
||||
(nodeId: string, iterationType: 'loop' | 'parallel', collection: string) => {
|
||||
const currentBlock = workflowStore.blocks[nodeId]
|
||||
if (!currentBlock || currentBlock.type !== iterationType) return
|
||||
|
||||
const childNodes = Object.values(workflowStore.blocks)
|
||||
.filter((b) => b.data?.parentId === nodeId)
|
||||
.map((b) => b.id)
|
||||
|
||||
if (iterationType === 'loop') {
|
||||
const currentIterations = currentBlock.data?.count || 5
|
||||
const currentLoopType = currentBlock.data?.loopType || 'for'
|
||||
|
||||
const config = {
|
||||
id: nodeId,
|
||||
nodes: childNodes,
|
||||
iterations: currentIterations,
|
||||
loopType: currentLoopType,
|
||||
forEachItems: collection,
|
||||
}
|
||||
|
||||
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () =>
|
||||
workflowStore.updateLoopCollection(nodeId, collection)
|
||||
)
|
||||
} else {
|
||||
const currentCount = currentBlock.data?.count || 5
|
||||
const currentParallelType = currentBlock.data?.parallelType || 'count'
|
||||
|
||||
const config = {
|
||||
id: nodeId,
|
||||
nodes: childNodes,
|
||||
count: currentCount,
|
||||
distribution: collection,
|
||||
parallelType: currentParallelType,
|
||||
}
|
||||
|
||||
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () =>
|
||||
workflowStore.updateParallelCollection(nodeId, collection)
|
||||
)
|
||||
}
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeUpdateVariable = useCallback(
|
||||
(variableId: string, field: 'name' | 'value' | 'type', value: any) => {
|
||||
executeQueuedOperation('variable-update', 'variable', { variableId, field, value }, () => {
|
||||
if (field === 'name') {
|
||||
variablesStore.updateVariable(variableId, { name: value })
|
||||
} else if (field === 'value') {
|
||||
variablesStore.updateVariable(variableId, { value })
|
||||
} else if (field === 'type') {
|
||||
variablesStore.updateVariable(variableId, { type: value })
|
||||
}
|
||||
})
|
||||
},
|
||||
[executeQueuedOperation, variablesStore]
|
||||
)
|
||||
|
||||
const collaborativeAddVariable = useCallback(
|
||||
(variableData: { name: string; type: any; value: any; workflowId: string }) => {
|
||||
const id = crypto.randomUUID()
|
||||
variablesStore.addVariable(variableData, id)
|
||||
const processedVariable = useVariablesStore.getState().variables[id]
|
||||
|
||||
if (processedVariable) {
|
||||
const payloadWithProcessedName = {
|
||||
...variableData,
|
||||
id,
|
||||
name: processedVariable.name,
|
||||
}
|
||||
|
||||
executeQueuedOperation('add', 'variable', payloadWithProcessedName, () => {})
|
||||
}
|
||||
|
||||
return id
|
||||
},
|
||||
[executeQueuedOperation, variablesStore]
|
||||
)
|
||||
|
||||
const collaborativeDeleteVariable = useCallback(
|
||||
(variableId: string) => {
|
||||
cancelOperationsForVariable(variableId)
|
||||
|
||||
executeQueuedOperation('remove', 'variable', { variableId }, () => {
|
||||
variablesStore.deleteVariable(variableId)
|
||||
})
|
||||
},
|
||||
[executeQueuedOperation, variablesStore, cancelOperationsForVariable]
|
||||
)
|
||||
|
||||
const collaborativeDuplicateVariable = useCallback(
|
||||
(variableId: string) => {
|
||||
const newId = crypto.randomUUID()
|
||||
const sourceVariable = useVariablesStore.getState().variables[variableId]
|
||||
if (!sourceVariable) return null
|
||||
|
||||
executeQueuedOperation(
|
||||
'duplicate',
|
||||
'variable',
|
||||
{ sourceVariableId: variableId, id: newId },
|
||||
() => {
|
||||
variablesStore.duplicateVariable(variableId, newId)
|
||||
}
|
||||
)
|
||||
return newId
|
||||
},
|
||||
[executeQueuedOperation, variablesStore]
|
||||
)
|
||||
|
||||
return {
|
||||
// Connection status
|
||||
isConnected,
|
||||
@@ -1150,14 +1245,20 @@ export function useCollaborativeWorkflow() {
|
||||
collaborativeSetSubblockValue,
|
||||
collaborativeSetTagSelection,
|
||||
|
||||
// Collaborative variable operations
|
||||
collaborativeUpdateVariable,
|
||||
collaborativeAddVariable,
|
||||
collaborativeDeleteVariable,
|
||||
collaborativeDuplicateVariable,
|
||||
|
||||
// Collaborative loop/parallel operations
|
||||
collaborativeUpdateLoopCount,
|
||||
collaborativeUpdateLoopType,
|
||||
collaborativeUpdateLoopCollection,
|
||||
collaborativeUpdateParallelCount,
|
||||
collaborativeUpdateParallelCollection,
|
||||
collaborativeUpdateParallelType,
|
||||
|
||||
// Unified iteration operations
|
||||
collaborativeUpdateIterationCount,
|
||||
collaborativeUpdateIterationCollection,
|
||||
|
||||
// Direct access to stores for non-collaborative operations
|
||||
workflowStore,
|
||||
subBlockStore,
|
||||
|
||||
@@ -41,7 +41,6 @@ export function useKnowledgeBase(id: string) {
|
||||
}
|
||||
|
||||
// Constants
|
||||
const MAX_DOCUMENTS_LIMIT = 10000
|
||||
const DEFAULT_PAGE_SIZE = 50
|
||||
|
||||
export function useKnowledgeBaseDocuments(
|
||||
@@ -54,20 +53,26 @@ export function useKnowledgeBaseDocuments(
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const documentsCache = getCachedDocuments(knowledgeBaseId)
|
||||
const allDocuments = documentsCache?.documents || []
|
||||
const isLoading = loadingDocuments.has(knowledgeBaseId)
|
||||
const hasBeenLoaded = documentsCache !== null // Check if we have any cache entry, even if empty
|
||||
|
||||
// Load all documents on initial mount
|
||||
// Load documents with server-side pagination and search
|
||||
const requestLimit = options?.limit || DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset || 0
|
||||
const requestSearch = options?.search
|
||||
|
||||
useEffect(() => {
|
||||
if (!knowledgeBaseId || hasBeenLoaded || isLoading) return
|
||||
if (!knowledgeBaseId || isLoading) return
|
||||
|
||||
let isMounted = true
|
||||
|
||||
const loadAllDocuments = async () => {
|
||||
const loadDocuments = async () => {
|
||||
try {
|
||||
setError(null)
|
||||
await getDocuments(knowledgeBaseId, { limit: MAX_DOCUMENTS_LIMIT })
|
||||
await getDocuments(knowledgeBaseId, {
|
||||
search: requestSearch,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
})
|
||||
} catch (err) {
|
||||
if (isMounted) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load documents')
|
||||
@@ -75,48 +80,34 @@ export function useKnowledgeBaseDocuments(
|
||||
}
|
||||
}
|
||||
|
||||
loadAllDocuments()
|
||||
loadDocuments()
|
||||
|
||||
return () => {
|
||||
isMounted = false
|
||||
}
|
||||
}, [knowledgeBaseId, hasBeenLoaded, isLoading, getDocuments])
|
||||
}, [knowledgeBaseId, isLoading, getDocuments, requestSearch, requestLimit, requestOffset])
|
||||
|
||||
// Client-side filtering and pagination
|
||||
const { documents, pagination } = useMemo(() => {
|
||||
let filteredDocs = allDocuments
|
||||
|
||||
// Apply search filter
|
||||
if (options?.search) {
|
||||
const searchLower = options.search.toLowerCase()
|
||||
filteredDocs = filteredDocs.filter((doc) => doc.filename.toLowerCase().includes(searchLower))
|
||||
}
|
||||
|
||||
// Apply pagination
|
||||
const offset = options?.offset || 0
|
||||
const limit = options?.limit || DEFAULT_PAGE_SIZE
|
||||
const total = filteredDocs.length
|
||||
const paginatedDocs = filteredDocs.slice(offset, offset + limit)
|
||||
|
||||
return {
|
||||
documents: paginatedDocs,
|
||||
pagination: {
|
||||
total,
|
||||
limit,
|
||||
offset,
|
||||
hasMore: offset + limit < total,
|
||||
},
|
||||
}
|
||||
}, [allDocuments, options?.search, options?.limit, options?.offset])
|
||||
// Use server-side filtered and paginated results directly
|
||||
const documents = documentsCache?.documents || []
|
||||
const pagination = documentsCache?.pagination || {
|
||||
total: 0,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const refreshDocumentsData = useCallback(async () => {
|
||||
try {
|
||||
setError(null)
|
||||
await refreshDocuments(knowledgeBaseId, { limit: MAX_DOCUMENTS_LIMIT })
|
||||
await refreshDocuments(knowledgeBaseId, {
|
||||
search: requestSearch,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
})
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to refresh documents')
|
||||
}
|
||||
}, [knowledgeBaseId, refreshDocuments])
|
||||
}, [knowledgeBaseId, refreshDocuments, requestSearch, requestLimit, requestOffset])
|
||||
|
||||
const updateDocumentLocal = useCallback(
|
||||
(documentId: string, updates: Partial<DocumentData>) => {
|
||||
|
||||
@@ -23,8 +23,9 @@ import { getBaseURL } from '@/lib/auth-client'
|
||||
import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants'
|
||||
import { quickValidateEmail } from '@/lib/email/validation'
|
||||
import { env, isTruthy } from '@/lib/env'
|
||||
import { isProd } from '@/lib/environment'
|
||||
import { isBillingEnabled, isProd } from '@/lib/environment'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getRedisClient } from '@/lib/redis'
|
||||
import { getEmailDomain } from '@/lib/urls/utils'
|
||||
import { db } from '@/db'
|
||||
import * as schema from '@/db/schema'
|
||||
@@ -72,14 +73,55 @@ export const auth = betterAuth({
|
||||
provider: 'pg',
|
||||
schema,
|
||||
}),
|
||||
// Conditionally use Redis for session storage only if Redis is available
|
||||
...(env.REDIS_URL
|
||||
? {
|
||||
secondaryStorage: {
|
||||
get: async (key: string) => {
|
||||
try {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) return null
|
||||
const value = await redis.get(`auth:${key}`)
|
||||
return value || null
|
||||
} catch (error) {
|
||||
logger.error('Redis get error:', error)
|
||||
return null
|
||||
}
|
||||
},
|
||||
set: async (key: string, value: string, ttl?: number) => {
|
||||
try {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) return
|
||||
if (ttl) {
|
||||
await redis.setex(`auth:${key}`, ttl, value)
|
||||
} else {
|
||||
await redis.set(`auth:${key}`, value)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Redis set error:', error)
|
||||
}
|
||||
},
|
||||
delete: async (key: string) => {
|
||||
try {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) return
|
||||
await redis.del(`auth:${key}`)
|
||||
} catch (error) {
|
||||
logger.error('Redis delete error:', error)
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
session: {
|
||||
cookieCache: {
|
||||
enabled: true,
|
||||
maxAge: 24 * 60 * 60, // 24 hours in seconds
|
||||
// Use shorter cache with Redis (5 min), longer without (1 hour)
|
||||
maxAge: env.REDIS_URL ? 5 * 60 : 60 * 60,
|
||||
},
|
||||
expiresIn: 30 * 24 * 60 * 60, // 30 days (how long a session can last overall)
|
||||
updateAge: 24 * 60 * 60, // 24 hours (how often to refresh the expiry)
|
||||
freshAge: 60 * 60, // 1 hour (or set to 0 to disable completely)
|
||||
freshAge: env.REDIS_URL ? 0 : 6 * 60 * 60, // 0 with Redis, 6 hours without Redis
|
||||
},
|
||||
databaseHooks: {
|
||||
session: {
|
||||
@@ -1160,8 +1202,8 @@ export const auth = betterAuth({
|
||||
},
|
||||
],
|
||||
}),
|
||||
// Only include the Stripe plugin in production
|
||||
...(isProd && stripeClient
|
||||
// Only include the Stripe plugin when billing is enabled
|
||||
...(isBillingEnabled && stripeClient
|
||||
? [
|
||||
stripe({
|
||||
stripeClient,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getUserUsageLimit } from '@/lib/billing/core/usage'
|
||||
import { isProd } from '@/lib/environment'
|
||||
import { isBillingEnabled } from '@/lib/environment'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { userStats } from '@/db/schema'
|
||||
@@ -24,8 +24,8 @@ interface UsageData {
|
||||
*/
|
||||
export async function checkUsageStatus(userId: string): Promise<UsageData> {
|
||||
try {
|
||||
// In development, always return permissive limits
|
||||
if (!isProd) {
|
||||
// If billing is disabled, always return permissive limits
|
||||
if (!isBillingEnabled) {
|
||||
// Get actual usage from the database for display purposes
|
||||
const statsRecords = await db.select().from(userStats).where(eq(userStats.userId, userId))
|
||||
const currentUsage =
|
||||
@@ -115,8 +115,8 @@ export async function checkUsageStatus(userId: string): Promise<UsageData> {
|
||||
*/
|
||||
export async function checkAndNotifyUsage(userId: string): Promise<void> {
|
||||
try {
|
||||
// Skip usage notifications in development
|
||||
if (!isProd) {
|
||||
// Skip usage notifications if billing is disabled
|
||||
if (!isBillingEnabled) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -182,8 +182,8 @@ export async function checkServerSideUsageLimits(userId: string): Promise<{
|
||||
message?: string
|
||||
}> {
|
||||
try {
|
||||
// In development, always allow execution
|
||||
if (!isProd) {
|
||||
// If billing is disabled, always allow execution
|
||||
if (!isBillingEnabled) {
|
||||
return {
|
||||
isExceeded: false,
|
||||
currentUsage: 0,
|
||||
|
||||
@@ -259,8 +259,13 @@ async function parseWithMistralOCR(
|
||||
const timeoutId = setTimeout(() => controller.abort(), TIMEOUTS.MISTRAL_OCR_API)
|
||||
|
||||
try {
|
||||
const method =
|
||||
typeof mistralParserTool.request!.method === 'function'
|
||||
? mistralParserTool.request!.method(requestBody as any)
|
||||
: mistralParserTool.request!.method
|
||||
|
||||
const res = await fetch(url, {
|
||||
method: mistralParserTool.request!.method,
|
||||
method,
|
||||
headers,
|
||||
body: JSON.stringify(requestBody),
|
||||
signal: controller.signal,
|
||||
|
||||
@@ -10,6 +10,8 @@ vi.mock('@/lib/env', () => ({
|
||||
env: {
|
||||
BETTER_AUTH_SECRET: 'test-secret-key',
|
||||
},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
|
||||
describe('unsubscribe utilities', () => {
|
||||
|
||||
@@ -16,192 +16,193 @@ export const env = createEnv({
|
||||
|
||||
server: {
|
||||
// Core Database & Authentication
|
||||
DATABASE_URL: z.string().url(), // Primary database connection string
|
||||
BETTER_AUTH_URL: z.string().url(), // Base URL for Better Auth service
|
||||
BETTER_AUTH_SECRET: z.string().min(32), // Secret key for Better Auth JWT signing
|
||||
DISABLE_REGISTRATION: z.boolean().optional(), // Flag to disable new user registration
|
||||
ALLOWED_LOGIN_EMAILS: z.string().optional(), // Comma-separated list of allowed email addresses for login
|
||||
ALLOWED_LOGIN_DOMAINS: z.string().optional(), // Comma-separated list of allowed email domains for login
|
||||
ENCRYPTION_KEY: z.string().min(32), // Key for encrypting sensitive data
|
||||
INTERNAL_API_SECRET: z.string().min(32), // Secret for internal API authentication
|
||||
SIM_AGENT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
|
||||
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
|
||||
DATABASE_URL: z.string().url(), // Primary database connection string
|
||||
BETTER_AUTH_URL: z.string().url(), // Base URL for Better Auth service
|
||||
BETTER_AUTH_SECRET: z.string().min(32), // Secret key for Better Auth JWT signing
|
||||
DISABLE_REGISTRATION: z.boolean().optional(), // Flag to disable new user registration
|
||||
ALLOWED_LOGIN_EMAILS: z.string().optional(), // Comma-separated list of allowed email addresses for login
|
||||
ALLOWED_LOGIN_DOMAINS: z.string().optional(), // Comma-separated list of allowed email domains for login
|
||||
ENCRYPTION_KEY: z.string().min(32), // Key for encrypting sensitive data
|
||||
INTERNAL_API_SECRET: z.string().min(32), // Secret for internal API authentication
|
||||
SIM_AGENT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
|
||||
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
|
||||
|
||||
// Database & Storage
|
||||
POSTGRES_URL: z.string().url().optional(), // Alternative PostgreSQL connection string
|
||||
REDIS_URL: z.string().url().optional(), // Redis connection string for caching/sessions
|
||||
POSTGRES_URL: z.string().url().optional(), // Alternative PostgreSQL connection string
|
||||
REDIS_URL: z.string().url().optional(), // Redis connection string for caching/sessions (optional - improves performance)
|
||||
|
||||
// Payment & Billing (Stripe)
|
||||
STRIPE_SECRET_KEY: z.string().min(1).optional(), // Stripe secret key for payment processing
|
||||
STRIPE_BILLING_WEBHOOK_SECRET: z.string().min(1).optional(), // Webhook secret for billing events
|
||||
STRIPE_WEBHOOK_SECRET: z.string().min(1).optional(), // General Stripe webhook secret
|
||||
STRIPE_FREE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for free tier
|
||||
FREE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for free tier users
|
||||
STRIPE_PRO_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for pro tier
|
||||
PRO_TIER_COST_LIMIT: z.number().optional(), // Cost limit for pro tier users
|
||||
STRIPE_TEAM_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for team tier
|
||||
TEAM_TIER_COST_LIMIT: z.number().optional(), // Cost limit for team tier users
|
||||
STRIPE_ENTERPRISE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for enterprise tier
|
||||
ENTERPRISE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for enterprise tier users
|
||||
// Payment & Billing
|
||||
BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking
|
||||
STRIPE_SECRET_KEY: z.string().min(1).optional(), // Stripe secret key for payment processing
|
||||
STRIPE_BILLING_WEBHOOK_SECRET: z.string().min(1).optional(), // Webhook secret for billing events
|
||||
STRIPE_WEBHOOK_SECRET: z.string().min(1).optional(), // General Stripe webhook secret
|
||||
STRIPE_FREE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for free tier
|
||||
FREE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for free tier users
|
||||
STRIPE_PRO_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for pro tier
|
||||
PRO_TIER_COST_LIMIT: z.number().optional(), // Cost limit for pro tier users
|
||||
STRIPE_TEAM_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for team tier
|
||||
TEAM_TIER_COST_LIMIT: z.number().optional(), // Cost limit for team tier users
|
||||
STRIPE_ENTERPRISE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for enterprise tier
|
||||
ENTERPRISE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for enterprise tier users
|
||||
|
||||
// Email & Communication
|
||||
RESEND_API_KEY: z.string().min(1).optional(), // Resend API key for transactional emails
|
||||
EMAIL_DOMAIN: z.string().min(1).optional(), // Domain for sending emails
|
||||
RESEND_API_KEY: z.string().min(1).optional(), // Resend API key for transactional emails
|
||||
EMAIL_DOMAIN: z.string().min(1).optional(), // Domain for sending emails
|
||||
|
||||
// AI/LLM Provider API Keys
|
||||
OPENAI_API_KEY: z.string().min(1).optional(), // Primary OpenAI API key
|
||||
OPENAI_API_KEY_1: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
|
||||
OPENAI_API_KEY_2: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
|
||||
OPENAI_API_KEY_3: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
|
||||
MISTRAL_API_KEY: z.string().min(1).optional(), // Mistral AI API key
|
||||
ANTHROPIC_API_KEY_1: z.string().min(1).optional(), // Primary Anthropic Claude API key
|
||||
ANTHROPIC_API_KEY_2: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
|
||||
ANTHROPIC_API_KEY_3: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
|
||||
FREESTYLE_API_KEY: z.string().min(1).optional(), // Freestyle AI API key
|
||||
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
|
||||
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
|
||||
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
|
||||
OPENAI_API_KEY: z.string().min(1).optional(), // Primary OpenAI API key
|
||||
OPENAI_API_KEY_1: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
|
||||
OPENAI_API_KEY_2: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
|
||||
OPENAI_API_KEY_3: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
|
||||
MISTRAL_API_KEY: z.string().min(1).optional(), // Mistral AI API key
|
||||
ANTHROPIC_API_KEY_1: z.string().min(1).optional(), // Primary Anthropic Claude API key
|
||||
ANTHROPIC_API_KEY_2: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
|
||||
ANTHROPIC_API_KEY_3: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
|
||||
FREESTYLE_API_KEY: z.string().min(1).optional(), // Freestyle AI API key
|
||||
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
|
||||
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
|
||||
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
|
||||
|
||||
// Azure OpenAI Configuration
|
||||
AZURE_OPENAI_ENDPOINT: z.string().url().optional(), // Azure OpenAI service endpoint
|
||||
AZURE_OPENAI_API_VERSION: z.string().optional(), // Azure OpenAI API version
|
||||
AZURE_OPENAI_ENDPOINT: z.string().url().optional(), // Azure OpenAI service endpoint
|
||||
AZURE_OPENAI_API_VERSION: z.string().optional(), // Azure OpenAI API version
|
||||
|
||||
// Monitoring & Analytics
|
||||
TELEMETRY_ENDPOINT: z.string().url().optional(), // Custom telemetry/analytics endpoint
|
||||
COST_MULTIPLIER: z.number().optional(), // Multiplier for cost calculations
|
||||
COPILOT_COST_MULTIPLIER: z.number().optional(), // Multiplier for copilot cost calculations
|
||||
SENTRY_ORG: z.string().optional(), // Sentry organization for error tracking
|
||||
SENTRY_PROJECT: z.string().optional(), // Sentry project for error tracking
|
||||
SENTRY_AUTH_TOKEN: z.string().optional(), // Sentry authentication token
|
||||
TELEMETRY_ENDPOINT: z.string().url().optional(), // Custom telemetry/analytics endpoint
|
||||
COST_MULTIPLIER: z.number().optional(), // Multiplier for cost calculations
|
||||
COPILOT_COST_MULTIPLIER: z.number().optional(), // Multiplier for copilot cost calculations
|
||||
SENTRY_ORG: z.string().optional(), // Sentry organization for error tracking
|
||||
SENTRY_PROJECT: z.string().optional(), // Sentry project for error tracking
|
||||
SENTRY_AUTH_TOKEN: z.string().optional(), // Sentry authentication token
|
||||
|
||||
// External Services
|
||||
JWT_SECRET: z.string().min(1).optional(), // JWT signing secret for custom tokens
|
||||
BROWSERBASE_API_KEY: z.string().min(1).optional(), // Browserbase API key for browser automation
|
||||
BROWSERBASE_PROJECT_ID: z.string().min(1).optional(), // Browserbase project ID
|
||||
GITHUB_TOKEN: z.string().optional(), // GitHub personal access token for API access
|
||||
JWT_SECRET: z.string().min(1).optional(), // JWT signing secret for custom tokens
|
||||
BROWSERBASE_API_KEY: z.string().min(1).optional(), // Browserbase API key for browser automation
|
||||
BROWSERBASE_PROJECT_ID: z.string().min(1).optional(), // Browserbase project ID
|
||||
GITHUB_TOKEN: z.string().optional(), // GitHub personal access token for API access
|
||||
|
||||
// Infrastructure & Deployment
|
||||
NEXT_RUNTIME: z.string().optional(), // Next.js runtime environment
|
||||
VERCEL_ENV: z.string().optional(), // Vercel deployment environment
|
||||
DOCKER_BUILD: z.boolean().optional(), // Flag indicating Docker build environment
|
||||
NEXT_RUNTIME: z.string().optional(), // Next.js runtime environment
|
||||
VERCEL_ENV: z.string().optional(), // Vercel deployment environment
|
||||
DOCKER_BUILD: z.boolean().optional(), // Flag indicating Docker build environment
|
||||
|
||||
// Background Jobs & Scheduling
|
||||
TRIGGER_SECRET_KEY: z.string().min(1).optional(), // Trigger.dev secret key for background jobs
|
||||
CRON_SECRET: z.string().optional(), // Secret for authenticating cron job requests
|
||||
JOB_RETENTION_DAYS: z.string().optional().default('1'), // Days to retain job logs/data
|
||||
TRIGGER_SECRET_KEY: z.string().min(1).optional(), // Trigger.dev secret key for background jobs
|
||||
CRON_SECRET: z.string().optional(), // Secret for authenticating cron job requests
|
||||
JOB_RETENTION_DAYS: z.string().optional().default('1'), // Days to retain job logs/data
|
||||
|
||||
// Cloud Storage - AWS S3
|
||||
AWS_REGION: z.string().optional(), // AWS region for S3 buckets
|
||||
AWS_ACCESS_KEY_ID: z.string().optional(), // AWS access key ID
|
||||
AWS_SECRET_ACCESS_KEY: z.string().optional(), // AWS secret access key
|
||||
S3_BUCKET_NAME: z.string().optional(), // S3 bucket for general file storage
|
||||
S3_LOGS_BUCKET_NAME: z.string().optional(), // S3 bucket for storing logs
|
||||
S3_KB_BUCKET_NAME: z.string().optional(), // S3 bucket for knowledge base files
|
||||
S3_EXECUTION_FILES_BUCKET_NAME: z.string().optional(), // S3 bucket for workflow execution files
|
||||
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
|
||||
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
|
||||
AWS_REGION: z.string().optional(), // AWS region for S3 buckets
|
||||
AWS_ACCESS_KEY_ID: z.string().optional(), // AWS access key ID
|
||||
AWS_SECRET_ACCESS_KEY: z.string().optional(), // AWS secret access key
|
||||
S3_BUCKET_NAME: z.string().optional(), // S3 bucket for general file storage
|
||||
S3_LOGS_BUCKET_NAME: z.string().optional(), // S3 bucket for storing logs
|
||||
S3_KB_BUCKET_NAME: z.string().optional(), // S3 bucket for knowledge base files
|
||||
S3_EXECUTION_FILES_BUCKET_NAME: z.string().optional(), // S3 bucket for workflow execution files
|
||||
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
|
||||
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
|
||||
|
||||
// Cloud Storage - Azure Blob
|
||||
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
|
||||
AZURE_ACCOUNT_KEY: z.string().optional(), // Azure storage account key
|
||||
AZURE_CONNECTION_STRING: z.string().optional(), // Azure storage connection string
|
||||
AZURE_STORAGE_CONTAINER_NAME: z.string().optional(), // Azure container for general files
|
||||
AZURE_STORAGE_KB_CONTAINER_NAME: z.string().optional(), // Azure container for knowledge base files
|
||||
AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME: z.string().optional(), // Azure container for workflow execution files
|
||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
|
||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
|
||||
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
|
||||
AZURE_ACCOUNT_KEY: z.string().optional(), // Azure storage account key
|
||||
AZURE_CONNECTION_STRING: z.string().optional(), // Azure storage connection string
|
||||
AZURE_STORAGE_CONTAINER_NAME: z.string().optional(), // Azure container for general files
|
||||
AZURE_STORAGE_KB_CONTAINER_NAME: z.string().optional(), // Azure container for knowledge base files
|
||||
AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME: z.string().optional(), // Azure container for workflow execution files
|
||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
|
||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
|
||||
|
||||
// Data Retention
|
||||
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users
|
||||
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users
|
||||
|
||||
// Rate Limiting Configuration
|
||||
RATE_LIMIT_WINDOW_MS: z.string().optional().default('60000'), // Rate limit window duration in milliseconds (default: 1 minute)
|
||||
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'), // Manual execution bypass value (effectively unlimited)
|
||||
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
|
||||
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
|
||||
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
|
||||
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
|
||||
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
|
||||
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
|
||||
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
|
||||
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
|
||||
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'),// Manual execution bypass value (effectively unlimited)
|
||||
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
|
||||
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
|
||||
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
|
||||
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
|
||||
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
|
||||
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
|
||||
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
|
||||
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
|
||||
|
||||
// Real-time Communication
|
||||
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
|
||||
PORT: z.number().optional(), // Main application port
|
||||
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
|
||||
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
|
||||
PORT: z.number().optional(), // Main application port
|
||||
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
|
||||
|
||||
// OAuth Integration Credentials - All optional, enables third-party integrations
|
||||
GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for Google services
|
||||
GOOGLE_CLIENT_SECRET: z.string().optional(), // Google OAuth client secret
|
||||
GITHUB_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for GitHub integration
|
||||
GITHUB_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret
|
||||
GITHUB_REPO_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for repo access
|
||||
GITHUB_REPO_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret for repo access
|
||||
X_CLIENT_ID: z.string().optional(), // X (Twitter) OAuth client ID
|
||||
X_CLIENT_SECRET: z.string().optional(), // X (Twitter) OAuth client secret
|
||||
CONFLUENCE_CLIENT_ID: z.string().optional(), // Atlassian Confluence OAuth client ID
|
||||
CONFLUENCE_CLIENT_SECRET: z.string().optional(), // Atlassian Confluence OAuth client secret
|
||||
JIRA_CLIENT_ID: z.string().optional(), // Atlassian Jira OAuth client ID
|
||||
JIRA_CLIENT_SECRET: z.string().optional(), // Atlassian Jira OAuth client secret
|
||||
AIRTABLE_CLIENT_ID: z.string().optional(), // Airtable OAuth client ID
|
||||
AIRTABLE_CLIENT_SECRET: z.string().optional(), // Airtable OAuth client secret
|
||||
SUPABASE_CLIENT_ID: z.string().optional(), // Supabase OAuth client ID
|
||||
SUPABASE_CLIENT_SECRET: z.string().optional(), // Supabase OAuth client secret
|
||||
NOTION_CLIENT_ID: z.string().optional(), // Notion OAuth client ID
|
||||
NOTION_CLIENT_SECRET: z.string().optional(), // Notion OAuth client secret
|
||||
DISCORD_CLIENT_ID: z.string().optional(), // Discord OAuth client ID
|
||||
DISCORD_CLIENT_SECRET: z.string().optional(), // Discord OAuth client secret
|
||||
MICROSOFT_CLIENT_ID: z.string().optional(), // Microsoft OAuth client ID for Office 365/Teams
|
||||
MICROSOFT_CLIENT_SECRET: z.string().optional(), // Microsoft OAuth client secret
|
||||
HUBSPOT_CLIENT_ID: z.string().optional(), // HubSpot OAuth client ID
|
||||
HUBSPOT_CLIENT_SECRET: z.string().optional(), // HubSpot OAuth client secret
|
||||
WEALTHBOX_CLIENT_ID: z.string().optional(), // WealthBox OAuth client ID
|
||||
WEALTHBOX_CLIENT_SECRET: z.string().optional(), // WealthBox OAuth client secret
|
||||
LINEAR_CLIENT_ID: z.string().optional(), // Linear OAuth client ID
|
||||
LINEAR_CLIENT_SECRET: z.string().optional(), // Linear OAuth client secret
|
||||
SLACK_CLIENT_ID: z.string().optional(), // Slack OAuth client ID
|
||||
SLACK_CLIENT_SECRET: z.string().optional(), // Slack OAuth client secret
|
||||
REDDIT_CLIENT_ID: z.string().optional(), // Reddit OAuth client ID
|
||||
REDDIT_CLIENT_SECRET: z.string().optional(), // Reddit OAuth client secret
|
||||
GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for Google services
|
||||
GOOGLE_CLIENT_SECRET: z.string().optional(), // Google OAuth client secret
|
||||
GITHUB_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for GitHub integration
|
||||
GITHUB_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret
|
||||
GITHUB_REPO_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for repo access
|
||||
GITHUB_REPO_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret for repo access
|
||||
X_CLIENT_ID: z.string().optional(), // X (Twitter) OAuth client ID
|
||||
X_CLIENT_SECRET: z.string().optional(), // X (Twitter) OAuth client secret
|
||||
CONFLUENCE_CLIENT_ID: z.string().optional(), // Atlassian Confluence OAuth client ID
|
||||
CONFLUENCE_CLIENT_SECRET: z.string().optional(), // Atlassian Confluence OAuth client secret
|
||||
JIRA_CLIENT_ID: z.string().optional(), // Atlassian Jira OAuth client ID
|
||||
JIRA_CLIENT_SECRET: z.string().optional(), // Atlassian Jira OAuth client secret
|
||||
AIRTABLE_CLIENT_ID: z.string().optional(), // Airtable OAuth client ID
|
||||
AIRTABLE_CLIENT_SECRET: z.string().optional(), // Airtable OAuth client secret
|
||||
SUPABASE_CLIENT_ID: z.string().optional(), // Supabase OAuth client ID
|
||||
SUPABASE_CLIENT_SECRET: z.string().optional(), // Supabase OAuth client secret
|
||||
NOTION_CLIENT_ID: z.string().optional(), // Notion OAuth client ID
|
||||
NOTION_CLIENT_SECRET: z.string().optional(), // Notion OAuth client secret
|
||||
DISCORD_CLIENT_ID: z.string().optional(), // Discord OAuth client ID
|
||||
DISCORD_CLIENT_SECRET: z.string().optional(), // Discord OAuth client secret
|
||||
MICROSOFT_CLIENT_ID: z.string().optional(), // Microsoft OAuth client ID for Office 365/Teams
|
||||
MICROSOFT_CLIENT_SECRET: z.string().optional(), // Microsoft OAuth client secret
|
||||
HUBSPOT_CLIENT_ID: z.string().optional(), // HubSpot OAuth client ID
|
||||
HUBSPOT_CLIENT_SECRET: z.string().optional(), // HubSpot OAuth client secret
|
||||
WEALTHBOX_CLIENT_ID: z.string().optional(), // WealthBox OAuth client ID
|
||||
WEALTHBOX_CLIENT_SECRET: z.string().optional(), // WealthBox OAuth client secret
|
||||
LINEAR_CLIENT_ID: z.string().optional(), // Linear OAuth client ID
|
||||
LINEAR_CLIENT_SECRET: z.string().optional(), // Linear OAuth client secret
|
||||
SLACK_CLIENT_ID: z.string().optional(), // Slack OAuth client ID
|
||||
SLACK_CLIENT_SECRET: z.string().optional(), // Slack OAuth client secret
|
||||
REDDIT_CLIENT_ID: z.string().optional(), // Reddit OAuth client ID
|
||||
REDDIT_CLIENT_SECRET: z.string().optional(), // Reddit OAuth client secret
|
||||
},
|
||||
|
||||
client: {
|
||||
// Core Application URLs - Required for frontend functionality
|
||||
NEXT_PUBLIC_APP_URL: z.string().url(), // Base URL of the application (e.g., https://app.sim.ai)
|
||||
NEXT_PUBLIC_VERCEL_URL: z.string().optional(), // Vercel deployment URL for preview/production
|
||||
NEXT_PUBLIC_APP_URL: z.string().url(), // Base URL of the application (e.g., https://app.sim.ai)
|
||||
NEXT_PUBLIC_VERCEL_URL: z.string().optional(), // Vercel deployment URL for preview/production
|
||||
|
||||
// Client-side Services
|
||||
NEXT_PUBLIC_SENTRY_DSN: z.string().url().optional(), // Sentry DSN for client-side error tracking
|
||||
NEXT_PUBLIC_SOCKET_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
NEXT_PUBLIC_SENTRY_DSN: z.string().url().optional(), // Sentry DSN for client-side error tracking
|
||||
NEXT_PUBLIC_SOCKET_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
|
||||
// Asset Storage
|
||||
NEXT_PUBLIC_BLOB_BASE_URL: z.string().url().optional(), // Base URL for Vercel Blob storage (CDN assets)
|
||||
NEXT_PUBLIC_BLOB_BASE_URL: z.string().url().optional(), // Base URL for Vercel Blob storage (CDN assets)
|
||||
|
||||
// Google Services - For client-side Google integrations
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for browser auth
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for browser auth
|
||||
|
||||
// Analytics & Tracking
|
||||
NEXT_PUBLIC_RB2B_KEY: z.string().optional(), // RB2B tracking key for B2B analytics
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(), // Google API key for client-side API calls
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(), // Google project number for Drive picker
|
||||
NEXT_PUBLIC_RB2B_KEY: z.string().optional(), // RB2B tracking key for B2B analytics
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(), // Google API key for client-side API calls
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(), // Google project number for Drive picker
|
||||
|
||||
// UI Branding & Whitelabeling
|
||||
NEXT_PUBLIC_BRAND_NAME: z.string().optional(), // Custom brand name (defaults to "Sim")
|
||||
NEXT_PUBLIC_BRAND_LOGO_URL: z.string().url().optional(), // Custom logo URL
|
||||
NEXT_PUBLIC_BRAND_FAVICON_URL: z.string().url().optional(), // Custom favicon URL
|
||||
NEXT_PUBLIC_BRAND_PRIMARY_COLOR: z.string().optional(), // Primary brand color (hex)
|
||||
NEXT_PUBLIC_BRAND_SECONDARY_COLOR: z.string().optional(), // Secondary brand color (hex)
|
||||
NEXT_PUBLIC_BRAND_ACCENT_COLOR: z.string().optional(), // Accent brand color (hex)
|
||||
NEXT_PUBLIC_CUSTOM_CSS_URL: z.string().url().optional(), // Custom CSS stylesheet URL
|
||||
NEXT_PUBLIC_HIDE_BRANDING: z.string().optional(), // Hide "Powered by" branding
|
||||
NEXT_PUBLIC_CUSTOM_FOOTER_TEXT: z.string().optional(), // Custom footer text
|
||||
NEXT_PUBLIC_SUPPORT_EMAIL: z.string().email().optional(), // Custom support email
|
||||
NEXT_PUBLIC_SUPPORT_URL: z.string().url().optional(), // Custom support URL
|
||||
NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL
|
||||
NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL
|
||||
NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL
|
||||
NEXT_PUBLIC_BRAND_NAME: z.string().optional(), // Custom brand name (defaults to "Sim")
|
||||
NEXT_PUBLIC_BRAND_LOGO_URL: z.string().url().optional(), // Custom logo URL
|
||||
NEXT_PUBLIC_BRAND_FAVICON_URL: z.string().url().optional(), // Custom favicon URL
|
||||
NEXT_PUBLIC_BRAND_PRIMARY_COLOR: z.string().optional(), // Primary brand color (hex)
|
||||
NEXT_PUBLIC_BRAND_SECONDARY_COLOR: z.string().optional(), // Secondary brand color (hex)
|
||||
NEXT_PUBLIC_BRAND_ACCENT_COLOR: z.string().optional(), // Accent brand color (hex)
|
||||
NEXT_PUBLIC_CUSTOM_CSS_URL: z.string().url().optional(), // Custom CSS stylesheet URL
|
||||
NEXT_PUBLIC_HIDE_BRANDING: z.string().optional(), // Hide "Powered by" branding
|
||||
NEXT_PUBLIC_CUSTOM_FOOTER_TEXT: z.string().optional(), // Custom footer text
|
||||
NEXT_PUBLIC_SUPPORT_EMAIL: z.string().email().optional(), // Custom support email
|
||||
NEXT_PUBLIC_SUPPORT_URL: z.string().url().optional(), // Custom support URL
|
||||
NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL
|
||||
NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL
|
||||
NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL
|
||||
},
|
||||
|
||||
// Variables available on both server and client
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Environment utility functions for consistent environment detection across the application
|
||||
*/
|
||||
import { env } from './env'
|
||||
import { env, isTruthy } from './env'
|
||||
|
||||
/**
|
||||
* Is the application running in production mode
|
||||
@@ -23,6 +23,11 @@ export const isTest = env.NODE_ENV === 'test'
|
||||
*/
|
||||
export const isHosted = env.NEXT_PUBLIC_APP_URL === 'https://www.sim.ai'
|
||||
|
||||
/**
|
||||
* Is billing enforcement enabled
|
||||
*/
|
||||
export const isBillingEnabled = isTruthy(env.BILLING_ENABLED)
|
||||
|
||||
/**
|
||||
* Get cost multiplier based on environment
|
||||
*/
|
||||
|
||||
@@ -223,55 +223,81 @@ export function formatWebhookInput(
|
||||
input = 'Message received'
|
||||
}
|
||||
|
||||
// Create the message object for easier access
|
||||
const messageObj = {
|
||||
id: message.message_id,
|
||||
text: message.text,
|
||||
caption: message.caption,
|
||||
date: message.date,
|
||||
messageType: message.photo
|
||||
? 'photo'
|
||||
: message.document
|
||||
? 'document'
|
||||
: message.audio
|
||||
? 'audio'
|
||||
: message.video
|
||||
? 'video'
|
||||
: message.voice
|
||||
? 'voice'
|
||||
: message.sticker
|
||||
? 'sticker'
|
||||
: message.location
|
||||
? 'location'
|
||||
: message.contact
|
||||
? 'contact'
|
||||
: message.poll
|
||||
? 'poll'
|
||||
: 'text',
|
||||
raw: message,
|
||||
}
|
||||
|
||||
// Create sender object
|
||||
const senderObj = message.from
|
||||
? {
|
||||
id: message.from.id,
|
||||
firstName: message.from.first_name,
|
||||
lastName: message.from.last_name,
|
||||
username: message.from.username,
|
||||
languageCode: message.from.language_code,
|
||||
isBot: message.from.is_bot,
|
||||
}
|
||||
: null
|
||||
|
||||
// Create chat object
|
||||
const chatObj = message.chat
|
||||
? {
|
||||
id: message.chat.id,
|
||||
type: message.chat.type,
|
||||
title: message.chat.title,
|
||||
username: message.chat.username,
|
||||
firstName: message.chat.first_name,
|
||||
lastName: message.chat.last_name,
|
||||
}
|
||||
: null
|
||||
|
||||
return {
|
||||
input, // Primary workflow input - the message content
|
||||
|
||||
// NEW: Top-level properties for backward compatibility with <blockName.message> syntax
|
||||
message: messageObj,
|
||||
sender: senderObj,
|
||||
chat: chatObj,
|
||||
updateId: body.update_id,
|
||||
updateType: body.message
|
||||
? 'message'
|
||||
: body.edited_message
|
||||
? 'edited_message'
|
||||
: body.channel_post
|
||||
? 'channel_post'
|
||||
: body.edited_channel_post
|
||||
? 'edited_channel_post'
|
||||
: 'unknown',
|
||||
|
||||
// Keep the nested structure for the new telegram.message.text syntax
|
||||
telegram: {
|
||||
message: {
|
||||
id: message.message_id,
|
||||
text: message.text,
|
||||
caption: message.caption,
|
||||
date: message.date,
|
||||
messageType: message.photo
|
||||
? 'photo'
|
||||
: message.document
|
||||
? 'document'
|
||||
: message.audio
|
||||
? 'audio'
|
||||
: message.video
|
||||
? 'video'
|
||||
: message.voice
|
||||
? 'voice'
|
||||
: message.sticker
|
||||
? 'sticker'
|
||||
: message.location
|
||||
? 'location'
|
||||
: message.contact
|
||||
? 'contact'
|
||||
: message.poll
|
||||
? 'poll'
|
||||
: 'text',
|
||||
raw: message,
|
||||
},
|
||||
sender: message.from
|
||||
? {
|
||||
id: message.from.id,
|
||||
firstName: message.from.first_name,
|
||||
lastName: message.from.last_name,
|
||||
username: message.from.username,
|
||||
languageCode: message.from.language_code,
|
||||
isBot: message.from.is_bot,
|
||||
}
|
||||
: null,
|
||||
chat: message.chat
|
||||
? {
|
||||
id: message.chat.id,
|
||||
type: message.chat.type,
|
||||
title: message.chat.title,
|
||||
username: message.chat.username,
|
||||
firstName: message.chat.first_name,
|
||||
lastName: message.chat.last_name,
|
||||
}
|
||||
: null,
|
||||
message: messageObj,
|
||||
sender: senderObj,
|
||||
chat: chatObj,
|
||||
updateId: body.update_id,
|
||||
updateType: body.message
|
||||
? 'message'
|
||||
@@ -331,6 +357,13 @@ export function formatWebhookInput(
|
||||
return body
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'outlook') {
|
||||
if (body && typeof body === 'object' && 'email' in body) {
|
||||
return body // { email: {...}, timestamp: ... }
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'microsoftteams') {
|
||||
// Microsoft Teams outgoing webhook - Teams sending data to us
|
||||
const messageText = body?.text || ''
|
||||
@@ -341,6 +374,19 @@ export function formatWebhookInput(
|
||||
|
||||
return {
|
||||
input: messageText, // Primary workflow input - the message text
|
||||
|
||||
// Top-level properties for backward compatibility with <blockName.text> syntax
|
||||
type: body?.type || 'message',
|
||||
id: messageId,
|
||||
timestamp,
|
||||
localTimestamp: body?.localTimestamp || '',
|
||||
serviceUrl: body?.serviceUrl || '',
|
||||
channelId: body?.channelId || '',
|
||||
from_id: from.id || '',
|
||||
from_name: from.name || '',
|
||||
conversation_id: conversation.id || '',
|
||||
text: messageText,
|
||||
|
||||
microsoftteams: {
|
||||
message: {
|
||||
id: messageId,
|
||||
@@ -385,7 +431,210 @@ export function formatWebhookInput(
|
||||
}
|
||||
}
|
||||
|
||||
// Generic format for Slack and other providers
|
||||
if (foundWebhook.provider === 'slack') {
|
||||
// Slack input formatting logic - check for valid event
|
||||
const event = body?.event
|
||||
|
||||
if (event && body?.type === 'event_callback') {
|
||||
// Extract event text with fallbacks for different event types
|
||||
let input = ''
|
||||
|
||||
if (event.text) {
|
||||
input = event.text
|
||||
} else if (event.type === 'app_mention') {
|
||||
input = 'App mention received'
|
||||
} else {
|
||||
input = 'Slack event received'
|
||||
}
|
||||
|
||||
// Create the event object for easier access
|
||||
const eventObj = {
|
||||
event_type: event.type || '',
|
||||
channel: event.channel || '',
|
||||
channel_name: '', // Could be resolved via additional API calls if needed
|
||||
user: event.user || '',
|
||||
user_name: '', // Could be resolved via additional API calls if needed
|
||||
text: event.text || '',
|
||||
timestamp: event.ts || event.event_ts || '',
|
||||
team_id: body.team_id || event.team || '',
|
||||
event_id: body.event_id || '',
|
||||
}
|
||||
|
||||
return {
|
||||
input, // Primary workflow input - the event content
|
||||
|
||||
// // // Top-level properties for backward compatibility with <blockName.event> syntax
|
||||
event: eventObj,
|
||||
|
||||
// Keep the nested structure for the new slack.event.text syntax
|
||||
slack: {
|
||||
event: eventObj,
|
||||
},
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'slack',
|
||||
path: foundWebhook.path,
|
||||
providerConfig: foundWebhook.providerConfig,
|
||||
payload: body,
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
method: request.method,
|
||||
},
|
||||
},
|
||||
workflowId: foundWorkflow.id,
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for unknown Slack event types
|
||||
logger.warn('Unknown Slack event type', {
|
||||
type: body?.type,
|
||||
hasEvent: !!body?.event,
|
||||
bodyKeys: Object.keys(body || {}),
|
||||
})
|
||||
|
||||
return {
|
||||
input: 'Slack webhook received',
|
||||
slack: {
|
||||
event: {
|
||||
event_type: body?.event?.type || body?.type || 'unknown',
|
||||
channel: body?.event?.channel || '',
|
||||
user: body?.event?.user || '',
|
||||
text: body?.event?.text || '',
|
||||
timestamp: body?.event?.ts || '',
|
||||
team_id: body?.team_id || '',
|
||||
event_id: body?.event_id || '',
|
||||
},
|
||||
},
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'slack',
|
||||
path: foundWebhook.path,
|
||||
providerConfig: foundWebhook.providerConfig,
|
||||
payload: body,
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
method: request.method,
|
||||
},
|
||||
},
|
||||
workflowId: foundWorkflow.id,
|
||||
}
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'github') {
|
||||
// GitHub webhook input formatting logic
|
||||
const eventType = request.headers.get('x-github-event') || 'unknown'
|
||||
const delivery = request.headers.get('x-github-delivery') || ''
|
||||
|
||||
// Extract common GitHub properties
|
||||
const repository = body?.repository || {}
|
||||
const sender = body?.sender || {}
|
||||
const action = body?.action || ''
|
||||
|
||||
// Build GitHub-specific variables based on the trigger config outputs
|
||||
const githubData = {
|
||||
// Event metadata
|
||||
event_type: eventType,
|
||||
action: action,
|
||||
delivery_id: delivery,
|
||||
|
||||
// Repository information (avoid 'repository' to prevent conflict with the object)
|
||||
repository_full_name: repository.full_name || '',
|
||||
repository_name: repository.name || '',
|
||||
repository_owner: repository.owner?.login || '',
|
||||
repository_id: repository.id || '',
|
||||
repository_url: repository.html_url || '',
|
||||
|
||||
// Sender information (avoid 'sender' to prevent conflict with the object)
|
||||
sender_login: sender.login || '',
|
||||
sender_id: sender.id || '',
|
||||
sender_type: sender.type || '',
|
||||
sender_url: sender.html_url || '',
|
||||
|
||||
// Event-specific data
|
||||
...(body?.ref && {
|
||||
ref: body.ref,
|
||||
branch: body.ref?.replace('refs/heads/', '') || '',
|
||||
}),
|
||||
...(body?.before && { before: body.before }),
|
||||
...(body?.after && { after: body.after }),
|
||||
...(body?.commits && {
|
||||
commits: JSON.stringify(body.commits),
|
||||
commit_count: body.commits.length || 0,
|
||||
}),
|
||||
...(body?.head_commit && {
|
||||
commit_message: body.head_commit.message || '',
|
||||
commit_author: body.head_commit.author?.name || '',
|
||||
commit_sha: body.head_commit.id || '',
|
||||
commit_url: body.head_commit.url || '',
|
||||
}),
|
||||
...(body?.pull_request && {
|
||||
pull_request: JSON.stringify(body.pull_request),
|
||||
pr_number: body.pull_request.number || '',
|
||||
pr_title: body.pull_request.title || '',
|
||||
pr_state: body.pull_request.state || '',
|
||||
pr_url: body.pull_request.html_url || '',
|
||||
}),
|
||||
...(body?.issue && {
|
||||
issue: JSON.stringify(body.issue),
|
||||
issue_number: body.issue.number || '',
|
||||
issue_title: body.issue.title || '',
|
||||
issue_state: body.issue.state || '',
|
||||
issue_url: body.issue.html_url || '',
|
||||
}),
|
||||
...(body?.comment && {
|
||||
comment: JSON.stringify(body.comment),
|
||||
comment_body: body.comment.body || '',
|
||||
comment_url: body.comment.html_url || '',
|
||||
}),
|
||||
}
|
||||
|
||||
// Set input based on event type for workflow processing
|
||||
let input = ''
|
||||
switch (eventType) {
|
||||
case 'push':
|
||||
input = `Push to ${githubData.branch || githubData.ref}: ${githubData.commit_message || 'No commit message'}`
|
||||
break
|
||||
case 'pull_request':
|
||||
input = `${action} pull request: ${githubData.pr_title || 'No title'}`
|
||||
break
|
||||
case 'issues':
|
||||
input = `${action} issue: ${githubData.issue_title || 'No title'}`
|
||||
break
|
||||
case 'issue_comment':
|
||||
case 'pull_request_review_comment':
|
||||
input = `Comment ${action}: ${githubData.comment_body?.slice(0, 100) || 'No comment body'}${(githubData.comment_body?.length || 0) > 100 ? '...' : ''}`
|
||||
break
|
||||
default:
|
||||
input = `GitHub ${eventType} event${action ? ` (${action})` : ''}`
|
||||
}
|
||||
|
||||
return {
|
||||
input, // Primary workflow input
|
||||
|
||||
// Top-level properties for backward compatibility
|
||||
...githubData,
|
||||
|
||||
// GitHub data structured for trigger handler to extract
|
||||
github: {
|
||||
// Processed convenience variables
|
||||
...githubData,
|
||||
// Raw GitHub webhook payload for direct field access
|
||||
...body,
|
||||
},
|
||||
|
||||
webhook: {
|
||||
data: {
|
||||
provider: 'github',
|
||||
path: foundWebhook.path,
|
||||
providerConfig: foundWebhook.providerConfig,
|
||||
payload: body,
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
method: request.method,
|
||||
},
|
||||
},
|
||||
workflowId: foundWorkflow.id,
|
||||
}
|
||||
}
|
||||
|
||||
// Generic format for other providers
|
||||
return {
|
||||
webhook: {
|
||||
data: {
|
||||
|
||||
@@ -225,6 +225,15 @@ export class Serializer {
|
||||
// This catches missing API keys, credentials, and other user-provided values early
|
||||
// Fields that are user-or-llm will be validated later after parameter merging
|
||||
|
||||
// Skip validation if the block is in trigger mode
|
||||
if (block.triggerMode || blockConfig.category === 'triggers') {
|
||||
logger.info('Skipping validation for block in trigger mode', {
|
||||
blockId: block.id,
|
||||
blockType: block.type,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Get the tool configuration to check parameter visibility
|
||||
const toolAccess = blockConfig.tools?.access
|
||||
if (!toolAccess || toolAccess.length === 0) {
|
||||
|
||||
@@ -194,6 +194,9 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
|
||||
case 'subflow':
|
||||
await handleSubflowOperationTx(tx, workflowId, op, payload, userId)
|
||||
break
|
||||
case 'variable':
|
||||
await handleVariableOperationTx(tx, workflowId, op, payload, userId)
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown operation target: ${target}`)
|
||||
}
|
||||
@@ -855,3 +858,127 @@ async function handleSubflowOperationTx(
|
||||
throw new Error(`Unsupported subflow operation: ${operation}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Variable operations - updates workflow.variables JSON field
|
||||
async function handleVariableOperationTx(
|
||||
tx: any,
|
||||
workflowId: string,
|
||||
operation: string,
|
||||
payload: any,
|
||||
userId: string
|
||||
) {
|
||||
// Get current workflow variables
|
||||
const workflowData = await tx
|
||||
.select({ variables: workflow.variables })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (workflowData.length === 0) {
|
||||
throw new Error(`Workflow ${workflowId} not found`)
|
||||
}
|
||||
|
||||
const currentVariables = (workflowData[0].variables as Record<string, any>) || {}
|
||||
|
||||
switch (operation) {
|
||||
case 'add': {
|
||||
if (!payload.id || !payload.name || payload.type === undefined) {
|
||||
throw new Error('Missing required fields for add variable operation')
|
||||
}
|
||||
|
||||
// Add the new variable
|
||||
const updatedVariables = {
|
||||
...currentVariables,
|
||||
[payload.id]: {
|
||||
id: payload.id,
|
||||
workflowId: payload.workflowId,
|
||||
name: payload.name,
|
||||
type: payload.type,
|
||||
value: payload.value || '',
|
||||
},
|
||||
}
|
||||
|
||||
await tx
|
||||
.update(workflow)
|
||||
.set({
|
||||
variables: updatedVariables,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflow.id, workflowId))
|
||||
|
||||
logger.debug(`Added variable ${payload.id} (${payload.name}) to workflow ${workflowId}`)
|
||||
break
|
||||
}
|
||||
|
||||
case 'remove': {
|
||||
if (!payload.variableId) {
|
||||
throw new Error('Missing variable ID for remove operation')
|
||||
}
|
||||
|
||||
// Remove the variable
|
||||
const { [payload.variableId]: _, ...updatedVariables } = currentVariables
|
||||
|
||||
await tx
|
||||
.update(workflow)
|
||||
.set({
|
||||
variables: updatedVariables,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflow.id, workflowId))
|
||||
|
||||
logger.debug(`Removed variable ${payload.variableId} from workflow ${workflowId}`)
|
||||
break
|
||||
}
|
||||
|
||||
case 'duplicate': {
|
||||
if (!payload.sourceVariableId || !payload.id) {
|
||||
throw new Error('Missing required fields for duplicate variable operation')
|
||||
}
|
||||
|
||||
const sourceVariable = currentVariables[payload.sourceVariableId]
|
||||
if (!sourceVariable) {
|
||||
throw new Error(`Source variable ${payload.sourceVariableId} not found`)
|
||||
}
|
||||
|
||||
// Create duplicated variable with unique name
|
||||
const baseName = `${sourceVariable.name} (copy)`
|
||||
let uniqueName = baseName
|
||||
let nameIndex = 1
|
||||
|
||||
// Ensure name uniqueness
|
||||
const existingNames = Object.values(currentVariables).map((v: any) => v.name)
|
||||
while (existingNames.includes(uniqueName)) {
|
||||
uniqueName = `${baseName} (${nameIndex})`
|
||||
nameIndex++
|
||||
}
|
||||
|
||||
const duplicatedVariable = {
|
||||
...sourceVariable,
|
||||
id: payload.id,
|
||||
name: uniqueName,
|
||||
}
|
||||
|
||||
const updatedVariables = {
|
||||
...currentVariables,
|
||||
[payload.id]: duplicatedVariable,
|
||||
}
|
||||
|
||||
await tx
|
||||
.update(workflow)
|
||||
.set({
|
||||
variables: updatedVariables,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflow.id, workflowId))
|
||||
|
||||
logger.debug(
|
||||
`Duplicated variable ${payload.sourceVariableId} -> ${payload.id} (${uniqueName}) in workflow ${workflowId}`
|
||||
)
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
logger.warn(`Unknown variable operation: ${operation}`)
|
||||
throw new Error(`Unsupported variable operation: ${operation}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { setupConnectionHandlers } from '@/socket-server/handlers/connection'
|
||||
import { setupOperationsHandlers } from '@/socket-server/handlers/operations'
|
||||
import { setupPresenceHandlers } from '@/socket-server/handlers/presence'
|
||||
import { setupSubblocksHandlers } from '@/socket-server/handlers/subblocks'
|
||||
import { setupVariablesHandlers } from '@/socket-server/handlers/variables'
|
||||
import { setupWorkflowHandlers } from '@/socket-server/handlers/workflow'
|
||||
import type { AuthenticatedSocket } from '@/socket-server/middleware/auth'
|
||||
import type { RoomManager, UserPresence, WorkflowRoom } from '@/socket-server/rooms/manager'
|
||||
@@ -17,6 +18,7 @@ export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: RoomM
|
||||
setupWorkflowHandlers(socket, roomManager)
|
||||
setupOperationsHandlers(socket, roomManager)
|
||||
setupSubblocksHandlers(socket, roomManager)
|
||||
setupVariablesHandlers(socket, roomManager)
|
||||
setupPresenceHandlers(socket, roomManager)
|
||||
setupConnectionHandlers(socket, roomManager)
|
||||
}
|
||||
@@ -25,6 +27,7 @@ export {
|
||||
setupWorkflowHandlers,
|
||||
setupOperationsHandlers,
|
||||
setupSubblocksHandlers,
|
||||
setupVariablesHandlers,
|
||||
setupPresenceHandlers,
|
||||
setupConnectionHandlers,
|
||||
}
|
||||
|
||||
@@ -126,6 +126,44 @@ export function setupOperationsHandlers(
|
||||
return // Early return for position updates
|
||||
}
|
||||
|
||||
if (target === 'variable' && ['add', 'remove', 'duplicate'].includes(operation)) {
|
||||
// Persist first, then broadcast
|
||||
await persistWorkflowOperation(workflowId, {
|
||||
operation,
|
||||
target,
|
||||
payload,
|
||||
timestamp: operationTimestamp,
|
||||
userId: session.userId,
|
||||
})
|
||||
|
||||
room.lastModified = Date.now()
|
||||
|
||||
const broadcastData = {
|
||||
operation,
|
||||
target,
|
||||
payload,
|
||||
timestamp: operationTimestamp,
|
||||
senderId: socket.id,
|
||||
userId: session.userId,
|
||||
userName: session.userName,
|
||||
metadata: {
|
||||
workflowId,
|
||||
operationId: crypto.randomUUID(),
|
||||
},
|
||||
}
|
||||
|
||||
socket.to(workflowId).emit('workflow-operation', broadcastData)
|
||||
|
||||
if (operationId) {
|
||||
socket.emit('operation-confirmed', {
|
||||
operationId,
|
||||
serverTimestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// For non-position operations, persist first then broadcast
|
||||
await persistWorkflowOperation(workflowId, {
|
||||
operation,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user