mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-20 20:38:16 -05:00
Compare commits
30 Commits
feat/file-
...
v0.5.63
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -86,112 +86,27 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
.limit(candidateLimit)
|
||||
|
||||
const knownLocales = ['en', 'es', 'fr', 'de', 'ja', 'zh']
|
||||
const seenIds = new Set<string>()
|
||||
const mergedResults = []
|
||||
|
||||
const vectorRankMap = new Map<string, number>()
|
||||
vectorResults.forEach((r, idx) => vectorRankMap.set(r.chunkId, idx + 1))
|
||||
|
||||
const keywordRankMap = new Map<string, number>()
|
||||
keywordResults.forEach((r, idx) => keywordRankMap.set(r.chunkId, idx + 1))
|
||||
|
||||
const allChunkIds = new Set([
|
||||
...vectorResults.map((r) => r.chunkId),
|
||||
...keywordResults.map((r) => r.chunkId),
|
||||
])
|
||||
|
||||
const k = 60
|
||||
type ResultWithRRF = (typeof vectorResults)[0] & { rrfScore: number }
|
||||
const scoredResults: ResultWithRRF[] = []
|
||||
|
||||
for (const chunkId of allChunkIds) {
|
||||
const vectorRank = vectorRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
|
||||
const keywordRank = keywordRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
|
||||
|
||||
const rrfScore = 1 / (k + vectorRank) + 1 / (k + keywordRank)
|
||||
|
||||
const result =
|
||||
vectorResults.find((r) => r.chunkId === chunkId) ||
|
||||
keywordResults.find((r) => r.chunkId === chunkId)
|
||||
|
||||
if (result) {
|
||||
scoredResults.push({ ...result, rrfScore })
|
||||
for (let i = 0; i < Math.max(vectorResults.length, keywordResults.length); i++) {
|
||||
if (i < vectorResults.length && !seenIds.has(vectorResults[i].chunkId)) {
|
||||
mergedResults.push(vectorResults[i])
|
||||
seenIds.add(vectorResults[i].chunkId)
|
||||
}
|
||||
if (i < keywordResults.length && !seenIds.has(keywordResults[i].chunkId)) {
|
||||
mergedResults.push(keywordResults[i])
|
||||
seenIds.add(keywordResults[i].chunkId)
|
||||
}
|
||||
}
|
||||
|
||||
scoredResults.sort((a, b) => b.rrfScore - a.rrfScore)
|
||||
|
||||
const localeFilteredResults = scoredResults.filter((result) => {
|
||||
const firstPart = result.sourceDocument.split('/')[0]
|
||||
if (knownLocales.includes(firstPart)) {
|
||||
return firstPart === locale
|
||||
}
|
||||
return locale === 'en'
|
||||
})
|
||||
|
||||
const queryLower = query.toLowerCase()
|
||||
const getTitleBoost = (result: ResultWithRRF): number => {
|
||||
const fileName = result.sourceDocument
|
||||
.replace('.mdx', '')
|
||||
.split('/')
|
||||
.pop()
|
||||
?.toLowerCase()
|
||||
?.replace(/_/g, ' ')
|
||||
|
||||
if (fileName === queryLower) return 0.01
|
||||
if (fileName?.includes(queryLower)) return 0.005
|
||||
return 0
|
||||
}
|
||||
|
||||
localeFilteredResults.sort((a, b) => {
|
||||
return b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a))
|
||||
})
|
||||
|
||||
const pageMap = new Map<string, ResultWithRRF>()
|
||||
|
||||
for (const result of localeFilteredResults) {
|
||||
const pageKey = result.sourceDocument
|
||||
const existing = pageMap.get(pageKey)
|
||||
|
||||
if (!existing || result.rrfScore > existing.rrfScore) {
|
||||
pageMap.set(pageKey, result)
|
||||
}
|
||||
}
|
||||
|
||||
const deduplicatedResults = Array.from(pageMap.values())
|
||||
.sort((a, b) => b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a)))
|
||||
.slice(0, limit)
|
||||
|
||||
const searchResults = deduplicatedResults.map((result) => {
|
||||
const filteredResults = mergedResults.slice(0, limit)
|
||||
const searchResults = filteredResults.map((result) => {
|
||||
const title = result.headerText || result.sourceDocument.replace('.mdx', '')
|
||||
|
||||
const pathParts = result.sourceDocument
|
||||
.replace('.mdx', '')
|
||||
.split('/')
|
||||
.filter((part) => part !== 'index' && !knownLocales.includes(part))
|
||||
.map((part) => {
|
||||
return part
|
||||
.replace(/_/g, ' ')
|
||||
.split(' ')
|
||||
.map((word) => {
|
||||
const acronyms = [
|
||||
'api',
|
||||
'mcp',
|
||||
'sdk',
|
||||
'url',
|
||||
'http',
|
||||
'json',
|
||||
'xml',
|
||||
'html',
|
||||
'css',
|
||||
'ai',
|
||||
]
|
||||
if (acronyms.includes(word.toLowerCase())) {
|
||||
return word.toUpperCase()
|
||||
}
|
||||
return word.charAt(0).toUpperCase() + word.slice(1)
|
||||
})
|
||||
.join(' ')
|
||||
})
|
||||
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
|
||||
|
||||
return {
|
||||
id: result.chunkId,
|
||||
|
||||
@@ -1739,12 +1739,12 @@ export function BrowserUseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
{...props}
|
||||
version='1.0'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='28'
|
||||
height='28'
|
||||
width='150pt'
|
||||
height='150pt'
|
||||
viewBox='0 0 150 150'
|
||||
preserveAspectRatio='xMidYMid meet'
|
||||
>
|
||||
<g transform='translate(0,150) scale(0.05,-0.05)' fill='currentColor' stroke='none'>
|
||||
<g transform='translate(0,150) scale(0.05,-0.05)' fill='#000000' stroke='none'>
|
||||
<path
|
||||
d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666
|
||||
l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111
|
||||
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="browser_use"
|
||||
color="#181C1E"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -52,15 +52,6 @@ Read content from a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `slides` | json | Array of slides with their content |
|
||||
| `metadata` | json | Presentation metadata including ID, title, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `title` | string | The presentation title |
|
||||
| ↳ `pageSize` | object | Presentation page size |
|
||||
| ↳ `width` | json | Page width as a Dimension object |
|
||||
| ↳ `height` | json | Page height as a Dimension object |
|
||||
| ↳ `width` | json | Page width as a Dimension object |
|
||||
| ↳ `height` | json | Page height as a Dimension object |
|
||||
| ↳ `mimeType` | string | The mime type of the presentation |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_write`
|
||||
|
||||
@@ -80,10 +71,6 @@ Write or update content in a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `updatedContent` | boolean | Indicates if presentation content was updated successfully |
|
||||
| `metadata` | json | Updated presentation metadata including ID, title, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `title` | string | The presentation title |
|
||||
| ↳ `mimeType` | string | The mime type of the presentation |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_create`
|
||||
|
||||
@@ -103,10 +90,6 @@ Create a new Google Slides presentation
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `metadata` | json | Created presentation metadata including ID, title, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `title` | string | The presentation title |
|
||||
| ↳ `mimeType` | string | The mime type of the presentation |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_replace_all_text`
|
||||
|
||||
@@ -128,10 +111,6 @@ Find and replace all occurrences of text throughout a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `occurrencesChanged` | number | Number of text occurrences that were replaced |
|
||||
| `metadata` | json | Operation metadata including presentation ID and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `findText` | string | The text that was searched for |
|
||||
| ↳ `replaceText` | string | The text that replaced the matches |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_add_slide`
|
||||
|
||||
@@ -152,10 +131,6 @@ Add a new slide to a Google Slides presentation with a specified layout
|
||||
| --------- | ---- | ----------- |
|
||||
| `slideId` | string | The object ID of the newly created slide |
|
||||
| `metadata` | json | Operation metadata including presentation ID, layout, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `layout` | string | The layout used for the new slide |
|
||||
| ↳ `insertionIndex` | number | The zero-based index where the slide was inserted |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_add_image`
|
||||
|
||||
@@ -179,10 +154,6 @@ Insert an image into a specific slide in a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `imageId` | string | The object ID of the newly created image |
|
||||
| `metadata` | json | Operation metadata including presentation ID and image URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `pageObjectId` | string | The page object ID where the image was inserted |
|
||||
| ↳ `imageUrl` | string | The source image URL |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_get_thumbnail`
|
||||
|
||||
@@ -205,10 +176,6 @@ Generate a thumbnail image of a specific slide in a Google Slides presentation
|
||||
| `width` | number | Width of the thumbnail in pixels |
|
||||
| `height` | number | Height of the thumbnail in pixels |
|
||||
| `metadata` | json | Operation metadata including presentation ID and page object ID |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `pageObjectId` | string | The page object ID for the thumbnail |
|
||||
| ↳ `thumbnailSize` | string | The requested thumbnail size |
|
||||
| ↳ `mimeType` | string | The thumbnail MIME type |
|
||||
|
||||
### `google_slides_get_page`
|
||||
|
||||
|
||||
@@ -224,7 +224,7 @@ export async function POST(req: NextRequest) {
|
||||
hasApiKey: !!executionParams.apiKey,
|
||||
})
|
||||
|
||||
const result = await executeTool(resolvedToolName, executionParams)
|
||||
const result = await executeTool(resolvedToolName, executionParams, true)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
||||
toolName,
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templateCreators } from '@sim/db/schema'
|
||||
import { templateCreators, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('CreatorVerificationAPI')
|
||||
|
||||
@@ -24,8 +23,9 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
|
||||
}
|
||||
@@ -76,8 +76,9 @@ export async function DELETE(
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
|
||||
}
|
||||
|
||||
@@ -6,10 +6,9 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
|
||||
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
|
||||
import {
|
||||
@@ -22,7 +21,6 @@ import {
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import '@/lib/uploads/core/setup.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -32,12 +30,6 @@ const logger = createLogger('FilesParseAPI')
|
||||
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
|
||||
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
|
||||
|
||||
interface ExecutionContext {
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
executionId: string
|
||||
}
|
||||
|
||||
interface ParseResult {
|
||||
success: boolean
|
||||
content?: string
|
||||
@@ -45,7 +37,6 @@ interface ParseResult {
|
||||
filePath: string
|
||||
originalName?: string // Original filename from database (for workspace files)
|
||||
viewerUrl?: string | null // Viewer URL for the file if available
|
||||
userFile?: UserFile // UserFile object for the raw file
|
||||
metadata?: {
|
||||
fileType: string
|
||||
size: number
|
||||
@@ -79,45 +70,27 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const userId = authResult.userId
|
||||
const requestData = await request.json()
|
||||
const { filePath, fileType, workspaceId, workflowId, executionId } = requestData
|
||||
const { filePath, fileType, workspaceId } = requestData
|
||||
|
||||
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
|
||||
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Build execution context if all required fields are present
|
||||
const executionContext: ExecutionContext | undefined =
|
||||
workspaceId && workflowId && executionId
|
||||
? { workspaceId, workflowId, executionId }
|
||||
: undefined
|
||||
|
||||
logger.info('File parse request received:', {
|
||||
filePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
hasExecutionContext: !!executionContext,
|
||||
})
|
||||
logger.info('File parse request received:', { filePath, fileType, workspaceId, userId })
|
||||
|
||||
if (Array.isArray(filePath)) {
|
||||
const results = []
|
||||
for (const singlePath of filePath) {
|
||||
if (!singlePath || (typeof singlePath === 'string' && singlePath.trim() === '')) {
|
||||
for (const path of filePath) {
|
||||
if (!path || (typeof path === 'string' && path.trim() === '')) {
|
||||
results.push({
|
||||
success: false,
|
||||
error: 'Empty file path in array',
|
||||
filePath: singlePath || '',
|
||||
filePath: path || '',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(
|
||||
singlePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
executionContext
|
||||
)
|
||||
const result = await parseFileSingle(path, fileType, workspaceId, userId)
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
@@ -133,7 +106,6 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -149,7 +121,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId, executionContext)
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId)
|
||||
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
@@ -165,7 +137,6 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -193,8 +164,7 @@ async function parseFileSingle(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
userId: string
|
||||
): Promise<ParseResult> {
|
||||
logger.info('Parsing file:', filePath)
|
||||
|
||||
@@ -216,18 +186,18 @@ async function parseFileSingle(
|
||||
}
|
||||
|
||||
if (filePath.includes('/api/files/serve/')) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
}
|
||||
|
||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId, executionContext)
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId)
|
||||
}
|
||||
|
||||
if (isUsingCloudStorage()) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
}
|
||||
|
||||
return handleLocalFile(filePath, fileType, userId, executionContext)
|
||||
return handleLocalFile(filePath, fileType, userId)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -260,14 +230,12 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
/**
|
||||
* Handle external URL
|
||||
* If workspaceId is provided, checks if file already exists and saves to workspace if not
|
||||
* If executionContext is provided, also stores the file in execution storage and returns UserFile
|
||||
*/
|
||||
async function handleExternalUrl(
|
||||
url: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
userId: string
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
logger.info('Fetching external URL:', url)
|
||||
@@ -344,13 +312,17 @@ async function handleExternalUrl(
|
||||
|
||||
if (existingFile) {
|
||||
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId, executionContext)
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
timeout: DOWNLOAD_TIMEOUT_MS,
|
||||
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
|
||||
headers: {
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)
|
||||
@@ -369,19 +341,6 @@ async function handleExternalUrl(
|
||||
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
let userFile: UserFile | undefined
|
||||
const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(executionContext, buffer, filename, mimeType, userId)
|
||||
logger.info(`Stored file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store file in execution storage:`, uploadError)
|
||||
// Continue without userFile - parsing can still work
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCheckWorkspace) {
|
||||
try {
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
@@ -394,6 +353,8 @@ async function handleExternalUrl(
|
||||
})
|
||||
} else {
|
||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||
const mimeType =
|
||||
response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType)
|
||||
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
||||
}
|
||||
@@ -402,23 +363,17 @@ async function handleExternalUrl(
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
parseResult = await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
} else if (extension === 'csv') {
|
||||
parseResult = await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
} else if (isSupportedFileType(extension)) {
|
||||
parseResult = await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
} else {
|
||||
parseResult = handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
return await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (extension === 'csv') {
|
||||
return await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (isSupportedFileType(extension)) {
|
||||
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
}
|
||||
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
return handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
} catch (error) {
|
||||
logger.error(`Error handling external URL ${url}:`, error)
|
||||
return {
|
||||
@@ -431,15 +386,12 @@ async function handleExternalUrl(
|
||||
|
||||
/**
|
||||
* Handle file stored in cloud storage
|
||||
* If executionContext is provided and file is not already from execution storage,
|
||||
* copies the file to execution storage and returns UserFile
|
||||
*/
|
||||
async function handleCloudFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
explicitContext: string | undefined,
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
userId: string
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const cloudKey = extractStorageKey(filePath)
|
||||
@@ -486,7 +438,6 @@ async function handleCloudFile(
|
||||
|
||||
const filename = originalFilename || cloudKey.split('/').pop() || cloudKey
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
|
||||
const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}`
|
||||
let workspaceIdFromKey: string | undefined
|
||||
@@ -502,39 +453,6 @@ async function handleCloudFile(
|
||||
|
||||
const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
|
||||
if (executionContext) {
|
||||
// If file is already from execution context, create UserFile reference without re-uploading
|
||||
if (context === 'execution') {
|
||||
userFile = {
|
||||
id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`,
|
||||
name: filename,
|
||||
url: normalizedFilePath,
|
||||
size: fileBuffer.length,
|
||||
type: mimeType,
|
||||
key: cloudKey,
|
||||
context: 'execution',
|
||||
}
|
||||
logger.info(`Created UserFile reference for existing execution file: ${filename}`)
|
||||
} else {
|
||||
// Copy from workspace/other storage to execution storage
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Copied file to execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to copy file to execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath)
|
||||
@@ -559,11 +477,6 @@ async function handleCloudFile(
|
||||
|
||||
parseResult.viewerUrl = viewerUrl
|
||||
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling cloud file ${filePath}:`, error)
|
||||
@@ -587,8 +500,7 @@ async function handleCloudFile(
|
||||
async function handleLocalFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
userId: string
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const filename = filePath.split('/').pop() || filePath
|
||||
@@ -628,32 +540,13 @@ async function handleLocalFile(
|
||||
const hash = createHash('md5').update(fileBuffer).digest('hex')
|
||||
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = fileType || getMimeTypeFromExtension(extension)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Stored local file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store local file in execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
content: result.content,
|
||||
filePath,
|
||||
userFile,
|
||||
metadata: {
|
||||
fileType: mimeType,
|
||||
fileType: fileType || getMimeTypeFromExtension(extension),
|
||||
size: stats.size,
|
||||
hash,
|
||||
processingTime: 0,
|
||||
|
||||
395
apps/sim/app/api/proxy/route.ts
Normal file
395
apps/sim/app/api/proxy/route.ts
Normal file
@@ -0,0 +1,395 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('ProxyAPI')
|
||||
|
||||
const proxyPostSchema = z.object({
|
||||
toolId: z.string().min(1, 'toolId is required'),
|
||||
params: z.record(z.any()).optional().default({}),
|
||||
executionContext: z
|
||||
.object({
|
||||
workflowId: z.string().optional(),
|
||||
workspaceId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
userId: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Creates a minimal set of default headers for proxy requests
|
||||
* @returns Record of HTTP headers
|
||||
*/
|
||||
const getProxyHeaders = (): Record<string, string> => {
|
||||
return {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
|
||||
Accept: '*/*',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a response with CORS headers
|
||||
* @param responseData Response data object
|
||||
* @param status HTTP status code
|
||||
* @returns NextResponse with CORS headers
|
||||
*/
|
||||
const formatResponse = (responseData: any, status = 200) => {
|
||||
return NextResponse.json(responseData, {
|
||||
status,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an error response with consistent formatting
|
||||
* @param error Error object or message
|
||||
* @param status HTTP status code
|
||||
* @param additionalData Additional data to include in the response
|
||||
* @returns Formatted error response
|
||||
*/
|
||||
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const errorStack = error instanceof Error ? error.stack : undefined
|
||||
|
||||
logger.error('Creating error response', {
|
||||
errorMessage,
|
||||
status,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
})
|
||||
|
||||
return formatResponse(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
...additionalData,
|
||||
},
|
||||
status
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET handler for direct external URL proxying
|
||||
* This allows for GET requests to external APIs
|
||||
*/
|
||||
export async function GET(request: Request) {
|
||||
const url = new URL(request.url)
|
||||
const targetUrl = url.searchParams.get('url')
|
||||
const requestId = generateRequestId()
|
||||
|
||||
// Vault download proxy: /api/proxy?vaultDownload=1&bucket=...&object=...&credentialId=...
|
||||
const vaultDownload = url.searchParams.get('vaultDownload')
|
||||
if (vaultDownload === '1') {
|
||||
try {
|
||||
const bucket = url.searchParams.get('bucket')
|
||||
const objectParam = url.searchParams.get('object')
|
||||
const credentialId = url.searchParams.get('credentialId')
|
||||
|
||||
if (!bucket || !objectParam || !credentialId) {
|
||||
return createErrorResponse('Missing bucket, object, or credentialId', 400)
|
||||
}
|
||||
|
||||
// Fetch access token using existing token API
|
||||
const baseUrl = new URL(getBaseUrl())
|
||||
const tokenUrl = new URL('/api/auth/oauth/token', baseUrl)
|
||||
|
||||
// Build headers: forward session cookies if present; include internal auth for server-side
|
||||
const tokenHeaders: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
const incomingCookie = request.headers.get('cookie')
|
||||
if (incomingCookie) tokenHeaders.Cookie = incomingCookie
|
||||
try {
|
||||
const internalToken = await generateInternalToken()
|
||||
tokenHeaders.Authorization = `Bearer ${internalToken}`
|
||||
} catch (_e) {
|
||||
// best-effort internal auth
|
||||
}
|
||||
|
||||
// Optional workflow context for collaboration auth
|
||||
const workflowId = url.searchParams.get('workflowId') || undefined
|
||||
|
||||
const tokenRes = await fetch(tokenUrl.toString(), {
|
||||
method: 'POST',
|
||||
headers: tokenHeaders,
|
||||
body: JSON.stringify({ credentialId, workflowId }),
|
||||
})
|
||||
|
||||
if (!tokenRes.ok) {
|
||||
const err = await tokenRes.text()
|
||||
return createErrorResponse(`Failed to fetch access token: ${err}`, 401)
|
||||
}
|
||||
|
||||
const tokenJson = await tokenRes.json()
|
||||
const accessToken = tokenJson.accessToken
|
||||
if (!accessToken) {
|
||||
return createErrorResponse('No access token available', 401)
|
||||
}
|
||||
|
||||
// Avoid double-encoding: incoming object may already be percent-encoded
|
||||
const objectDecoded = decodeURIComponent(objectParam)
|
||||
const gcsUrl = `https://storage.googleapis.com/storage/v1/b/${encodeURIComponent(
|
||||
bucket
|
||||
)}/o/${encodeURIComponent(objectDecoded)}?alt=media`
|
||||
|
||||
const fileRes = await fetch(gcsUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!fileRes.ok) {
|
||||
const errText = await fileRes.text()
|
||||
return createErrorResponse(errText || 'Failed to download file', fileRes.status)
|
||||
}
|
||||
|
||||
const headers = new Headers()
|
||||
fileRes.headers.forEach((v, k) => headers.set(k, v))
|
||||
return new NextResponse(fileRes.body, { status: 200, headers })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Vault download proxy failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return createErrorResponse('Vault download failed', 500)
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetUrl) {
|
||||
logger.error(`[${requestId}] Missing 'url' parameter`)
|
||||
return createErrorResponse("Missing 'url' parameter", 400)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(targetUrl)
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Blocked proxy request`, {
|
||||
url: targetUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return createErrorResponse(urlValidation.error || 'Invalid URL', 403)
|
||||
}
|
||||
|
||||
const method = url.searchParams.get('method') || 'GET'
|
||||
|
||||
const bodyParam = url.searchParams.get('body')
|
||||
let body: string | undefined
|
||||
|
||||
if (bodyParam && ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())) {
|
||||
try {
|
||||
body = decodeURIComponent(bodyParam)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to decode body parameter`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const customHeaders: Record<string, string> = {}
|
||||
|
||||
for (const [key, value] of url.searchParams.entries()) {
|
||||
if (key.startsWith('header.')) {
|
||||
const headerName = key.substring(7)
|
||||
customHeaders[headerName] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (body && !customHeaders['Content-Type']) {
|
||||
customHeaders['Content-Type'] = 'application/json'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
|
||||
|
||||
try {
|
||||
const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
method: method,
|
||||
headers: {
|
||||
...getProxyHeaders(),
|
||||
...customHeaders,
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
body: body || undefined,
|
||||
})
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
let data
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
data = await response.json()
|
||||
} else {
|
||||
data = await response.text()
|
||||
}
|
||||
|
||||
const errorMessage = !response.ok
|
||||
? data && typeof data === 'object' && data.error
|
||||
? `${data.error.message || JSON.stringify(data.error)}`
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
return formatResponse({
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: Object.fromEntries(response.headers.entries()),
|
||||
data,
|
||||
error: errorMessage,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy GET request failed`, {
|
||||
url: targetUrl,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse(error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const startTime = new Date()
|
||||
const startTimeISO = startTime.toISOString()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
logger.error(`[${requestId}] Authentication failed for proxy:`, authResult.error)
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
let requestBody
|
||||
try {
|
||||
requestBody = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
throw new Error('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
const validationResult = proxyPostSchema.safeParse(requestBody)
|
||||
if (!validationResult.success) {
|
||||
logger.error(`[${requestId}] Request validation failed`, {
|
||||
errors: validationResult.error.errors,
|
||||
})
|
||||
const errorMessages = validationResult.error.errors
|
||||
.map((err) => `${err.path.join('.')}: ${err.message}`)
|
||||
.join(', ')
|
||||
throw new Error(`Validation failed: ${errorMessages}`)
|
||||
}
|
||||
|
||||
const { toolId, params } = validationResult.data
|
||||
|
||||
logger.info(`[${requestId}] Processing tool: ${toolId}`)
|
||||
|
||||
const tool = getTool(toolId)
|
||||
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found: ${toolId}`)
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
try {
|
||||
validateRequiredParametersAfterMerge(toolId, tool, params)
|
||||
} catch (validationError) {
|
||||
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
|
||||
error: validationError instanceof Error ? validationError.message : String(validationError),
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(validationError, 400, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
|
||||
const hasFileOutputs =
|
||||
tool.outputs &&
|
||||
Object.values(tool.outputs).some(
|
||||
(output) => output.type === 'file' || output.type === 'file[]'
|
||||
)
|
||||
|
||||
const result = await executeTool(
|
||||
toolId,
|
||||
params,
|
||||
true, // skipProxy (we're already in the proxy)
|
||||
!hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs)
|
||||
undefined // execution context is not available in proxy context
|
||||
)
|
||||
|
||||
if (!result.success) {
|
||||
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
throw new Error(result.error || 'Tool execution failed')
|
||||
}
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
const responseWithTimingData = {
|
||||
...result,
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
timing: {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
|
||||
|
||||
return formatResponse(responseWithTimingData)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy request failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
name: error instanceof Error ? error.name : undefined,
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(error, 500, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new NextResponse(null, {
|
||||
status: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats, workflow, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
import { parseWorkflowJson } from '@/lib/workflows/operations/import-export'
|
||||
import {
|
||||
loadWorkflowFromNormalizedTables,
|
||||
saveWorkflowToNormalizedTables,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
|
||||
const logger = createLogger('SuperUserImportWorkflow')
|
||||
|
||||
interface ImportWorkflowRequest {
|
||||
workflowId: string
|
||||
targetWorkspaceId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/superuser/import-workflow
|
||||
*
|
||||
* Superuser endpoint to import a workflow by ID along with its copilot chats.
|
||||
* This creates a copy of the workflow in the target workspace with new IDs.
|
||||
* Only the workflow structure and copilot chats are copied - no deployments,
|
||||
* webhooks, triggers, or other sensitive data.
|
||||
*
|
||||
* Requires both isSuperUser flag AND superUserModeEnabled setting.
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser, isSuperUser, superUserModeEnabled } =
|
||||
await verifyEffectiveSuperUser(session.user.id)
|
||||
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn('Non-effective-superuser attempted to access import-workflow endpoint', {
|
||||
userId: session.user.id,
|
||||
isSuperUser,
|
||||
superUserModeEnabled,
|
||||
})
|
||||
return NextResponse.json({ error: 'Forbidden: Superuser access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body: ImportWorkflowRequest = await request.json()
|
||||
const { workflowId, targetWorkspaceId } = body
|
||||
|
||||
if (!workflowId) {
|
||||
return NextResponse.json({ error: 'workflowId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!targetWorkspaceId) {
|
||||
return NextResponse.json({ error: 'targetWorkspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify target workspace exists
|
||||
const [targetWorkspace] = await db
|
||||
.select({ id: workspace.id, ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, targetWorkspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!targetWorkspace) {
|
||||
return NextResponse.json({ error: 'Target workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Get the source workflow
|
||||
const [sourceWorkflow] = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!sourceWorkflow) {
|
||||
return NextResponse.json({ error: 'Source workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Load the workflow state from normalized tables
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow has no normalized data - cannot import' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Use existing export logic to create export format
|
||||
const workflowState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
metadata: {
|
||||
name: sourceWorkflow.name,
|
||||
description: sourceWorkflow.description ?? undefined,
|
||||
color: sourceWorkflow.color,
|
||||
},
|
||||
}
|
||||
|
||||
const exportData = sanitizeForExport(workflowState)
|
||||
|
||||
// Use existing import logic (parseWorkflowJson regenerates IDs automatically)
|
||||
const { data: importedData, errors } = parseWorkflowJson(JSON.stringify(exportData))
|
||||
|
||||
if (!importedData || errors.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: `Failed to parse workflow: ${errors.join(', ')}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Create new workflow record
|
||||
const newWorkflowId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
await db.insert(workflow).values({
|
||||
id: newWorkflowId,
|
||||
userId: session.user.id,
|
||||
workspaceId: targetWorkspaceId,
|
||||
folderId: null, // Don't copy folder association
|
||||
name: `[Debug Import] ${sourceWorkflow.name}`,
|
||||
description: sourceWorkflow.description,
|
||||
color: sourceWorkflow.color,
|
||||
lastSynced: now,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
isDeployed: false, // Never copy deployment status
|
||||
runCount: 0,
|
||||
variables: sourceWorkflow.variables || {},
|
||||
})
|
||||
|
||||
// Save using existing persistence logic
|
||||
const saveResult = await saveWorkflowToNormalizedTables(newWorkflowId, importedData)
|
||||
|
||||
if (!saveResult.success) {
|
||||
// Clean up the workflow record if save failed
|
||||
await db.delete(workflow).where(eq(workflow.id, newWorkflowId))
|
||||
return NextResponse.json(
|
||||
{ error: `Failed to save workflow state: ${saveResult.error}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Copy copilot chats associated with the source workflow
|
||||
const sourceCopilotChats = await db
|
||||
.select()
|
||||
.from(copilotChats)
|
||||
.where(eq(copilotChats.workflowId, workflowId))
|
||||
|
||||
let copilotChatsImported = 0
|
||||
|
||||
for (const chat of sourceCopilotChats) {
|
||||
await db.insert(copilotChats).values({
|
||||
userId: session.user.id,
|
||||
workflowId: newWorkflowId,
|
||||
title: chat.title ? `[Import] ${chat.title}` : null,
|
||||
messages: chat.messages,
|
||||
model: chat.model,
|
||||
conversationId: null, // Don't copy conversation ID
|
||||
previewYaml: chat.previewYaml,
|
||||
planArtifact: chat.planArtifact,
|
||||
config: chat.config,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
copilotChatsImported++
|
||||
}
|
||||
|
||||
logger.info('Superuser imported workflow', {
|
||||
userId: session.user.id,
|
||||
sourceWorkflowId: workflowId,
|
||||
newWorkflowId,
|
||||
targetWorkspaceId,
|
||||
copilotChatsImported,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
newWorkflowId,
|
||||
copilotChatsImported,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error importing workflow', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateApprovalAPI')
|
||||
|
||||
@@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
||||
}
|
||||
@@ -71,8 +71,8 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateRejectionAPI')
|
||||
|
||||
@@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
templateCreators,
|
||||
templateStars,
|
||||
templates,
|
||||
user,
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
} from '@sim/db/schema'
|
||||
@@ -13,7 +14,6 @@ import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
import {
|
||||
extractRequiredCredentials,
|
||||
sanitizeCredentials,
|
||||
@@ -70,8 +70,8 @@ export async function GET(request: NextRequest) {
|
||||
logger.debug(`[${requestId}] Fetching templates with params:`, params)
|
||||
|
||||
// Check if user is a super user
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
const isSuperUser = effectiveSuperUser
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
const isSuperUser = currentUser[0]?.isSuperUser || false
|
||||
|
||||
// Build query conditions
|
||||
const conditions = []
|
||||
|
||||
@@ -550,8 +550,6 @@ export interface AdminUserBilling {
|
||||
totalWebhookTriggers: number
|
||||
totalScheduledExecutions: number
|
||||
totalChatExecutions: number
|
||||
totalMcpExecutions: number
|
||||
totalA2aExecutions: number
|
||||
totalTokensUsed: number
|
||||
totalCost: string
|
||||
currentUsageLimit: string | null
|
||||
|
||||
@@ -97,8 +97,6 @@ export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
||||
totalWebhookTriggers: stats?.totalWebhookTriggers ?? 0,
|
||||
totalScheduledExecutions: stats?.totalScheduledExecutions ?? 0,
|
||||
totalChatExecutions: stats?.totalChatExecutions ?? 0,
|
||||
totalMcpExecutions: stats?.totalMcpExecutions ?? 0,
|
||||
totalA2aExecutions: stats?.totalA2aExecutions ?? 0,
|
||||
totalTokensUsed: stats?.totalTokensUsed ?? 0,
|
||||
totalCost: stats?.totalCost ?? '0',
|
||||
currentUsageLimit: stats?.currentUsageLimit ?? null,
|
||||
|
||||
@@ -19,7 +19,7 @@ export interface RateLimitResult {
|
||||
|
||||
export async function checkRateLimit(
|
||||
request: NextRequest,
|
||||
endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' = 'logs'
|
||||
endpoint: 'logs' | 'logs-detail' = 'logs'
|
||||
): Promise<RateLimitResult> {
|
||||
try {
|
||||
const auth = await authenticateV1Request(request)
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowBlocks } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
|
||||
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
|
||||
|
||||
const logger = createLogger('V1WorkflowDetailsAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const rateLimit = await checkRateLimit(request, 'workflow-detail')
|
||||
if (!rateLimit.allowed) {
|
||||
return createRateLimitResponse(rateLimit)
|
||||
}
|
||||
|
||||
const userId = rateLimit.userId!
|
||||
const { id } = await params
|
||||
|
||||
logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId })
|
||||
|
||||
const rows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
folderId: workflow.folderId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
variables: workflow.variables,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflow.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(eq(workflow.id, id))
|
||||
.limit(1)
|
||||
|
||||
const workflowData = rows[0]
|
||||
if (!workflowData) {
|
||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const blockRows = await db
|
||||
.select({
|
||||
id: workflowBlocks.id,
|
||||
type: workflowBlocks.type,
|
||||
subBlocks: workflowBlocks.subBlocks,
|
||||
})
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, id))
|
||||
|
||||
const blocksRecord = Object.fromEntries(
|
||||
blockRows.map((block) => [block.id, { type: block.type, subBlocks: block.subBlocks }])
|
||||
)
|
||||
const inputs = extractInputFieldsFromBlocks(blocksRecord)
|
||||
|
||||
const response = {
|
||||
id: workflowData.id,
|
||||
name: workflowData.name,
|
||||
description: workflowData.description,
|
||||
color: workflowData.color,
|
||||
folderId: workflowData.folderId,
|
||||
workspaceId: workflowData.workspaceId,
|
||||
isDeployed: workflowData.isDeployed,
|
||||
deployedAt: workflowData.deployedAt?.toISOString() || null,
|
||||
runCount: workflowData.runCount,
|
||||
lastRunAt: workflowData.lastRunAt?.toISOString() || null,
|
||||
variables: workflowData.variables || {},
|
||||
inputs,
|
||||
createdAt: workflowData.createdAt.toISOString(),
|
||||
updatedAt: workflowData.updatedAt.toISOString(),
|
||||
}
|
||||
|
||||
const limits = await getUserLimits(userId)
|
||||
|
||||
const apiResponse = createApiResponse({ data: response }, limits, rateLimit)
|
||||
|
||||
return NextResponse.json(apiResponse.body, { headers: apiResponse.headers })
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Workflow details fetch error`, { error: message })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,184 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, asc, eq, gt, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
|
||||
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
|
||||
|
||||
const logger = createLogger('V1WorkflowsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const revalidate = 0
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
workspaceId: z.string(),
|
||||
folderId: z.string().optional(),
|
||||
deployedOnly: z.coerce.boolean().optional().default(false),
|
||||
limit: z.coerce.number().min(1).max(100).optional().default(50),
|
||||
cursor: z.string().optional(),
|
||||
})
|
||||
|
||||
interface CursorData {
|
||||
sortOrder: number
|
||||
createdAt: string
|
||||
id: string
|
||||
}
|
||||
|
||||
function encodeCursor(data: CursorData): string {
|
||||
return Buffer.from(JSON.stringify(data)).toString('base64')
|
||||
}
|
||||
|
||||
function decodeCursor(cursor: string): CursorData | null {
|
||||
try {
|
||||
return JSON.parse(Buffer.from(cursor, 'base64').toString())
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const rateLimit = await checkRateLimit(request, 'workflows')
|
||||
if (!rateLimit.allowed) {
|
||||
return createRateLimitResponse(rateLimit)
|
||||
}
|
||||
|
||||
const userId = rateLimit.userId!
|
||||
const { searchParams } = new URL(request.url)
|
||||
const rawParams = Object.fromEntries(searchParams.entries())
|
||||
|
||||
const validationResult = QueryParamsSchema.safeParse(rawParams)
|
||||
if (!validationResult.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid parameters', details: validationResult.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
logger.info(`[${requestId}] Fetching workflows for workspace ${params.workspaceId}`, {
|
||||
userId,
|
||||
filters: {
|
||||
folderId: params.folderId,
|
||||
deployedOnly: params.deployedOnly,
|
||||
},
|
||||
})
|
||||
|
||||
const conditions = [
|
||||
eq(workflow.workspaceId, params.workspaceId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, params.workspaceId),
|
||||
eq(permissions.userId, userId),
|
||||
]
|
||||
|
||||
if (params.folderId) {
|
||||
conditions.push(eq(workflow.folderId, params.folderId))
|
||||
}
|
||||
|
||||
if (params.deployedOnly) {
|
||||
conditions.push(eq(workflow.isDeployed, true))
|
||||
}
|
||||
|
||||
if (params.cursor) {
|
||||
const cursorData = decodeCursor(params.cursor)
|
||||
if (cursorData) {
|
||||
const cursorCondition = or(
|
||||
gt(workflow.sortOrder, cursorData.sortOrder),
|
||||
and(
|
||||
eq(workflow.sortOrder, cursorData.sortOrder),
|
||||
gt(workflow.createdAt, new Date(cursorData.createdAt))
|
||||
),
|
||||
and(
|
||||
eq(workflow.sortOrder, cursorData.sortOrder),
|
||||
eq(workflow.createdAt, new Date(cursorData.createdAt)),
|
||||
gt(workflow.id, cursorData.id)
|
||||
)
|
||||
)
|
||||
if (cursorCondition) {
|
||||
conditions.push(cursorCondition)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
|
||||
|
||||
const rows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
folderId: workflow.folderId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
sortOrder: workflow.sortOrder,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, params.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(and(...conditions))
|
||||
.orderBy(...orderByClause)
|
||||
.limit(params.limit + 1)
|
||||
|
||||
const hasMore = rows.length > params.limit
|
||||
const data = rows.slice(0, params.limit)
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (hasMore && data.length > 0) {
|
||||
const lastWorkflow = data[data.length - 1]
|
||||
nextCursor = encodeCursor({
|
||||
sortOrder: lastWorkflow.sortOrder,
|
||||
createdAt: lastWorkflow.createdAt.toISOString(),
|
||||
id: lastWorkflow.id,
|
||||
})
|
||||
}
|
||||
|
||||
const formattedWorkflows = data.map((w) => ({
|
||||
id: w.id,
|
||||
name: w.name,
|
||||
description: w.description,
|
||||
color: w.color,
|
||||
folderId: w.folderId,
|
||||
workspaceId: w.workspaceId,
|
||||
isDeployed: w.isDeployed,
|
||||
deployedAt: w.deployedAt?.toISOString() || null,
|
||||
runCount: w.runCount,
|
||||
lastRunAt: w.lastRunAt?.toISOString() || null,
|
||||
createdAt: w.createdAt.toISOString(),
|
||||
updatedAt: w.updatedAt.toISOString(),
|
||||
}))
|
||||
|
||||
const limits = await getUserLimits(userId)
|
||||
|
||||
const response = createApiResponse(
|
||||
{
|
||||
data: formattedWorkflows,
|
||||
nextCursor,
|
||||
},
|
||||
limits,
|
||||
rateLimit
|
||||
)
|
||||
|
||||
return NextResponse.json(response.body, { headers: response.headers })
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Workflows fetch error`, { error: message })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -12,10 +12,6 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
@@ -29,7 +25,7 @@ import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
@@ -42,8 +38,6 @@ const ExecuteWorkflowSchema = z.object({
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
isClientSession: z.boolean().optional(),
|
||||
includeFileBase64: z.boolean().optional().default(true),
|
||||
base64MaxBytes: z.number().int().positive().optional(),
|
||||
workflowStateOverride: z
|
||||
.object({
|
||||
blocks: z.record(z.any()),
|
||||
@@ -220,8 +214,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
useDraftState,
|
||||
input: validatedInput,
|
||||
isClientSession = false,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
} = validation.data
|
||||
|
||||
@@ -235,8 +227,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
triggerType,
|
||||
stream,
|
||||
useDraftState,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
@@ -437,31 +427,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
const outputWithBase64 = includeFileBase64
|
||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})) as NormalizedBlockOutput)
|
||||
: result.output
|
||||
|
||||
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
||||
const hasResponseBlock = workflowHasResponseBlock(result)
|
||||
if (hasResponseBlock) {
|
||||
return createHttpResponseFromBlock(resultWithBase64)
|
||||
return createHttpResponseFromBlock(result)
|
||||
}
|
||||
|
||||
const filteredResult = {
|
||||
success: result.success,
|
||||
output: outputWithBase64,
|
||||
output: result.output,
|
||||
error: result.error,
|
||||
metadata: result.metadata
|
||||
? {
|
||||
@@ -523,8 +498,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
selectedOutputs: resolvedSelectedOutputs,
|
||||
isSecureMode: false,
|
||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
},
|
||||
executionId,
|
||||
})
|
||||
@@ -725,8 +698,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
@@ -779,21 +750,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: includeFileBase64
|
||||
? await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})
|
||||
: result.output,
|
||||
output: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error: any) {
|
||||
const errorMessage = error.message || 'Unknown error'
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isUserFileWithMetadata } from '@/lib/core/utils/user-file'
|
||||
import { isUserFile } from '@/lib/core/utils/display-filters'
|
||||
import type { ChatFile, ChatMessage } from '@/app/chat/components/message/message'
|
||||
import { CHAT_ERROR_MESSAGES } from '@/app/chat/constants'
|
||||
|
||||
@@ -17,7 +17,7 @@ function extractFilesFromData(
|
||||
return files
|
||||
}
|
||||
|
||||
if (isUserFileWithMetadata(data)) {
|
||||
if (isUserFile(data)) {
|
||||
if (!seenIds.has(data.id)) {
|
||||
seenIds.add(data.id)
|
||||
files.push({
|
||||
@@ -232,7 +232,7 @@ export function useChatStreaming() {
|
||||
return null
|
||||
}
|
||||
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
if (isUserFile(value)) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -285,7 +285,7 @@ export function useChatStreaming() {
|
||||
|
||||
const value = getOutputValue(blockOutputs, config.path)
|
||||
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
if (isUserFile(value)) {
|
||||
extractedFiles.push({
|
||||
id: value.id,
|
||||
name: value.name,
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { Suspense, useEffect, useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { CheckCircle, Heart, Info, Loader2, XCircle } from 'lucide-react'
|
||||
import { useSearchParams } from 'next/navigation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SupportFooter } from '@/app/(auth)/components/support-footer'
|
||||
import { InviteLayout } from '@/app/invite/components'
|
||||
import { Button, Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
|
||||
interface UnsubscribeData {
|
||||
success: boolean
|
||||
@@ -30,6 +27,7 @@ function UnsubscribeContent() {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [processing, setProcessing] = useState(false)
|
||||
const [unsubscribed, setUnsubscribed] = useState(false)
|
||||
const brand = useBrandConfig()
|
||||
|
||||
const email = searchParams.get('email')
|
||||
const token = searchParams.get('token')
|
||||
@@ -111,7 +109,7 @@ function UnsubscribeContent() {
|
||||
} else {
|
||||
setError(result.error || 'Failed to unsubscribe')
|
||||
}
|
||||
} catch {
|
||||
} catch (error) {
|
||||
setError('Failed to process unsubscribe request')
|
||||
} finally {
|
||||
setProcessing(false)
|
||||
@@ -120,171 +118,272 @@ function UnsubscribeContent() {
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Loading
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Validating your unsubscribe link...
|
||||
</p>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 flex w-full items-center justify-center py-8`}>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</div>
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardContent className='flex items-center justify-center p-8'>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Invalid Unsubscribe Link
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
{error}
|
||||
</p>
|
||||
</div>
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center p-4 before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<XCircle className='mx-auto mb-2 h-12 w-12 text-red-500' />
|
||||
<CardTitle className='text-foreground'>Invalid Unsubscribe Link</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
This unsubscribe link is invalid or has expired
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='rounded-lg border bg-red-50 p-4'>
|
||||
<p className='text-red-800 text-sm'>
|
||||
<strong>Error:</strong> {error}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => window.history.back()}>Go Back</BrandedButton>
|
||||
</div>
|
||||
<div className='space-y-3'>
|
||||
<p className='text-muted-foreground text-sm'>This could happen if:</p>
|
||||
<ul className='ml-4 list-inside list-disc space-y-1 text-muted-foreground text-sm'>
|
||||
<li>The link is missing required parameters</li>
|
||||
<li>The link has expired or been used already</li>
|
||||
<li>The link was copied incorrectly</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
<div className='mt-6 flex flex-col gap-3'>
|
||||
<Button
|
||||
onClick={() =>
|
||||
window.open(
|
||||
`mailto:${brand.supportEmail}?subject=Unsubscribe%20Help&body=Hi%2C%20I%20need%20help%20unsubscribing%20from%20emails.%20My%20unsubscribe%20link%20is%20not%20working.`,
|
||||
'_blank'
|
||||
)
|
||||
}
|
||||
className='w-full bg-[var(--brand-primary-hex)] font-medium text-white shadow-sm transition-colors duration-200 hover:bg-[var(--brand-primary-hover-hex)]'
|
||||
>
|
||||
Contact Support
|
||||
</Button>
|
||||
<Button onClick={() => window.history.back()} variant='outline' className='w-full'>
|
||||
Go Back
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className='mt-4 text-center'>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Need immediate help? Email us at{' '}
|
||||
<a
|
||||
href={`mailto:${brand.supportEmail}`}
|
||||
className='text-muted-foreground hover:underline'
|
||||
>
|
||||
{brand.supportEmail}
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (data?.isTransactional) {
|
||||
return (
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Important Account Emails
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Transactional emails like password resets, account confirmations, and security alerts
|
||||
cannot be unsubscribed from as they contain essential information for your account.
|
||||
</p>
|
||||
</div>
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center p-4 before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<Info className='mx-auto mb-2 h-12 w-12 text-blue-500' />
|
||||
<CardTitle className='text-foreground'>Important Account Emails</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
This email contains important information about your account
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='rounded-lg border bg-blue-50 p-4'>
|
||||
<p className='text-blue-800 text-sm'>
|
||||
<strong>Transactional emails</strong> like password resets, account confirmations,
|
||||
and security alerts cannot be unsubscribed from as they contain essential
|
||||
information for your account security and functionality.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => window.close()}>Close</BrandedButton>
|
||||
</div>
|
||||
<div className='space-y-3'>
|
||||
<p className='text-foreground text-sm'>
|
||||
If you no longer wish to receive these emails, you can:
|
||||
</p>
|
||||
<ul className='ml-4 list-inside list-disc space-y-1 text-muted-foreground text-sm'>
|
||||
<li>Close your account entirely</li>
|
||||
<li>Contact our support team for assistance</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
<div className='mt-6 flex flex-col gap-3'>
|
||||
<Button
|
||||
onClick={() =>
|
||||
window.open(
|
||||
`mailto:${brand.supportEmail}?subject=Account%20Help&body=Hi%2C%20I%20need%20help%20with%20my%20account%20emails.`,
|
||||
'_blank'
|
||||
)
|
||||
}
|
||||
className='w-full bg-blue-600 text-white hover:bg-blue-700'
|
||||
>
|
||||
Contact Support
|
||||
</Button>
|
||||
<Button onClick={() => window.close()} variant='outline' className='w-full'>
|
||||
Close
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (unsubscribed) {
|
||||
return (
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Successfully Unsubscribed
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
You have been unsubscribed from our emails. You will stop receiving emails within 48
|
||||
hours.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => window.close()}>Close</BrandedButton>
|
||||
</div>
|
||||
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<CheckCircle className='mx-auto mb-2 h-12 w-12 text-green-500' />
|
||||
<CardTitle className='text-foreground'>Successfully Unsubscribed</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
You have been unsubscribed from our emails. You will stop receiving emails within 48
|
||||
hours.
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className='text-center'>
|
||||
<p className='text-muted-foreground text-sm'>
|
||||
If you change your mind, you can always update your email preferences in your account
|
||||
settings or contact us at{' '}
|
||||
<a
|
||||
href={`mailto:${brand.supportEmail}`}
|
||||
className='text-muted-foreground hover:underline'
|
||||
>
|
||||
{brand.supportEmail}
|
||||
</a>
|
||||
</p>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const isAlreadyUnsubscribedFromAll = data?.currentPreferences.unsubscribeAll
|
||||
|
||||
return (
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Email Preferences
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Choose which emails you'd like to stop receiving.
|
||||
</p>
|
||||
<p className={`${inter.className} mt-2 font-[380] text-[14px] text-muted-foreground`}>
|
||||
{data?.email}
|
||||
</p>
|
||||
</div>
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center p-4 before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<Heart className='mx-auto mb-2 h-12 w-12 text-red-500' />
|
||||
<CardTitle className='text-foreground'>We're sorry to see you go!</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
We understand email preferences are personal. Choose which emails you'd like to
|
||||
stop receiving from Sim.
|
||||
</CardDescription>
|
||||
<div className='mt-2 rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Email: <span className='font-medium text-foreground'>{data?.email}</span>
|
||||
</p>
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='space-y-3'>
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('all')}
|
||||
disabled={processing || data?.currentPreferences.unsubscribeAll}
|
||||
variant='destructive'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeAll ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{processing
|
||||
? 'Unsubscribing...'
|
||||
: data?.currentPreferences.unsubscribeAll
|
||||
? 'Unsubscribed from All Emails'
|
||||
: 'Unsubscribe from All Marketing Emails'}
|
||||
</Button>
|
||||
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('all')}
|
||||
disabled={processing || isAlreadyUnsubscribedFromAll}
|
||||
loading={processing}
|
||||
loadingText='Unsubscribing'
|
||||
>
|
||||
{isAlreadyUnsubscribedFromAll
|
||||
? 'Unsubscribed from All Emails'
|
||||
: 'Unsubscribe from All Marketing Emails'}
|
||||
</BrandedButton>
|
||||
<div className='text-center text-muted-foreground text-sm'>
|
||||
or choose specific types:
|
||||
</div>
|
||||
|
||||
<div className='py-2 text-center'>
|
||||
<span className={`${inter.className} font-[380] text-[14px] text-muted-foreground`}>
|
||||
or choose specific types
|
||||
</span>
|
||||
</div>
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('marketing')}
|
||||
disabled={
|
||||
processing ||
|
||||
data?.currentPreferences.unsubscribeAll ||
|
||||
data?.currentPreferences.unsubscribeMarketing
|
||||
}
|
||||
variant='outline'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeMarketing ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{data?.currentPreferences.unsubscribeMarketing
|
||||
? 'Unsubscribed from Marketing'
|
||||
: 'Unsubscribe from Marketing Emails'}
|
||||
</Button>
|
||||
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('marketing')}
|
||||
disabled={
|
||||
processing ||
|
||||
isAlreadyUnsubscribedFromAll ||
|
||||
data?.currentPreferences.unsubscribeMarketing
|
||||
}
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeMarketing
|
||||
? 'Unsubscribed from Marketing'
|
||||
: 'Unsubscribe from Marketing Emails'}
|
||||
</BrandedButton>
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('updates')}
|
||||
disabled={
|
||||
processing ||
|
||||
data?.currentPreferences.unsubscribeAll ||
|
||||
data?.currentPreferences.unsubscribeUpdates
|
||||
}
|
||||
variant='outline'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeUpdates ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{data?.currentPreferences.unsubscribeUpdates
|
||||
? 'Unsubscribed from Updates'
|
||||
: 'Unsubscribe from Product Updates'}
|
||||
</Button>
|
||||
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('updates')}
|
||||
disabled={
|
||||
processing ||
|
||||
isAlreadyUnsubscribedFromAll ||
|
||||
data?.currentPreferences.unsubscribeUpdates
|
||||
}
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeUpdates
|
||||
? 'Unsubscribed from Updates'
|
||||
: 'Unsubscribe from Product Updates'}
|
||||
</BrandedButton>
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('notifications')}
|
||||
disabled={
|
||||
processing ||
|
||||
data?.currentPreferences.unsubscribeAll ||
|
||||
data?.currentPreferences.unsubscribeNotifications
|
||||
}
|
||||
variant='outline'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeNotifications ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{data?.currentPreferences.unsubscribeNotifications
|
||||
? 'Unsubscribed from Notifications'
|
||||
: 'Unsubscribe from Notifications'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('notifications')}
|
||||
disabled={
|
||||
processing ||
|
||||
isAlreadyUnsubscribedFromAll ||
|
||||
data?.currentPreferences.unsubscribeNotifications
|
||||
}
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeNotifications
|
||||
? 'Unsubscribed from Notifications'
|
||||
: 'Unsubscribe from Notifications'}
|
||||
</BrandedButton>
|
||||
</div>
|
||||
<div className='mt-6 space-y-3'>
|
||||
<div className='rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='text-center text-muted-foreground text-xs'>
|
||||
<strong>Note:</strong> You'll continue receiving important account emails like
|
||||
password resets and security alerts.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className={`${inter.className} mt-6 max-w-[410px] text-center`}>
|
||||
<p className='font-[380] text-[13px] text-muted-foreground'>
|
||||
You'll continue receiving important account emails like password resets and security
|
||||
alerts.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
<p className='text-center text-muted-foreground text-xs'>
|
||||
Questions? Contact us at{' '}
|
||||
<a
|
||||
href={`mailto:${brand.supportEmail}`}
|
||||
className='text-muted-foreground hover:underline'
|
||||
>
|
||||
{brand.supportEmail}
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -292,20 +391,13 @@ export default function Unsubscribe() {
|
||||
return (
|
||||
<Suspense
|
||||
fallback={
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Loading
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Validating your unsubscribe link...
|
||||
</p>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 flex w-full items-center justify-center py-8`}>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</div>
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardContent className='flex items-center justify-center p-8'>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<UnsubscribeContent />
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
Button,
|
||||
Label,
|
||||
@@ -13,7 +14,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { useCreateChunk } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateChunkModal')
|
||||
|
||||
@@ -30,20 +31,16 @@ export function CreateChunkModal({
|
||||
document,
|
||||
knowledgeBaseId,
|
||||
}: CreateChunkModalProps) {
|
||||
const {
|
||||
mutate: createChunk,
|
||||
isPending: isCreating,
|
||||
error: mutationError,
|
||||
reset: resetMutation,
|
||||
} = useCreateChunk()
|
||||
const queryClient = useQueryClient()
|
||||
const [content, setContent] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const isProcessingRef = useRef(false)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
const hasUnsavedChanges = content.trim().length > 0
|
||||
|
||||
const handleCreateChunk = () => {
|
||||
const handleCreateChunk = async () => {
|
||||
if (!document || content.trim().length === 0 || isProcessingRef.current) {
|
||||
if (isProcessingRef.current) {
|
||||
logger.warn('Chunk creation already in progress, ignoring duplicate request')
|
||||
@@ -51,32 +48,57 @@ export function CreateChunkModal({
|
||||
return
|
||||
}
|
||||
|
||||
isProcessingRef.current = true
|
||||
try {
|
||||
isProcessingRef.current = true
|
||||
setIsCreating(true)
|
||||
setError(null)
|
||||
|
||||
createChunk(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId: document.id,
|
||||
content: content.trim(),
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
isProcessingRef.current = false
|
||||
onClose()
|
||||
},
|
||||
onError: () => {
|
||||
isProcessingRef.current = false
|
||||
},
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: content.trim(),
|
||||
enabled: true,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create chunk')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success && result.data) {
|
||||
logger.info('Chunk created successfully:', result.data.id)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
onClose()
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to create chunk')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error creating chunk:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
isProcessingRef.current = false
|
||||
setIsCreating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const onClose = () => {
|
||||
onOpenChange(false)
|
||||
setContent('')
|
||||
setError(null)
|
||||
setShowUnsavedChangesAlert(false)
|
||||
resetMutation()
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import { useDeleteChunk } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('DeleteChunkModal')
|
||||
|
||||
interface DeleteChunkModalProps {
|
||||
chunk: ChunkData | null
|
||||
@@ -19,12 +24,44 @@ export function DeleteChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
}: DeleteChunkModalProps) {
|
||||
const { mutate: deleteChunk, isPending: isDeleting } = useDeleteChunk()
|
||||
const queryClient = useQueryClient()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
const handleDeleteChunk = () => {
|
||||
const handleDeleteChunk = async () => {
|
||||
if (!chunk || isDeleting) return
|
||||
|
||||
deleteChunk({ knowledgeBaseId, documentId, chunkId: chunk.id }, { onSuccess: onClose })
|
||||
try {
|
||||
setIsDeleting(true)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunk.id}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Chunk deleted successfully:', chunk.id)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
onClose()
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete chunk')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting chunk:', err)
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}
|
||||
|
||||
if (!chunk) return null
|
||||
|
||||
@@ -25,7 +25,6 @@ import {
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/kb/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/kb/use-tag-definitions'
|
||||
import { useUpdateDocumentTags } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('DocumentTagsModal')
|
||||
|
||||
@@ -59,6 +58,8 @@ function formatValueForDisplay(value: string, fieldType: string): string {
|
||||
try {
|
||||
const date = new Date(value)
|
||||
if (Number.isNaN(date.getTime())) return value
|
||||
// For UTC dates, display the UTC date to prevent timezone shifts
|
||||
// e.g., 2002-05-16T00:00:00.000Z should show as "May 16, 2002" not "May 15, 2002"
|
||||
if (typeof value === 'string' && (value.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(value))) {
|
||||
return new Date(
|
||||
date.getUTCFullYear(),
|
||||
@@ -95,7 +96,6 @@ export function DocumentTagsModal({
|
||||
const documentTagHook = useTagDefinitions(knowledgeBaseId, documentId)
|
||||
const kbTagHook = useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
const { getNextAvailableSlot: getServerNextSlot } = useNextAvailableSlot(knowledgeBaseId)
|
||||
const { mutateAsync: updateDocumentTags } = useUpdateDocumentTags()
|
||||
|
||||
const { saveTagDefinitions, tagDefinitions, fetchTagDefinitions } = documentTagHook
|
||||
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } = kbTagHook
|
||||
@@ -118,6 +118,7 @@ export function DocumentTagsModal({
|
||||
const definition = definitions.find((def) => def.tagSlot === slot)
|
||||
|
||||
if (rawValue !== null && rawValue !== undefined && definition) {
|
||||
// Convert value to string for storage
|
||||
const stringValue = String(rawValue).trim()
|
||||
if (stringValue) {
|
||||
tags.push({
|
||||
@@ -141,34 +142,41 @@ export function DocumentTagsModal({
|
||||
async (tagsToSave: DocumentTag[]) => {
|
||||
if (!documentData) return
|
||||
|
||||
const tagData: Record<string, string> = {}
|
||||
try {
|
||||
const tagData: Record<string, string> = {}
|
||||
|
||||
ALL_TAG_SLOTS.forEach((slot) => {
|
||||
const tag = tagsToSave.find((t) => t.slot === slot)
|
||||
if (tag?.value.trim()) {
|
||||
tagData[slot] = tag.value.trim()
|
||||
} else {
|
||||
tagData[slot] = ''
|
||||
// Only include tags that have values (omit empty ones)
|
||||
// Use empty string for slots that should be cleared
|
||||
ALL_TAG_SLOTS.forEach((slot) => {
|
||||
const tag = tagsToSave.find((t) => t.slot === slot)
|
||||
if (tag?.value.trim()) {
|
||||
tagData[slot] = tag.value.trim()
|
||||
} else {
|
||||
// Use empty string to clear a tag (API schema expects string, not null)
|
||||
tagData[slot] = ''
|
||||
}
|
||||
})
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(tagData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update document tags')
|
||||
}
|
||||
})
|
||||
|
||||
await updateDocumentTags({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
tags: tagData,
|
||||
})
|
||||
|
||||
onDocumentUpdate?.(tagData)
|
||||
await fetchTagDefinitions()
|
||||
onDocumentUpdate?.(tagData as Record<string, string>)
|
||||
await fetchTagDefinitions()
|
||||
} catch (error) {
|
||||
logger.error('Error updating document tags:', error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
[
|
||||
documentData,
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
updateDocumentTags,
|
||||
fetchTagDefinitions,
|
||||
onDocumentUpdate,
|
||||
]
|
||||
[documentData, knowledgeBaseId, documentId, fetchTagDefinitions, onDocumentUpdate]
|
||||
)
|
||||
|
||||
const handleRemoveTag = async (index: number) => {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
@@ -18,7 +19,7 @@ import {
|
||||
import type { ChunkData, DocumentData } from '@/lib/knowledge/types'
|
||||
import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useUpdateChunk } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('EditChunkModal')
|
||||
|
||||
@@ -49,22 +50,17 @@ export function EditChunkModal({
|
||||
onNavigateToPage,
|
||||
maxChunkSize,
|
||||
}: EditChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const {
|
||||
mutate: updateChunk,
|
||||
isPending: isSaving,
|
||||
error: mutationError,
|
||||
reset: resetMutation,
|
||||
} = useUpdateChunk()
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
|
||||
const tokenStrings = useMemo(() => {
|
||||
@@ -106,15 +102,44 @@ export function EditChunkModal({
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = () => {
|
||||
const handleSaveContent = async () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
updateChunk({
|
||||
knowledgeBaseId,
|
||||
documentId: document.id,
|
||||
chunkId: chunk.id,
|
||||
content: editedContent,
|
||||
})
|
||||
try {
|
||||
setIsSaving(true)
|
||||
setError(null)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks/${chunk.id}`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: editedContent,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
@@ -140,6 +165,7 @@ export function EditChunkModal({
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
@@ -159,7 +185,6 @@ export function EditChunkModal({
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
resetMutation()
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
@@ -170,7 +195,6 @@ export function EditChunkModal({
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
resetMutation()
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,13 +48,7 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/componen
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useDocument, useDocumentChunks, useKnowledgeBase } from '@/hooks/kb/use-knowledge'
|
||||
import {
|
||||
knowledgeKeys,
|
||||
useBulkChunkOperation,
|
||||
useDeleteDocument,
|
||||
useDocumentChunkSearchQuery,
|
||||
useUpdateChunk,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys, useDocumentChunkSearchQuery } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
@@ -409,13 +403,11 @@ export function Document({
|
||||
const [isCreateChunkModalOpen, setIsCreateChunkModalOpen] = useState(false)
|
||||
const [chunkToDelete, setChunkToDelete] = useState<ChunkData | null>(null)
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [showDeleteDocumentDialog, setShowDeleteDocumentDialog] = useState(false)
|
||||
const [isDeletingDocument, setIsDeletingDocument] = useState(false)
|
||||
const [contextMenuChunk, setContextMenuChunk] = useState<ChunkData | null>(null)
|
||||
|
||||
const { mutate: updateChunkMutation } = useUpdateChunk()
|
||||
const { mutate: deleteDocumentMutation, isPending: isDeletingDocument } = useDeleteDocument()
|
||||
const { mutate: bulkChunkMutation, isPending: isBulkOperating } = useBulkChunkOperation()
|
||||
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position: contextMenuPosition,
|
||||
@@ -448,23 +440,36 @@ export function Document({
|
||||
setSelectedChunk(null)
|
||||
}
|
||||
|
||||
const handleToggleEnabled = (chunkId: string) => {
|
||||
const handleToggleEnabled = async (chunkId: string) => {
|
||||
const chunk = displayChunks.find((c) => c.id === chunkId)
|
||||
if (!chunk) return
|
||||
|
||||
updateChunkMutation(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
chunkId,
|
||||
enabled: !chunk.enabled,
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
updateChunk(chunkId, { enabled: !chunk.enabled })
|
||||
},
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
enabled: !chunk.enabled,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update chunk')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
updateChunk(chunkId, { enabled: !chunk.enabled })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDeleteChunk = (chunkId: string) => {
|
||||
@@ -510,69 +515,107 @@ export function Document({
|
||||
/**
|
||||
* Handles deleting the document
|
||||
*/
|
||||
const handleDeleteDocument = () => {
|
||||
const handleDeleteDocument = async () => {
|
||||
if (!documentData) return
|
||||
|
||||
deleteDocumentMutation(
|
||||
{ knowledgeBaseId, documentId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`)
|
||||
},
|
||||
try {
|
||||
setIsDeletingDocument(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete document')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete document')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting document:', err)
|
||||
setIsDeletingDocument(false)
|
||||
}
|
||||
}
|
||||
|
||||
const performBulkChunkOperation = (
|
||||
const performBulkChunkOperation = async (
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
chunks: ChunkData[]
|
||||
) => {
|
||||
if (chunks.length === 0) return
|
||||
|
||||
bulkChunkMutation(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
operation,
|
||||
chunkIds: chunks.map((chunk) => chunk.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
if (operation === 'delete') {
|
||||
refreshChunks()
|
||||
} else {
|
||||
result.results.forEach((opResult) => {
|
||||
if (opResult.operation === operation) {
|
||||
opResult.chunkIds.forEach((chunkId: string) => {
|
||||
updateChunk(chunkId, { enabled: operation === 'enable' })
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
logger.info(`Successfully ${operation}d ${result.successCount} chunks`)
|
||||
setSelectedChunks(new Set())
|
||||
},
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation,
|
||||
chunkIds: chunks.map((chunk) => chunk.id),
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to ${operation} chunks`)
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
if (operation === 'delete') {
|
||||
await refreshChunks()
|
||||
} else {
|
||||
result.data.results.forEach((opResult: any) => {
|
||||
if (opResult.operation === operation) {
|
||||
opResult.chunkIds.forEach((chunkId: string) => {
|
||||
updateChunk(chunkId, { enabled: operation === 'enable' })
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`Successfully ${operation}d ${result.data.successCount} chunks`)
|
||||
}
|
||||
|
||||
setSelectedChunks(new Set())
|
||||
} catch (err) {
|
||||
logger.error(`Error ${operation}ing chunks:`, err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleBulkEnable = () => {
|
||||
const handleBulkEnable = async () => {
|
||||
const chunksToEnable = displayChunks.filter(
|
||||
(chunk) => selectedChunks.has(chunk.id) && !chunk.enabled
|
||||
)
|
||||
performBulkChunkOperation('enable', chunksToEnable)
|
||||
await performBulkChunkOperation('enable', chunksToEnable)
|
||||
}
|
||||
|
||||
const handleBulkDisable = () => {
|
||||
const handleBulkDisable = async () => {
|
||||
const chunksToDisable = displayChunks.filter(
|
||||
(chunk) => selectedChunks.has(chunk.id) && chunk.enabled
|
||||
)
|
||||
performBulkChunkOperation('disable', chunksToDisable)
|
||||
await performBulkChunkOperation('disable', chunksToDisable)
|
||||
}
|
||||
|
||||
const handleBulkDelete = () => {
|
||||
const handleBulkDelete = async () => {
|
||||
const chunksToDelete = displayChunks.filter((chunk) => selectedChunks.has(chunk.id))
|
||||
performBulkChunkOperation('delete', chunksToDelete)
|
||||
await performBulkChunkOperation('delete', chunksToDelete)
|
||||
}
|
||||
|
||||
const selectedChunksList = displayChunks.filter((chunk) => selectedChunks.has(chunk.id))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { format } from 'date-fns'
|
||||
import {
|
||||
AlertCircle,
|
||||
@@ -61,12 +62,7 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import {
|
||||
useBulkDocumentOperation,
|
||||
useDeleteDocument,
|
||||
useDeleteKnowledgeBase,
|
||||
useUpdateDocument,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
@@ -411,17 +407,12 @@ export function KnowledgeBase({
|
||||
id,
|
||||
knowledgeBaseName: passedKnowledgeBaseName,
|
||||
}: KnowledgeBaseProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false })
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const { mutate: updateDocumentMutation } = useUpdateDocument()
|
||||
const { mutate: deleteDocumentMutation } = useDeleteDocument()
|
||||
const { mutate: deleteKnowledgeBaseMutation, isPending: isDeleting } =
|
||||
useDeleteKnowledgeBase(workspaceId)
|
||||
const { mutate: bulkDocumentMutation, isPending: isBulkOperating } = useBulkDocumentOperation()
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
|
||||
@@ -436,6 +427,8 @@ export function KnowledgeBase({
|
||||
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
|
||||
const [documentToDelete, setDocumentToDelete] = useState<string | null>(null)
|
||||
const [showBulkDeleteModal, setShowBulkDeleteModal] = useState(false)
|
||||
@@ -557,7 +550,7 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
*/
|
||||
const checkForDeadProcesses = () => {
|
||||
const checkForDeadProcesses = async () => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
|
||||
@@ -574,79 +567,116 @@ export function KnowledgeBase({
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
const markFailedPromises = staleDocuments.map(async (doc) => {
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${doc.id}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
markFailedDueToTimeout: true,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: 'Unknown error' }))
|
||||
logger.error(`Failed to mark document ${doc.id} as failed: ${errorData.error}`)
|
||||
return
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
if (result.success) {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error marking document ${doc.id} as failed:`, error)
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.allSettled(markFailedPromises)
|
||||
}
|
||||
|
||||
const handleToggleEnabled = (docId: string) => {
|
||||
const handleToggleEnabled = async (docId: string) => {
|
||||
const document = documents.find((doc) => doc.id === docId)
|
||||
if (!document) return
|
||||
|
||||
const newEnabled = !document.enabled
|
||||
|
||||
// Optimistic update
|
||||
updateDocument(docId, { enabled: newEnabled })
|
||||
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: docId,
|
||||
updates: { enabled: newEnabled },
|
||||
},
|
||||
{
|
||||
onError: () => {
|
||||
// Rollback on error
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
enabled: newEnabled,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update document')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
}
|
||||
} catch (err) {
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
logger.error('Error updating document:', err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles retrying a failed document processing
|
||||
*/
|
||||
const handleRetryDocument = (docId: string) => {
|
||||
// Optimistic update
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'pending',
|
||||
processingError: null,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
})
|
||||
const handleRetryDocument = async (docId: string) => {
|
||||
try {
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'pending',
|
||||
processingError: null,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
})
|
||||
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: docId,
|
||||
updates: { retryProcessing: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
refreshDocuments()
|
||||
logger.info(`Document retry initiated successfully for: ${docId}`)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Error retrying document:', err)
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'failed',
|
||||
processingError:
|
||||
err instanceof Error ? err.message : 'Failed to retry document processing',
|
||||
})
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
retryProcessing: true,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to retry document processing')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to retry document processing')
|
||||
}
|
||||
|
||||
await refreshDocuments()
|
||||
|
||||
logger.info(`Document retry initiated successfully for: ${docId}`)
|
||||
} catch (err) {
|
||||
logger.error('Error retrying document:', err)
|
||||
const currentDoc = documents.find((doc) => doc.id === docId)
|
||||
if (currentDoc) {
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'failed',
|
||||
processingError:
|
||||
err instanceof Error ? err.message : 'Failed to retry document processing',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -664,32 +694,43 @@ export function KnowledgeBase({
|
||||
const currentDoc = documents.find((doc) => doc.id === documentId)
|
||||
const previousName = currentDoc?.filename
|
||||
|
||||
// Optimistic update
|
||||
updateDocument(documentId, { filename: newName })
|
||||
queryClient.setQueryData<DocumentData>(knowledgeKeys.document(id, documentId), (previous) =>
|
||||
previous ? { ...previous, filename: newName } : previous
|
||||
)
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId,
|
||||
updates: { filename: newName },
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Document renamed: ${documentId}`)
|
||||
resolve()
|
||||
},
|
||||
onError: (err) => {
|
||||
// Rollback on error
|
||||
if (previousName !== undefined) {
|
||||
updateDocument(documentId, { filename: previousName })
|
||||
}
|
||||
logger.error('Error renaming document:', err)
|
||||
reject(err)
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
body: JSON.stringify({ filename: newName }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to rename document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to rename document')
|
||||
}
|
||||
|
||||
logger.info(`Document renamed: ${documentId}`)
|
||||
} catch (err) {
|
||||
if (previousName !== undefined) {
|
||||
updateDocument(documentId, { filename: previousName })
|
||||
queryClient.setQueryData<DocumentData>(
|
||||
knowledgeKeys.document(id, documentId),
|
||||
(previous) => (previous ? { ...previous, filename: previousName } : previous)
|
||||
)
|
||||
}
|
||||
logger.error('Error renaming document:', err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -703,26 +744,35 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Confirms and executes the deletion of a single document
|
||||
*/
|
||||
const confirmDeleteDocument = () => {
|
||||
const confirmDeleteDocument = async () => {
|
||||
if (!documentToDelete) return
|
||||
|
||||
deleteDocumentMutation(
|
||||
{ knowledgeBaseId: id, documentId: documentToDelete },
|
||||
{
|
||||
onSuccess: () => {
|
||||
refreshDocuments()
|
||||
setSelectedDocuments((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(documentToDelete)
|
||||
return newSet
|
||||
})
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowDeleteDocumentModal(false)
|
||||
setDocumentToDelete(null)
|
||||
},
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${documentToDelete}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete document')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
refreshDocuments()
|
||||
|
||||
setSelectedDocuments((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(documentToDelete)
|
||||
return newSet
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting document:', err)
|
||||
} finally {
|
||||
setShowDeleteDocumentModal(false)
|
||||
setDocumentToDelete(null)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -768,18 +818,32 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Handles deleting the entire knowledge base
|
||||
*/
|
||||
const handleDeleteKnowledgeBase = () => {
|
||||
const handleDeleteKnowledgeBase = async () => {
|
||||
if (!knowledgeBase) return
|
||||
|
||||
deleteKnowledgeBaseMutation(
|
||||
{ knowledgeBaseId: id },
|
||||
{
|
||||
onSuccess: () => {
|
||||
removeKnowledgeBase(id)
|
||||
router.push(`/workspace/${workspaceId}/knowledge`)
|
||||
},
|
||||
try {
|
||||
setIsDeleting(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete knowledge base')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
removeKnowledgeBase(id)
|
||||
router.push(`/workspace/${workspaceId}/knowledge`)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting knowledge base:', err)
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -792,57 +856,93 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Handles bulk enabling of selected documents
|
||||
*/
|
||||
const handleBulkEnable = () => {
|
||||
const handleBulkEnable = async () => {
|
||||
const documentsToEnable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && !doc.enabled
|
||||
)
|
||||
|
||||
if (documentsToEnable.length === 0) return
|
||||
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'enable',
|
||||
documentIds: documentsToEnable.map((doc) => doc.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
result.updatedDocuments?.forEach((updatedDoc) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
logger.info(`Successfully enabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation: 'enable',
|
||||
documentIds: documentsToEnable.map((doc) => doc.id),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to enable documents')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
|
||||
logger.info(`Successfully enabled ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error enabling documents:', err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles bulk disabling of selected documents
|
||||
*/
|
||||
const handleBulkDisable = () => {
|
||||
const handleBulkDisable = async () => {
|
||||
const documentsToDisable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && doc.enabled
|
||||
)
|
||||
|
||||
if (documentsToDisable.length === 0) return
|
||||
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'disable',
|
||||
documentIds: documentsToDisable.map((doc) => doc.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
result.updatedDocuments?.forEach((updatedDoc) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
logger.info(`Successfully disabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation: 'disable',
|
||||
documentIds: documentsToDisable.map((doc) => doc.id),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to disable documents')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
|
||||
logger.info(`Successfully disabled ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error disabling documents:', err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -856,28 +956,44 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Confirms and executes the bulk deletion of selected documents
|
||||
*/
|
||||
const confirmBulkDelete = () => {
|
||||
const confirmBulkDelete = async () => {
|
||||
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
if (documentsToDelete.length === 0) return
|
||||
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'delete',
|
||||
documentIds: documentsToDelete.map((doc) => doc.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully deleted ${result.successCount} documents`)
|
||||
refreshDocuments()
|
||||
setSelectedDocuments(new Set())
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowBulkDeleteModal(false)
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation: 'delete',
|
||||
documentIds: documentsToDelete.map((doc) => doc.id),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete documents')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Successfully deleted ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
await refreshDocuments()
|
||||
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error deleting documents:', err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
setShowBulkDeleteModal(false)
|
||||
}
|
||||
}
|
||||
|
||||
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
@@ -22,10 +22,10 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('BaseTagsModal')
|
||||
|
||||
/** Field type display labels */
|
||||
const FIELD_TYPE_LABELS: Record<string, string> = {
|
||||
text: 'Text',
|
||||
number: 'Number',
|
||||
@@ -45,6 +45,7 @@ interface DocumentListProps {
|
||||
totalCount: number
|
||||
}
|
||||
|
||||
/** Displays a list of documents affected by tag operations */
|
||||
function DocumentList({ documents, totalCount }: DocumentListProps) {
|
||||
const displayLimit = 5
|
||||
const hasMore = totalCount > displayLimit
|
||||
@@ -94,14 +95,13 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } =
|
||||
useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
|
||||
const createTagMutation = useCreateTagDefinition()
|
||||
const deleteTagMutation = useDeleteTagDefinition()
|
||||
|
||||
const [deleteTagDialogOpen, setDeleteTagDialogOpen] = useState(false)
|
||||
const [selectedTag, setSelectedTag] = useState<TagDefinition | null>(null)
|
||||
const [viewDocumentsDialogOpen, setViewDocumentsDialogOpen] = useState(false)
|
||||
const [isDeletingTag, setIsDeletingTag] = useState(false)
|
||||
const [tagUsageData, setTagUsageData] = useState<TagUsageData[]>([])
|
||||
const [isCreatingTag, setIsCreatingTag] = useState(false)
|
||||
const [isSavingTag, setIsSavingTag] = useState(false)
|
||||
const [createTagForm, setCreateTagForm] = useState({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
@@ -177,12 +177,13 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
}
|
||||
|
||||
const tagNameConflict =
|
||||
isCreatingTag && !createTagMutation.isPending && hasTagNameConflict(createTagForm.displayName)
|
||||
isCreatingTag && !isSavingTag && hasTagNameConflict(createTagForm.displayName)
|
||||
|
||||
const canSaveTag = () => {
|
||||
return createTagForm.displayName.trim() && !hasTagNameConflict(createTagForm.displayName)
|
||||
}
|
||||
|
||||
/** Get slot usage counts per field type */
|
||||
const getSlotUsageByFieldType = (fieldType: string): { used: number; max: number } => {
|
||||
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
|
||||
if (!config) return { used: 0, max: 0 }
|
||||
@@ -190,11 +191,13 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
return { used, max: config.maxSlots }
|
||||
}
|
||||
|
||||
/** Check if a field type has available slots */
|
||||
const hasAvailableSlots = (fieldType: string): boolean => {
|
||||
const { used, max } = getSlotUsageByFieldType(fieldType)
|
||||
return used < max
|
||||
}
|
||||
|
||||
/** Field type options for Combobox */
|
||||
const fieldTypeOptions: ComboboxOption[] = useMemo(() => {
|
||||
return SUPPORTED_FIELD_TYPES.filter((type) => hasAvailableSlots(type)).map((type) => {
|
||||
const { used, max } = getSlotUsageByFieldType(type)
|
||||
@@ -208,17 +211,43 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
const saveTagDefinition = async () => {
|
||||
if (!canSaveTag()) return
|
||||
|
||||
setIsSavingTag(true)
|
||||
try {
|
||||
// Check if selected field type has available slots
|
||||
if (!hasAvailableSlots(createTagForm.fieldType)) {
|
||||
throw new Error(`No available slots for ${createTagForm.fieldType} type`)
|
||||
}
|
||||
|
||||
await createTagMutation.mutateAsync({
|
||||
knowledgeBaseId,
|
||||
// Get the next available slot from the API
|
||||
const slotResponse = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/next-available-slot?fieldType=${createTagForm.fieldType}`
|
||||
)
|
||||
if (!slotResponse.ok) {
|
||||
throw new Error('Failed to get available slot')
|
||||
}
|
||||
const slotResult = await slotResponse.json()
|
||||
if (!slotResult.success || !slotResult.data?.nextAvailableSlot) {
|
||||
throw new Error('No available tag slots for this field type')
|
||||
}
|
||||
|
||||
const newTagDefinition = {
|
||||
tagSlot: slotResult.data.nextAvailableSlot,
|
||||
displayName: createTagForm.displayName.trim(),
|
||||
fieldType: createTagForm.fieldType,
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(newTagDefinition),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to create tag definition')
|
||||
}
|
||||
|
||||
await Promise.all([refreshTagDefinitions(), fetchTagUsage()])
|
||||
|
||||
setCreateTagForm({
|
||||
@@ -228,17 +257,27 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
setIsCreatingTag(false)
|
||||
} catch (error) {
|
||||
logger.error('Error creating tag definition:', error)
|
||||
} finally {
|
||||
setIsSavingTag(false)
|
||||
}
|
||||
}
|
||||
|
||||
const confirmDeleteTag = async () => {
|
||||
if (!selectedTag) return
|
||||
|
||||
setIsDeletingTag(true)
|
||||
try {
|
||||
await deleteTagMutation.mutateAsync({
|
||||
knowledgeBaseId,
|
||||
tagDefinitionId: selectedTag.id,
|
||||
})
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/tag-definitions/${selectedTag.id}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`Failed to delete tag definition: ${response.status} ${errorText}`)
|
||||
}
|
||||
|
||||
await Promise.all([refreshTagDefinitions(), fetchTagUsage()])
|
||||
|
||||
@@ -246,6 +285,8 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
setSelectedTag(null)
|
||||
} catch (error) {
|
||||
logger.error('Error deleting tag definition:', error)
|
||||
} finally {
|
||||
setIsDeletingTag(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -392,11 +433,11 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
className='flex-1'
|
||||
disabled={
|
||||
!canSaveTag() ||
|
||||
createTagMutation.isPending ||
|
||||
isSavingTag ||
|
||||
!hasAvailableSlots(createTagForm.fieldType)
|
||||
}
|
||||
>
|
||||
{createTagMutation.isPending ? 'Creating...' : 'Create Tag'}
|
||||
{isSavingTag ? 'Creating...' : 'Create Tag'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -440,17 +481,13 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
<ModalFooter>
|
||||
<Button
|
||||
variant='default'
|
||||
disabled={deleteTagMutation.isPending}
|
||||
disabled={isDeletingTag}
|
||||
onClick={() => setDeleteTagDialogOpen(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='destructive'
|
||||
onClick={confirmDeleteTag}
|
||||
disabled={deleteTagMutation.isPending}
|
||||
>
|
||||
{deleteTagMutation.isPending ? 'Deleting...' : 'Delete Tag'}
|
||||
<Button variant='destructive' onClick={confirmDeleteTag} disabled={isDeletingTag}>
|
||||
{isDeletingTag ? <>Deleting...</> : 'Delete Tag'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { zodResolver } from '@hookform/resolvers/zod'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Loader2, RotateCcw, X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { useForm } from 'react-hook-form'
|
||||
@@ -22,7 +23,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||
import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateBaseModal')
|
||||
|
||||
@@ -81,11 +82,10 @@ interface SubmitStatus {
|
||||
export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const createKnowledgeBaseMutation = useCreateKnowledgeBase(workspaceId)
|
||||
const deleteKnowledgeBaseMutation = useDeleteKnowledgeBase(workspaceId)
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
const [submitStatus, setSubmitStatus] = useState<SubmitStatus | null>(null)
|
||||
const [files, setFiles] = useState<FileWithPreview[]>([])
|
||||
const [fileError, setFileError] = useState<string | null>(null)
|
||||
@@ -245,14 +245,12 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
})
|
||||
}
|
||||
|
||||
const isSubmitting =
|
||||
createKnowledgeBaseMutation.isPending || deleteKnowledgeBaseMutation.isPending || isUploading
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
|
||||
try {
|
||||
const newKnowledgeBase = await createKnowledgeBaseMutation.mutateAsync({
|
||||
const knowledgeBasePayload = {
|
||||
name: data.name,
|
||||
description: data.description || undefined,
|
||||
workspaceId: workspaceId,
|
||||
@@ -261,8 +259,29 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
minSize: data.minChunkSize,
|
||||
overlap: data.overlapSize,
|
||||
},
|
||||
}
|
||||
|
||||
const response = await fetch('/api/knowledge', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(knowledgeBasePayload),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(errorData.error || 'Failed to create knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to create knowledge base')
|
||||
}
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
if (files.length > 0) {
|
||||
try {
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
@@ -274,11 +293,15 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
logger.info(`Started processing ${uploadedFiles.length} documents in the background`)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
} catch (uploadError) {
|
||||
logger.error('File upload failed, deleting knowledge base:', uploadError)
|
||||
try {
|
||||
await deleteKnowledgeBaseMutation.mutateAsync({
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
await fetch(`/api/knowledge/${newKnowledgeBase.id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
logger.info(`Deleted orphaned knowledge base: ${newKnowledgeBase.id}`)
|
||||
} catch (deleteError) {
|
||||
@@ -286,6 +309,10 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
}
|
||||
throw uploadError
|
||||
}
|
||||
} else {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
}
|
||||
|
||||
files.forEach((file) => URL.revokeObjectURL(file.preview))
|
||||
@@ -298,6 +325,8 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
type: 'error',
|
||||
message: error instanceof Error ? error.message : 'An unknown error occurred',
|
||||
})
|
||||
} finally {
|
||||
setIsSubmitting(false)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { AlertTriangle, ChevronDown, LibraryBig, MoreHorizontal } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import {
|
||||
@@ -14,7 +15,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { filterButtonClass } from '@/app/workspace/[workspaceId]/knowledge/components/constants'
|
||||
import { useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeHeader')
|
||||
|
||||
@@ -53,13 +54,14 @@ interface Workspace {
|
||||
}
|
||||
|
||||
export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const [isActionsPopoverOpen, setIsActionsPopoverOpen] = useState(false)
|
||||
const [isWorkspacePopoverOpen, setIsWorkspacePopoverOpen] = useState(false)
|
||||
const [workspaces, setWorkspaces] = useState<Workspace[]>([])
|
||||
const [isLoadingWorkspaces, setIsLoadingWorkspaces] = useState(false)
|
||||
const [isUpdatingWorkspace, setIsUpdatingWorkspace] = useState(false)
|
||||
|
||||
const updateKnowledgeBase = useUpdateKnowledgeBase()
|
||||
|
||||
// Fetch available workspaces
|
||||
useEffect(() => {
|
||||
if (!options?.knowledgeBaseId) return
|
||||
|
||||
@@ -74,6 +76,7 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// Filter workspaces where user has write/admin permissions
|
||||
const availableWorkspaces = data.workspaces
|
||||
.filter((ws: any) => ws.permissions === 'write' || ws.permissions === 'admin')
|
||||
.map((ws: any) => ({
|
||||
@@ -94,27 +97,47 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
}, [options?.knowledgeBaseId])
|
||||
|
||||
const handleWorkspaceChange = async (workspaceId: string | null) => {
|
||||
if (updateKnowledgeBase.isPending || !options?.knowledgeBaseId) return
|
||||
if (isUpdatingWorkspace || !options?.knowledgeBaseId) return
|
||||
|
||||
setIsWorkspacePopoverOpen(false)
|
||||
try {
|
||||
setIsUpdatingWorkspace(true)
|
||||
setIsWorkspacePopoverOpen(false)
|
||||
|
||||
updateKnowledgeBase.mutate(
|
||||
{
|
||||
knowledgeBaseId: options.knowledgeBaseId,
|
||||
updates: { workspaceId },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(
|
||||
`Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}`
|
||||
)
|
||||
options.onWorkspaceChange?.(workspaceId)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Error updating workspace:', err)
|
||||
const response = await fetch(`/api/knowledge/${options.knowledgeBaseId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update workspace')
|
||||
}
|
||||
)
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(
|
||||
`Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}`
|
||||
)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(options.knowledgeBaseId),
|
||||
})
|
||||
|
||||
await options.onWorkspaceChange?.(workspaceId)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to update workspace')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating workspace:', err)
|
||||
} finally {
|
||||
setIsUpdatingWorkspace(false)
|
||||
}
|
||||
}
|
||||
|
||||
const currentWorkspace = workspaces.find((ws) => ws.id === options?.currentWorkspaceId)
|
||||
@@ -124,6 +147,7 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
<div className={HEADER_STYLES.container}>
|
||||
<div className={HEADER_STYLES.breadcrumbs}>
|
||||
{breadcrumbs.map((breadcrumb, index) => {
|
||||
// Use unique identifier when available, fallback to content-based key
|
||||
const key = breadcrumb.id || `${breadcrumb.label}-${breadcrumb.href || index}`
|
||||
|
||||
return (
|
||||
@@ -165,13 +189,13 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
disabled={isLoadingWorkspaces || updateKnowledgeBase.isPending}
|
||||
disabled={isLoadingWorkspaces || isUpdatingWorkspace}
|
||||
className={filterButtonClass}
|
||||
>
|
||||
<span className='truncate'>
|
||||
{isLoadingWorkspaces
|
||||
? 'Loading...'
|
||||
: updateKnowledgeBase.isPending
|
||||
: isUpdatingWorkspace
|
||||
? 'Updating...'
|
||||
: currentWorkspace?.name || 'No workspace'}
|
||||
</span>
|
||||
|
||||
@@ -32,7 +32,6 @@ import {
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge'
|
||||
import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
|
||||
const logger = createLogger('Knowledge')
|
||||
@@ -52,12 +51,10 @@ export function Knowledge() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const { knowledgeBases, isLoading, error } = useKnowledgeBasesList(workspaceId)
|
||||
const { knowledgeBases, isLoading, error, removeKnowledgeBase, updateKnowledgeBase } =
|
||||
useKnowledgeBasesList(workspaceId)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const { mutateAsync: updateKnowledgeBaseMutation } = useUpdateKnowledgeBase(workspaceId)
|
||||
const { mutateAsync: deleteKnowledgeBaseMutation } = useDeleteKnowledgeBase(workspaceId)
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const debouncedSearchQuery = useDebounce(searchQuery, 300)
|
||||
const [isCreateModalOpen, setIsCreateModalOpen] = useState(false)
|
||||
@@ -115,13 +112,29 @@ export function Knowledge() {
|
||||
*/
|
||||
const handleUpdateKnowledgeBase = useCallback(
|
||||
async (id: string, name: string, description: string) => {
|
||||
await updateKnowledgeBaseMutation({
|
||||
knowledgeBaseId: id,
|
||||
updates: { name, description },
|
||||
const response = await fetch(`/api/knowledge/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ name, description }),
|
||||
})
|
||||
logger.info(`Knowledge base updated: ${id}`)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Knowledge base updated: ${id}`)
|
||||
updateKnowledgeBase(id, { name, description })
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to update knowledge base')
|
||||
}
|
||||
},
|
||||
[updateKnowledgeBaseMutation]
|
||||
[updateKnowledgeBase]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -129,10 +142,25 @@ export function Knowledge() {
|
||||
*/
|
||||
const handleDeleteKnowledgeBase = useCallback(
|
||||
async (id: string) => {
|
||||
await deleteKnowledgeBaseMutation({ knowledgeBaseId: id })
|
||||
logger.info(`Knowledge base deleted: ${id}`)
|
||||
const response = await fetch(`/api/knowledge/${id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Knowledge base deleted: ${id}`)
|
||||
removeKnowledgeBase(id)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
},
|
||||
[deleteKnowledgeBaseMutation]
|
||||
[removeKnowledgeBase]
|
||||
)
|
||||
|
||||
/**
|
||||
|
||||
@@ -26,6 +26,9 @@ import { CLASS_TOOL_METADATA } from '@/stores/panel/copilot/store'
|
||||
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Parse special tags from content
|
||||
*/
|
||||
/**
|
||||
* Plan step can be either a string or an object with title and plan
|
||||
*/
|
||||
@@ -44,56 +47,6 @@ interface ParsedTags {
|
||||
cleanContent: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract plan steps from plan_respond tool calls in subagent blocks.
|
||||
* Returns { steps, isComplete } where steps is in the format expected by PlanSteps component.
|
||||
*/
|
||||
function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
steps: Record<string, PlanStep> | undefined
|
||||
isComplete: boolean
|
||||
} {
|
||||
if (!blocks) return { steps: undefined, isComplete: false }
|
||||
|
||||
// Find the plan_respond tool call
|
||||
const planRespondBlock = blocks.find(
|
||||
(b) => b.type === 'subagent_tool_call' && b.toolCall?.name === 'plan_respond'
|
||||
)
|
||||
|
||||
if (!planRespondBlock?.toolCall) {
|
||||
return { steps: undefined, isComplete: false }
|
||||
}
|
||||
|
||||
// Tool call arguments can be in different places depending on the source
|
||||
// Also handle nested data.arguments structure from the schema
|
||||
const tc = planRespondBlock.toolCall as any
|
||||
const args = tc.params || tc.parameters || tc.input || tc.arguments || tc.data?.arguments || {}
|
||||
const stepsArray = args.steps
|
||||
|
||||
if (!Array.isArray(stepsArray) || stepsArray.length === 0) {
|
||||
return { steps: undefined, isComplete: false }
|
||||
}
|
||||
|
||||
// Convert array format to Record<string, PlanStep> format
|
||||
// From: [{ number: 1, title: "..." }, { number: 2, title: "..." }]
|
||||
// To: { "1": "...", "2": "..." }
|
||||
const steps: Record<string, PlanStep> = {}
|
||||
for (const step of stepsArray) {
|
||||
if (step.number !== undefined && step.title) {
|
||||
steps[String(step.number)] = step.title
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the tool call is complete (not pending/executing)
|
||||
const isComplete =
|
||||
planRespondBlock.toolCall.state === ClientToolCallState.success ||
|
||||
planRespondBlock.toolCall.state === ClientToolCallState.error
|
||||
|
||||
return {
|
||||
steps: Object.keys(steps).length > 0 ? steps : undefined,
|
||||
isComplete,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse partial JSON for streaming options.
|
||||
* Attempts to extract complete key-value pairs from incomplete JSON.
|
||||
@@ -701,20 +654,11 @@ function SubAgentThinkingContent({
|
||||
}
|
||||
}
|
||||
|
||||
// Extract plan from plan_respond tool call (preferred) or fall back to <plan> tags
|
||||
const { steps: planSteps, isComplete: planComplete } = extractPlanFromBlocks(blocks)
|
||||
const allParsed = parseSpecialTags(allRawText)
|
||||
|
||||
// Prefer plan_respond tool data over <plan> tags
|
||||
const hasPlan =
|
||||
!!(planSteps && Object.keys(planSteps).length > 0) ||
|
||||
!!(allParsed.plan && Object.keys(allParsed.plan).length > 0)
|
||||
const planToRender = planSteps || allParsed.plan
|
||||
const isPlanStreaming = planSteps ? !planComplete : isStreaming
|
||||
if (!cleanText.trim() && !allParsed.plan) return null
|
||||
|
||||
if (!cleanText.trim() && !hasPlan) return null
|
||||
|
||||
const hasSpecialTags = hasPlan
|
||||
const hasSpecialTags = !!(allParsed.plan && Object.keys(allParsed.plan).length > 0)
|
||||
|
||||
return (
|
||||
<div className='space-y-1.5'>
|
||||
@@ -726,7 +670,9 @@ function SubAgentThinkingContent({
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
/>
|
||||
)}
|
||||
{hasPlan && planToRender && <PlanSteps steps={planToRender} streaming={isPlanStreaming} />}
|
||||
{allParsed.plan && Object.keys(allParsed.plan).length > 0 && (
|
||||
<PlanSteps steps={allParsed.plan} streaming={isStreaming} />
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -798,19 +744,8 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
}
|
||||
|
||||
const allParsed = parseSpecialTags(allRawText)
|
||||
|
||||
// Extract plan from plan_respond tool call (preferred) or fall back to <plan> tags
|
||||
const { steps: planSteps, isComplete: planComplete } = extractPlanFromBlocks(
|
||||
toolCall.subAgentBlocks
|
||||
)
|
||||
const hasPlan =
|
||||
!!(planSteps && Object.keys(planSteps).length > 0) ||
|
||||
!!(allParsed.plan && Object.keys(allParsed.plan).length > 0)
|
||||
const planToRender = planSteps || allParsed.plan
|
||||
const isPlanStreaming = planSteps ? !planComplete : isStreaming
|
||||
|
||||
const hasSpecialTags = !!(
|
||||
hasPlan ||
|
||||
(allParsed.plan && Object.keys(allParsed.plan).length > 0) ||
|
||||
(allParsed.options && Object.keys(allParsed.options).length > 0)
|
||||
)
|
||||
|
||||
@@ -822,6 +757,8 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
const outerLabel = getSubagentCompletionLabel(toolCall.name)
|
||||
const durationText = `${outerLabel} for ${formatDuration(duration)}`
|
||||
|
||||
const hasPlan = allParsed.plan && Object.keys(allParsed.plan).length > 0
|
||||
|
||||
const renderCollapsibleContent = () => (
|
||||
<>
|
||||
{segments.map((segment, index) => {
|
||||
@@ -863,7 +800,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
return (
|
||||
<div className='w-full space-y-1.5'>
|
||||
{renderCollapsibleContent()}
|
||||
{hasPlan && planToRender && <PlanSteps steps={planToRender} streaming={isPlanStreaming} />}
|
||||
{hasPlan && <PlanSteps steps={allParsed.plan!} streaming={isStreaming} />}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -895,7 +832,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
</div>
|
||||
|
||||
{/* Plan stays outside the collapsible */}
|
||||
{hasPlan && planToRender && <PlanSteps steps={planToRender} />}
|
||||
{hasPlan && <PlanSteps steps={allParsed.plan!} />}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
@@ -1475,11 +1412,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
if (
|
||||
toolCall.name === 'checkoff_todo' ||
|
||||
toolCall.name === 'mark_todo_in_progress' ||
|
||||
toolCall.name === 'tool_search_tool_regex' ||
|
||||
toolCall.name === 'user_memory' ||
|
||||
toolCall.name === 'edit_respond' ||
|
||||
toolCall.name === 'debug_respond' ||
|
||||
toolCall.name === 'plan_respond'
|
||||
toolCall.name === 'tool_search_tool_regex'
|
||||
)
|
||||
return null
|
||||
|
||||
|
||||
@@ -452,6 +452,39 @@ console.log(limits);`
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* <div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
URL
|
||||
</Label>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => handleCopy('endpoint', info.endpoint)}
|
||||
aria-label='Copy endpoint'
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{copied.endpoint ? (
|
||||
<Check className='h-3 w-3' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{copied.endpoint ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={info.endpoint}
|
||||
language='javascript'
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
</div> */}
|
||||
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
type NormalizedField = InputFormatField & { name: string }
|
||||
|
||||
interface ApiInfoModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
workflowId: string
|
||||
}
|
||||
|
||||
export function ApiInfoModal({ open, onOpenChange, workflowId }: ApiInfoModalProps) {
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const setValue = useSubBlockStore((state) => state.setValue)
|
||||
const subBlockValues = useSubBlockStore((state) =>
|
||||
workflowId ? (state.workflowValues[workflowId] ?? {}) : {}
|
||||
)
|
||||
|
||||
const workflowMetadata = useWorkflowRegistry((state) =>
|
||||
workflowId ? state.workflows[workflowId] : undefined
|
||||
)
|
||||
const updateWorkflow = useWorkflowRegistry((state) => state.updateWorkflow)
|
||||
|
||||
const [description, setDescription] = useState('')
|
||||
const [paramDescriptions, setParamDescriptions] = useState<Record<string, string>>({})
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
|
||||
const initialDescriptionRef = useRef('')
|
||||
const initialParamDescriptionsRef = useRef<Record<string, string>>({})
|
||||
|
||||
const starterBlockId = useMemo(() => {
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
const blockType = (block as { type?: string }).type
|
||||
if (blockType && isValidStartBlockType(blockType)) {
|
||||
return blockId
|
||||
}
|
||||
}
|
||||
return null
|
||||
}, [blocks])
|
||||
|
||||
const inputFormat = useMemo((): NormalizedField[] => {
|
||||
if (!starterBlockId) return []
|
||||
|
||||
const storeValue = subBlockValues[starterBlockId]?.inputFormat
|
||||
const normalized = normalizeInputFormatValue(storeValue) as NormalizedField[]
|
||||
if (normalized.length > 0) return normalized
|
||||
|
||||
const startBlock = blocks[starterBlockId]
|
||||
const blockValue = startBlock?.subBlocks?.inputFormat?.value
|
||||
return normalizeInputFormatValue(blockValue) as NormalizedField[]
|
||||
}, [starterBlockId, subBlockValues, blocks])
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
const normalizedDesc = workflowMetadata?.description?.toLowerCase().trim()
|
||||
const isDefaultDescription =
|
||||
!workflowMetadata?.description ||
|
||||
workflowMetadata.description === workflowMetadata.name ||
|
||||
normalizedDesc === 'new workflow' ||
|
||||
normalizedDesc === 'your first workflow - start building here!'
|
||||
|
||||
const initialDescription = isDefaultDescription ? '' : workflowMetadata?.description || ''
|
||||
setDescription(initialDescription)
|
||||
initialDescriptionRef.current = initialDescription
|
||||
|
||||
const descriptions: Record<string, string> = {}
|
||||
for (const field of inputFormat) {
|
||||
if (field.description) {
|
||||
descriptions[field.name] = field.description
|
||||
}
|
||||
}
|
||||
setParamDescriptions(descriptions)
|
||||
initialParamDescriptionsRef.current = { ...descriptions }
|
||||
}
|
||||
}, [open, workflowMetadata, inputFormat])
|
||||
|
||||
const hasChanges = useMemo(() => {
|
||||
if (description.trim() !== initialDescriptionRef.current.trim()) return true
|
||||
|
||||
for (const field of inputFormat) {
|
||||
const currentValue = (paramDescriptions[field.name] || '').trim()
|
||||
const initialValue = (initialParamDescriptionsRef.current[field.name] || '').trim()
|
||||
if (currentValue !== initialValue) return true
|
||||
}
|
||||
|
||||
return false
|
||||
}, [description, paramDescriptions, inputFormat])
|
||||
|
||||
const handleParamDescriptionChange = (fieldName: string, value: string) => {
|
||||
setParamDescriptions((prev) => ({
|
||||
...prev,
|
||||
[fieldName]: value,
|
||||
}))
|
||||
}
|
||||
|
||||
const handleCloseAttempt = useCallback(() => {
|
||||
if (hasChanges && !isSaving) {
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
onOpenChange(false)
|
||||
}
|
||||
}, [hasChanges, isSaving, onOpenChange])
|
||||
|
||||
const handleDiscardChanges = useCallback(() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
setDescription(initialDescriptionRef.current)
|
||||
setParamDescriptions({ ...initialParamDescriptionsRef.current })
|
||||
onOpenChange(false)
|
||||
}, [onOpenChange])
|
||||
|
||||
const handleSave = useCallback(async () => {
|
||||
if (!workflowId) return
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (activeWorkflowId !== workflowId) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsSaving(true)
|
||||
try {
|
||||
if (description.trim() !== (workflowMetadata?.description || '')) {
|
||||
updateWorkflow(workflowId, { description: description.trim() || 'New workflow' })
|
||||
}
|
||||
|
||||
if (starterBlockId) {
|
||||
const updatedValue = inputFormat.map((field) => ({
|
||||
...field,
|
||||
description: paramDescriptions[field.name]?.trim() || undefined,
|
||||
}))
|
||||
setValue(starterBlockId, 'inputFormat', updatedValue)
|
||||
}
|
||||
|
||||
onOpenChange(false)
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}, [
|
||||
workflowId,
|
||||
description,
|
||||
workflowMetadata,
|
||||
updateWorkflow,
|
||||
starterBlockId,
|
||||
inputFormat,
|
||||
paramDescriptions,
|
||||
setValue,
|
||||
onOpenChange,
|
||||
])
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal open={open} onOpenChange={(openState) => !openState && handleCloseAttempt()}>
|
||||
<ModalContent className='max-w-[480px]'>
|
||||
<ModalHeader>
|
||||
<span>Edit API Info</span>
|
||||
</ModalHeader>
|
||||
<ModalBody className='space-y-[12px]'>
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Description
|
||||
</Label>
|
||||
<Textarea
|
||||
placeholder='Describe what this workflow API does...'
|
||||
className='min-h-[80px] resize-none'
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{inputFormat.length > 0 && (
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Parameters ({inputFormat.length})
|
||||
</Label>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{inputFormat.map((field) => (
|
||||
<div
|
||||
key={field.name}
|
||||
className='overflow-hidden rounded-[4px] border border-[var(--border-1)]'
|
||||
>
|
||||
<div className='flex items-center justify-between bg-[var(--surface-4)] px-[10px] py-[5px]'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{field.name}
|
||||
</span>
|
||||
<Badge size='sm'>{field.type || 'string'}</Badge>
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Description</Label>
|
||||
<Input
|
||||
value={paramDescriptions[field.name] || ''}
|
||||
onChange={(e) =>
|
||||
handleParamDescriptionChange(field.name, e.target.value)
|
||||
}
|
||||
placeholder={`Enter description for ${field.name}`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={handleCloseAttempt} disabled={isSaving}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' onClick={handleSave} disabled={isSaving || !hasChanges}>
|
||||
{isSaving ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
||||
<Modal open={showUnsavedChangesAlert} onOpenChange={setShowUnsavedChangesAlert}>
|
||||
<ModalContent className='max-w-[400px]'>
|
||||
<ModalHeader>
|
||||
<span>Unsaved Changes</span>
|
||||
</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[14px] text-[var(--text-secondary)]'>
|
||||
You have unsaved changes. Are you sure you want to discard them?
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => setShowUnsavedChangesAlert(false)}>
|
||||
Keep Editing
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleDiscardChanges}>
|
||||
Discard Changes
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -43,7 +43,6 @@ import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { A2aDeploy } from './components/a2a/a2a'
|
||||
import { ApiDeploy } from './components/api/api'
|
||||
import { ChatDeploy, type ExistingChat } from './components/chat/chat'
|
||||
import { ApiInfoModal } from './components/general/components/api-info-modal'
|
||||
import { GeneralDeploy } from './components/general/general'
|
||||
import { McpDeploy } from './components/mcp/mcp'
|
||||
import { TemplateDeploy } from './components/template/template'
|
||||
@@ -111,7 +110,6 @@ export function DeployModal({
|
||||
const [chatSuccess, setChatSuccess] = useState(false)
|
||||
|
||||
const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false)
|
||||
const [isApiInfoModalOpen, setIsApiInfoModalOpen] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const canManageWorkspaceKeys = userPermissions.canAdmin
|
||||
const { config: permissionConfig } = usePermissionConfig()
|
||||
@@ -391,6 +389,11 @@ export function DeployModal({
|
||||
form?.requestSubmit()
|
||||
}, [])
|
||||
|
||||
const handleA2aFormSubmit = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form') as HTMLFormElement
|
||||
form?.requestSubmit()
|
||||
}, [])
|
||||
|
||||
const handleA2aPublish = useCallback(() => {
|
||||
const form = document.getElementById('a2a-deploy-form')
|
||||
const publishTrigger = form?.querySelector('[data-a2a-publish-trigger]') as HTMLButtonElement
|
||||
@@ -591,11 +594,7 @@ export function DeployModal({
|
||||
)}
|
||||
{activeTab === 'api' && (
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div>
|
||||
<Button variant='default' onClick={() => setIsApiInfoModalOpen(true)}>
|
||||
Edit API Info
|
||||
</Button>
|
||||
</div>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
@@ -881,14 +880,6 @@ export function DeployModal({
|
||||
canManageWorkspaceKeys={canManageWorkspaceKeys}
|
||||
defaultKeyType={defaultKeyType}
|
||||
/>
|
||||
|
||||
{workflowId && (
|
||||
<ApiInfoModal
|
||||
open={isApiInfoModalOpen}
|
||||
onOpenChange={setIsApiInfoModalOpen}
|
||||
workflowId={workflowId}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { ReactElement } from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
|
||||
import { ChevronDown, ChevronUp, Plus } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import Editor from 'react-simple-code-editor'
|
||||
import { useUpdateNodeInternals } from 'reactflow'
|
||||
@@ -39,16 +39,6 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('ConditionInput')
|
||||
|
||||
/**
|
||||
* Default height for router textareas in pixels
|
||||
*/
|
||||
const ROUTER_DEFAULT_HEIGHT_PX = 100
|
||||
|
||||
/**
|
||||
* Minimum height for router textareas in pixels
|
||||
*/
|
||||
const ROUTER_MIN_HEIGHT_PX = 80
|
||||
|
||||
/**
|
||||
* Represents a single conditional block (if/else if/else).
|
||||
*/
|
||||
@@ -753,61 +743,6 @@ export function ConditionInput({
|
||||
}
|
||||
}, [conditionalBlocks, isRouterMode])
|
||||
|
||||
// State for tracking individual router textarea heights
|
||||
const [routerHeights, setRouterHeights] = useState<{ [key: string]: number }>({})
|
||||
const isResizing = useRef(false)
|
||||
|
||||
/**
|
||||
* Gets the height for a specific router block, returning default if not set.
|
||||
*
|
||||
* @param blockId - ID of the router block
|
||||
* @returns Height in pixels
|
||||
*/
|
||||
const getRouterHeight = (blockId: string): number => {
|
||||
return routerHeights[blockId] ?? ROUTER_DEFAULT_HEIGHT_PX
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles mouse-based resize for router textareas.
|
||||
*
|
||||
* @param e - Mouse event from the resize handle
|
||||
* @param blockId - ID of the block being resized
|
||||
*/
|
||||
const startRouterResize = (e: React.MouseEvent, blockId: string) => {
|
||||
if (isPreview || disabled) return
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
isResizing.current = true
|
||||
|
||||
const startY = e.clientY
|
||||
const startHeight = getRouterHeight(blockId)
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizing.current) return
|
||||
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const newHeight = Math.max(ROUTER_MIN_HEIGHT_PX, startHeight + deltaY)
|
||||
|
||||
// Update the textarea height directly for smooth resizing
|
||||
const textarea = inputRefs.current.get(blockId)
|
||||
if (textarea) {
|
||||
textarea.style.height = `${newHeight}px`
|
||||
}
|
||||
|
||||
// Update state to keep track
|
||||
setRouterHeights((prev) => ({ ...prev, [blockId]: newHeight }))
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
isResizing.current = false
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
// Show loading or empty state if not ready or no blocks
|
||||
if (!isReady || conditionalBlocks.length === 0) {
|
||||
return (
|
||||
@@ -972,24 +907,10 @@ export function ConditionInput({
|
||||
}}
|
||||
placeholder='Describe when this route should be taken...'
|
||||
disabled={disabled || isPreview}
|
||||
className='min-h-[100px] resize-none rounded-none border-0 px-3 py-2 text-sm placeholder:text-muted-foreground/50 focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
rows={4}
|
||||
style={{ height: `${getRouterHeight(block.id)}px` }}
|
||||
className='min-h-[60px] resize-none rounded-none border-0 px-3 py-2 text-sm placeholder:text-muted-foreground/50 focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
rows={2}
|
||||
/>
|
||||
|
||||
{/* Custom resize handle */}
|
||||
{!isPreview && !disabled && (
|
||||
<div
|
||||
className='absolute right-1 bottom-1 flex h-4 w-4 cursor-ns-resize items-center justify-center rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
|
||||
onMouseDown={(e) => startRouterResize(e, block.id)}
|
||||
onDragStart={(e) => {
|
||||
e.preventDefault()
|
||||
}}
|
||||
>
|
||||
<ChevronsUpDown className='h-3 w-3 text-[var(--text-muted)]' />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{block.showEnvVars && (
|
||||
<EnvVarDropdown
|
||||
visible={block.showEnvVars}
|
||||
|
||||
@@ -234,45 +234,48 @@ export function LongInput({
|
||||
}, [value])
|
||||
|
||||
// Handle resize functionality
|
||||
const startResize = (e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
isResizing.current = true
|
||||
const startResize = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
isResizing.current = true
|
||||
|
||||
const startY = e.clientY
|
||||
const startHeight = height
|
||||
const startY = e.clientY
|
||||
const startHeight = height
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizing.current) return
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizing.current) return
|
||||
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const newHeight = Math.max(MIN_HEIGHT_PX, startHeight + deltaY)
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const newHeight = Math.max(MIN_HEIGHT_PX, startHeight + deltaY)
|
||||
|
||||
if (textareaRef.current && overlayRef.current) {
|
||||
textareaRef.current.style.height = `${newHeight}px`
|
||||
overlayRef.current.style.height = `${newHeight}px`
|
||||
}
|
||||
if (containerRef.current) {
|
||||
containerRef.current.style.height = `${newHeight}px`
|
||||
}
|
||||
// Keep React state in sync so parent layouts (e.g., Editor) update during drag
|
||||
setHeight(newHeight)
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (textareaRef.current) {
|
||||
const finalHeight = Number.parseInt(textareaRef.current.style.height, 10) || height
|
||||
setHeight(finalHeight)
|
||||
if (textareaRef.current && overlayRef.current) {
|
||||
textareaRef.current.style.height = `${newHeight}px`
|
||||
overlayRef.current.style.height = `${newHeight}px`
|
||||
}
|
||||
if (containerRef.current) {
|
||||
containerRef.current.style.height = `${newHeight}px`
|
||||
}
|
||||
// Keep React state in sync so parent layouts (e.g., Editor) update during drag
|
||||
setHeight(newHeight)
|
||||
}
|
||||
|
||||
isResizing.current = false
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
const handleMouseUp = () => {
|
||||
if (textareaRef.current) {
|
||||
const finalHeight = Number.parseInt(textareaRef.current.style.height, 10) || height
|
||||
setHeight(finalHeight)
|
||||
}
|
||||
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
isResizing.current = false
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
},
|
||||
[height]
|
||||
)
|
||||
|
||||
// Expose wand control handlers to parent via ref
|
||||
useImperativeHandle(
|
||||
|
||||
@@ -1,17 +1,281 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import type { RefObject } from 'react'
|
||||
import { useCallback, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Combobox, Label, Slider, Switch } from '@/components/emcn/components'
|
||||
import { Combobox, Input, Label, Slider, Switch, Textarea } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { LongInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/long-input/long-input'
|
||||
import { ShortInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/short-input/short-input'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
import {
|
||||
checkTagTrigger,
|
||||
TagDropdown,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { useMcpTools } from '@/hooks/mcp/use-mcp-tools'
|
||||
import { formatParameterLabel } from '@/tools/params'
|
||||
|
||||
const logger = createLogger('McpDynamicArgs')
|
||||
|
||||
interface McpInputWithTagsProps {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
placeholder?: string
|
||||
disabled?: boolean
|
||||
isPassword?: boolean
|
||||
blockId: string
|
||||
accessiblePrefixes?: Set<string>
|
||||
}
|
||||
|
||||
function McpInputWithTags({
|
||||
value,
|
||||
onChange,
|
||||
placeholder,
|
||||
disabled,
|
||||
isPassword,
|
||||
blockId,
|
||||
accessiblePrefixes,
|
||||
}: McpInputWithTagsProps) {
|
||||
const [showTags, setShowTags] = useState(false)
|
||||
const [cursorPosition, setCursorPosition] = useState(0)
|
||||
const [activeSourceBlockId, setActiveSourceBlockId] = useState<string | null>(null)
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const inputNameRef = useRef(`mcp_input_${Math.random()}`)
|
||||
|
||||
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const newValue = e.target.value
|
||||
const newCursorPosition = e.target.selectionStart ?? 0
|
||||
|
||||
onChange(newValue)
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
const tagTrigger = checkTagTrigger(newValue, newCursorPosition)
|
||||
setShowTags(tagTrigger.show)
|
||||
}
|
||||
|
||||
const handleDrop = (e: React.DragEvent<HTMLInputElement>) => {
|
||||
e.preventDefault()
|
||||
|
||||
try {
|
||||
const data = JSON.parse(e.dataTransfer.getData('application/json'))
|
||||
if (data.type !== 'connectionBlock') return
|
||||
|
||||
const dropPosition = inputRef.current?.selectionStart ?? value.length ?? 0
|
||||
const currentValue = value ?? ''
|
||||
const newValue = `${currentValue.slice(0, dropPosition)}<${currentValue.slice(dropPosition)}`
|
||||
|
||||
onChange(newValue)
|
||||
setCursorPosition(dropPosition + 1)
|
||||
setShowTags(true)
|
||||
|
||||
if (data.connectionData?.sourceBlockId) {
|
||||
setActiveSourceBlockId(data.connectionData.sourceBlockId)
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
if (inputRef.current) {
|
||||
inputRef.current.selectionStart = dropPosition + 1
|
||||
inputRef.current.selectionEnd = dropPosition + 1
|
||||
}
|
||||
}, 0)
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse drop data:', { error })
|
||||
}
|
||||
}
|
||||
|
||||
const handleDragOver = (e: React.DragEvent<HTMLInputElement>) => {
|
||||
e.preventDefault()
|
||||
}
|
||||
|
||||
const handleTagSelect = (newValue: string) => {
|
||||
onChange(newValue)
|
||||
setShowTags(false)
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='relative'>
|
||||
<div className='relative'>
|
||||
<Input
|
||||
ref={inputRef}
|
||||
type={isPassword ? 'password' : 'text'}
|
||||
value={value || ''}
|
||||
onChange={handleChange}
|
||||
onDrop={handleDrop}
|
||||
onDragOver={handleDragOver}
|
||||
placeholder={placeholder}
|
||||
disabled={disabled}
|
||||
name={inputNameRef.current}
|
||||
autoComplete='off'
|
||||
autoCapitalize='off'
|
||||
spellCheck='false'
|
||||
data-form-type='other'
|
||||
data-lpignore='true'
|
||||
data-1p-ignore
|
||||
readOnly
|
||||
onFocus={(e) => {
|
||||
e.currentTarget.removeAttribute('readOnly')
|
||||
// Show tag dropdown on focus when input is empty
|
||||
if (!disabled && (value?.trim() === '' || !value)) {
|
||||
setShowTags(true)
|
||||
setCursorPosition(0)
|
||||
}
|
||||
}}
|
||||
className={cn(!isPassword && 'text-transparent caret-foreground')}
|
||||
/>
|
||||
{!isPassword && (
|
||||
<div className='pointer-events-none absolute inset-0 flex items-center overflow-hidden bg-transparent px-[8px] py-[6px] font-medium font-sans text-sm'>
|
||||
<div className='whitespace-pre'>
|
||||
{formatDisplayText(value?.toString() || '', {
|
||||
accessiblePrefixes,
|
||||
highlightAll: !accessiblePrefixes,
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<TagDropdown
|
||||
visible={showTags}
|
||||
onSelect={handleTagSelect}
|
||||
blockId={blockId}
|
||||
activeSourceBlockId={activeSourceBlockId}
|
||||
inputValue={value?.toString() ?? ''}
|
||||
cursorPosition={cursorPosition}
|
||||
onClose={() => {
|
||||
setShowTags(false)
|
||||
setActiveSourceBlockId(null)
|
||||
}}
|
||||
inputRef={inputRef as RefObject<HTMLInputElement>}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface McpTextareaWithTagsProps {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
placeholder?: string
|
||||
disabled?: boolean
|
||||
blockId: string
|
||||
accessiblePrefixes?: Set<string>
|
||||
rows?: number
|
||||
}
|
||||
|
||||
function McpTextareaWithTags({
|
||||
value,
|
||||
onChange,
|
||||
placeholder,
|
||||
disabled,
|
||||
blockId,
|
||||
accessiblePrefixes,
|
||||
rows = 4,
|
||||
}: McpTextareaWithTagsProps) {
|
||||
const [showTags, setShowTags] = useState(false)
|
||||
const [cursorPosition, setCursorPosition] = useState(0)
|
||||
const [activeSourceBlockId, setActiveSourceBlockId] = useState<string | null>(null)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
const textareaNameRef = useRef(`mcp_textarea_${Math.random()}`)
|
||||
|
||||
const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||
const newValue = e.target.value
|
||||
const newCursorPosition = e.target.selectionStart ?? 0
|
||||
|
||||
onChange(newValue)
|
||||
setCursorPosition(newCursorPosition)
|
||||
|
||||
const tagTrigger = checkTagTrigger(newValue, newCursorPosition)
|
||||
setShowTags(tagTrigger.show)
|
||||
}
|
||||
|
||||
const handleDrop = (e: React.DragEvent<HTMLTextAreaElement>) => {
|
||||
e.preventDefault()
|
||||
|
||||
try {
|
||||
const data = JSON.parse(e.dataTransfer.getData('application/json'))
|
||||
if (data.type !== 'connectionBlock') return
|
||||
|
||||
const dropPosition = textareaRef.current?.selectionStart ?? value.length ?? 0
|
||||
const currentValue = value ?? ''
|
||||
const newValue = `${currentValue.slice(0, dropPosition)}<${currentValue.slice(dropPosition)}`
|
||||
|
||||
onChange(newValue)
|
||||
setCursorPosition(dropPosition + 1)
|
||||
setShowTags(true)
|
||||
|
||||
if (data.connectionData?.sourceBlockId) {
|
||||
setActiveSourceBlockId(data.connectionData.sourceBlockId)
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
if (textareaRef.current) {
|
||||
textareaRef.current.selectionStart = dropPosition + 1
|
||||
textareaRef.current.selectionEnd = dropPosition + 1
|
||||
}
|
||||
}, 0)
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse drop data:', { error })
|
||||
}
|
||||
}
|
||||
|
||||
const handleDragOver = (e: React.DragEvent<HTMLTextAreaElement>) => {
|
||||
e.preventDefault()
|
||||
}
|
||||
|
||||
const handleTagSelect = (newValue: string) => {
|
||||
onChange(newValue)
|
||||
setShowTags(false)
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='relative'>
|
||||
<Textarea
|
||||
ref={textareaRef}
|
||||
value={value || ''}
|
||||
onChange={handleChange}
|
||||
onDrop={handleDrop}
|
||||
onDragOver={handleDragOver}
|
||||
onFocus={() => {
|
||||
// Show tag dropdown on focus when input is empty
|
||||
if (!disabled && (value?.trim() === '' || !value)) {
|
||||
setShowTags(true)
|
||||
setCursorPosition(0)
|
||||
}
|
||||
}}
|
||||
placeholder={placeholder}
|
||||
disabled={disabled}
|
||||
rows={rows}
|
||||
name={textareaNameRef.current}
|
||||
autoComplete='off'
|
||||
autoCapitalize='off'
|
||||
spellCheck='false'
|
||||
data-form-type='other'
|
||||
data-lpignore='true'
|
||||
data-1p-ignore
|
||||
className={cn('min-h-[80px] resize-none text-transparent caret-foreground')}
|
||||
/>
|
||||
<div className='pointer-events-none absolute inset-0 overflow-auto whitespace-pre-wrap break-words px-[8px] py-[8px] font-medium font-sans text-sm'>
|
||||
{formatDisplayText(value || '', {
|
||||
accessiblePrefixes,
|
||||
highlightAll: !accessiblePrefixes,
|
||||
})}
|
||||
</div>
|
||||
<TagDropdown
|
||||
visible={showTags}
|
||||
onSelect={handleTagSelect}
|
||||
blockId={blockId}
|
||||
activeSourceBlockId={activeSourceBlockId}
|
||||
inputValue={value?.toString() ?? ''}
|
||||
cursorPosition={cursorPosition}
|
||||
onClose={() => {
|
||||
setShowTags(false)
|
||||
setActiveSourceBlockId(null)
|
||||
}}
|
||||
inputRef={textareaRef as RefObject<HTMLTextAreaElement>}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface McpDynamicArgsProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
@@ -20,27 +284,6 @@ interface McpDynamicArgsProps {
|
||||
previewValue?: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a minimal SubBlockConfig for MCP tool parameters
|
||||
*/
|
||||
function createParamConfig(
|
||||
paramName: string,
|
||||
paramSchema: any,
|
||||
inputType: 'long-input' | 'short-input'
|
||||
): SubBlockConfig {
|
||||
const placeholder =
|
||||
paramSchema.type === 'array'
|
||||
? `Enter JSON array, e.g. ["item1", "item2"] or comma-separated values`
|
||||
: paramSchema.description || `Enter ${formatParameterLabel(paramName).toLowerCase()}`
|
||||
|
||||
return {
|
||||
id: paramName,
|
||||
type: inputType,
|
||||
title: formatParameterLabel(paramName),
|
||||
placeholder,
|
||||
}
|
||||
}
|
||||
|
||||
export function McpDynamicArgs({
|
||||
blockId,
|
||||
subBlockId,
|
||||
@@ -54,6 +297,7 @@ export function McpDynamicArgs({
|
||||
const [selectedTool] = useSubBlockValue(blockId, 'tool')
|
||||
const [cachedSchema] = useSubBlockValue(blockId, '_toolSchema')
|
||||
const [toolArgs, setToolArgs] = useSubBlockValue(blockId, subBlockId)
|
||||
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
|
||||
|
||||
const selectedToolConfig = mcpTools.find((tool) => tool.id === selectedTool)
|
||||
const toolSchema = cachedSchema || selectedToolConfig?.inputSchema
|
||||
@@ -64,7 +308,7 @@ export function McpDynamicArgs({
|
||||
try {
|
||||
return JSON.parse(previewValue)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to parse preview value as JSON:', { error })
|
||||
console.warn('Failed to parse preview value as JSON:', error)
|
||||
return previewValue
|
||||
}
|
||||
}
|
||||
@@ -74,7 +318,7 @@ export function McpDynamicArgs({
|
||||
try {
|
||||
return JSON.parse(toolArgs)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to parse toolArgs as JSON:', { error })
|
||||
console.warn('Failed to parse toolArgs as JSON:', error)
|
||||
return {}
|
||||
}
|
||||
}
|
||||
@@ -216,23 +460,24 @@ export function McpDynamicArgs({
|
||||
)
|
||||
}
|
||||
|
||||
case 'long-input': {
|
||||
const config = createParamConfig(paramName, paramSchema, 'long-input')
|
||||
case 'long-input':
|
||||
return (
|
||||
<LongInput
|
||||
<McpTextareaWithTags
|
||||
key={`${paramName}-long`}
|
||||
blockId={blockId}
|
||||
subBlockId={`_mcp_${paramName}`}
|
||||
config={config}
|
||||
placeholder={config.placeholder}
|
||||
rows={4}
|
||||
value={value || ''}
|
||||
onChange={(newValue) => updateParameter(paramName, newValue)}
|
||||
isPreview={isPreview}
|
||||
placeholder={
|
||||
paramSchema.type === 'array'
|
||||
? `Enter JSON array, e.g. ["item1", "item2"] or comma-separated values`
|
||||
: paramSchema.description ||
|
||||
`Enter ${formatParameterLabel(paramName).toLowerCase()}`
|
||||
}
|
||||
disabled={disabled}
|
||||
blockId={blockId}
|
||||
accessiblePrefixes={accessiblePrefixes}
|
||||
rows={4}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
default: {
|
||||
const isPassword =
|
||||
@@ -240,16 +485,10 @@ export function McpDynamicArgs({
|
||||
paramName.toLowerCase().includes('password') ||
|
||||
paramName.toLowerCase().includes('token')
|
||||
const isNumeric = paramSchema.type === 'number' || paramSchema.type === 'integer'
|
||||
const config = createParamConfig(paramName, paramSchema, 'short-input')
|
||||
|
||||
return (
|
||||
<ShortInput
|
||||
<McpInputWithTags
|
||||
key={`${paramName}-short`}
|
||||
blockId={blockId}
|
||||
subBlockId={`_mcp_${paramName}`}
|
||||
config={config}
|
||||
placeholder={config.placeholder}
|
||||
password={isPassword}
|
||||
value={value?.toString() || ''}
|
||||
onChange={(newValue) => {
|
||||
let processedValue: any = newValue
|
||||
@@ -267,8 +506,16 @@ export function McpDynamicArgs({
|
||||
}
|
||||
updateParameter(paramName, processedValue)
|
||||
}}
|
||||
isPreview={isPreview}
|
||||
placeholder={
|
||||
paramSchema.type === 'array'
|
||||
? `Enter JSON array, e.g. ["item1", "item2"] or comma-separated values`
|
||||
: paramSchema.description ||
|
||||
`Enter ${formatParameterLabel(paramName).toLowerCase()}`
|
||||
}
|
||||
disabled={disabled}
|
||||
isPassword={isPassword}
|
||||
blockId={blockId}
|
||||
accessiblePrefixes={accessiblePrefixes}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -331,40 +578,26 @@ export function McpDynamicArgs({
|
||||
tabIndex={-1}
|
||||
readOnly
|
||||
/>
|
||||
<div>
|
||||
<div className='space-y-4'>
|
||||
{toolSchema.properties &&
|
||||
Object.entries(toolSchema.properties).map(([paramName, paramSchema], index, entries) => {
|
||||
Object.entries(toolSchema.properties).map(([paramName, paramSchema]) => {
|
||||
const inputType = getInputType(paramSchema as any)
|
||||
const showLabel = inputType !== 'switch'
|
||||
const showDivider = index < entries.length - 1
|
||||
|
||||
return (
|
||||
<div key={paramName} className='subblock-row'>
|
||||
<div className='subblock-content flex flex-col gap-[10px]'>
|
||||
{showLabel && (
|
||||
<Label
|
||||
className={cn(
|
||||
'font-medium text-sm',
|
||||
toolSchema.required?.includes(paramName) &&
|
||||
'after:ml-1 after:text-red-500 after:content-["*"]'
|
||||
)}
|
||||
>
|
||||
{formatParameterLabel(paramName)}
|
||||
</Label>
|
||||
)}
|
||||
{renderParameterInput(paramName, paramSchema as any)}
|
||||
</div>
|
||||
{showDivider && (
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div key={paramName} className='space-y-2'>
|
||||
{showLabel && (
|
||||
<Label
|
||||
className={cn(
|
||||
'font-medium text-sm',
|
||||
toolSchema.required?.includes(paramName) &&
|
||||
'after:ml-1 after:text-red-500 after:content-["*"]'
|
||||
)}
|
||||
>
|
||||
{formatParameterLabel(paramName)}
|
||||
</Label>
|
||||
)}
|
||||
{renderParameterInput(paramName, paramSchema as any)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
|
||||
@@ -214,18 +214,40 @@ const getOutputTypeForPath = (
|
||||
outputPath: string,
|
||||
mergedSubBlocksOverride?: Record<string, any>
|
||||
): string => {
|
||||
const subBlocks =
|
||||
mergedSubBlocksOverride ?? useWorkflowStore.getState().blocks[blockId]?.subBlocks
|
||||
const triggerMode = block?.triggerMode && blockConfig?.triggers?.enabled
|
||||
if (block?.triggerMode && blockConfig?.triggers?.enabled) {
|
||||
return getBlockOutputType(block.type, outputPath, mergedSubBlocksOverride, true)
|
||||
}
|
||||
if (block?.type === 'starter') {
|
||||
const startWorkflowValue =
|
||||
mergedSubBlocksOverride?.startWorkflow?.value ?? getSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
if (blockConfig?.tools?.config?.tool) {
|
||||
if (startWorkflowValue === 'chat') {
|
||||
const chatModeTypes: Record<string, string> = {
|
||||
input: 'string',
|
||||
conversationId: 'string',
|
||||
files: 'files',
|
||||
}
|
||||
return chatModeTypes[outputPath] || 'any'
|
||||
}
|
||||
const inputFormatValue =
|
||||
mergedSubBlocksOverride?.inputFormat?.value ?? getSubBlockValue(blockId, 'inputFormat')
|
||||
if (inputFormatValue && Array.isArray(inputFormatValue)) {
|
||||
const field = inputFormatValue.find(
|
||||
(f: { name?: string; type?: string }) => f.name === outputPath
|
||||
)
|
||||
if (field?.type) return field.type
|
||||
}
|
||||
} else if (blockConfig?.category === 'triggers') {
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getBlockOutputType(block.type, outputPath, subBlocks)
|
||||
} else {
|
||||
const operationValue = getSubBlockValue(blockId, 'operation')
|
||||
if (operationValue) {
|
||||
if (blockConfig && operationValue) {
|
||||
return getToolOutputType(blockConfig, operationValue, outputPath)
|
||||
}
|
||||
}
|
||||
|
||||
return getBlockOutputType(block?.type ?? '', outputPath, subBlocks, triggerMode)
|
||||
return 'any'
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1767,7 +1789,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
mergedSubBlocks
|
||||
)
|
||||
|
||||
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') {
|
||||
if (fieldType === 'files' || fieldType === 'array') {
|
||||
const blockName = parts[0]
|
||||
const remainingPath = parts.slice(2).join('.')
|
||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||
|
||||
@@ -2069,7 +2069,6 @@ export const ToolInput = memo(function ToolInput({
|
||||
placeholder: uiComponent.placeholder,
|
||||
requiredScopes: uiComponent.requiredScopes,
|
||||
dependsOn: uiComponent.dependsOn,
|
||||
canonicalParamId: uiComponent.canonicalParamId ?? param.id,
|
||||
}}
|
||||
onProjectSelect={onChange}
|
||||
disabled={disabled}
|
||||
|
||||
@@ -34,7 +34,6 @@ interface LogRowContextMenuProps {
|
||||
onCopyRunId: (runId: string) => void
|
||||
onClearFilters: () => void
|
||||
onClearConsole: () => void
|
||||
onFixInCopilot: (entry: ConsoleEntry) => void
|
||||
hasActiveFilters: boolean
|
||||
}
|
||||
|
||||
@@ -55,7 +54,6 @@ export function LogRowContextMenu({
|
||||
onCopyRunId,
|
||||
onClearFilters,
|
||||
onClearConsole,
|
||||
onFixInCopilot,
|
||||
hasActiveFilters,
|
||||
}: LogRowContextMenuProps) {
|
||||
const hasRunId = entry?.executionId != null
|
||||
@@ -98,21 +96,6 @@ export function LogRowContextMenu({
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Fix in Copilot - only for error rows */}
|
||||
{entry && !entry.success && (
|
||||
<>
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onFixInCopilot(entry)
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Fix in Copilot
|
||||
</PopoverItem>
|
||||
<PopoverDivider />
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Filter actions */}
|
||||
{entry && (
|
||||
<>
|
||||
|
||||
@@ -54,7 +54,6 @@ import { useShowTrainingControls } from '@/hooks/queries/general-settings'
|
||||
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
|
||||
import { OUTPUT_PANEL_WIDTH, TERMINAL_HEIGHT } from '@/stores/constants'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
import { openCopilotWithMessage } from '@/stores/notifications/utils'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
import { useTerminalConsoleStore, useTerminalStore } from '@/stores/terminal'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -227,6 +226,7 @@ const isEventFromEditableElement = (e: KeyboardEvent): boolean => {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check target and walk up ancestors in case editors render nested elements
|
||||
let el: HTMLElement | null = target
|
||||
while (el) {
|
||||
if (isEditable(el)) return true
|
||||
@@ -1159,17 +1159,6 @@ export const Terminal = memo(function Terminal() {
|
||||
clearCurrentWorkflowConsole()
|
||||
}, [clearCurrentWorkflowConsole])
|
||||
|
||||
const handleFixInCopilot = useCallback(
|
||||
(entry: ConsoleEntry) => {
|
||||
const errorMessage = entry.error ? String(entry.error) : 'Unknown error'
|
||||
const blockName = entry.blockName || 'Unknown Block'
|
||||
const message = `${errorMessage}\n\nError in ${blockName}.\n\nPlease fix this.`
|
||||
openCopilotWithMessage(message)
|
||||
closeLogRowMenu()
|
||||
},
|
||||
[closeLogRowMenu]
|
||||
)
|
||||
|
||||
const handleTrainingClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
@@ -1960,7 +1949,6 @@ export const Terminal = memo(function Terminal() {
|
||||
closeLogRowMenu()
|
||||
}}
|
||||
onClearConsole={handleClearConsoleFromMenu}
|
||||
onFixInCopilot={handleFixInCopilot}
|
||||
hasActiveFilters={hasActiveFilters}
|
||||
/>
|
||||
</>
|
||||
|
||||
@@ -692,8 +692,7 @@ const WorkflowContent = React.memo(() => {
|
||||
parentId?: string,
|
||||
extent?: 'parent',
|
||||
autoConnectEdge?: Edge,
|
||||
triggerMode?: boolean,
|
||||
presetSubBlockValues?: Record<string, unknown>
|
||||
triggerMode?: boolean
|
||||
) => {
|
||||
setPendingSelection([id])
|
||||
setSelectedEdges(new Map())
|
||||
@@ -723,14 +722,6 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
}
|
||||
|
||||
// Apply preset subblock values (e.g., from tool-operation search)
|
||||
if (presetSubBlockValues) {
|
||||
if (!subBlockValues[id]) {
|
||||
subBlockValues[id] = {}
|
||||
}
|
||||
Object.assign(subBlockValues[id], presetSubBlockValues)
|
||||
}
|
||||
|
||||
collaborativeBatchAddBlocks(
|
||||
[block],
|
||||
autoConnectEdge ? [autoConnectEdge] : [],
|
||||
@@ -1498,7 +1489,7 @@ const WorkflowContent = React.memo(() => {
|
||||
return
|
||||
}
|
||||
|
||||
const { type, enableTriggerMode, presetOperation } = event.detail
|
||||
const { type, enableTriggerMode } = event.detail
|
||||
|
||||
if (!type) return
|
||||
if (type === 'connectionBlock') return
|
||||
@@ -1561,8 +1552,7 @@ const WorkflowContent = React.memo(() => {
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge,
|
||||
enableTriggerMode,
|
||||
presetOperation ? { operation: presetOperation } : undefined
|
||||
enableTriggerMode
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import { useParams, useRouter } from 'next/navigation'
|
||||
import { Dialog, DialogPortal, DialogTitle } from '@/components/ui/dialog'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getToolOperationsIndex } from '@/lib/search/tool-operations'
|
||||
import { getTriggersForSidebar, hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import { searchItems } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/search-modal/search-utils'
|
||||
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
|
||||
@@ -82,12 +81,10 @@ type SearchItem = {
|
||||
color?: string
|
||||
href?: string
|
||||
shortcut?: string
|
||||
type: 'block' | 'trigger' | 'tool' | 'tool-operation' | 'workflow' | 'workspace' | 'page' | 'doc'
|
||||
type: 'block' | 'trigger' | 'tool' | 'workflow' | 'workspace' | 'page' | 'doc'
|
||||
isCurrent?: boolean
|
||||
blockType?: string
|
||||
config?: any
|
||||
operationId?: string
|
||||
aliases?: string[]
|
||||
}
|
||||
|
||||
interface SearchResultItemProps {
|
||||
@@ -104,11 +101,7 @@ const SearchResultItem = memo(function SearchResultItem({
|
||||
onItemClick,
|
||||
}: SearchResultItemProps) {
|
||||
const Icon = item.icon
|
||||
const showColoredIcon =
|
||||
item.type === 'block' ||
|
||||
item.type === 'trigger' ||
|
||||
item.type === 'tool' ||
|
||||
item.type === 'tool-operation'
|
||||
const showColoredIcon = item.type === 'block' || item.type === 'trigger' || item.type === 'tool'
|
||||
const isWorkflow = item.type === 'workflow'
|
||||
const isWorkspace = item.type === 'workspace'
|
||||
|
||||
@@ -285,24 +278,6 @@ export const SearchModal = memo(function SearchModal({
|
||||
)
|
||||
}, [open, isOnWorkflowPage, filterBlocks])
|
||||
|
||||
const toolOperations = useMemo(() => {
|
||||
if (!open || !isOnWorkflowPage) return []
|
||||
|
||||
const allowedBlockTypes = new Set(tools.map((t) => t.type))
|
||||
|
||||
return getToolOperationsIndex()
|
||||
.filter((op) => allowedBlockTypes.has(op.blockType))
|
||||
.map((op) => ({
|
||||
id: op.id,
|
||||
name: `${op.serviceName}: ${op.operationName}`,
|
||||
icon: op.icon,
|
||||
bgColor: op.bgColor,
|
||||
blockType: op.blockType,
|
||||
operationId: op.operationId,
|
||||
aliases: op.aliases,
|
||||
}))
|
||||
}, [open, isOnWorkflowPage, tools])
|
||||
|
||||
const pages = useMemo(
|
||||
(): PageItem[] => [
|
||||
{
|
||||
@@ -421,19 +396,6 @@ export const SearchModal = memo(function SearchModal({
|
||||
})
|
||||
})
|
||||
|
||||
toolOperations.forEach((op) => {
|
||||
items.push({
|
||||
id: op.id,
|
||||
name: op.name,
|
||||
icon: op.icon,
|
||||
bgColor: op.bgColor,
|
||||
type: 'tool-operation',
|
||||
blockType: op.blockType,
|
||||
operationId: op.operationId,
|
||||
aliases: op.aliases,
|
||||
})
|
||||
})
|
||||
|
||||
docs.forEach((doc) => {
|
||||
items.push({
|
||||
id: doc.id,
|
||||
@@ -445,10 +407,10 @@ export const SearchModal = memo(function SearchModal({
|
||||
})
|
||||
|
||||
return items
|
||||
}, [workspaces, workflows, pages, blocks, triggers, tools, toolOperations, docs])
|
||||
}, [workspaces, workflows, pages, blocks, triggers, tools, docs])
|
||||
|
||||
const sectionOrder = useMemo<SearchItem['type'][]>(
|
||||
() => ['block', 'tool', 'tool-operation', 'trigger', 'workflow', 'workspace', 'page', 'doc'],
|
||||
() => ['block', 'tool', 'trigger', 'workflow', 'workspace', 'page', 'doc'],
|
||||
[]
|
||||
)
|
||||
|
||||
@@ -495,7 +457,6 @@ export const SearchModal = memo(function SearchModal({
|
||||
page: [],
|
||||
trigger: [],
|
||||
block: [],
|
||||
'tool-operation': [],
|
||||
tool: [],
|
||||
doc: [],
|
||||
}
|
||||
@@ -551,17 +512,6 @@ export const SearchModal = memo(function SearchModal({
|
||||
window.dispatchEvent(event)
|
||||
}
|
||||
break
|
||||
case 'tool-operation':
|
||||
if (item.blockType && item.operationId) {
|
||||
const event = new CustomEvent('add-block-from-toolbar', {
|
||||
detail: {
|
||||
type: item.blockType,
|
||||
presetOperation: item.operationId,
|
||||
},
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
}
|
||||
break
|
||||
case 'workspace':
|
||||
if (item.isCurrent) {
|
||||
break
|
||||
@@ -642,7 +592,6 @@ export const SearchModal = memo(function SearchModal({
|
||||
page: 'Pages',
|
||||
trigger: 'Triggers',
|
||||
block: 'Blocks',
|
||||
'tool-operation': 'Tool Operations',
|
||||
tool: 'Tools',
|
||||
doc: 'Docs',
|
||||
}
|
||||
|
||||
@@ -8,19 +8,17 @@ export interface SearchableItem {
|
||||
name: string
|
||||
description?: string
|
||||
type: string
|
||||
aliases?: string[]
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export interface SearchResult<T extends SearchableItem> {
|
||||
item: T
|
||||
score: number
|
||||
matchType: 'exact' | 'prefix' | 'alias' | 'word-boundary' | 'substring' | 'description'
|
||||
matchType: 'exact' | 'prefix' | 'word-boundary' | 'substring' | 'description'
|
||||
}
|
||||
|
||||
const SCORE_EXACT_MATCH = 10000
|
||||
const SCORE_PREFIX_MATCH = 5000
|
||||
const SCORE_ALIAS_MATCH = 3000
|
||||
const SCORE_WORD_BOUNDARY = 1000
|
||||
const SCORE_SUBSTRING_MATCH = 100
|
||||
const DESCRIPTION_WEIGHT = 0.3
|
||||
@@ -69,39 +67,6 @@ function calculateFieldScore(
|
||||
return { score: 0, matchType: null }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if query matches any alias in the item's aliases array
|
||||
* Returns the alias score if a match is found, 0 otherwise
|
||||
*/
|
||||
function calculateAliasScore(
|
||||
query: string,
|
||||
aliases?: string[]
|
||||
): { score: number; matchType: 'alias' | null } {
|
||||
if (!aliases || aliases.length === 0) {
|
||||
return { score: 0, matchType: null }
|
||||
}
|
||||
|
||||
const normalizedQuery = query.toLowerCase().trim()
|
||||
|
||||
for (const alias of aliases) {
|
||||
const normalizedAlias = alias.toLowerCase().trim()
|
||||
|
||||
if (normalizedAlias === normalizedQuery) {
|
||||
return { score: SCORE_ALIAS_MATCH, matchType: 'alias' }
|
||||
}
|
||||
|
||||
if (normalizedAlias.startsWith(normalizedQuery)) {
|
||||
return { score: SCORE_ALIAS_MATCH * 0.8, matchType: 'alias' }
|
||||
}
|
||||
|
||||
if (normalizedQuery.includes(normalizedAlias) || normalizedAlias.includes(normalizedQuery)) {
|
||||
return { score: SCORE_ALIAS_MATCH * 0.6, matchType: 'alias' }
|
||||
}
|
||||
}
|
||||
|
||||
return { score: 0, matchType: null }
|
||||
}
|
||||
|
||||
/**
|
||||
* Search items using tiered matching algorithm
|
||||
* Returns items sorted by relevance (highest score first)
|
||||
@@ -125,20 +90,15 @@ export function searchItems<T extends SearchableItem>(
|
||||
? calculateFieldScore(normalizedQuery, item.description)
|
||||
: { score: 0, matchType: null }
|
||||
|
||||
const aliasMatch = calculateAliasScore(normalizedQuery, item.aliases)
|
||||
|
||||
const nameScore = nameMatch.score
|
||||
const descScore = descMatch.score * DESCRIPTION_WEIGHT
|
||||
const aliasScore = aliasMatch.score
|
||||
|
||||
const bestScore = Math.max(nameScore, descScore, aliasScore)
|
||||
const bestScore = Math.max(nameScore, descScore)
|
||||
|
||||
if (bestScore > 0) {
|
||||
let matchType: SearchResult<T>['matchType'] = 'substring'
|
||||
if (nameScore >= descScore && nameScore >= aliasScore) {
|
||||
if (nameScore >= descScore) {
|
||||
matchType = nameMatch.matchType || 'substring'
|
||||
} else if (aliasScore >= descScore) {
|
||||
matchType = 'alias'
|
||||
} else {
|
||||
matchType = 'description'
|
||||
}
|
||||
@@ -165,8 +125,6 @@ export function getMatchTypeLabel(matchType: SearchResult<any>['matchType']): st
|
||||
return 'Exact match'
|
||||
case 'prefix':
|
||||
return 'Starts with'
|
||||
case 'alias':
|
||||
return 'Similar to'
|
||||
case 'word-boundary':
|
||||
return 'Word match'
|
||||
case 'substring':
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button, Input as EmcnInput } from '@/components/emcn'
|
||||
import { workflowKeys } from '@/hooks/queries/workflows'
|
||||
|
||||
const logger = createLogger('DebugSettings')
|
||||
|
||||
/**
|
||||
* Debug settings component for superusers.
|
||||
* Allows importing workflows by ID for debugging purposes.
|
||||
*/
|
||||
export function Debug() {
|
||||
const params = useParams()
|
||||
const queryClient = useQueryClient()
|
||||
const workspaceId = params?.workspaceId as string
|
||||
|
||||
const [workflowId, setWorkflowId] = useState('')
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
|
||||
const handleImport = async () => {
|
||||
if (!workflowId.trim()) return
|
||||
|
||||
setIsImporting(true)
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/superuser/import-workflow', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId: workflowId.trim(),
|
||||
targetWorkspaceId: workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (response.ok) {
|
||||
await queryClient.invalidateQueries({ queryKey: workflowKeys.list(workspaceId) })
|
||||
setWorkflowId('')
|
||||
logger.info('Workflow imported successfully', {
|
||||
originalWorkflowId: workflowId.trim(),
|
||||
newWorkflowId: data.newWorkflowId,
|
||||
copilotChatsImported: data.copilotChatsImported,
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflow', error)
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col gap-[16px]'>
|
||||
<p className='text-[13px] text-[var(--text-secondary)]'>
|
||||
Import a workflow by ID along with its associated copilot chats.
|
||||
</p>
|
||||
|
||||
<div className='flex gap-[8px]'>
|
||||
<EmcnInput
|
||||
value={workflowId}
|
||||
onChange={(e) => setWorkflowId(e.target.value)}
|
||||
placeholder='Enter workflow ID'
|
||||
disabled={isImporting}
|
||||
/>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
onClick={handleImport}
|
||||
disabled={isImporting || !workflowId.trim()}
|
||||
>
|
||||
{isImporting ? 'Importing...' : 'Import'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -4,7 +4,6 @@ export { BYOK } from './byok/byok'
|
||||
export { Copilot } from './copilot/copilot'
|
||||
export { CredentialSets } from './credential-sets/credential-sets'
|
||||
export { CustomTools } from './custom-tools/custom-tools'
|
||||
export { Debug } from './debug/debug'
|
||||
export { EnvironmentVariables } from './environment/environment'
|
||||
export { Files as FileUploads } from './files/files'
|
||||
export { General } from './general/general'
|
||||
|
||||
@@ -5,7 +5,6 @@ import * as DialogPrimitive from '@radix-ui/react-dialog'
|
||||
import * as VisuallyHidden from '@radix-ui/react-visually-hidden'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
Bug,
|
||||
Files,
|
||||
KeySquare,
|
||||
LogIn,
|
||||
@@ -47,7 +46,6 @@ import {
|
||||
Copilot,
|
||||
CredentialSets,
|
||||
CustomTools,
|
||||
Debug,
|
||||
EnvironmentVariables,
|
||||
FileUploads,
|
||||
General,
|
||||
@@ -93,15 +91,8 @@ type SettingsSection =
|
||||
| 'mcp'
|
||||
| 'custom-tools'
|
||||
| 'workflow-mcp-servers'
|
||||
| 'debug'
|
||||
|
||||
type NavigationSection =
|
||||
| 'account'
|
||||
| 'subscription'
|
||||
| 'tools'
|
||||
| 'system'
|
||||
| 'enterprise'
|
||||
| 'superuser'
|
||||
type NavigationSection = 'account' | 'subscription' | 'tools' | 'system' | 'enterprise'
|
||||
|
||||
type NavigationItem = {
|
||||
id: SettingsSection
|
||||
@@ -113,7 +104,6 @@ type NavigationItem = {
|
||||
requiresEnterprise?: boolean
|
||||
requiresHosted?: boolean
|
||||
selfHostedOverride?: boolean
|
||||
requiresSuperUser?: boolean
|
||||
}
|
||||
|
||||
const sectionConfig: { key: NavigationSection; title: string }[] = [
|
||||
@@ -122,7 +112,6 @@ const sectionConfig: { key: NavigationSection; title: string }[] = [
|
||||
{ key: 'subscription', title: 'Subscription' },
|
||||
{ key: 'system', title: 'System' },
|
||||
{ key: 'enterprise', title: 'Enterprise' },
|
||||
{ key: 'superuser', title: 'Superuser' },
|
||||
]
|
||||
|
||||
const allNavigationItems: NavigationItem[] = [
|
||||
@@ -191,24 +180,15 @@ const allNavigationItems: NavigationItem[] = [
|
||||
requiresEnterprise: true,
|
||||
selfHostedOverride: isSSOEnabled,
|
||||
},
|
||||
{
|
||||
id: 'debug',
|
||||
label: 'Debug',
|
||||
icon: Bug,
|
||||
section: 'superuser',
|
||||
requiresSuperUser: true,
|
||||
},
|
||||
]
|
||||
|
||||
export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
const [activeSection, setActiveSection] = useState<SettingsSection>('general')
|
||||
const { initialSection, mcpServerId, clearInitialState } = useSettingsModalStore()
|
||||
const [pendingMcpServerId, setPendingMcpServerId] = useState<string | null>(null)
|
||||
const [isSuperUser, setIsSuperUser] = useState(false)
|
||||
const { data: session } = useSession()
|
||||
const queryClient = useQueryClient()
|
||||
const { data: organizationsData } = useOrganizations()
|
||||
const { data: generalSettings } = useGeneralSettings()
|
||||
const { data: subscriptionData } = useSubscriptionData({ enabled: isBillingEnabled })
|
||||
const { data: ssoProvidersData, isLoading: isLoadingSSO } = useSSOProviders()
|
||||
|
||||
@@ -229,23 +209,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
const hasEnterprisePlan = subscriptionStatus.isEnterprise
|
||||
const hasOrganization = !!activeOrganization?.id
|
||||
|
||||
// Fetch superuser status
|
||||
useEffect(() => {
|
||||
const fetchSuperUserStatus = async () => {
|
||||
if (!userId) return
|
||||
try {
|
||||
const response = await fetch('/api/user/super-user')
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setIsSuperUser(data.isSuperUser)
|
||||
}
|
||||
} catch {
|
||||
setIsSuperUser(false)
|
||||
}
|
||||
}
|
||||
fetchSuperUserStatus()
|
||||
}, [userId])
|
||||
|
||||
// Memoize SSO provider ownership check
|
||||
const isSSOProviderOwner = useMemo(() => {
|
||||
if (isHosted) return null
|
||||
@@ -305,13 +268,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
return false
|
||||
}
|
||||
|
||||
// requiresSuperUser: only show if user is a superuser AND has superuser mode enabled
|
||||
const superUserModeEnabled = generalSettings?.superUserModeEnabled ?? false
|
||||
const effectiveSuperUser = isSuperUser && superUserModeEnabled
|
||||
if (item.requiresSuperUser && !effectiveSuperUser) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}, [
|
||||
@@ -324,8 +280,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
isOwner,
|
||||
isAdmin,
|
||||
permissionConfig,
|
||||
isSuperUser,
|
||||
generalSettings?.superUserModeEnabled,
|
||||
])
|
||||
|
||||
// Memoized callbacks to prevent infinite loops in child components
|
||||
@@ -354,6 +308,9 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
[activeSection]
|
||||
)
|
||||
|
||||
// React Query hook automatically loads and syncs settings
|
||||
useGeneralSettings()
|
||||
|
||||
// Apply initial section from store when modal opens
|
||||
useEffect(() => {
|
||||
if (open && initialSection) {
|
||||
@@ -566,7 +523,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
{activeSection === 'mcp' && <MCP initialServerId={pendingMcpServerId} />}
|
||||
{activeSection === 'custom-tools' && <CustomTools />}
|
||||
{activeSection === 'workflow-mcp-servers' && <WorkflowMcpServers />}
|
||||
{activeSection === 'debug' && <Debug />}
|
||||
</SModalMainBody>
|
||||
</SModalMain>
|
||||
</SModalContent>
|
||||
|
||||
@@ -208,8 +208,6 @@ async function runWorkflowExecution({
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
})
|
||||
|
||||
if (executionResult.status === 'paused') {
|
||||
|
||||
@@ -240,8 +240,6 @@ async function executeWebhookJobInternal(
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true, // Enable base64 hydration
|
||||
base64MaxBytes: undefined, // Use default limit
|
||||
})
|
||||
|
||||
if (executionResult.status === 'paused') {
|
||||
@@ -495,7 +493,6 @@ async function executeWebhookJobInternal(
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
})
|
||||
|
||||
if (executionResult.status === 'paused') {
|
||||
|
||||
@@ -109,8 +109,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
|
||||
@@ -11,7 +11,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
'Integrate Browser Use into the workflow. Can navigate the web and perform actions as if a real user was interacting with the browser.',
|
||||
docsLink: 'https://docs.sim.ai/tools/browser_use',
|
||||
category: 'tools',
|
||||
bgColor: '#181C1E',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: BrowserUseIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
|
||||
@@ -121,9 +121,5 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
type: 'string',
|
||||
description: 'All file contents merged into a single text string',
|
||||
},
|
||||
processedFiles: {
|
||||
type: 'files',
|
||||
description: 'Array of UserFile objects for downstream use (attachments, uploads, etc.)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ export function OTPVerificationEmail({
|
||||
const brand = getBrandConfig()
|
||||
|
||||
return (
|
||||
<EmailLayout preview={getSubjectByType(type, brand.name, chatTitle)} showUnsubscribe={false}>
|
||||
<EmailLayout preview={getSubjectByType(type, brand.name, chatTitle)}>
|
||||
<Text style={baseStyles.paragraph}>Your verification code:</Text>
|
||||
|
||||
<Section style={baseStyles.codeContainer}>
|
||||
|
||||
@@ -12,7 +12,7 @@ export function ResetPasswordEmail({ username = '', resetLink = '' }: ResetPassw
|
||||
const brand = getBrandConfig()
|
||||
|
||||
return (
|
||||
<EmailLayout preview={`Reset your ${brand.name} password`} showUnsubscribe={false}>
|
||||
<EmailLayout preview={`Reset your ${brand.name} password`}>
|
||||
<Text style={baseStyles.paragraph}>Hello {username},</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
A password reset was requested for your {brand.name} account. Click below to set a new
|
||||
|
||||
@@ -13,7 +13,7 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
return (
|
||||
<EmailLayout preview={`Welcome to ${brand.name}`} showUnsubscribe={false}>
|
||||
<EmailLayout preview={`Welcome to ${brand.name}`}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
|
||||
{userName ? `Hey ${userName},` : 'Hey,'}
|
||||
</Text>
|
||||
|
||||
@@ -23,7 +23,7 @@ export function CreditPurchaseEmail({
|
||||
const previewText = `${brand.name}: $${amount.toFixed(2)} in credits added to your account`
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText} showUnsubscribe={false}>
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
|
||||
{userName ? `Hi ${userName},` : 'Hi,'}
|
||||
</Text>
|
||||
|
||||
@@ -18,10 +18,7 @@ export function EnterpriseSubscriptionEmail({
|
||||
const effectiveLoginLink = loginLink || `${baseUrl}/login`
|
||||
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`Your Enterprise Plan is now active on ${brand.name}`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<EmailLayout preview={`Your Enterprise Plan is now active on ${brand.name}`}>
|
||||
<Text style={baseStyles.paragraph}>Hello {userName},</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
Your <strong>Enterprise Plan</strong> is now active. You have full access to advanced
|
||||
|
||||
@@ -31,7 +31,7 @@ export function FreeTierUpgradeEmail({
|
||||
const previewText = `${brand.name}: You've used ${percentUsed}% of your free credits`
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText} showUnsubscribe={true}>
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
|
||||
{userName ? `Hi ${userName},` : 'Hi,'}
|
||||
</Text>
|
||||
|
||||
@@ -25,7 +25,7 @@ export function PaymentFailedEmail({
|
||||
const previewText = `${brand.name}: Payment Failed - Action Required`
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText} showUnsubscribe={false}>
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
|
||||
{userName ? `Hi ${userName},` : 'Hi,'}
|
||||
</Text>
|
||||
|
||||
@@ -18,7 +18,7 @@ export function PlanWelcomeEmail({ planName, userName, loginLink }: PlanWelcomeE
|
||||
const previewText = `${brand.name}: Your ${planName} plan is active`
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText} showUnsubscribe={true}>
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
|
||||
{userName ? `Hi ${userName},` : 'Hi,'}
|
||||
</Text>
|
||||
|
||||
@@ -25,7 +25,7 @@ export function UsageThresholdEmail({
|
||||
const previewText = `${brand.name}: You're at ${percentUsed}% of your ${planName} monthly budget`
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText} showUnsubscribe={true}>
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
|
||||
{userName ? `Hi ${userName},` : 'Hi,'}
|
||||
</Text>
|
||||
|
||||
@@ -20,10 +20,7 @@ export function CareersConfirmationEmail({
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`Your application to ${brand.name} has been received`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<EmailLayout preview={`Your application to ${brand.name} has been received`}>
|
||||
<Text style={baseStyles.paragraph}>Hello {name},</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
We've received your application for <strong>{position}</strong>. Our team reviews every
|
||||
|
||||
@@ -40,7 +40,7 @@ export function CareersSubmissionEmail({
|
||||
submittedDate = new Date(),
|
||||
}: CareersSubmissionEmailProps) {
|
||||
return (
|
||||
<EmailLayout preview={`New Career Application from ${name}`} hideFooter showUnsubscribe={false}>
|
||||
<EmailLayout preview={`New Career Application from ${name}`} hideFooter>
|
||||
<Text
|
||||
style={{
|
||||
...baseStyles.paragraph,
|
||||
|
||||
@@ -4,29 +4,22 @@ import { getBrandConfig } from '@/lib/branding/branding'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
interface UnsubscribeOptions {
|
||||
unsubscribeToken?: string
|
||||
email?: string
|
||||
}
|
||||
|
||||
interface EmailFooterProps {
|
||||
baseUrl?: string
|
||||
unsubscribe?: UnsubscribeOptions
|
||||
messageId?: string
|
||||
/**
|
||||
* Whether to show unsubscribe link. Defaults to true.
|
||||
* Set to false for transactional emails where unsubscribe doesn't apply.
|
||||
*/
|
||||
showUnsubscribe?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Email footer component styled to match Stripe's email design.
|
||||
* Sits in the gray area below the main white card.
|
||||
*
|
||||
* For non-transactional emails, the unsubscribe link uses placeholders
|
||||
* {{UNSUBSCRIBE_TOKEN}} and {{UNSUBSCRIBE_EMAIL}} which are replaced
|
||||
* by the mailer when sending.
|
||||
*/
|
||||
export function EmailFooter({
|
||||
baseUrl = getBaseUrl(),
|
||||
messageId,
|
||||
showUnsubscribe = true,
|
||||
}: EmailFooterProps) {
|
||||
export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }: EmailFooterProps) {
|
||||
const brand = getBrandConfig()
|
||||
|
||||
const footerLinkStyle = {
|
||||
@@ -188,20 +181,19 @@ export function EmailFooter({
|
||||
•{' '}
|
||||
<a href={`${baseUrl}/terms`} style={footerLinkStyle} rel='noopener noreferrer'>
|
||||
Terms of Service
|
||||
</a>{' '}
|
||||
•{' '}
|
||||
<a
|
||||
href={
|
||||
unsubscribe?.unsubscribeToken && unsubscribe?.email
|
||||
? `${baseUrl}/unsubscribe?token=${unsubscribe.unsubscribeToken}&email=${encodeURIComponent(unsubscribe.email)}`
|
||||
: `mailto:${brand.supportEmail}?subject=Unsubscribe%20Request&body=Please%20unsubscribe%20me%20from%20all%20emails.`
|
||||
}
|
||||
style={footerLinkStyle}
|
||||
rel='noopener noreferrer'
|
||||
>
|
||||
Unsubscribe
|
||||
</a>
|
||||
{showUnsubscribe && (
|
||||
<>
|
||||
{' '}
|
||||
•{' '}
|
||||
<a
|
||||
href={`${baseUrl}/unsubscribe?token={{UNSUBSCRIBE_TOKEN}}&email={{UNSUBSCRIBE_EMAIL}}`}
|
||||
style={footerLinkStyle}
|
||||
rel='noopener noreferrer'
|
||||
>
|
||||
Unsubscribe
|
||||
</a>
|
||||
</>
|
||||
)}
|
||||
</td>
|
||||
<td style={baseStyles.gutter} width={spacing.gutter}>
|
||||
|
||||
|
||||
@@ -11,23 +11,13 @@ interface EmailLayoutProps {
|
||||
children: React.ReactNode
|
||||
/** Optional: hide footer for internal emails */
|
||||
hideFooter?: boolean
|
||||
/**
|
||||
* Whether to show unsubscribe link in footer.
|
||||
* Set to false for transactional emails where unsubscribe doesn't apply.
|
||||
*/
|
||||
showUnsubscribe: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared email layout wrapper providing consistent structure.
|
||||
* Includes Html, Head, Body, Container with logo header, and Footer.
|
||||
*/
|
||||
export function EmailLayout({
|
||||
preview,
|
||||
children,
|
||||
hideFooter = false,
|
||||
showUnsubscribe,
|
||||
}: EmailLayoutProps) {
|
||||
export function EmailLayout({ preview, children, hideFooter = false }: EmailLayoutProps) {
|
||||
const brand = getBrandConfig()
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
@@ -53,7 +43,7 @@ export function EmailLayout({
|
||||
</Container>
|
||||
|
||||
{/* Footer in gray section */}
|
||||
{!hideFooter && <EmailFooter baseUrl={baseUrl} showUnsubscribe={showUnsubscribe} />}
|
||||
{!hideFooter && <EmailFooter baseUrl={baseUrl} />}
|
||||
</Body>
|
||||
</Html>
|
||||
)
|
||||
|
||||
@@ -54,7 +54,6 @@ export function BatchInvitationEmail({
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`You've been invited to join ${organizationName}${hasWorkspaces ? ` and ${workspaceInvitations.length} workspace(s)` : ''}`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<Text style={baseStyles.paragraph}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
|
||||
@@ -36,10 +36,7 @@ export function InvitationEmail({
|
||||
}
|
||||
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`You've been invited to join ${organizationName} on ${brand.name}`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<EmailLayout preview={`You've been invited to join ${organizationName} on ${brand.name}`}>
|
||||
<Text style={baseStyles.paragraph}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
<strong>{inviterName}</strong> invited you to join <strong>{organizationName}</strong> on{' '}
|
||||
|
||||
@@ -22,10 +22,7 @@ export function PollingGroupInvitationEmail({
|
||||
const providerName = provider === 'google-email' ? 'Gmail' : 'Outlook'
|
||||
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`You've been invited to join ${pollingGroupName} on ${brand.name}`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<EmailLayout preview={`You've been invited to join ${pollingGroupName} on ${brand.name}`}>
|
||||
<Text style={baseStyles.paragraph}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
<strong>{inviterName}</strong> from <strong>{organizationName}</strong> has invited you to
|
||||
|
||||
@@ -41,7 +41,6 @@ export function WorkspaceInvitationEmail({
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`You've been invited to join the "${workspaceName}" workspace on ${brand.name}!`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<Text style={baseStyles.paragraph}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
|
||||
@@ -73,7 +73,7 @@ export function WorkflowNotificationEmail({
|
||||
: 'Your workflow completed successfully.'
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText} showUnsubscribe={true}>
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>{message}</Text>
|
||||
|
||||
|
||||
@@ -32,10 +32,7 @@ export function HelpConfirmationEmail({
|
||||
const typeLabel = getTypeLabel(type)
|
||||
|
||||
return (
|
||||
<EmailLayout
|
||||
preview={`Your ${typeLabel.toLowerCase()} has been received`}
|
||||
showUnsubscribe={false}
|
||||
>
|
||||
<EmailLayout preview={`Your ${typeLabel.toLowerCase()} has been received`}>
|
||||
<Text style={baseStyles.paragraph}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>
|
||||
We've received your <strong>{typeLabel.toLowerCase()}</strong> and will get back to you
|
||||
|
||||
@@ -1739,12 +1739,12 @@ export function BrowserUseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
{...props}
|
||||
version='1.0'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='28'
|
||||
height='28'
|
||||
width='150pt'
|
||||
height='150pt'
|
||||
viewBox='0 0 150 150'
|
||||
preserveAspectRatio='xMidYMid meet'
|
||||
>
|
||||
<g transform='translate(0,150) scale(0.05,-0.05)' fill='currentColor' stroke='none'>
|
||||
<g transform='translate(0,150) scale(0.05,-0.05)' fill='#000000' stroke='none'>
|
||||
<path
|
||||
d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666
|
||||
l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111
|
||||
|
||||
@@ -3,10 +3,6 @@ import { mcpServers } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull } from 'drizzle-orm'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import {
|
||||
containsUserFileWithMetadata,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import {
|
||||
BlockType,
|
||||
buildResumeApiUrl,
|
||||
@@ -139,14 +135,6 @@ export class BlockExecutor {
|
||||
normalizedOutput = this.normalizeOutput(output)
|
||||
}
|
||||
|
||||
if (ctx.includeFileBase64 && containsUserFileWithMetadata(normalizedOutput)) {
|
||||
normalizedOutput = (await hydrateUserFilesWithBase64(normalizedOutput, {
|
||||
requestId: ctx.metadata.requestId,
|
||||
executionId: ctx.executionId,
|
||||
maxBytes: ctx.base64MaxBytes,
|
||||
})) as NormalizedBlockOutput
|
||||
}
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (blockLog) {
|
||||
|
||||
@@ -169,8 +169,6 @@ export class DAGExecutor {
|
||||
onBlockStart: this.contextExtensions.onBlockStart,
|
||||
onBlockComplete: this.contextExtensions.onBlockComplete,
|
||||
abortSignal: this.contextExtensions.abortSignal,
|
||||
includeFileBase64: this.contextExtensions.includeFileBase64,
|
||||
base64MaxBytes: this.contextExtensions.base64MaxBytes,
|
||||
}
|
||||
|
||||
if (this.contextExtensions.resumeFromSnapshot) {
|
||||
|
||||
@@ -89,8 +89,6 @@ export interface ContextExtensions {
|
||||
* When aborted, the execution should stop gracefully.
|
||||
*/
|
||||
abortSignal?: AbortSignal
|
||||
includeFileBase64?: boolean
|
||||
base64MaxBytes?: number
|
||||
onStream?: (streamingExecution: unknown) => Promise<void>
|
||||
onBlockStart?: (
|
||||
blockId: string,
|
||||
|
||||
@@ -387,6 +387,7 @@ describe('AgentBlockHandler', () => {
|
||||
code: 'return { result: "auto tool executed", input }',
|
||||
input: 'test input',
|
||||
}),
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
expect.any(Object) // execution context
|
||||
)
|
||||
@@ -399,6 +400,7 @@ describe('AgentBlockHandler', () => {
|
||||
code: 'return { result: "force tool executed", input }',
|
||||
input: 'another test',
|
||||
}),
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
expect.any(Object) // execution context
|
||||
)
|
||||
@@ -1405,7 +1407,7 @@ describe('AgentBlockHandler', () => {
|
||||
})
|
||||
|
||||
it('should handle MCP tools in agent execution', async () => {
|
||||
mockExecuteTool.mockImplementation((toolId, params, skipPostProcess, context) => {
|
||||
mockExecuteTool.mockImplementation((toolId, params, skipProxy, skipPostProcess, context) => {
|
||||
if (isMcpTool(toolId)) {
|
||||
return Promise.resolve({
|
||||
success: true,
|
||||
@@ -1680,7 +1682,7 @@ describe('AgentBlockHandler', () => {
|
||||
|
||||
it('should provide workspaceId context for MCP tool execution', async () => {
|
||||
let capturedContext: any
|
||||
mockExecuteTool.mockImplementation((toolId, params, skipPostProcess, context) => {
|
||||
mockExecuteTool.mockImplementation((toolId, params, skipProxy, skipPostProcess, context) => {
|
||||
capturedContext = context
|
||||
if (isMcpTool(toolId)) {
|
||||
return Promise.resolve({
|
||||
|
||||
@@ -325,6 +325,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
|
||||
@@ -106,6 +106,7 @@ describe('ApiBlockHandler', () => {
|
||||
body: { key: 'value' }, // Expect parsed body
|
||||
_context: { workflowId: 'test-workflow-id' },
|
||||
},
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
@@ -157,6 +158,7 @@ describe('ApiBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'http_request',
|
||||
expect.objectContaining({ body: expectedParsedBody }),
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
@@ -173,6 +175,7 @@ describe('ApiBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'http_request',
|
||||
expect.objectContaining({ body: 'This is plain text' }),
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
@@ -189,6 +192,7 @@ describe('ApiBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'http_request',
|
||||
expect.objectContaining({ body: undefined }),
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
|
||||
@@ -82,6 +82,7 @@ export class ApiBlockHandler implements BlockHandler {
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
|
||||
@@ -201,6 +201,7 @@ describe('ConditionBlockHandler', () => {
|
||||
},
|
||||
}),
|
||||
false,
|
||||
false,
|
||||
mockContext
|
||||
)
|
||||
})
|
||||
|
||||
@@ -44,6 +44,7 @@ export async function evaluateConditionExpression(
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
|
||||
@@ -84,6 +84,7 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
@@ -116,6 +117,7 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
@@ -140,6 +142,7 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
|
||||
@@ -42,6 +42,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
|
||||
@@ -95,6 +95,7 @@ describe('GenericBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'some_custom_tool',
|
||||
expectedToolParams,
|
||||
false, // skipProxy
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
|
||||
@@ -70,6 +70,7 @@ export class GenericBlockHandler implements BlockHandler {
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
|
||||
@@ -633,7 +633,7 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
blockNameMapping: blockNameMappingWithPause,
|
||||
}
|
||||
|
||||
const result = await executeTool(toolId, toolParams, false, ctx)
|
||||
const result = await executeTool(toolId, toolParams, false, false, ctx)
|
||||
const durationMs = Date.now() - startTime
|
||||
|
||||
if (!result.success) {
|
||||
|
||||
@@ -11,7 +11,6 @@ export interface UserFile {
|
||||
type: string
|
||||
key: string
|
||||
context?: string
|
||||
base64?: string
|
||||
}
|
||||
|
||||
export interface ParallelPauseScope {
|
||||
@@ -237,19 +236,6 @@ export interface ExecutionContext {
|
||||
|
||||
// Dynamically added nodes that need to be scheduled (e.g., from parallel expansion)
|
||||
pendingDynamicNodes?: string[]
|
||||
|
||||
/**
|
||||
* When true, UserFile objects in block outputs will be hydrated with base64 content
|
||||
* before being stored in execution state. This ensures base64 is available for
|
||||
* variable resolution in downstream blocks.
|
||||
*/
|
||||
includeFileBase64?: boolean
|
||||
|
||||
/**
|
||||
* Maximum file size in bytes for base64 hydration. Files larger than this limit
|
||||
* will not have their base64 content fetched.
|
||||
*/
|
||||
base64MaxBytes?: number
|
||||
}
|
||||
|
||||
export interface ExecutionResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { isUserFileWithMetadata } from '@/lib/core/utils/user-file'
|
||||
import { isUserFile } from '@/lib/core/utils/display-filters'
|
||||
import {
|
||||
classifyStartBlockType,
|
||||
getLegacyStarterMode,
|
||||
@@ -234,7 +234,7 @@ function getFilesFromWorkflowInput(workflowInput: unknown): UserFile[] | undefin
|
||||
return undefined
|
||||
}
|
||||
const files = workflowInput.files
|
||||
if (Array.isArray(files) && files.every(isUserFileWithMetadata)) {
|
||||
if (Array.isArray(files) && files.every(isUserFile)) {
|
||||
return files
|
||||
}
|
||||
return undefined
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import {
|
||||
isReference,
|
||||
normalizeName,
|
||||
@@ -21,58 +20,11 @@ function isPathInOutputSchema(
|
||||
return true
|
||||
}
|
||||
|
||||
const isFileArrayType = (value: any): boolean =>
|
||||
value?.type === 'file[]' || value?.type === 'files'
|
||||
|
||||
let current: any = outputs
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
let fieldDef: any
|
||||
|
||||
if (prop in current) {
|
||||
fieldDef = current[prop]
|
||||
} else if (current.properties && prop in current.properties) {
|
||||
fieldDef = current.properties[prop]
|
||||
} else if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && prop in current.items.properties) {
|
||||
fieldDef = current.items.properties[prop]
|
||||
} else if (prop in current.items) {
|
||||
fieldDef = current.items[prop]
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldDef) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (isFileArrayType(fieldDef)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(pathParts[i + 1] as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if (fieldDef.type === 'array' && fieldDef.items) {
|
||||
current = fieldDef.items
|
||||
continue
|
||||
}
|
||||
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileArrayType(current)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(nextPart as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -81,15 +33,7 @@ function isPathInOutputSchema(
|
||||
}
|
||||
|
||||
if (part in current) {
|
||||
const nextCurrent = current[part]
|
||||
if (nextCurrent?.type === 'file[]' && i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (/^\d+$/.test(nextPart) && i + 2 < pathParts.length) {
|
||||
const propertyPart = pathParts[i + 2]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(propertyPart as any)
|
||||
}
|
||||
}
|
||||
current = nextCurrent
|
||||
current = current[part]
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -109,10 +53,6 @@ function isPathInOutputSchema(
|
||||
}
|
||||
}
|
||||
|
||||
if (isFileArrayType(current) && USER_FILE_ACCESSIBLE_PROPERTIES.includes(part as any)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ('type' in current && typeof current.type === 'string') {
|
||||
if (!current.properties && !current.items) {
|
||||
return false
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { keepPreviousData, useQuery } from '@tanstack/react-query'
|
||||
import type {
|
||||
ChunkData,
|
||||
ChunksPagination,
|
||||
@@ -332,629 +332,3 @@ export function useDocumentChunkSearchQuery(
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface UpdateChunkParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
chunkId: string
|
||||
content?: string
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export async function updateChunk({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
chunkId,
|
||||
content,
|
||||
enabled,
|
||||
}: UpdateChunkParams): Promise<ChunkData> {
|
||||
const body: Record<string, unknown> = {}
|
||||
if (content !== undefined) body.content = content
|
||||
if (enabled !== undefined) body.enabled = enabled
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update chunk')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useUpdateChunk() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateChunk,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteChunkParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
chunkId: string
|
||||
}
|
||||
|
||||
export async function deleteChunk({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
chunkId,
|
||||
}: DeleteChunkParams): Promise<void> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete chunk')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteChunk() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteChunk,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface CreateChunkParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
content: string
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export async function createChunk({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
content,
|
||||
enabled = true,
|
||||
}: CreateChunkParams): Promise<ChunkData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ content, enabled }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to create chunk')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useCreateChunk() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: createChunk,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface UpdateDocumentParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
updates: {
|
||||
enabled?: boolean
|
||||
filename?: string
|
||||
retryProcessing?: boolean
|
||||
markFailedDueToTimeout?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateDocument({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
updates,
|
||||
}: UpdateDocumentParams): Promise<DocumentData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update document')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useUpdateDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateDocument,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteDocumentParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
}
|
||||
|
||||
export async function deleteDocument({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
}: DeleteDocumentParams): Promise<void> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete document')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteDocument,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface BulkDocumentOperationParams {
|
||||
knowledgeBaseId: string
|
||||
operation: 'enable' | 'disable' | 'delete'
|
||||
documentIds: string[]
|
||||
}
|
||||
|
||||
export interface BulkDocumentOperationResult {
|
||||
successCount: number
|
||||
failedCount: number
|
||||
updatedDocuments?: Array<{ id: string; enabled: boolean }>
|
||||
}
|
||||
|
||||
export async function bulkDocumentOperation({
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
}: BulkDocumentOperationParams): Promise<BulkDocumentOperationResult> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation, documentIds }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || `Failed to ${operation} documents`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || `Failed to ${operation} documents`)
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useBulkDocumentOperation() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: bulkDocumentOperation,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface CreateKnowledgeBaseParams {
|
||||
name: string
|
||||
description?: string
|
||||
workspaceId: string
|
||||
chunkingConfig: {
|
||||
maxSize: number
|
||||
minSize: number
|
||||
overlap: number
|
||||
}
|
||||
}
|
||||
|
||||
export async function createKnowledgeBase(
|
||||
params: CreateKnowledgeBaseParams
|
||||
): Promise<KnowledgeBaseData> {
|
||||
const response = await fetch('/api/knowledge', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(params),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to create knowledge base')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useCreateKnowledgeBase(workspaceId?: string) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: createKnowledgeBase,
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface UpdateKnowledgeBaseParams {
|
||||
knowledgeBaseId: string
|
||||
updates: {
|
||||
name?: string
|
||||
description?: string
|
||||
workspaceId?: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateKnowledgeBase({
|
||||
knowledgeBaseId,
|
||||
updates,
|
||||
}: UpdateKnowledgeBaseParams): Promise<KnowledgeBaseData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update knowledge base')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useUpdateKnowledgeBase(workspaceId?: string) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateKnowledgeBase,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteKnowledgeBaseParams {
|
||||
knowledgeBaseId: string
|
||||
}
|
||||
|
||||
export async function deleteKnowledgeBase({
|
||||
knowledgeBaseId,
|
||||
}: DeleteKnowledgeBaseParams): Promise<void> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteKnowledgeBase(workspaceId?: string) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteKnowledgeBase,
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface BulkChunkOperationParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
operation: 'enable' | 'disable' | 'delete'
|
||||
chunkIds: string[]
|
||||
}
|
||||
|
||||
export interface BulkChunkOperationResult {
|
||||
successCount: number
|
||||
failedCount: number
|
||||
results: Array<{
|
||||
operation: string
|
||||
chunkIds: string[]
|
||||
}>
|
||||
}
|
||||
|
||||
export async function bulkChunkOperation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
operation,
|
||||
chunkIds,
|
||||
}: BulkChunkOperationParams): Promise<BulkChunkOperationResult> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation, chunkIds }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || `Failed to ${operation} chunks`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || `Failed to ${operation} chunks`)
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useBulkChunkOperation() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: bulkChunkOperation,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface UpdateDocumentTagsParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
tags: Record<string, string>
|
||||
}
|
||||
|
||||
export async function updateDocumentTags({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
tags,
|
||||
}: UpdateDocumentTagsParams): Promise<DocumentData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(tags),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update document tags')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update document tags')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useUpdateDocumentTags() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateDocumentTags,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface TagDefinitionData {
|
||||
id: string
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface CreateTagDefinitionParams {
|
||||
knowledgeBaseId: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
}
|
||||
|
||||
async function fetchNextAvailableSlot(knowledgeBaseId: string, fieldType: string): Promise<string> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/next-available-slot?fieldType=${fieldType}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to get available slot')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result.success || !result.data?.nextAvailableSlot) {
|
||||
throw new Error('No available tag slots for this field type')
|
||||
}
|
||||
|
||||
return result.data.nextAvailableSlot
|
||||
}
|
||||
|
||||
export async function createTagDefinition({
|
||||
knowledgeBaseId,
|
||||
displayName,
|
||||
fieldType,
|
||||
}: CreateTagDefinitionParams): Promise<TagDefinitionData> {
|
||||
const tagSlot = await fetchNextAvailableSlot(knowledgeBaseId, fieldType)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ tagSlot, displayName, fieldType }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create tag definition')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to create tag definition')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useCreateTagDefinition() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: createTagDefinition,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteTagDefinitionParams {
|
||||
knowledgeBaseId: string
|
||||
tagDefinitionId: string
|
||||
}
|
||||
|
||||
export async function deleteTagDefinition({
|
||||
knowledgeBaseId,
|
||||
tagDefinitionId,
|
||||
}: DeleteTagDefinitionParams): Promise<void> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/tag-definitions/${tagDefinitionId}`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete tag definition')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete tag definition')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteTagDefinition() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteTagDefinition,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -203,11 +203,10 @@ function resolveProjectSelector(
|
||||
): SelectorResolution {
|
||||
const serviceId = subBlock.serviceId
|
||||
const context = buildBaseContext(args)
|
||||
const selectorId = subBlock.canonicalParamId ?? subBlock.id
|
||||
|
||||
switch (serviceId) {
|
||||
case 'linear': {
|
||||
const key: SelectorKey = selectorId === 'teamId' ? 'linear.teams' : 'linear.projects'
|
||||
const key: SelectorKey = subBlock.id === 'teamId' ? 'linear.teams' : 'linear.projects'
|
||||
return { key, context, allowSearch: true }
|
||||
}
|
||||
case 'jira':
|
||||
|
||||
@@ -21,8 +21,6 @@ import {
|
||||
type BatchToggleEnabledOperation,
|
||||
type BatchToggleHandlesOperation,
|
||||
type BatchUpdateParentOperation,
|
||||
captureLatestEdges,
|
||||
captureLatestSubBlockValues,
|
||||
createOperationEntry,
|
||||
runWithUndoRedoRecordingSuspended,
|
||||
type UpdateParentOperation,
|
||||
@@ -30,6 +28,7 @@ import {
|
||||
} from '@/stores/undo-redo'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -446,19 +445,34 @@ export function useUndoRedo() {
|
||||
break
|
||||
}
|
||||
|
||||
const latestEdges = captureLatestEdges(
|
||||
useWorkflowStore.getState().edges,
|
||||
existingBlockIds
|
||||
)
|
||||
const latestEdges = useWorkflowStore
|
||||
.getState()
|
||||
.edges.filter(
|
||||
(e) => existingBlockIds.includes(e.source) || existingBlockIds.includes(e.target)
|
||||
)
|
||||
batchRemoveOp.data.edgeSnapshots = latestEdges
|
||||
|
||||
const latestSubBlockValues = captureLatestSubBlockValues(
|
||||
useWorkflowStore.getState().blocks,
|
||||
activeWorkflowId,
|
||||
existingBlockIds
|
||||
)
|
||||
const latestSubBlockValues: Record<string, Record<string, unknown>> = {}
|
||||
existingBlockIds.forEach((blockId) => {
|
||||
const merged = mergeSubblockState(
|
||||
useWorkflowStore.getState().blocks,
|
||||
activeWorkflowId,
|
||||
blockId
|
||||
)
|
||||
const block = merged[blockId]
|
||||
if (block?.subBlocks) {
|
||||
const values: Record<string, unknown> = {}
|
||||
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
|
||||
if (subBlock.value !== null && subBlock.value !== undefined) {
|
||||
values[subBlockId] = subBlock.value
|
||||
}
|
||||
})
|
||||
if (Object.keys(values).length > 0) {
|
||||
latestSubBlockValues[blockId] = values
|
||||
}
|
||||
}
|
||||
})
|
||||
batchRemoveOp.data.subBlockValues = latestSubBlockValues
|
||||
;(entry.operation as BatchAddBlocksOperation).data.subBlockValues = latestSubBlockValues
|
||||
|
||||
addToQueue({
|
||||
id: opId,
|
||||
@@ -1139,20 +1153,6 @@ export function useUndoRedo() {
|
||||
break
|
||||
}
|
||||
|
||||
const latestEdges = captureLatestEdges(
|
||||
useWorkflowStore.getState().edges,
|
||||
existingBlockIds
|
||||
)
|
||||
batchOp.data.edgeSnapshots = latestEdges
|
||||
|
||||
const latestSubBlockValues = captureLatestSubBlockValues(
|
||||
useWorkflowStore.getState().blocks,
|
||||
activeWorkflowId,
|
||||
existingBlockIds
|
||||
)
|
||||
batchOp.data.subBlockValues = latestSubBlockValues
|
||||
;(entry.inverse as BatchAddBlocksOperation).data.subBlockValues = latestSubBlockValues
|
||||
|
||||
addToQueue({
|
||||
id: opId,
|
||||
operation: {
|
||||
|
||||
@@ -29,11 +29,13 @@ export class DocsChunker {
|
||||
private readonly baseUrl: string
|
||||
|
||||
constructor(options: DocsChunkerOptions = {}) {
|
||||
// Use the existing TextChunker for chunking logic
|
||||
this.textChunker = new TextChunker({
|
||||
chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk
|
||||
minCharactersPerChunk: options.minCharactersPerChunk ?? 1,
|
||||
chunkOverlap: options.chunkOverlap ?? 50,
|
||||
})
|
||||
// Use localhost docs in development, production docs otherwise
|
||||
this.baseUrl = options.baseUrl ?? 'https://docs.sim.ai'
|
||||
}
|
||||
|
||||
@@ -72,18 +74,24 @@ export class DocsChunker {
|
||||
const content = await fs.readFile(filePath, 'utf-8')
|
||||
const relativePath = path.relative(basePath, filePath)
|
||||
|
||||
// Parse frontmatter and content
|
||||
const { data: frontmatter, content: markdownContent } = this.parseFrontmatter(content)
|
||||
|
||||
// Extract headers from the content
|
||||
const headers = this.extractHeaders(markdownContent)
|
||||
|
||||
// Generate document URL
|
||||
const documentUrl = this.generateDocumentUrl(relativePath)
|
||||
|
||||
// Split content into chunks
|
||||
const textChunks = await this.splitContent(markdownContent)
|
||||
|
||||
// Generate embeddings for all chunks at once (batch processing)
|
||||
logger.info(`Generating embeddings for ${textChunks.length} chunks in ${relativePath}`)
|
||||
const embeddings = textChunks.length > 0 ? await generateEmbeddings(textChunks) : []
|
||||
const embeddingModel = 'text-embedding-3-small'
|
||||
|
||||
// Convert to DocChunk objects with header context and embeddings
|
||||
const chunks: DocChunk[] = []
|
||||
let currentPosition = 0
|
||||
|
||||
@@ -92,6 +100,7 @@ export class DocsChunker {
|
||||
const chunkStart = currentPosition
|
||||
const chunkEnd = currentPosition + chunkText.length
|
||||
|
||||
// Find the most relevant header for this chunk
|
||||
const relevantHeader = this.findRelevantHeader(headers, chunkStart)
|
||||
|
||||
const chunk: DocChunk = {
|
||||
@@ -177,21 +186,11 @@ export class DocsChunker {
|
||||
|
||||
/**
|
||||
* Generate document URL from relative path
|
||||
* Handles index.mdx files specially - they are served at the parent directory path
|
||||
*/
|
||||
private generateDocumentUrl(relativePath: string): string {
|
||||
// Convert file path to URL path
|
||||
// e.g., "tools/knowledge.mdx" -> "/tools/knowledge"
|
||||
// e.g., "triggers/index.mdx" -> "/triggers" (NOT "/triggers/index")
|
||||
let urlPath = relativePath.replace(/\.mdx$/, '').replace(/\\/g, '/') // Handle Windows paths
|
||||
|
||||
// In fumadocs, index.mdx files are served at the parent directory path
|
||||
// e.g., "triggers/index" -> "triggers"
|
||||
if (urlPath.endsWith('/index')) {
|
||||
urlPath = urlPath.slice(0, -6) // Remove "/index"
|
||||
} else if (urlPath === 'index') {
|
||||
urlPath = '' // Root index.mdx
|
||||
}
|
||||
const urlPath = relativePath.replace(/\.mdx$/, '').replace(/\\/g, '/') // Handle Windows paths
|
||||
|
||||
return `${this.baseUrl}/${urlPath}`
|
||||
}
|
||||
@@ -202,6 +201,7 @@ export class DocsChunker {
|
||||
private findRelevantHeader(headers: HeaderInfo[], position: number): HeaderInfo | null {
|
||||
if (headers.length === 0) return null
|
||||
|
||||
// Find the last header that comes before this position
|
||||
let relevantHeader: HeaderInfo | null = null
|
||||
|
||||
for (const header of headers) {
|
||||
@@ -219,18 +219,23 @@ export class DocsChunker {
|
||||
* Split content into chunks using the existing TextChunker with table awareness
|
||||
*/
|
||||
private async splitContent(content: string): Promise<string[]> {
|
||||
// Clean the content first
|
||||
const cleanedContent = this.cleanContent(content)
|
||||
|
||||
// Detect table boundaries to avoid splitting them
|
||||
const tableBoundaries = this.detectTableBoundaries(cleanedContent)
|
||||
|
||||
// Use the existing TextChunker
|
||||
const chunks = await this.textChunker.chunk(cleanedContent)
|
||||
|
||||
// Post-process chunks to ensure tables aren't split
|
||||
const processedChunks = this.mergeTableChunks(
|
||||
chunks.map((chunk) => chunk.text),
|
||||
tableBoundaries,
|
||||
cleanedContent
|
||||
)
|
||||
|
||||
// Ensure no chunk exceeds 300 tokens
|
||||
const finalChunks = this.enforceSizeLimit(processedChunks)
|
||||
|
||||
return finalChunks
|
||||
@@ -268,6 +273,7 @@ export class DocsChunker {
|
||||
const [, frontmatterText, markdownContent] = match
|
||||
const data: Frontmatter = {}
|
||||
|
||||
// Simple YAML parsing for title and description
|
||||
const lines = frontmatterText.split('\n')
|
||||
for (const line of lines) {
|
||||
const colonIndex = line.indexOf(':')
|
||||
@@ -288,6 +294,7 @@ export class DocsChunker {
|
||||
* Estimate token count (rough approximation)
|
||||
*/
|
||||
private estimateTokens(text: string): number {
|
||||
// Rough approximation: 1 token ≈ 4 characters
|
||||
return Math.ceil(text.length / 4)
|
||||
}
|
||||
|
||||
@@ -304,13 +311,17 @@ export class DocsChunker {
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i].trim()
|
||||
|
||||
// Detect table start (markdown table row with pipes)
|
||||
if (line.includes('|') && line.split('|').length >= 3 && !inTable) {
|
||||
// Check if next line is table separator (contains dashes and pipes)
|
||||
const nextLine = lines[i + 1]?.trim()
|
||||
if (nextLine?.includes('|') && nextLine.includes('-')) {
|
||||
inTable = true
|
||||
tableStart = i
|
||||
}
|
||||
} else if (inTable && (!line.includes('|') || line === '' || line.startsWith('#'))) {
|
||||
}
|
||||
// Detect table end (empty line or non-table content)
|
||||
else if (inTable && (!line.includes('|') || line === '' || line.startsWith('#'))) {
|
||||
tables.push({
|
||||
start: this.getCharacterPosition(lines, tableStart),
|
||||
end: this.getCharacterPosition(lines, i - 1) + lines[i - 1]?.length || 0,
|
||||
@@ -319,6 +330,7 @@ export class DocsChunker {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle table at end of content
|
||||
if (inTable && tableStart >= 0) {
|
||||
tables.push({
|
||||
start: this.getCharacterPosition(lines, tableStart),
|
||||
@@ -355,6 +367,7 @@ export class DocsChunker {
|
||||
const chunkStart = originalContent.indexOf(chunk, currentPosition)
|
||||
const chunkEnd = chunkStart + chunk.length
|
||||
|
||||
// Check if this chunk intersects with any table
|
||||
const intersectsTable = tableBoundaries.some(
|
||||
(table) =>
|
||||
(chunkStart >= table.start && chunkStart <= table.end) ||
|
||||
@@ -363,6 +376,7 @@ export class DocsChunker {
|
||||
)
|
||||
|
||||
if (intersectsTable) {
|
||||
// Find which table(s) this chunk intersects with
|
||||
const affectedTables = tableBoundaries.filter(
|
||||
(table) =>
|
||||
(chunkStart >= table.start && chunkStart <= table.end) ||
|
||||
@@ -370,10 +384,12 @@ export class DocsChunker {
|
||||
(chunkStart <= table.start && chunkEnd >= table.end)
|
||||
)
|
||||
|
||||
// Create a chunk that includes the complete table(s)
|
||||
const minStart = Math.min(chunkStart, ...affectedTables.map((t) => t.start))
|
||||
const maxEnd = Math.max(chunkEnd, ...affectedTables.map((t) => t.end))
|
||||
const completeChunk = originalContent.slice(minStart, maxEnd)
|
||||
|
||||
// Only add if we haven't already included this content
|
||||
if (!mergedChunks.some((existing) => existing.includes(completeChunk.trim()))) {
|
||||
mergedChunks.push(completeChunk.trim())
|
||||
}
|
||||
@@ -384,7 +400,7 @@ export class DocsChunker {
|
||||
currentPosition = chunkEnd
|
||||
}
|
||||
|
||||
return mergedChunks.filter((chunk) => chunk.length > 50)
|
||||
return mergedChunks.filter((chunk) => chunk.length > 50) // Filter out tiny chunks
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -397,8 +413,10 @@ export class DocsChunker {
|
||||
const tokens = this.estimateTokens(chunk)
|
||||
|
||||
if (tokens <= 300) {
|
||||
// Chunk is within limit
|
||||
finalChunks.push(chunk)
|
||||
} else {
|
||||
// Chunk is too large - split it
|
||||
const lines = chunk.split('\n')
|
||||
let currentChunk = ''
|
||||
|
||||
@@ -408,6 +426,7 @@ export class DocsChunker {
|
||||
if (this.estimateTokens(testChunk) <= 300) {
|
||||
currentChunk = testChunk
|
||||
} else {
|
||||
// Adding this line would exceed limit
|
||||
if (currentChunk.trim()) {
|
||||
finalChunks.push(currentChunk.trim())
|
||||
}
|
||||
@@ -415,6 +434,7 @@ export class DocsChunker {
|
||||
}
|
||||
}
|
||||
|
||||
// Add final chunk if it has content
|
||||
if (currentChunk.trim()) {
|
||||
finalChunks.push(currentChunk.trim())
|
||||
}
|
||||
|
||||
@@ -209,17 +209,13 @@ export class SetGlobalWorkflowVariablesClientTool extends BaseClientTool {
|
||||
}
|
||||
}
|
||||
|
||||
// Convert byName (keyed by name) to record keyed by ID for the API
|
||||
const variablesRecord: Record<string, any> = {}
|
||||
for (const v of Object.values(byName)) {
|
||||
variablesRecord[v.id] = v
|
||||
}
|
||||
const variablesArray = Object.values(byName)
|
||||
|
||||
// POST full variables record to persist
|
||||
// POST full variables array to persist
|
||||
const res = await fetch(`/api/workflows/${payload.workflowId}/variables`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ variables: variablesRecord }),
|
||||
body: JSON.stringify({ variables: variablesArray }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
|
||||
@@ -817,8 +817,6 @@ function normalizeResponseFormat(value: any): string {
|
||||
interface EdgeHandleValidationResult {
|
||||
valid: boolean
|
||||
error?: string
|
||||
/** The normalized handle to use (e.g., simple 'if' normalized to 'condition-{uuid}') */
|
||||
normalizedHandle?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -853,6 +851,13 @@ function validateSourceHandleForBlock(
|
||||
}
|
||||
|
||||
case 'condition': {
|
||||
if (!sourceHandle.startsWith(EDGE.CONDITION_PREFIX)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for condition block. Must start with "${EDGE.CONDITION_PREFIX}"`,
|
||||
}
|
||||
}
|
||||
|
||||
const conditionsValue = sourceBlock?.subBlocks?.conditions?.value
|
||||
if (!conditionsValue) {
|
||||
return {
|
||||
@@ -861,8 +866,6 @@ function validateSourceHandleForBlock(
|
||||
}
|
||||
}
|
||||
|
||||
// validateConditionHandle accepts simple format (if, else-if-0, else),
|
||||
// legacy format (condition-{blockId}-if), and internal ID format (condition-{uuid})
|
||||
return validateConditionHandle(sourceHandle, sourceBlock.id, conditionsValue)
|
||||
}
|
||||
|
||||
@@ -876,6 +879,13 @@ function validateSourceHandleForBlock(
|
||||
}
|
||||
|
||||
case 'router_v2': {
|
||||
if (!sourceHandle.startsWith(EDGE.ROUTER_PREFIX)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for router_v2 block. Must start with "${EDGE.ROUTER_PREFIX}"`,
|
||||
}
|
||||
}
|
||||
|
||||
const routesValue = sourceBlock?.subBlocks?.routes?.value
|
||||
if (!routesValue) {
|
||||
return {
|
||||
@@ -884,8 +894,6 @@ function validateSourceHandleForBlock(
|
||||
}
|
||||
}
|
||||
|
||||
// validateRouterHandle accepts simple format (route-0, route-1),
|
||||
// legacy format (router-{blockId}-route-1), and internal ID format (router-{uuid})
|
||||
return validateRouterHandle(sourceHandle, sourceBlock.id, routesValue)
|
||||
}
|
||||
|
||||
@@ -902,12 +910,7 @@ function validateSourceHandleForBlock(
|
||||
|
||||
/**
|
||||
* Validates condition handle references a valid condition in the block.
|
||||
* Accepts multiple formats:
|
||||
* - Simple format: "if", "else-if-0", "else-if-1", "else"
|
||||
* - Legacy semantic format: "condition-{blockId}-if", "condition-{blockId}-else-if"
|
||||
* - Internal ID format: "condition-{conditionId}"
|
||||
*
|
||||
* Returns the normalized handle (condition-{conditionId}) for storage.
|
||||
* Accepts both internal IDs (condition-blockId-if) and semantic keys (condition-blockId-else-if)
|
||||
*/
|
||||
function validateConditionHandle(
|
||||
sourceHandle: string,
|
||||
@@ -940,80 +943,48 @@ function validateConditionHandle(
|
||||
}
|
||||
}
|
||||
|
||||
// Build a map of all valid handle formats -> normalized handle (condition-{conditionId})
|
||||
const handleToNormalized = new Map<string, string>()
|
||||
const legacySemanticPrefix = `condition-${blockId}-`
|
||||
let elseIfIndex = 0
|
||||
const validHandles = new Set<string>()
|
||||
const semanticPrefix = `condition-${blockId}-`
|
||||
let elseIfCount = 0
|
||||
|
||||
for (const condition of conditions) {
|
||||
if (!condition.id) continue
|
||||
if (condition.id) {
|
||||
validHandles.add(`condition-${condition.id}`)
|
||||
}
|
||||
|
||||
const normalizedHandle = `condition-${condition.id}`
|
||||
const title = condition.title?.toLowerCase()
|
||||
|
||||
// Always accept internal ID format
|
||||
handleToNormalized.set(normalizedHandle, normalizedHandle)
|
||||
|
||||
if (title === 'if') {
|
||||
// Simple format: "if"
|
||||
handleToNormalized.set('if', normalizedHandle)
|
||||
// Legacy format: "condition-{blockId}-if"
|
||||
handleToNormalized.set(`${legacySemanticPrefix}if`, normalizedHandle)
|
||||
validHandles.add(`${semanticPrefix}if`)
|
||||
} else if (title === 'else if') {
|
||||
// Simple format: "else-if-0", "else-if-1", etc. (0-indexed)
|
||||
handleToNormalized.set(`else-if-${elseIfIndex}`, normalizedHandle)
|
||||
// Legacy format: "condition-{blockId}-else-if" for first, "condition-{blockId}-else-if-2" for second
|
||||
if (elseIfIndex === 0) {
|
||||
handleToNormalized.set(`${legacySemanticPrefix}else-if`, normalizedHandle)
|
||||
} else {
|
||||
handleToNormalized.set(
|
||||
`${legacySemanticPrefix}else-if-${elseIfIndex + 1}`,
|
||||
normalizedHandle
|
||||
)
|
||||
}
|
||||
elseIfIndex++
|
||||
elseIfCount++
|
||||
validHandles.add(
|
||||
elseIfCount === 1 ? `${semanticPrefix}else-if` : `${semanticPrefix}else-if-${elseIfCount}`
|
||||
)
|
||||
} else if (title === 'else') {
|
||||
// Simple format: "else"
|
||||
handleToNormalized.set('else', normalizedHandle)
|
||||
// Legacy format: "condition-{blockId}-else"
|
||||
handleToNormalized.set(`${legacySemanticPrefix}else`, normalizedHandle)
|
||||
validHandles.add(`${semanticPrefix}else`)
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedHandle = handleToNormalized.get(sourceHandle)
|
||||
if (normalizedHandle) {
|
||||
return { valid: true, normalizedHandle }
|
||||
if (validHandles.has(sourceHandle)) {
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
// Build list of valid simple format options for error message
|
||||
const simpleOptions: string[] = []
|
||||
elseIfIndex = 0
|
||||
for (const condition of conditions) {
|
||||
const title = condition.title?.toLowerCase()
|
||||
if (title === 'if') {
|
||||
simpleOptions.push('if')
|
||||
} else if (title === 'else if') {
|
||||
simpleOptions.push(`else-if-${elseIfIndex}`)
|
||||
elseIfIndex++
|
||||
} else if (title === 'else') {
|
||||
simpleOptions.push('else')
|
||||
}
|
||||
const validOptions = Array.from(validHandles).slice(0, 5)
|
||||
const moreCount = validHandles.size - validOptions.length
|
||||
let validOptionsStr = validOptions.join(', ')
|
||||
if (moreCount > 0) {
|
||||
validOptionsStr += `, ... and ${moreCount} more`
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}". Valid handles: ${simpleOptions.join(', ')}`,
|
||||
error: `Invalid condition handle "${sourceHandle}". Valid handles: ${validOptionsStr}`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates router handle references a valid route in the block.
|
||||
* Accepts multiple formats:
|
||||
* - Simple format: "route-0", "route-1", "route-2" (0-indexed)
|
||||
* - Legacy semantic format: "router-{blockId}-route-1" (1-indexed)
|
||||
* - Internal ID format: "router-{routeId}"
|
||||
*
|
||||
* Returns the normalized handle (router-{routeId}) for storage.
|
||||
* Accepts both internal IDs (router-{routeId}) and semantic keys (router-{blockId}-route-1)
|
||||
*/
|
||||
function validateRouterHandle(
|
||||
sourceHandle: string,
|
||||
@@ -1046,48 +1017,47 @@ function validateRouterHandle(
|
||||
}
|
||||
}
|
||||
|
||||
// Build a map of all valid handle formats -> normalized handle (router-{routeId})
|
||||
const handleToNormalized = new Map<string, string>()
|
||||
const legacySemanticPrefix = `router-${blockId}-`
|
||||
const validHandles = new Set<string>()
|
||||
const semanticPrefix = `router-${blockId}-`
|
||||
|
||||
for (let i = 0; i < routes.length; i++) {
|
||||
const route = routes[i]
|
||||
if (!route.id) continue
|
||||
|
||||
const normalizedHandle = `router-${route.id}`
|
||||
// Accept internal ID format: router-{uuid}
|
||||
if (route.id) {
|
||||
validHandles.add(`router-${route.id}`)
|
||||
}
|
||||
|
||||
// Always accept internal ID format: router-{uuid}
|
||||
handleToNormalized.set(normalizedHandle, normalizedHandle)
|
||||
|
||||
// Simple format: route-0, route-1, etc. (0-indexed)
|
||||
handleToNormalized.set(`route-${i}`, normalizedHandle)
|
||||
|
||||
// Legacy 1-indexed route number format: router-{blockId}-route-1
|
||||
handleToNormalized.set(`${legacySemanticPrefix}route-${i + 1}`, normalizedHandle)
|
||||
// Accept 1-indexed route number format: router-{blockId}-route-1, router-{blockId}-route-2, etc.
|
||||
validHandles.add(`${semanticPrefix}route-${i + 1}`)
|
||||
|
||||
// Accept normalized title format: router-{blockId}-{normalized-title}
|
||||
// Normalize: lowercase, replace spaces with dashes, remove special chars
|
||||
if (route.title && typeof route.title === 'string') {
|
||||
const normalizedTitle = route.title
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, '-')
|
||||
.replace(/[^a-z0-9-]/g, '')
|
||||
if (normalizedTitle) {
|
||||
handleToNormalized.set(`${legacySemanticPrefix}${normalizedTitle}`, normalizedHandle)
|
||||
validHandles.add(`${semanticPrefix}${normalizedTitle}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedHandle = handleToNormalized.get(sourceHandle)
|
||||
if (normalizedHandle) {
|
||||
return { valid: true, normalizedHandle }
|
||||
if (validHandles.has(sourceHandle)) {
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
// Build list of valid simple format options for error message
|
||||
const simpleOptions = routes.map((_, i) => `route-${i}`)
|
||||
const validOptions = Array.from(validHandles).slice(0, 5)
|
||||
const moreCount = validHandles.size - validOptions.length
|
||||
let validOptionsStr = validOptions.join(', ')
|
||||
if (moreCount > 0) {
|
||||
validOptionsStr += `, ... and ${moreCount} more`
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid router handle "${sourceHandle}". Valid handles: ${simpleOptions.join(', ')}`,
|
||||
error: `Invalid router handle "${sourceHandle}". Valid handles: ${validOptionsStr}`,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1202,13 +1172,10 @@ function createValidatedEdge(
|
||||
return false
|
||||
}
|
||||
|
||||
// Use normalized handle if available (e.g., 'if' -> 'condition-{uuid}')
|
||||
const finalSourceHandle = sourceValidation.normalizedHandle || sourceHandle
|
||||
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: sourceBlockId,
|
||||
sourceHandle: finalSourceHandle,
|
||||
sourceHandle,
|
||||
target: targetBlockId,
|
||||
targetHandle,
|
||||
type: 'default',
|
||||
@@ -1217,11 +1184,7 @@ function createValidatedEdge(
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds connections as edges for a block.
|
||||
* Supports multiple target formats:
|
||||
* - String: "target-block-id"
|
||||
* - Object: { block: "target-block-id", handle?: "custom-target-handle" }
|
||||
* - Array of strings or objects
|
||||
* Adds connections as edges for a block
|
||||
*/
|
||||
function addConnectionsAsEdges(
|
||||
modifiedState: any,
|
||||
@@ -1231,34 +1194,19 @@ function addConnectionsAsEdges(
|
||||
skippedItems?: SkippedItem[]
|
||||
): void {
|
||||
Object.entries(connections).forEach(([sourceHandle, targets]) => {
|
||||
if (targets === null) return
|
||||
|
||||
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
|
||||
const targetArray = Array.isArray(targets) ? targets : [targets]
|
||||
targetArray.forEach((targetId: string) => {
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
blockId,
|
||||
targetBlock,
|
||||
targetId,
|
||||
sourceHandle,
|
||||
targetHandle || 'target',
|
||||
'target',
|
||||
'add_edge',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdgeForTarget(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdgeForTarget(target)
|
||||
} else if (target?.block) {
|
||||
addEdgeForTarget(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && targets?.block) {
|
||||
addEdgeForTarget(targets.block, targets.handle)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user