v0.5.27: sidebar updates, ssrf patches, gpt-5.2, stagehand fixes

This commit is contained in:
Waleed
2025-12-11 14:45:25 -08:00
committed by GitHub
27 changed files with 801 additions and 198 deletions

View File

@@ -5,7 +5,7 @@ import path from 'path'
import binaryExtensionsList from 'binary-extensions'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateExternalUrl } from '@/lib/core/security/input-validation'
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
import { createLogger } from '@/lib/logs/console/logger'
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
@@ -270,7 +270,7 @@ async function handleExternalUrl(
logger.info('Fetching external URL:', url)
logger.info('WorkspaceId for URL save:', workspaceId)
const urlValidation = validateExternalUrl(url, 'fileUrl')
const urlValidation = await validateUrlWithDNS(url, 'fileUrl')
if (!urlValidation.isValid) {
logger.warn(`Blocked external URL request: ${urlValidation.error}`)
return {
@@ -346,8 +346,12 @@ async function handleExternalUrl(
}
}
const response = await fetch(url, {
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
const response = await fetch(pinnedUrl, {
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
headers: {
Host: urlValidation.originalHostname!,
},
})
if (!response.ok) {
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)

View File

@@ -4,7 +4,7 @@ import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateInternalToken } from '@/lib/auth/internal'
import { isDev } from '@/lib/core/config/environment'
import { validateProxyUrl } from '@/lib/core/security/input-validation'
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
@@ -173,7 +173,7 @@ export async function GET(request: Request) {
return createErrorResponse("Missing 'url' parameter", 400)
}
const urlValidation = validateProxyUrl(targetUrl)
const urlValidation = await validateUrlWithDNS(targetUrl)
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] Blocked proxy request`, {
url: targetUrl.substring(0, 100),
@@ -211,11 +211,13 @@ export async function GET(request: Request) {
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
try {
const response = await fetch(targetUrl, {
const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!)
const response = await fetch(pinnedUrl, {
method: method,
headers: {
...getProxyHeaders(),
...customHeaders,
Host: urlValidation.originalHostname!,
},
body: body || undefined,
})

View File

@@ -33,9 +33,10 @@ export async function executeQuery(
params: unknown[] = []
): Promise<{ rows: unknown[]; rowCount: number }> {
const result = await sql.unsafe(query, params)
const rowCount = result.count ?? result.length ?? 0
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
rowCount,
}
}
@@ -107,9 +108,10 @@ export async function executeInsert(
const query = `INSERT INTO ${sanitizedTable} (${sanitizedColumns.join(', ')}) VALUES (${placeholders.join(', ')}) RETURNING *`
const result = await sql.unsafe(query, values)
const rowCount = result.count ?? result.length ?? 0
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
rowCount,
}
}
@@ -130,9 +132,10 @@ export async function executeUpdate(
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where} RETURNING *`
const result = await sql.unsafe(query, values)
const rowCount = result.count ?? result.length ?? 0
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
rowCount,
}
}
@@ -147,8 +150,9 @@ export async function executeDelete(
const query = `DELETE FROM ${sanitizedTable} WHERE ${where} RETURNING *`
const result = await sql.unsafe(query, [])
const rowCount = result.count ?? result.length ?? 0
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
rowCount,
}
}

View File

@@ -496,7 +496,7 @@ export async function POST(request: NextRequest) {
verbose: 1,
logger: (msg) => logger.info(typeof msg === 'string' ? msg : JSON.stringify(msg)),
model: {
modelName: 'claude-sonnet-4-20250514',
modelName: 'anthropic/claude-3-7-sonnet-latest',
apiKey: apiKey,
},
})
@@ -704,7 +704,14 @@ The system will substitute actual values when these placeholders are used, keepi
`.trim()
const agent = stagehand.agent({
model: 'anthropic/claude-sonnet-4-20250514',
model: {
modelName: 'anthropic/claude-3-7-sonnet-latest',
apiKey: apiKey,
},
executionModel: {
modelName: 'anthropic/claude-3-7-sonnet-latest',
apiKey: apiKey,
},
systemPrompt: `${agentInstructions}\n\n${additionalContext}`,
})
@@ -795,6 +802,9 @@ The system will substitute actual values when these placeholders are used, keepi
})
let structuredOutput = null
const hasOutputSchema =
outputSchema && typeof outputSchema === 'object' && outputSchema !== null
if (agentResult.message) {
try {
let jsonContent = agentResult.message
@@ -807,33 +817,31 @@ The system will substitute actual values when these placeholders are used, keepi
structuredOutput = JSON.parse(jsonContent)
logger.info('Successfully parsed structured output from agent response')
} catch (parseError) {
logger.error('Failed to parse JSON from agent message', {
error: parseError,
message: agentResult.message,
})
if (hasOutputSchema) {
logger.warn('Failed to parse JSON from agent message, attempting fallback extraction', {
error: parseError,
})
if (
outputSchema &&
typeof outputSchema === 'object' &&
outputSchema !== null &&
stagehand
) {
try {
logger.info('Attempting to extract structured data using Stagehand extract')
const schemaObj = getSchemaObject(outputSchema)
const zodSchema = ensureZodObject(logger, schemaObj)
if (stagehand) {
try {
logger.info('Attempting to extract structured data using Stagehand extract')
const schemaObj = getSchemaObject(outputSchema)
const zodSchema = ensureZodObject(logger, schemaObj)
structuredOutput = await stagehand.extract(
'Extract the requested information from this page according to the schema',
zodSchema
)
structuredOutput = await stagehand.extract(
'Extract the requested information from this page according to the schema',
zodSchema
)
logger.info('Successfully extracted structured data as fallback', {
keys: structuredOutput ? Object.keys(structuredOutput) : [],
})
} catch (extractError) {
logger.error('Fallback extraction also failed', { error: extractError })
logger.info('Successfully extracted structured data as fallback', {
keys: structuredOutput ? Object.keys(structuredOutput) : [],
})
} catch (extractError) {
logger.error('Fallback extraction also failed', { error: extractError })
}
}
} else {
logger.info('Agent returned plain text response (no schema provided)')
}
}
}

View File

@@ -86,7 +86,7 @@ export async function POST(request: NextRequest) {
verbose: 1,
logger: (msg) => logger.info(typeof msg === 'string' ? msg : JSON.stringify(msg)),
model: {
modelName: 'gpt-4o',
modelName: 'openai/gpt-4o',
apiKey: apiKey,
},
})

View File

@@ -10,6 +10,7 @@ import { useBrandConfig } from '@/lib/branding/branding'
import { cn } from '@/lib/core/utils/cn'
import { getTriggersForSidebar, hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
import { searchItems } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/search-modal/search-utils'
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
import { getAllBlocks } from '@/blocks'
interface SearchModalProps {
@@ -430,6 +431,12 @@ export function SearchModal({
window.open(item.href, '_blank', 'noopener,noreferrer')
} else {
router.push(item.href)
// Scroll to the workflow in the sidebar after navigation
if (item.type === 'workflow') {
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: item.id } })
)
}
}
}
break

View File

@@ -14,6 +14,7 @@ import {
useItemDrag,
useItemRename,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
import { useDeleteFolder, useDuplicateFolder } from '@/app/workspace/[workspaceId]/w/hooks'
import { useCreateFolder, useUpdateFolder } from '@/hooks/queries/folders'
import { useCreateWorkflow } from '@/hooks/queries/workflows'
@@ -87,6 +88,10 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
if (result.id) {
router.push(`/workspace/${workspaceId}/w/${result.id}`)
// Scroll to the newly created workflow
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
)
}
} catch (error) {
// Error already handled by mutation's onError callback
@@ -100,11 +105,17 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
*/
const handleCreateFolderInFolder = useCallback(async () => {
try {
await createFolderMutation.mutateAsync({
const result = await createFolderMutation.mutateAsync({
workspaceId,
name: 'New Folder',
parentId: folder.id,
})
if (result.id) {
// Scroll to the newly created folder
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
)
}
} catch (error) {
logger.error('Failed to create folder:', error)
}

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useMemo, useRef } from 'react'
import { useCallback, useEffect, useMemo } from 'react'
import clsx from 'clsx'
import { useParams, usePathname } from 'next/navigation'
import { FolderItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/folder-item/folder-item'
@@ -144,11 +144,8 @@ export function WorkflowList({
[pathname, workspaceId]
)
// Track last scrolled workflow to avoid redundant scroll checks
const lastScrolledWorkflowRef = useRef<string | null>(null)
/**
* Auto-expand folders, select active workflow, and scroll into view if needed.
* Auto-expand folders and select active workflow.
*/
useEffect(() => {
if (!workflowId || isLoading || foldersLoading) return
@@ -164,25 +161,6 @@ export function WorkflowList({
if (!selectedWorkflows.has(workflowId)) {
selectOnly(workflowId)
}
// Skip scroll check if already handled for this workflow
if (lastScrolledWorkflowRef.current === workflowId) return
lastScrolledWorkflowRef.current = workflowId
// Scroll after render only if element is completely off-screen
requestAnimationFrame(() => {
const element = document.querySelector(`[data-item-id="${workflowId}"]`)
const container = scrollContainerRef.current
if (!element || !container) return
const { top: elTop, bottom: elBottom } = element.getBoundingClientRect()
const { top: ctTop, bottom: ctBottom } = container.getBoundingClientRect()
// Only scroll if completely above or below the visible area
if (elBottom <= ctTop || elTop >= ctBottom) {
element.scrollIntoView({ behavior: 'smooth', block: 'center' })
}
})
}, [workflowId, activeWorkflowFolderId, isLoading, foldersLoading, getFolderPath, setExpanded])
const renderWorkflowItem = useCallback(

View File

@@ -34,9 +34,13 @@ import { useSearchModalStore } from '@/stores/search-modal/store'
import { MIN_SIDEBAR_WIDTH, useSidebarStore } from '@/stores/sidebar/store'
const logger = createLogger('Sidebar')
// Feature flag: Billing usage indicator visibility (matches legacy sidebar behavior)
/** Feature flag for billing usage indicator visibility */
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
/** Event name for sidebar scroll operations - centralized for consistency */
export const SIDEBAR_SCROLL_EVENT = 'sidebar-scroll-to-item'
/**
* Sidebar component with resizable width that persists across page refreshes.
*
@@ -60,63 +64,79 @@ export function Sidebar() {
const fileInputRef = useRef<HTMLInputElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
// Session data
const { data: sessionData, isPending: sessionLoading } = useSession()
// Sidebar state - use store's hydration tracking to prevent SSR mismatch
/**
* Sidebar state from store with hydration tracking to prevent SSR mismatch.
* Uses default (expanded) state until hydrated.
*/
const hasHydrated = useSidebarStore((state) => state._hasHydrated)
const isCollapsedStore = useSidebarStore((state) => state.isCollapsed)
const setIsCollapsed = useSidebarStore((state) => state.setIsCollapsed)
const setSidebarWidth = useSidebarStore((state) => state.setSidebarWidth)
// Use default (expanded) state until hydrated to prevent hydration mismatch
const isCollapsed = hasHydrated ? isCollapsedStore : false
// Determine if we're on a workflow page (only workflow pages allow collapse and resize)
const isOnWorkflowPage = !!workflowId
// Import state
const [isImporting, setIsImporting] = useState(false)
// Workspace import input ref
const workspaceFileInputRef = useRef<HTMLInputElement>(null)
// Workspace import hook
const { isImporting: isImportingWorkspace, handleImportWorkspace: importWorkspace } =
useImportWorkspace()
const { handleExportWorkspace: exportWorkspace } = useExportWorkspace()
// Workspace export hook
const { isExporting: isExportingWorkspace, handleExportWorkspace: exportWorkspace } =
useExportWorkspace()
// Workspace popover state
const [isWorkspaceMenuOpen, setIsWorkspaceMenuOpen] = useState(false)
// Footer navigation modal state
const [isHelpModalOpen, setIsHelpModalOpen] = useState(false)
const [isSettingsModalOpen, setIsSettingsModalOpen] = useState(false)
// Listen for external events to open help modal
/** Listens for external events to open help modal */
useEffect(() => {
const handleOpenHelpModal = () => setIsHelpModalOpen(true)
window.addEventListener('open-help-modal', handleOpenHelpModal)
return () => window.removeEventListener('open-help-modal', handleOpenHelpModal)
}, [])
// Global search modal state
/** Listens for scroll events and scrolls items into view if off-screen */
useEffect(() => {
const handleScrollToItem = (e: CustomEvent<{ itemId: string }>) => {
const { itemId } = e.detail
if (!itemId) return
const tryScroll = (retriesLeft: number) => {
requestAnimationFrame(() => {
const element = document.querySelector(`[data-item-id="${itemId}"]`)
const container = scrollContainerRef.current
if (!element || !container) {
if (retriesLeft > 0) tryScroll(retriesLeft - 1)
return
}
const { top: elTop, bottom: elBottom } = element.getBoundingClientRect()
const { top: ctTop, bottom: ctBottom } = container.getBoundingClientRect()
if (elBottom <= ctTop || elTop >= ctBottom) {
element.scrollIntoView({ behavior: 'smooth', block: 'center' })
}
})
}
tryScroll(10)
}
window.addEventListener(SIDEBAR_SCROLL_EVENT, handleScrollToItem as EventListener)
return () =>
window.removeEventListener(SIDEBAR_SCROLL_EVENT, handleScrollToItem as EventListener)
}, [])
const {
isOpen: isSearchModalOpen,
setOpen: setIsSearchModalOpen,
open: openSearchModal,
} = useSearchModalStore()
// Workspace management hook
const {
workspaces,
activeWorkspace,
isWorkspacesLoading,
fetchWorkspaces,
isWorkspaceValid,
switchWorkspace,
handleCreateWorkspace,
isCreatingWorkspace,
@@ -127,10 +147,8 @@ export function Sidebar() {
sessionUserId: sessionData?.user?.id,
})
// Sidebar resize hook
const { handleMouseDown } = useSidebarResize()
// Workflow operations hook
const {
regularWorkflows,
workflowsLoading,
@@ -138,17 +156,14 @@ export function Sidebar() {
handleCreateWorkflow: createWorkflow,
} = useWorkflowOperations({ workspaceId })
// Folder operations hook
const { isCreatingFolder, handleCreateFolder: createFolder } = useFolderOperations({
workspaceId,
})
// Duplicate workspace hook
const { handleDuplicateWorkspace: duplicateWorkspace } = useDuplicateWorkspace({
getWorkspaceId: () => workspaceId,
})
// Prepare data for search modal
const searchModalWorkflows = useMemo(
() =>
regularWorkflows.map((workflow) => ({
@@ -172,7 +187,6 @@ export function Sidebar() {
[workspaces, workspaceId]
)
// Footer navigation items
const footerNavigationItems = useMemo(
() => [
{
@@ -209,116 +223,85 @@ export function Sidebar() {
[workspaceId]
)
// Combined loading state
const isLoading = workflowsLoading || sessionLoading
const initialScrollDoneRef = useRef<string | null>(null)
/**
* Scrolls a newly created element into view if completely off-screen.
* Uses requestAnimationFrame to sync with render, then scrolls.
*/
const scrollToElement = useCallback((elementId: string) => {
/** Scrolls to active workflow on initial load or workspace switch */
useEffect(() => {
if (!workflowId || workflowsLoading || initialScrollDoneRef.current === workflowId) return
initialScrollDoneRef.current = workflowId
requestAnimationFrame(() => {
const element = document.querySelector(`[data-item-id="${elementId}"]`)
const container = scrollContainerRef.current
if (!element || !container) return
const { top: elTop, bottom: elBottom } = element.getBoundingClientRect()
const { top: ctTop, bottom: ctBottom } = container.getBoundingClientRect()
// Only scroll if element is completely off-screen
if (elBottom <= ctTop || elTop >= ctBottom) {
element.scrollIntoView({ behavior: 'smooth', block: 'center' })
}
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: workflowId } })
)
})
}, [])
}, [workflowId, workflowsLoading])
/**
* Force sidebar to minimum width and ensure it's expanded when not on a workflow page
*/
/** Forces sidebar to minimum width and ensures it's expanded when not on a workflow page */
useEffect(() => {
if (!isOnWorkflowPage) {
// Ensure sidebar is always expanded on non-workflow pages
if (isCollapsed) {
setIsCollapsed(false)
}
// Force sidebar to minimum width
setSidebarWidth(MIN_SIDEBAR_WIDTH)
}
}, [isOnWorkflowPage, isCollapsed, setIsCollapsed, setSidebarWidth])
/**
* Handle create workflow - creates workflow and scrolls to it
*/
/** Creates a workflow and scrolls to it */
const handleCreateWorkflow = useCallback(async () => {
const workflowId = await createWorkflow()
if (workflowId) {
scrollToElement(workflowId)
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: workflowId } })
)
}
}, [createWorkflow, scrollToElement])
}, [createWorkflow])
/**
* Handle create folder - creates folder and scrolls to it
*/
/** Creates a folder and scrolls to it */
const handleCreateFolder = useCallback(async () => {
const folderId = await createFolder()
if (folderId) {
scrollToElement(folderId)
window.dispatchEvent(new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: folderId } }))
}
}, [createFolder, scrollToElement])
}, [createFolder])
/**
* Handle import workflow button click - triggers file input
*/
/** Triggers file input for workflow import */
const handleImportWorkflow = useCallback(() => {
if (fileInputRef.current) {
fileInputRef.current.click()
}
fileInputRef.current?.click()
}, [])
/**
* Handle workspace switch from popover menu
*/
/** Handles workspace switch from popover menu */
const handleWorkspaceSwitch = useCallback(
async (workspace: { id: string; name: string; ownerId: string; role?: string }) => {
if (workspace.id === workspaceId) {
setIsWorkspaceMenuOpen(false)
return
}
await switchWorkspace(workspace)
setIsWorkspaceMenuOpen(false)
},
[workspaceId, switchWorkspace]
)
/**
* Handle sidebar collapse toggle
*/
/** Toggles sidebar collapse state */
const handleToggleCollapse = useCallback(() => {
setIsCollapsed(!isCollapsed)
}, [isCollapsed, setIsCollapsed])
/**
* Handle click on sidebar elements to revert to active workflow selection
*/
/** Reverts to active workflow selection when clicking sidebar background */
const handleSidebarClick = useCallback(
(e: React.MouseEvent<HTMLElement>) => {
const target = e.target as HTMLElement
// Revert to active workflow selection if clicking on sidebar background, header, or search area
// But not on interactive elements like buttons or links
if (target.tagName === 'BUTTON' || target.closest('button, [role="button"], a')) {
return
}
const { selectOnly, clearSelection } = useFolderStore.getState()
workflowId ? selectOnly(workflowId) : clearSelection()
},
[workflowId]
)
/**
* Handle workspace rename
*/
/** Renames a workspace */
const handleRenameWorkspace = useCallback(
async (workspaceIdToRename: string, newName: string) => {
await updateWorkspaceName(workspaceIdToRename, newName)
@@ -326,9 +309,7 @@ export function Sidebar() {
[updateWorkspaceName]
)
/**
* Handle workspace delete
*/
/** Deletes a workspace */
const handleDeleteWorkspace = useCallback(
async (workspaceIdToDelete: string) => {
const workspaceToDelete = workspaces.find((w) => w.id === workspaceIdToDelete)
@@ -339,9 +320,7 @@ export function Sidebar() {
[workspaces, confirmDeleteWorkspace]
)
/**
* Handle workspace duplicate
*/
/** Duplicates a workspace */
const handleDuplicateWorkspace = useCallback(
async (_workspaceIdToDuplicate: string, workspaceName: string) => {
await duplicateWorkspace(workspaceName)
@@ -349,9 +328,7 @@ export function Sidebar() {
[duplicateWorkspace]
)
/**
* Handle workspace export
*/
/** Exports a workspace */
const handleExportWorkspace = useCallback(
async (workspaceIdToExport: string, workspaceName: string) => {
await exportWorkspace(workspaceIdToExport, workspaceName)
@@ -359,18 +336,12 @@ export function Sidebar() {
[exportWorkspace]
)
/**
* Handle workspace import button click
*/
/** Triggers file input for workspace import */
const handleImportWorkspace = useCallback(() => {
if (workspaceFileInputRef.current) {
workspaceFileInputRef.current.click()
}
workspaceFileInputRef.current?.click()
}, [])
/**
* Handle workspace import file change
*/
/** Handles workspace import file selection */
const handleWorkspaceFileChange = useCallback(
async (event: React.ChangeEvent<HTMLInputElement>) => {
const files = event.target.files
@@ -379,7 +350,6 @@ export function Sidebar() {
const zipFile = files[0]
await importWorkspace(zipFile)
// Reset file input
if (event.target) {
event.target.value = ''
}
@@ -387,12 +357,7 @@ export function Sidebar() {
[importWorkspace]
)
/**
* Resolve a workspace id from either params or the current URL path.
*
* This mirrors existing behavior but is wrapped in a helper to keep command
* handlers small and focused.
*/
/** Resolves workspace ID from params or URL path */
const resolveWorkspaceIdFromPath = useCallback((): string | undefined => {
if (workspaceId) return workspaceId
if (typeof window === 'undefined') return undefined
@@ -404,12 +369,7 @@ export function Sidebar() {
return parts[idx + 1]
}, [workspaceId])
/**
* Register global sidebar commands using the central commands registry.
*
* Only commands declared in the registry can be registered here. The
* registry owns ids and shortcut strings; this component supplies handlers.
*/
/** Registers global sidebar commands with the central commands registry */
useRegisterGlobalCommands(() =>
createCommands([
{

View File

@@ -32,11 +32,23 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
title: 'Model',
type: 'dropdown',
options: [
{ label: 'gpt-4o', id: 'gpt-4o' },
{ label: 'gemini-2.0-flash', id: 'gemini-2.0-flash' },
{ label: 'gemini-2.0-flash-lite', id: 'gemini-2.0-flash-lite' },
{ label: 'claude-3-7-sonnet-20250219', id: 'claude-3-7-sonnet-20250219' },
{ label: 'llama-4-maverick-17b-128e-instruct', id: 'llama-4-maverick-17b-128e-instruct' },
{ label: 'Browser Use LLM', id: 'browser-use-llm' },
{ label: 'GPT-4o', id: 'gpt-4o' },
{ label: 'GPT-4o Mini', id: 'gpt-4o-mini' },
{ label: 'GPT-4.1', id: 'gpt-4.1' },
{ label: 'GPT-4.1 Mini', id: 'gpt-4.1-mini' },
{ label: 'O3', id: 'o3' },
{ label: 'O4 Mini', id: 'o4-mini' },
{ label: 'Gemini 2.5 Flash', id: 'gemini-2.5-flash' },
{ label: 'Gemini 2.5 Pro', id: 'gemini-2.5-pro' },
{ label: 'Gemini 3 Pro Preview', id: 'gemini-3-pro-preview' },
{ label: 'Gemini Flash Latest', id: 'gemini-flash-latest' },
{ label: 'Gemini Flash Lite Latest', id: 'gemini-flash-lite-latest' },
{ label: 'Claude 3.7 Sonnet', id: 'claude-3-7-sonnet-20250219' },
{ label: 'Claude Sonnet 4', id: 'claude-sonnet-4-20250514' },
{ label: 'Claude Sonnet 4.5', id: 'claude-sonnet-4-5-20250929' },
{ label: 'Claude Opus 4.5', id: 'claude-opus-4-5-20251101' },
{ label: 'Llama 4 Maverick', id: 'llama-4-maverick-17b-128e-instruct' },
],
},
{

View File

@@ -86,6 +86,27 @@ export class EdgeManager {
this.deactivatedEdges.clear()
}
/**
* Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration).
* This ensures error/success edges can be re-evaluated on each iteration.
*/
clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void {
const edgesToRemove: string[] = []
for (const edgeKey of this.deactivatedEdges) {
// Edge key format is "sourceId-targetId-handle"
// Check if either source or target is in the nodeIds set
for (const nodeId of nodeIds) {
if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) {
edgesToRemove.push(edgeKey)
break
}
}
}
for (const edgeKey of edgesToRemove) {
this.deactivatedEdges.delete(edgeKey)
}
}
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
const handle = edge.sourceHandle
@@ -180,7 +201,7 @@ export class EdgeManager {
const sourceNode = this.dag.nodes.get(sourceId)
if (!sourceNode) continue
for (const [_, edge] of sourceNode.outgoingEdges) {
for (const [, edge] of sourceNode.outgoingEdges) {
if (edge.target === node.id) {
const edgeKey = this.createEdgeKey(sourceId, edge.target, edge.sourceHandle)
if (!this.deactivatedEdges.has(edgeKey)) {

View File

@@ -279,6 +279,14 @@ export class ExecutionEngine {
})
this.addMultipleToQueue(readyNodes)
// Check for dynamically added nodes (e.g., from parallel expansion)
if (this.context.pendingDynamicNodes && this.context.pendingDynamicNodes.length > 0) {
const dynamicNodes = this.context.pendingDynamicNodes
this.context.pendingDynamicNodes = []
logger.info('Adding dynamically expanded parallel nodes', { dynamicNodes })
this.addMultipleToQueue(dynamicNodes)
}
}
private buildPausedResult(startTime: number): ExecutionResult {

View File

@@ -64,9 +64,11 @@ export class DAGExecutor {
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
parallelOrchestrator.setResolver(resolver)
const allHandlers = createBlockHandlers()
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
const edgeManager = new EdgeManager(dag)
loopOrchestrator.setEdgeManager(edgeManager)
const nodeOrchestrator = new NodeExecutionOrchestrator(
dag,
state,

View File

@@ -22,6 +22,7 @@ export interface ParallelScope {
branchOutputs: Map<number, NormalizedBlockOutput[]>
completedCount: number
totalExpectedNodes: number
items?: any[]
}
export class ExecutionState implements BlockStateController {

View File

@@ -822,7 +822,7 @@ export class AgentBlockHandler implements BlockHandler {
provider: providerId,
model,
systemPrompt: validMessages ? undefined : inputs.systemPrompt,
context: stringifyJSON(messages),
context: validMessages ? undefined : stringifyJSON(messages),
tools: formattedTools,
temperature: inputs.temperature,
maxTokens: inputs.maxTokens,

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger'
import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/constants'
import type { DAG } from '@/executor/dag/builder'
import type { EdgeManager } from '@/executor/execution/edge-manager'
import type { LoopScope } from '@/executor/execution/state'
import type { BlockStateController } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
@@ -26,12 +27,18 @@ export interface LoopContinuationResult {
}
export class LoopOrchestrator {
private edgeManager: EdgeManager | null = null
constructor(
private dag: DAG,
private state: BlockStateController,
private resolver: VariableResolver
) {}
setEdgeManager(edgeManager: EdgeManager): void {
this.edgeManager = edgeManager
}
initializeLoopScope(ctx: ExecutionContext, loopId: string): LoopScope {
const loopConfig = this.dag.loopConfigs.get(loopId) as SerializedLoop | undefined
if (!loopConfig) {
@@ -216,7 +223,11 @@ export class LoopOrchestrator {
const loopNodes = loopConfig.nodes
const allLoopNodeIds = new Set([sentinelStartId, sentinelEndId, ...loopNodes])
let restoredCount = 0
// Clear deactivated edges for loop nodes so error/success edges can be re-evaluated
if (this.edgeManager) {
this.edgeManager.clearDeactivatedEdgesForNodes(allLoopNodeIds)
}
for (const nodeId of allLoopNodeIds) {
const nodeToRestore = this.dag.nodes.get(nodeId)
if (!nodeToRestore) continue
@@ -224,7 +235,7 @@ export class LoopOrchestrator {
for (const [potentialSourceId, potentialSourceNode] of this.dag.nodes) {
if (!allLoopNodeIds.has(potentialSourceId)) continue
for (const [_, edge] of potentialSourceNode.outgoingEdges) {
for (const [, edge] of potentialSourceNode.outgoingEdges) {
if (edge.target === nodeId) {
const isBackwardEdge =
edge.sourceHandle === EDGE.LOOP_CONTINUE ||
@@ -232,7 +243,6 @@ export class LoopOrchestrator {
if (!isBackwardEdge) {
nodeToRestore.incomingEdges.add(potentialSourceId)
restoredCount++
}
}
}

View File

@@ -53,6 +53,20 @@ export class NodeExecutionOrchestrator {
}
}
// Initialize parallel scope BEFORE execution so <parallel.currentItem> can be resolved
const parallelId = node.metadata.parallelId
if (parallelId && !this.parallelOrchestrator.getParallelScope(ctx, parallelId)) {
const totalBranches = node.metadata.branchTotal || 1
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
const nodesInParallel = (parallelConfig as any)?.nodes?.length || 1
this.parallelOrchestrator.initializeParallelScope(
ctx,
parallelId,
totalBranches,
nodesInParallel
)
}
if (node.metadata.isSentinel) {
const output = this.handleSentinel(ctx, node)
const isFinalOutput = node.outgoingEdges.size === 0

View File

@@ -1,15 +1,17 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { DAG } from '@/executor/dag/builder'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import type { ParallelScope } from '@/executor/execution/state'
import type { BlockStateWriter } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { ParallelConfigWithNodes } from '@/executor/types/parallel'
import {
buildBranchNodeId,
calculateBranchCount,
extractBaseBlockId,
extractBranchIndex,
parseDistributionItems,
} from '@/executor/utils/subflow-utils'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedParallel } from '@/serializer/types'
const logger = createLogger('ParallelOrchestrator')
@@ -29,31 +31,325 @@ export interface ParallelAggregationResult {
}
export class ParallelOrchestrator {
private resolver: VariableResolver | null = null
constructor(
private dag: DAG,
private state: BlockStateWriter
) {}
setResolver(resolver: VariableResolver): void {
this.resolver = resolver
}
initializeParallelScope(
ctx: ExecutionContext,
parallelId: string,
totalBranches: number,
terminalNodesCount = 1
): ParallelScope {
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
const items = parallelConfig ? this.resolveDistributionItems(ctx, parallelConfig) : undefined
// If we have more items than pre-built branches, expand the DAG
const actualBranchCount = items && items.length > totalBranches ? items.length : totalBranches
const scope: ParallelScope = {
parallelId,
totalBranches,
totalBranches: actualBranchCount,
branchOutputs: new Map(),
completedCount: 0,
totalExpectedNodes: totalBranches * terminalNodesCount,
totalExpectedNodes: actualBranchCount * terminalNodesCount,
items,
}
if (!ctx.parallelExecutions) {
ctx.parallelExecutions = new Map()
}
ctx.parallelExecutions.set(parallelId, scope)
// Dynamically expand DAG if needed
if (items && items.length > totalBranches && parallelConfig) {
logger.info('Dynamically expanding parallel branches', {
parallelId,
existingBranches: totalBranches,
targetBranches: items.length,
itemsCount: items.length,
})
const newEntryNodes = this.expandParallelBranches(
parallelId,
parallelConfig,
totalBranches,
items.length
)
logger.info('Parallel expansion complete', {
parallelId,
newEntryNodes,
totalNodesInDag: this.dag.nodes.size,
})
// Add new entry nodes to pending dynamic nodes so the engine can schedule them
if (newEntryNodes.length > 0) {
if (!ctx.pendingDynamicNodes) {
ctx.pendingDynamicNodes = []
}
ctx.pendingDynamicNodes.push(...newEntryNodes)
}
} else {
logger.info('No parallel expansion needed', {
parallelId,
itemsLength: items?.length,
totalBranches,
hasParallelConfig: !!parallelConfig,
})
}
return scope
}
/**
* Dynamically expand the DAG to include additional branch nodes when
* the resolved item count exceeds the pre-built branch count.
*/
private expandParallelBranches(
parallelId: string,
config: SerializedParallel,
existingBranchCount: number,
targetBranchCount: number
): string[] {
// Get all blocks that are part of this parallel
const blocksInParallel = config.nodes
const blocksInParallelSet = new Set(blocksInParallel)
// Step 1: Create all new nodes first
for (const blockId of blocksInParallel) {
const branch0NodeId = buildBranchNodeId(blockId, 0)
const templateNode = this.dag.nodes.get(branch0NodeId)
if (!templateNode) {
logger.warn('Template node not found for parallel expansion', { blockId, branch0NodeId })
continue
}
for (let branchIndex = existingBranchCount; branchIndex < targetBranchCount; branchIndex++) {
const newNodeId = buildBranchNodeId(blockId, branchIndex)
const newNode: DAGNode = {
id: newNodeId,
block: {
...templateNode.block,
id: newNodeId,
},
incomingEdges: new Set(),
outgoingEdges: new Map(),
metadata: {
...templateNode.metadata,
branchIndex,
branchTotal: targetBranchCount,
originalBlockId: blockId,
},
}
this.dag.nodes.set(newNodeId, newNode)
}
}
// Step 2: Wire edges between the new branch nodes
this.wireExpandedBranchEdges(
parallelId,
blocksInParallel,
existingBranchCount,
targetBranchCount
)
// Step 3: Update metadata on existing nodes to reflect new total
this.updateExistingBranchMetadata(blocksInParallel, existingBranchCount, targetBranchCount)
// Step 4: Identify entry nodes AFTER edges are wired
// Entry nodes are those with no INTERNAL incoming edges (edges from outside parallel don't count)
const newEntryNodes: string[] = []
for (const blockId of blocksInParallel) {
const branch0NodeId = buildBranchNodeId(blockId, 0)
const templateNode = this.dag.nodes.get(branch0NodeId)
if (!templateNode) continue
// Check if template has any INTERNAL incoming edges
let hasInternalIncoming = false
for (const incomingId of templateNode.incomingEdges) {
const baseIncomingId = extractBaseBlockId(incomingId)
if (blocksInParallelSet.has(baseIncomingId)) {
hasInternalIncoming = true
break
}
}
// If no internal incoming edges, the new branches of this block are entry nodes
if (!hasInternalIncoming) {
for (
let branchIndex = existingBranchCount;
branchIndex < targetBranchCount;
branchIndex++
) {
newEntryNodes.push(buildBranchNodeId(blockId, branchIndex))
}
}
}
return newEntryNodes
}
/**
* Wire edges between expanded branch nodes by replicating the edge pattern from branch 0.
* Handles both internal edges (within the parallel) and exit edges (to blocks after the parallel).
*/
private wireExpandedBranchEdges(
parallelId: string,
blocksInParallel: string[],
existingBranchCount: number,
targetBranchCount: number
): void {
const blocksInParallelSet = new Set(blocksInParallel)
// For each block, look at branch 0's outgoing edges and replicate for new branches
for (const blockId of blocksInParallel) {
const branch0NodeId = buildBranchNodeId(blockId, 0)
const branch0Node = this.dag.nodes.get(branch0NodeId)
if (!branch0Node) continue
// Replicate outgoing edges for each new branch
for (const [, edge] of branch0Node.outgoingEdges) {
// Use edge.target (the actual target node ID), not the Map key which may be a formatted edge ID
const actualTargetNodeId = edge.target
// Extract the base target block ID
const baseTargetId = extractBaseBlockId(actualTargetNodeId)
// Check if target is inside or outside the parallel
const isInternalEdge = blocksInParallelSet.has(baseTargetId)
for (
let branchIndex = existingBranchCount;
branchIndex < targetBranchCount;
branchIndex++
) {
const sourceNodeId = buildBranchNodeId(blockId, branchIndex)
const sourceNode = this.dag.nodes.get(sourceNodeId)
if (!sourceNode) continue
if (isInternalEdge) {
// Internal edge: wire to the corresponding branch of the target
const newTargetNodeId = buildBranchNodeId(baseTargetId, branchIndex)
const targetNode = this.dag.nodes.get(newTargetNodeId)
if (targetNode) {
sourceNode.outgoingEdges.set(newTargetNodeId, {
target: newTargetNodeId,
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
})
targetNode.incomingEdges.add(sourceNodeId)
}
} else {
// Exit edge: wire to the same external target (blocks after the parallel)
// All branches point to the same external node
const externalTargetNode = this.dag.nodes.get(actualTargetNodeId)
if (externalTargetNode) {
sourceNode.outgoingEdges.set(actualTargetNodeId, {
target: actualTargetNodeId,
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
})
// Add incoming edge from this new branch to the external node
externalTargetNode.incomingEdges.add(sourceNodeId)
}
}
}
}
}
}
/**
* Update existing branch nodes' metadata to reflect the new total branch count.
*/
private updateExistingBranchMetadata(
blocksInParallel: string[],
existingBranchCount: number,
targetBranchCount: number
): void {
for (const blockId of blocksInParallel) {
for (let branchIndex = 0; branchIndex < existingBranchCount; branchIndex++) {
const nodeId = buildBranchNodeId(blockId, branchIndex)
const node = this.dag.nodes.get(nodeId)
if (node) {
node.metadata.branchTotal = targetBranchCount
}
}
}
}
/**
* Resolve distribution items at runtime, handling references like <previousBlock.items>
* This mirrors how LoopOrchestrator.resolveForEachItems works.
*/
private resolveDistributionItems(ctx: ExecutionContext, config: SerializedParallel): any[] {
const rawItems = config.distribution
if (rawItems === undefined || rawItems === null) {
return []
}
// Already an array - return as-is
if (Array.isArray(rawItems)) {
return rawItems
}
// Object - convert to entries array (consistent with loop forEach behavior)
if (typeof rawItems === 'object') {
return Object.entries(rawItems)
}
// String handling
if (typeof rawItems === 'string') {
// Resolve references at runtime using the variable resolver
if (rawItems.startsWith('<') && rawItems.endsWith('>') && this.resolver) {
const resolved = this.resolver.resolveSingleReference(ctx, '', rawItems)
if (Array.isArray(resolved)) {
return resolved
}
if (typeof resolved === 'object' && resolved !== null) {
return Object.entries(resolved)
}
logger.warn('Distribution reference did not resolve to array or object', {
rawItems,
resolved,
})
return []
}
// Try to parse as JSON
try {
const normalized = rawItems.replace(/'/g, '"')
const parsed = JSON.parse(normalized)
if (Array.isArray(parsed)) {
return parsed
}
if (typeof parsed === 'object' && parsed !== null) {
return Object.entries(parsed)
}
return []
} catch (error) {
logger.error('Failed to parse distribution items', { rawItems, error })
return []
}
}
return []
}
handleParallelBranchCompletion(
ctx: ExecutionContext,
parallelId: string,

View File

@@ -190,6 +190,7 @@ export interface ExecutionContext {
completedCount: number
totalExpectedNodes: number
parallelType?: 'count' | 'collection'
items?: any[]
}
>
@@ -223,6 +224,9 @@ export interface ExecutionContext {
// Cancellation support
isCancelled?: boolean
// Dynamically added nodes that need to be scheduled (e.g., from parallel expansion)
pendingDynamicNodes?: string[]
}
export interface ExecutionResult {

View File

@@ -49,7 +49,10 @@ export class ParallelResolver implements Resolver {
return undefined
}
const distributionItems = this.getDistributionItems(parallelConfig)
// First try to get items from the parallel scope (resolved at runtime)
// This is the same pattern as LoopResolver reading from loopScope.items
const parallelScope = context.executionContext.parallelExecutions?.get(parallelId)
const distributionItems = parallelScope?.items ?? this.getDistributionItems(parallelConfig)
let value: any
switch (property) {

View File

@@ -1,5 +1,6 @@
import { describe, expect, it } from 'vitest'
import {
createPinnedUrl,
sanitizeForLogging,
validateAlphanumericId,
validateEnum,
@@ -7,6 +8,7 @@ import {
validateHostname,
validateNumericId,
validatePathSegment,
validateUrlWithDNS,
validateUUID,
} from '@/lib/core/security/input-validation'
@@ -588,3 +590,83 @@ describe('sanitizeForLogging', () => {
expect(result).toBe(input)
})
})
describe('validateUrlWithDNS', () => {
describe('basic validation', () => {
it('should reject invalid URLs', async () => {
const result = await validateUrlWithDNS('not-a-url')
expect(result.isValid).toBe(false)
expect(result.error).toContain('valid URL')
})
it('should reject http:// URLs', async () => {
const result = await validateUrlWithDNS('http://example.com')
expect(result.isValid).toBe(false)
expect(result.error).toContain('https://')
})
it('should reject localhost URLs', async () => {
const result = await validateUrlWithDNS('https://localhost/api')
expect(result.isValid).toBe(false)
expect(result.error).toContain('localhost')
})
it('should reject private IP URLs', async () => {
const result = await validateUrlWithDNS('https://192.168.1.1/api')
expect(result.isValid).toBe(false)
expect(result.error).toContain('private IP')
})
it('should reject null', async () => {
const result = await validateUrlWithDNS(null)
expect(result.isValid).toBe(false)
})
it('should reject empty string', async () => {
const result = await validateUrlWithDNS('')
expect(result.isValid).toBe(false)
})
})
describe('DNS resolution', () => {
it('should accept valid public URLs and return resolved IP', async () => {
const result = await validateUrlWithDNS('https://example.com')
expect(result.isValid).toBe(true)
expect(result.resolvedIP).toBeDefined()
expect(result.originalHostname).toBe('example.com')
})
it('should reject URLs that resolve to private IPs', async () => {
const result = await validateUrlWithDNS('https://localhost.localdomain')
expect(result.isValid).toBe(false)
})
it('should reject unresolvable hostnames', async () => {
const result = await validateUrlWithDNS('https://this-domain-does-not-exist-xyz123.invalid')
expect(result.isValid).toBe(false)
expect(result.error).toContain('could not be resolved')
})
})
})
describe('createPinnedUrl', () => {
it('should replace hostname with IP', () => {
const result = createPinnedUrl('https://example.com/api/data', '93.184.216.34')
expect(result).toBe('https://93.184.216.34/api/data')
})
it('should preserve port if specified', () => {
const result = createPinnedUrl('https://example.com:8443/api', '93.184.216.34')
expect(result).toBe('https://93.184.216.34:8443/api')
})
it('should preserve query string', () => {
const result = createPinnedUrl('https://example.com/api?foo=bar&baz=qux', '93.184.216.34')
expect(result).toBe('https://93.184.216.34/api?foo=bar&baz=qux')
})
it('should preserve path', () => {
const result = createPinnedUrl('https://example.com/a/b/c/d', '93.184.216.34')
expect(result).toBe('https://93.184.216.34/a/b/c/d')
})
})

View File

@@ -1,3 +1,4 @@
import dns from 'dns/promises'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('InputValidation')
@@ -850,3 +851,110 @@ export function validateProxyUrl(
): ValidationResult {
return validateExternalUrl(url, paramName)
}
/**
* Checks if an IP address is private or reserved (not routable on the public internet)
*/
function isPrivateOrReservedIP(ip: string): boolean {
const patterns = [
/^127\./, // Loopback
/^10\./, // Private Class A
/^172\.(1[6-9]|2[0-9]|3[0-1])\./, // Private Class B
/^192\.168\./, // Private Class C
/^169\.254\./, // Link-local
/^0\./, // Current network
/^100\.(6[4-9]|[7-9][0-9]|1[0-1][0-9]|12[0-7])\./, // Carrier-grade NAT
/^192\.0\.0\./, // IETF Protocol Assignments
/^192\.0\.2\./, // TEST-NET-1
/^198\.51\.100\./, // TEST-NET-2
/^203\.0\.113\./, // TEST-NET-3
/^224\./, // Multicast
/^240\./, // Reserved
/^255\./, // Broadcast
/^::1$/, // IPv6 loopback
/^fe80:/i, // IPv6 link-local
/^fc00:/i, // IPv6 unique local
/^fd00:/i, // IPv6 unique local
/^::ffff:(127\.|10\.|172\.(1[6-9]|2[0-9]|3[0-1])\.|192\.168\.|169\.254\.)/i, // IPv4-mapped IPv6
]
return patterns.some((pattern) => pattern.test(ip))
}
/**
* Result type for async URL validation with resolved IP
*/
export interface AsyncValidationResult extends ValidationResult {
resolvedIP?: string
originalHostname?: string
}
/**
* Validates a URL and resolves its DNS to prevent SSRF via DNS rebinding
*
* This function:
* 1. Performs basic URL validation (protocol, format)
* 2. Resolves the hostname to an IP address
* 3. Validates the resolved IP is not private/reserved
* 4. Returns the resolved IP for use in the actual request
*
* @param url - The URL to validate
* @param paramName - Name of the parameter for error messages
* @returns AsyncValidationResult with resolved IP for DNS pinning
*/
export async function validateUrlWithDNS(
url: string | null | undefined,
paramName = 'url'
): Promise<AsyncValidationResult> {
const basicValidation = validateExternalUrl(url, paramName)
if (!basicValidation.isValid) {
return basicValidation
}
const parsedUrl = new URL(url!)
const hostname = parsedUrl.hostname
try {
const { address } = await dns.lookup(hostname)
if (isPrivateOrReservedIP(address)) {
logger.warn('URL resolves to blocked IP address', {
paramName,
hostname,
resolvedIP: address,
})
return {
isValid: false,
error: `${paramName} resolves to a blocked IP address`,
}
}
return {
isValid: true,
resolvedIP: address,
originalHostname: hostname,
}
} catch (error) {
logger.warn('DNS lookup failed for URL', {
paramName,
hostname,
error: error instanceof Error ? error.message : String(error),
})
return {
isValid: false,
error: `${paramName} hostname could not be resolved`,
}
}
}
/**
* Creates a fetch URL that uses a resolved IP address to prevent DNS rebinding
*
* @param originalUrl - The original URL
* @param resolvedIP - The resolved IP address to use
* @returns The URL with IP substituted for hostname
*/
export function createPinnedUrl(originalUrl: string, resolvedIP: string): string {
const parsed = new URL(originalUrl)
const port = parsed.port ? `:${parsed.port}` : ''
return `${parsed.protocol}//${resolvedIP}${port}${parsed.pathname}${parsed.search}`
}

View File

@@ -146,11 +146,17 @@ export class SnapshotService implements ISnapshotService {
const normalizedBlocks: Record<string, any> = {}
for (const [blockId, block] of Object.entries(state.blocks || {})) {
// Skip position as it doesn't affect functionality
const { position, ...blockWithoutPosition } = block
const { position, layout, height, ...blockWithoutLayoutFields } = block
// Also exclude width/height from data object (container dimensions from autolayout)
const {
width: _dataWidth,
height: _dataHeight,
...dataRest
} = blockWithoutLayoutFields.data || {}
// Handle subBlocks with detailed comparison (same as hasWorkflowChanged)
const subBlocks = blockWithoutPosition.subBlocks || {}
const subBlocks = blockWithoutLayoutFields.subBlocks || {}
const normalizedSubBlocks: Record<string, any> = {}
for (const [subBlockId, subBlock] of Object.entries(subBlocks)) {
@@ -168,7 +174,8 @@ export class SnapshotService implements ISnapshotService {
}
normalizedBlocks[blockId] = {
...blockWithoutPosition,
...blockWithoutLayoutFields,
data: dataRest,
subBlocks: normalizedSubBlocks,
}
}

View File

@@ -255,22 +255,48 @@ export function hasWorkflowChanged(
const currentBlock = currentState.blocks[blockId]
const deployedBlock = deployedState.blocks[blockId]
// Destructure and exclude non-functional fields
const { position: _currentPos, subBlocks: currentSubBlocks = {}, ...currentRest } = currentBlock
// Destructure and exclude non-functional fields:
// - position: visual positioning only
// - subBlocks: handled separately below
// - layout: contains measuredWidth/measuredHeight from autolayout
// - height: block height measurement from autolayout
const {
position: _currentPos,
subBlocks: currentSubBlocks = {},
layout: _currentLayout,
height: _currentHeight,
...currentRest
} = currentBlock
const {
position: _deployedPos,
subBlocks: deployedSubBlocks = {},
layout: _deployedLayout,
height: _deployedHeight,
...deployedRest
} = deployedBlock
// Also exclude width/height from data object (container dimensions from autolayout)
const {
width: _currentDataWidth,
height: _currentDataHeight,
...currentDataRest
} = currentRest.data || {}
const {
width: _deployedDataWidth,
height: _deployedDataHeight,
...deployedDataRest
} = deployedRest.data || {}
normalizedCurrentBlocks[blockId] = {
...currentRest,
data: currentDataRest,
subBlocks: undefined,
}
normalizedDeployedBlocks[blockId] = {
...deployedRest,
data: deployedDataRest,
subBlocks: undefined,
}

View File

@@ -79,6 +79,7 @@ const nextConfig: NextConfig = {
'pino',
'pino-pretty',
'thread-stream',
'@browserbasehq/stagehand',
],
experimental: {
optimizeCss: true,

View File

@@ -120,6 +120,24 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
},
contextWindow: 128000,
},
{
id: 'gpt-5.2',
pricing: {
input: 1.75,
cachedInput: 0.175,
output: 14.0,
updatedAt: '2025-12-11',
},
capabilities: {
reasoningEffort: {
values: ['none', 'low', 'medium', 'high'],
},
verbosity: {
values: ['low', 'medium', 'high'],
},
},
contextWindow: 400000,
},
{
id: 'gpt-5.1',
pricing: {
@@ -355,6 +373,24 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
},
contextWindow: 128000,
},
{
id: 'azure/gpt-5.2',
pricing: {
input: 1.75,
cachedInput: 0.175,
output: 14.0,
updatedAt: '2025-12-11',
},
capabilities: {
reasoningEffort: {
values: ['none', 'low', 'medium', 'high'],
},
verbosity: {
values: ['low', 'medium', 'high'],
},
},
contextWindow: 400000,
},
{
id: 'azure/gpt-5.1',
pricing: {

View File

@@ -195,8 +195,6 @@ async function flushSubblockUpdate(
sock.emit('operation-confirmed', { operationId: opId, serverTimestamp: Date.now() })
}
})
logger.debug(`Flushed subblock update ${workflowId}: ${blockId}.${subblockId}`)
} else {
pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId)