mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-21 04:48:00 -05:00
Compare commits
15 Commits
feat/copil
...
fix/deploy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
098b9eff8c | ||
|
|
1d450578c8 | ||
|
|
c6d408c65b | ||
|
|
16716ea26a | ||
|
|
563098ca0a | ||
|
|
1f1f015031 | ||
|
|
4afb245fa2 | ||
|
|
8344d68ca8 | ||
|
|
a26a1a9737 | ||
|
|
689037a300 | ||
|
|
07f0c01dc4 | ||
|
|
e4ad31bb6b | ||
|
|
84691fc873 | ||
|
|
2daf34386e | ||
|
|
ac991d4b54 |
35
.claude/rules/emcn-components.md
Normal file
35
.claude/rules/emcn-components.md
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/components/emcn/**"
|
||||
---
|
||||
|
||||
# EMCN Components
|
||||
|
||||
Import from `@/components/emcn`, never from subpaths (except CSS files).
|
||||
|
||||
## CVA vs Direct Styles
|
||||
|
||||
**Use CVA when:** 2+ variants (primary/secondary, sm/md/lg)
|
||||
|
||||
```tsx
|
||||
const buttonVariants = cva('base-classes', {
|
||||
variants: { variant: { default: '...', primary: '...' } }
|
||||
})
|
||||
export { Button, buttonVariants }
|
||||
```
|
||||
|
||||
**Use direct className when:** Single consistent style, no variations
|
||||
|
||||
```tsx
|
||||
function Label({ className, ...props }) {
|
||||
return <Primitive className={cn('style-classes', className)} {...props} />
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
- Use Radix UI primitives for accessibility
|
||||
- Export component and variants (if using CVA)
|
||||
- TSDoc with usage examples
|
||||
- Consistent tokens: `font-medium`, `text-[12px]`, `rounded-[4px]`
|
||||
- `transition-colors` for hover states
|
||||
13
.claude/rules/global.md
Normal file
13
.claude/rules/global.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Global Standards
|
||||
|
||||
## Logging
|
||||
Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
|
||||
|
||||
## Comments
|
||||
Use TSDoc for documentation. No `====` separators. No non-TSDoc comments.
|
||||
|
||||
## Styling
|
||||
Never update global styles. Keep all styling local to components.
|
||||
|
||||
## Package Manager
|
||||
Use `bun` and `bunx`, not `npm` and `npx`.
|
||||
56
.claude/rules/sim-architecture.md
Normal file
56
.claude/rules/sim-architecture.md
Normal file
@@ -0,0 +1,56 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**"
|
||||
---
|
||||
|
||||
# Sim App Architecture
|
||||
|
||||
## Core Principles
|
||||
1. **Single Responsibility**: Each component, hook, store has one clear purpose
|
||||
2. **Composition Over Complexity**: Break down complex logic into smaller pieces
|
||||
3. **Type Safety First**: TypeScript interfaces for all props, state, return types
|
||||
4. **Predictable State**: Zustand for global state, useState for UI-only concerns
|
||||
|
||||
## Root-Level Structure
|
||||
|
||||
```
|
||||
apps/sim/
|
||||
├── app/ # Next.js app router (pages, API routes)
|
||||
├── blocks/ # Block definitions and registry
|
||||
├── components/ # Shared UI (emcn/, ui/)
|
||||
├── executor/ # Workflow execution engine
|
||||
├── hooks/ # Shared hooks (queries/, selectors/)
|
||||
├── lib/ # App-wide utilities
|
||||
├── providers/ # LLM provider integrations
|
||||
├── stores/ # Zustand stores
|
||||
├── tools/ # Tool definitions
|
||||
└── triggers/ # Trigger definitions
|
||||
```
|
||||
|
||||
## Feature Organization
|
||||
|
||||
Features live under `app/workspace/[workspaceId]/`:
|
||||
|
||||
```
|
||||
feature/
|
||||
├── components/ # Feature components
|
||||
├── hooks/ # Feature-scoped hooks
|
||||
├── utils/ # Feature-scoped utilities (2+ consumers)
|
||||
├── feature.tsx # Main component
|
||||
└── page.tsx # Next.js page entry
|
||||
```
|
||||
|
||||
## Naming Conventions
|
||||
- **Components**: PascalCase (`WorkflowList`)
|
||||
- **Hooks**: `use` prefix (`useWorkflowOperations`)
|
||||
- **Files**: kebab-case (`workflow-list.tsx`)
|
||||
- **Stores**: `stores/feature/store.ts`
|
||||
- **Constants**: SCREAMING_SNAKE_CASE
|
||||
- **Interfaces**: PascalCase with suffix (`WorkflowListProps`)
|
||||
|
||||
## Utils Rules
|
||||
|
||||
- **Never create `utils.ts` for single consumer** - inline it
|
||||
- **Create `utils.ts` when** 2+ files need the same helper
|
||||
- **Check existing sources** before duplicating (`lib/` has many utilities)
|
||||
- **Location**: `lib/` (app-wide) → `feature/utils/` (feature-scoped) → inline (single-use)
|
||||
48
.claude/rules/sim-components.md
Normal file
48
.claude/rules/sim-components.md
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.tsx"
|
||||
---
|
||||
|
||||
# Component Patterns
|
||||
|
||||
## Structure Order
|
||||
|
||||
```typescript
|
||||
'use client' // Only if using hooks
|
||||
|
||||
// Imports (external → internal)
|
||||
// Constants at module level
|
||||
const CONFIG = { SPACING: 8 } as const
|
||||
|
||||
// Props interface
|
||||
interface ComponentProps {
|
||||
requiredProp: string
|
||||
optionalProp?: boolean
|
||||
}
|
||||
|
||||
export function Component({ requiredProp, optionalProp = false }: ComponentProps) {
|
||||
// a. Refs
|
||||
// b. External hooks (useParams, useRouter)
|
||||
// c. Store hooks
|
||||
// d. Custom hooks
|
||||
// e. Local state
|
||||
// f. useMemo
|
||||
// g. useCallback
|
||||
// h. useEffect
|
||||
// i. Return JSX
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
1. `'use client'` only when using React hooks
|
||||
2. Always define props interface
|
||||
3. Extract constants with `as const`
|
||||
4. Semantic HTML (`aside`, `nav`, `article`)
|
||||
5. Optional chain callbacks: `onAction?.(id)`
|
||||
|
||||
## Component Extraction
|
||||
|
||||
**Extract when:** 50+ lines, used in 2+ files, or has own state/logic
|
||||
|
||||
**Keep inline when:** < 10 lines, single use, purely presentational
|
||||
55
.claude/rules/sim-hooks.md
Normal file
55
.claude/rules/sim-hooks.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/use-*.ts"
|
||||
- "apps/sim/**/hooks/**/*.ts"
|
||||
---
|
||||
|
||||
# Hook Patterns
|
||||
|
||||
## Structure
|
||||
|
||||
```typescript
|
||||
interface UseFeatureProps {
|
||||
id: string
|
||||
onSuccess?: (result: Result) => void
|
||||
}
|
||||
|
||||
export function useFeature({ id, onSuccess }: UseFeatureProps) {
|
||||
// 1. Refs for stable dependencies
|
||||
const idRef = useRef(id)
|
||||
const onSuccessRef = useRef(onSuccess)
|
||||
|
||||
// 2. State
|
||||
const [data, setData] = useState<Data | null>(null)
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
// 3. Sync refs
|
||||
useEffect(() => {
|
||||
idRef.current = id
|
||||
onSuccessRef.current = onSuccess
|
||||
}, [id, onSuccess])
|
||||
|
||||
// 4. Operations (useCallback with empty deps when using refs)
|
||||
const fetchData = useCallback(async () => {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const result = await fetch(`/api/${idRef.current}`).then(r => r.json())
|
||||
setData(result)
|
||||
onSuccessRef.current?.(result)
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
return { data, isLoading, fetchData }
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
1. Single responsibility per hook
|
||||
2. Props interface required
|
||||
3. Refs for stable callback dependencies
|
||||
4. Wrap returned functions in useCallback
|
||||
5. Always try/catch async operations
|
||||
6. Track loading/error states
|
||||
62
.claude/rules/sim-imports.md
Normal file
62
.claude/rules/sim-imports.md
Normal file
@@ -0,0 +1,62 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.ts"
|
||||
- "apps/sim/**/*.tsx"
|
||||
---
|
||||
|
||||
# Import Patterns
|
||||
|
||||
## Absolute Imports
|
||||
|
||||
**Always use absolute imports.** Never use relative imports.
|
||||
|
||||
```typescript
|
||||
// ✓ Good
|
||||
import { useWorkflowStore } from '@/stores/workflows/store'
|
||||
import { Button } from '@/components/ui/button'
|
||||
|
||||
// ✗ Bad
|
||||
import { useWorkflowStore } from '../../../stores/workflows/store'
|
||||
```
|
||||
|
||||
## Barrel Exports
|
||||
|
||||
Use barrel exports (`index.ts`) when a folder has 3+ exports. Import from barrel, not individual files.
|
||||
|
||||
```typescript
|
||||
// ✓ Good
|
||||
import { Dashboard, Sidebar } from '@/app/workspace/[workspaceId]/logs/components'
|
||||
|
||||
// ✗ Bad
|
||||
import { Dashboard } from '@/app/workspace/[workspaceId]/logs/components/dashboard/dashboard'
|
||||
```
|
||||
|
||||
## No Re-exports
|
||||
|
||||
Do not re-export from non-barrel files. Import directly from the source.
|
||||
|
||||
```typescript
|
||||
// ✓ Good - import from where it's declared
|
||||
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
||||
|
||||
// ✗ Bad - re-exporting in utils.ts then importing from there
|
||||
import { CORE_TRIGGER_TYPES } from '@/app/workspace/.../utils'
|
||||
```
|
||||
|
||||
## Import Order
|
||||
|
||||
1. React/core libraries
|
||||
2. External libraries
|
||||
3. UI components (`@/components/emcn`, `@/components/ui`)
|
||||
4. Utilities (`@/lib/...`)
|
||||
5. Stores (`@/stores/...`)
|
||||
6. Feature imports
|
||||
7. CSS imports
|
||||
|
||||
## Type Imports
|
||||
|
||||
Use `type` keyword for type-only imports:
|
||||
|
||||
```typescript
|
||||
import type { WorkflowLog } from '@/stores/logs/types'
|
||||
```
|
||||
209
.claude/rules/sim-integrations.md
Normal file
209
.claude/rules/sim-integrations.md
Normal file
@@ -0,0 +1,209 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/tools/**"
|
||||
- "apps/sim/blocks/**"
|
||||
- "apps/sim/triggers/**"
|
||||
---
|
||||
|
||||
# Adding Integrations
|
||||
|
||||
## Overview
|
||||
|
||||
Adding a new integration typically requires:
|
||||
1. **Tools** - API operations (`tools/{service}/`)
|
||||
2. **Block** - UI component (`blocks/blocks/{service}.ts`)
|
||||
3. **Icon** - SVG icon (`components/icons.tsx`)
|
||||
4. **Trigger** (optional) - Webhooks/polling (`triggers/{service}/`)
|
||||
|
||||
Always look up the service's API docs first.
|
||||
|
||||
## 1. Tools (`tools/{service}/`)
|
||||
|
||||
```
|
||||
tools/{service}/
|
||||
├── index.ts # Export all tools
|
||||
├── types.ts # Params/response types
|
||||
├── {action}.ts # Individual tool (e.g., send_message.ts)
|
||||
└── ...
|
||||
```
|
||||
|
||||
**Tool file structure:**
|
||||
|
||||
```typescript
|
||||
// tools/{service}/{action}.ts
|
||||
import type { {Service}Params, {Service}Response } from '@/tools/{service}/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const {service}{Action}Tool: ToolConfig<{Service}Params, {Service}Response> = {
|
||||
id: '{service}_{action}',
|
||||
name: '{Service} {Action}',
|
||||
description: 'What this tool does',
|
||||
version: '1.0.0',
|
||||
oauth: { required: true, provider: '{service}' }, // if OAuth
|
||||
params: { /* param definitions */ },
|
||||
request: {
|
||||
url: '/api/tools/{service}/{action}',
|
||||
method: 'POST',
|
||||
headers: () => ({ 'Content-Type': 'application/json' }),
|
||||
body: (params) => ({ ...params }),
|
||||
},
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) throw new Error(data.error)
|
||||
return { success: true, output: data.output }
|
||||
},
|
||||
outputs: { /* output definitions */ },
|
||||
}
|
||||
```
|
||||
|
||||
**Register in `tools/registry.ts`:**
|
||||
|
||||
```typescript
|
||||
import { {service}{Action}Tool } from '@/tools/{service}'
|
||||
// Add to registry object
|
||||
{service}_{action}: {service}{Action}Tool,
|
||||
```
|
||||
|
||||
## 2. Block (`blocks/blocks/{service}.ts`)
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { {Service}Response } from '@/tools/{service}/types'
|
||||
|
||||
export const {Service}Block: BlockConfig<{Service}Response> = {
|
||||
type: '{service}',
|
||||
name: '{Service}',
|
||||
description: 'Short description',
|
||||
longDescription: 'Detailed description',
|
||||
category: 'tools',
|
||||
bgColor: '#hexcolor',
|
||||
icon: {Service}Icon,
|
||||
subBlocks: [ /* see SubBlock Properties below */ ],
|
||||
tools: {
|
||||
access: ['{service}_{action}', ...],
|
||||
config: {
|
||||
tool: (params) => `{service}_${params.operation}`,
|
||||
params: (params) => ({ ...params }),
|
||||
},
|
||||
},
|
||||
inputs: { /* input definitions */ },
|
||||
outputs: { /* output definitions */ },
|
||||
}
|
||||
```
|
||||
|
||||
### SubBlock Properties
|
||||
|
||||
```typescript
|
||||
{
|
||||
id: 'fieldName', // Unique identifier
|
||||
title: 'Field Label', // UI label
|
||||
type: 'short-input', // See SubBlock Types below
|
||||
placeholder: 'Hint text',
|
||||
required: true, // See Required below
|
||||
condition: { ... }, // See Condition below
|
||||
dependsOn: ['otherField'], // See DependsOn below
|
||||
mode: 'basic', // 'basic' | 'advanced' | 'both' | 'trigger'
|
||||
}
|
||||
```
|
||||
|
||||
**SubBlock Types:** `short-input`, `long-input`, `dropdown`, `code`, `switch`, `slider`, `oauth-input`, `channel-selector`, `user-selector`, `file-upload`, etc.
|
||||
|
||||
### `condition` - Show/hide based on another field
|
||||
|
||||
```typescript
|
||||
// Show when operation === 'send'
|
||||
condition: { field: 'operation', value: 'send' }
|
||||
|
||||
// Show when operation is 'send' OR 'read'
|
||||
condition: { field: 'operation', value: ['send', 'read'] }
|
||||
|
||||
// Show when operation !== 'send'
|
||||
condition: { field: 'operation', value: 'send', not: true }
|
||||
|
||||
// Complex: NOT in list AND another condition
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users'],
|
||||
not: true,
|
||||
and: { field: 'destinationType', value: 'dm', not: true }
|
||||
}
|
||||
```
|
||||
|
||||
### `required` - Field validation
|
||||
|
||||
```typescript
|
||||
// Always required
|
||||
required: true
|
||||
|
||||
// Conditionally required (same syntax as condition)
|
||||
required: { field: 'operation', value: 'send' }
|
||||
```
|
||||
|
||||
### `dependsOn` - Clear field when dependencies change
|
||||
|
||||
```typescript
|
||||
// Clear when credential changes
|
||||
dependsOn: ['credential']
|
||||
|
||||
// Clear when authMethod changes AND (credential OR botToken) changes
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] }
|
||||
```
|
||||
|
||||
### `mode` - When to show field
|
||||
|
||||
- `'basic'` - Only in basic mode (default UI)
|
||||
- `'advanced'` - Only in advanced mode (manual input)
|
||||
- `'both'` - Show in both modes (default)
|
||||
- `'trigger'` - Only when block is used as trigger
|
||||
|
||||
**Register in `blocks/registry.ts`:**
|
||||
|
||||
```typescript
|
||||
import { {Service}Block } from '@/blocks/blocks/{service}'
|
||||
// Add to registry object (alphabetically)
|
||||
{service}: {Service}Block,
|
||||
```
|
||||
|
||||
## 3. Icon (`components/icons.tsx`)
|
||||
|
||||
```typescript
|
||||
export function {Service}Icon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
{/* SVG path from service's brand assets */}
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
## 4. Trigger (`triggers/{service}/`) - Optional
|
||||
|
||||
```
|
||||
triggers/{service}/
|
||||
├── index.ts # Export all triggers
|
||||
├── webhook.ts # Webhook handler
|
||||
├── utils.ts # Shared utilities
|
||||
└── {event}.ts # Specific event handlers
|
||||
```
|
||||
|
||||
**Register in `triggers/registry.ts`:**
|
||||
|
||||
```typescript
|
||||
import { {service}WebhookTrigger } from '@/triggers/{service}'
|
||||
// Add to TRIGGER_REGISTRY
|
||||
{service}_webhook: {service}WebhookTrigger,
|
||||
```
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Look up API docs for the service
|
||||
- [ ] Create `tools/{service}/types.ts` with proper types
|
||||
- [ ] Create tool files for each operation
|
||||
- [ ] Create `tools/{service}/index.ts` barrel export
|
||||
- [ ] Register tools in `tools/registry.ts`
|
||||
- [ ] Add icon to `components/icons.tsx`
|
||||
- [ ] Create block in `blocks/blocks/{service}.ts`
|
||||
- [ ] Register block in `blocks/registry.ts`
|
||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||
66
.claude/rules/sim-queries.md
Normal file
66
.claude/rules/sim-queries.md
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/hooks/queries/**/*.ts"
|
||||
---
|
||||
|
||||
# React Query Patterns
|
||||
|
||||
All React Query hooks live in `hooks/queries/`.
|
||||
|
||||
## Query Key Factory
|
||||
|
||||
Every query file defines a keys factory:
|
||||
|
||||
```typescript
|
||||
export const entityKeys = {
|
||||
all: ['entity'] as const,
|
||||
list: (workspaceId?: string) => [...entityKeys.all, 'list', workspaceId ?? ''] as const,
|
||||
detail: (id?: string) => [...entityKeys.all, 'detail', id ?? ''] as const,
|
||||
}
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
```typescript
|
||||
// 1. Query keys factory
|
||||
// 2. Types (if needed)
|
||||
// 3. Private fetch functions
|
||||
// 4. Exported hooks
|
||||
```
|
||||
|
||||
## Query Hook
|
||||
|
||||
```typescript
|
||||
export function useEntityList(workspaceId?: string, options?: { enabled?: boolean }) {
|
||||
return useQuery({
|
||||
queryKey: entityKeys.list(workspaceId),
|
||||
queryFn: () => fetchEntities(workspaceId as string),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
## Mutation Hook
|
||||
|
||||
```typescript
|
||||
export function useCreateEntity() {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: async (variables) => { /* fetch POST */ },
|
||||
onSuccess: () => queryClient.invalidateQueries({ queryKey: entityKeys.all }),
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
## Optimistic Updates
|
||||
|
||||
For optimistic mutations syncing with Zustand, use `createOptimisticMutationHandlers` from `@/hooks/queries/utils/optimistic-mutation`.
|
||||
|
||||
## Naming
|
||||
|
||||
- **Keys**: `entityKeys`
|
||||
- **Query hooks**: `useEntity`, `useEntityList`
|
||||
- **Mutation hooks**: `useCreateEntity`, `useUpdateEntity`
|
||||
- **Fetch functions**: `fetchEntity` (private)
|
||||
71
.claude/rules/sim-stores.md
Normal file
71
.claude/rules/sim-stores.md
Normal file
@@ -0,0 +1,71 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/store.ts"
|
||||
- "apps/sim/**/stores/**/*.ts"
|
||||
---
|
||||
|
||||
# Zustand Store Patterns
|
||||
|
||||
Stores live in `stores/`. Complex stores split into `store.ts` + `types.ts`.
|
||||
|
||||
## Basic Store
|
||||
|
||||
```typescript
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import type { FeatureState } from '@/stores/feature/types'
|
||||
|
||||
const initialState = { items: [] as Item[], activeId: null as string | null }
|
||||
|
||||
export const useFeatureStore = create<FeatureState>()(
|
||||
devtools(
|
||||
(set, get) => ({
|
||||
...initialState,
|
||||
setItems: (items) => set({ items }),
|
||||
addItem: (item) => set((state) => ({ items: [...state.items, item] })),
|
||||
reset: () => set(initialState),
|
||||
}),
|
||||
{ name: 'feature-store' }
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
## Persisted Store
|
||||
|
||||
```typescript
|
||||
import { create } from 'zustand'
|
||||
import { persist } from 'zustand/middleware'
|
||||
|
||||
export const useFeatureStore = create<FeatureState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
width: 300,
|
||||
setWidth: (width) => set({ width }),
|
||||
_hasHydrated: false,
|
||||
setHasHydrated: (v) => set({ _hasHydrated: v }),
|
||||
}),
|
||||
{
|
||||
name: 'feature-state',
|
||||
partialize: (state) => ({ width: state.width }),
|
||||
onRehydrateStorage: () => (state) => state?.setHasHydrated(true),
|
||||
}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
1. Use `devtools` middleware (named stores)
|
||||
2. Use `persist` only when data should survive reload
|
||||
3. `partialize` to persist only necessary state
|
||||
4. `_hasHydrated` pattern for persisted stores needing hydration tracking
|
||||
5. Immutable updates only
|
||||
6. `set((state) => ...)` when depending on previous state
|
||||
7. Provide `reset()` action
|
||||
|
||||
## Outside React
|
||||
|
||||
```typescript
|
||||
const items = useFeatureStore.getState().items
|
||||
useFeatureStore.setState({ items: newItems })
|
||||
```
|
||||
41
.claude/rules/sim-styling.md
Normal file
41
.claude/rules/sim-styling.md
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.tsx"
|
||||
- "apps/sim/**/*.css"
|
||||
---
|
||||
|
||||
# Styling Rules
|
||||
|
||||
## Tailwind
|
||||
|
||||
1. **No inline styles** - Use Tailwind classes
|
||||
2. **No duplicate dark classes** - Skip `dark:` when value matches light mode
|
||||
3. **Exact values** - `text-[14px]`, `h-[26px]`
|
||||
4. **Transitions** - `transition-colors` for interactive states
|
||||
|
||||
## Conditional Classes
|
||||
|
||||
```typescript
|
||||
import { cn } from '@/lib/utils'
|
||||
|
||||
<div className={cn(
|
||||
'base-classes',
|
||||
isActive && 'active-classes',
|
||||
disabled ? 'opacity-60' : 'hover:bg-accent'
|
||||
)} />
|
||||
```
|
||||
|
||||
## CSS Variables
|
||||
|
||||
For dynamic values (widths, heights) synced with stores:
|
||||
|
||||
```typescript
|
||||
// In store
|
||||
setWidth: (width) => {
|
||||
set({ width })
|
||||
document.documentElement.style.setProperty('--sidebar-width', `${width}px`)
|
||||
}
|
||||
|
||||
// In component
|
||||
<aside style={{ width: 'var(--sidebar-width)' }} />
|
||||
```
|
||||
58
.claude/rules/sim-testing.md
Normal file
58
.claude/rules/sim-testing.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.test.ts"
|
||||
- "apps/sim/**/*.test.tsx"
|
||||
---
|
||||
|
||||
# Testing Patterns
|
||||
|
||||
Use Vitest. Test files: `feature.ts` → `feature.test.ts`
|
||||
|
||||
## Structure
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
import { myFunction } from '@/lib/feature'
|
||||
|
||||
describe('myFunction', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
it.concurrent('isolated tests run in parallel', () => { ... })
|
||||
})
|
||||
```
|
||||
|
||||
## @sim/testing Package
|
||||
|
||||
Always prefer over local mocks.
|
||||
|
||||
| Category | Utilities |
|
||||
|----------|-----------|
|
||||
| **Mocks** | `loggerMock`, `databaseMock`, `setupGlobalFetchMock()` |
|
||||
| **Factories** | `createSession()`, `createWorkflowRecord()`, `createBlock()`, `createExecutorContext()` |
|
||||
| **Builders** | `WorkflowBuilder`, `ExecutionContextBuilder` |
|
||||
| **Assertions** | `expectWorkflowAccessGranted()`, `expectBlockExecuted()` |
|
||||
|
||||
## Rules
|
||||
|
||||
1. `@vitest-environment node` directive at file top
|
||||
2. `vi.mock()` calls before importing mocked modules
|
||||
3. `@sim/testing` utilities over local mocks
|
||||
4. `it.concurrent` for isolated tests (no shared mutable state)
|
||||
5. `beforeEach(() => vi.clearAllMocks())` to reset state
|
||||
|
||||
## Hoisted Mocks
|
||||
|
||||
For mutable mock references:
|
||||
|
||||
```typescript
|
||||
const mockFn = vi.hoisted(() => vi.fn())
|
||||
vi.mock('@/lib/module', () => ({ myFunction: mockFn }))
|
||||
mockFn.mockResolvedValue({ data: 'test' })
|
||||
```
|
||||
21
.claude/rules/sim-typescript.md
Normal file
21
.claude/rules/sim-typescript.md
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.ts"
|
||||
- "apps/sim/**/*.tsx"
|
||||
---
|
||||
|
||||
# TypeScript Rules
|
||||
|
||||
1. **No `any`** - Use proper types or `unknown` with type guards
|
||||
2. **Props interface** - Always define for components
|
||||
3. **Const assertions** - `as const` for constant objects/arrays
|
||||
4. **Ref types** - Explicit: `useRef<HTMLDivElement>(null)`
|
||||
5. **Type imports** - `import type { X }` for type-only imports
|
||||
|
||||
```typescript
|
||||
// ✗ Bad
|
||||
const handleClick = (e: any) => {}
|
||||
|
||||
// ✓ Good
|
||||
const handleClick = (e: React.MouseEvent<HTMLButtonElement>) => {}
|
||||
```
|
||||
@@ -8,7 +8,7 @@ alwaysApply: true
|
||||
You are a professional software engineer. All code must follow best practices: accurate, readable, clean, and efficient.
|
||||
|
||||
## Logging
|
||||
Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
|
||||
Import `createLogger` from `@sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
|
||||
|
||||
## Comments
|
||||
Use TSDoc for documentation. No `====` separators. No non-TSDoc comments.
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
</p>
|
||||
|
||||
### Build Workflows with Ease
|
||||
|
||||
@@ -4093,6 +4093,23 @@ export function SQSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TextractIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='10 14 60 52'
|
||||
version='1.1'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
xmlnsXlink='http://www.w3.org/1999/xlink'
|
||||
>
|
||||
<path
|
||||
d='M22.0624102,50 C24.3763895,53.603 28.4103535,56 33.0003125,56 C40.1672485,56 45.9991964,50.168 45.9991964,43 C45.9991964,35.832 40.1672485,30 33.0003125,30 C27.6033607,30 22.9664021,33.307 21.0024196,38 L23.2143999,38 C25.0393836,34.444 28.7363506,32 33.0003125,32 C39.0652583,32 43.9992143,36.935 43.9992143,43 C43.9992143,49.065 39.0652583,54 33.0003125,54 C29.5913429,54 26.5413702,52.441 24.5213882,50 L22.0624102,50 Z M37.0002768,45 L37.0002768,43 L41.9992321,43 C41.9992321,38.038 37.9622682,34 33.0003125,34 C28.0373568,34 23.9993929,38.038 23.9993929,43 L28.9993482,43 L28.9993482,45 L24.2313908,45 C25.1443826,49.002 28.7253507,52 33.0003125,52 C35.1362934,52 37.0992759,51.249 38.6442621,50 L34.0003036,50 L34.0003036,48 L40.4782457,48 C41.0812403,47.102 41.5202364,46.087 41.7682342,45 L37.0002768,45 Z M21.0024196,48 L23.2143999,48 C22.4434068,46.498 22.0004107,44.801 22.0004107,43 C22.0004107,41.959 22.1554093,40.955 22.4264069,40 L20.3634253,40 C20.1344274,40.965 19.9994286,41.966 19.9994286,43 C19.9994286,44.771 20.3584254,46.46 21.0024196,48 L21.0024196,48 Z M19.7434309,50 L17.0004554,50 L17.0004554,48 L18.8744386,48 C18.5344417,47.04 18.2894438,46.038 18.1494451,45 L15.4144695,45 L16.707458,46.293 L15.2924706,47.707 L12.2924974,44.707 C11.9025009,44.316 11.9025009,43.684 12.2924974,43.293 L15.2924706,40.293 L16.707458,41.707 L15.4144695,43 L18.0004464,43 C18.0004464,41.973 18.1044455,40.97 18.3024437,40 L17.0004554,40 L17.0004554,38 L18.8744386,38 C20.9404202,32.184 26.4833707,28 33.0003125,28 C37.427273,28 41.4002375,29.939 44.148213,33 L59.0000804,33 L59.0000804,35 L45.6661994,35 C47.1351863,37.318 47.9991786,40.058 47.9991786,43 L59.0000804,43 L59.0000804,45 L47.8501799,45 C46.8681887,52.327 40.5912447,58 33.0003125,58 C27.2563638,58 22.2624084,54.752 19.7434309,50 L19.7434309,50 Z M37.0002768,39 C37.0002768,38.448 36.5522808,38 36.0002857,38 L29.9993482,38 C29.4473442,38 28.9993482,38.448 28.9993482,39 L28.9993482,41 L31.0003304,41 L31.0003304,40 L32.0003214,40 L32.0003214,43 L31.0003304,43 L31.0003304,45 L35.0002946,45 L35.0002946,43 L34.0003036,43 L34.0003036,40 L35.0002946,40 L35.0002946,41 L37.0002768,41 L37.0002768,39 Z M49.0001696,40 L59.0000804,40 L59.0000804,38 L49.0001696,38 L49.0001696,40 Z M49.0001696,50 L59.0000804,50 L59.0000804,48 L49.0001696,48 L49.0001696,50 Z M57.0000982,27 L60.5850662,27 L57.0000982,23.414 L57.0000982,27 Z M63.7070383,27.293 C63.8940367,27.48 64.0000357,27.735 64.0000357,28 L64.0000357,63 C64.0000357,63.552 63.5520397,64 63.0000446,64 L32.0003304,64 C31.4473264,64 31.0003304,63.552 31.0003304,63 L31.0003304,59 L33.0003125,59 L33.0003125,62 L62.0000536,62 L62.0000536,29 L56.0001071,29 C55.4471121,29 55.0001161,28.552 55.0001161,28 L55.0001161,22 L33.0003125,22 L33.0003125,27 L31.0003304,27 L31.0003304,21 C31.0003304,20.448 31.4473264,20 32.0003304,20 L56.0001071,20 C56.2651048,20 56.5191025,20.105 56.7071008,20.293 L63.7070383,27.293 Z M68,24.166 L68,61 C68,61.552 67.552004,62 67.0000089,62 L65.0000268,62 L65.0000268,60 L66.0000179,60 L66.0000179,24.612 L58.6170838,18 L36.0002857,18 L36.0002857,19 L34.0003036,19 L34.0003036,17 C34.0003036,16.448 34.4472996,16 35.0003036,16 L59.0000804,16 C59.2460782,16 59.483076,16.091 59.6660744,16.255 L67.666003,23.42 C67.8780011,23.61 68,23.881 68,24.166 L68,24.166 Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -110,6 +110,7 @@ import {
|
||||
SupabaseIcon,
|
||||
TavilyIcon,
|
||||
TelegramIcon,
|
||||
TextractIcon,
|
||||
TinybirdIcon,
|
||||
TranslateIcon,
|
||||
TrelloIcon,
|
||||
@@ -143,7 +144,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
calendly: CalendlyIcon,
|
||||
circleback: CirclebackIcon,
|
||||
clay: ClayIcon,
|
||||
confluence: ConfluenceIcon,
|
||||
confluence_v2: ConfluenceIcon,
|
||||
cursor_v2: CursorIcon,
|
||||
datadog: DatadogIcon,
|
||||
discord: DiscordIcon,
|
||||
@@ -153,7 +154,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
elasticsearch: ElasticsearchIcon,
|
||||
elevenlabs: ElevenLabsIcon,
|
||||
exa: ExaAIIcon,
|
||||
file: DocumentIcon,
|
||||
file_v2: DocumentIcon,
|
||||
firecrawl: FirecrawlIcon,
|
||||
fireflies: FirefliesIcon,
|
||||
github_v2: GithubIcon,
|
||||
@@ -195,7 +196,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
microsoft_excel_v2: MicrosoftExcelIcon,
|
||||
microsoft_planner: MicrosoftPlannerIcon,
|
||||
microsoft_teams: MicrosoftTeamsIcon,
|
||||
mistral_parse: MistralIcon,
|
||||
mistral_parse_v2: MistralIcon,
|
||||
mongodb: MongoDBIcon,
|
||||
mysql: MySQLIcon,
|
||||
neo4j: Neo4jIcon,
|
||||
@@ -237,6 +238,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
supabase: SupabaseIcon,
|
||||
tavily: TavilyIcon,
|
||||
telegram: TelegramIcon,
|
||||
textract: TextractIcon,
|
||||
tinybird: TinybirdIcon,
|
||||
translate: TranslateIcon,
|
||||
trello: TrelloIcon,
|
||||
@@ -244,7 +246,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
twilio_sms: TwilioIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
typeform: TypeformIcon,
|
||||
video_generator: VideoIcon,
|
||||
video_generator_v2: VideoIcon,
|
||||
vision: EyeIcon,
|
||||
wealthbox: WealthboxIcon,
|
||||
webflow: WebflowIcon,
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Interact with Confluence
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="confluence"
|
||||
type="confluence_v2"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Read and parse multiple files
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="file"
|
||||
type="file_v2"
|
||||
color="#40916C"
|
||||
/>
|
||||
|
||||
@@ -48,7 +48,7 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `files` | array | Array of parsed files |
|
||||
| `combinedContent` | string | Combined content of all parsed files |
|
||||
| `files` | array | Array of parsed files with content, metadata, and file properties |
|
||||
| `combinedContent` | string | All file contents merged into a single text string |
|
||||
|
||||
|
||||
|
||||
@@ -106,6 +106,7 @@
|
||||
"supabase",
|
||||
"tavily",
|
||||
"telegram",
|
||||
"textract",
|
||||
"tinybird",
|
||||
"translate",
|
||||
"trello",
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Extract text from PDF documents
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="mistral_parse"
|
||||
type="mistral_parse_v2"
|
||||
color="#000000"
|
||||
/>
|
||||
|
||||
@@ -54,18 +54,37 @@ Parse PDF documents using Mistral OCR API
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the PDF was parsed successfully |
|
||||
| `content` | string | Extracted content in the requested format \(markdown, text, or JSON\) |
|
||||
| `metadata` | object | Processing metadata including jobId, fileType, pageCount, and usage info |
|
||||
| ↳ `jobId` | string | Unique job identifier |
|
||||
| ↳ `fileType` | string | File type \(e.g., pdf\) |
|
||||
| ↳ `fileName` | string | Original file name |
|
||||
| ↳ `source` | string | Source type \(url\) |
|
||||
| ↳ `pageCount` | number | Number of pages processed |
|
||||
| ↳ `model` | string | Mistral model used |
|
||||
| ↳ `resultType` | string | Output format \(markdown, text, json\) |
|
||||
| ↳ `processedAt` | string | Processing timestamp |
|
||||
| ↳ `sourceUrl` | string | Source URL if applicable |
|
||||
| ↳ `usageInfo` | object | Usage statistics from OCR processing |
|
||||
| `pages` | array | Array of page objects from Mistral OCR |
|
||||
| ↳ `index` | number | Page index \(zero-based\) |
|
||||
| ↳ `markdown` | string | Extracted markdown content |
|
||||
| ↳ `images` | array | Images extracted from this page with bounding boxes |
|
||||
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||
| ↳ `dimensions` | object | Page dimensions |
|
||||
| ↳ `dpi` | number | Dots per inch |
|
||||
| ↳ `height` | number | Page height in pixels |
|
||||
| ↳ `width` | number | Page width in pixels |
|
||||
| ↳ `dpi` | number | Dots per inch |
|
||||
| ↳ `height` | number | Page height in pixels |
|
||||
| ↳ `width` | number | Page width in pixels |
|
||||
| ↳ `tables` | array | Extracted tables as HTML/markdown \(when table_format is set\). Referenced via placeholders like \[tbl-0.html\] |
|
||||
| ↳ `hyperlinks` | array | Array of URL strings detected in the page \(e.g., \[ |
|
||||
| ↳ `header` | string | Page header content \(when extract_header=true\) |
|
||||
| ↳ `footer` | string | Page footer content \(when extract_footer=true\) |
|
||||
| `model` | string | Mistral OCR model identifier \(e.g., mistral-ocr-latest\) |
|
||||
| `usage_info` | object | Usage and processing statistics |
|
||||
| ↳ `pages_processed` | number | Total number of pages processed |
|
||||
| ↳ `doc_size_bytes` | number | Document file size in bytes |
|
||||
| `document_annotation` | string | Structured annotation data as JSON string \(when applicable\) |
|
||||
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ Upload a file to an AWS S3 bucket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the uploaded S3 object |
|
||||
| `uri` | string | S3 URI of the uploaded object \(s3://bucket/key\) |
|
||||
| `metadata` | object | Upload metadata including ETag and location |
|
||||
|
||||
### `s3_get_object`
|
||||
@@ -149,6 +150,7 @@ Copy an object within or between AWS S3 buckets
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the copied S3 object |
|
||||
| `uri` | string | S3 URI of the copied object \(s3://bucket/key\) |
|
||||
| `metadata` | object | Copy operation metadata |
|
||||
|
||||
|
||||
|
||||
120
apps/docs/content/docs/en/tools/textract.mdx
Normal file
120
apps/docs/content/docs/en/tools/textract.mdx
Normal file
@@ -0,0 +1,120 @@
|
||||
---
|
||||
title: AWS Textract
|
||||
description: Extract text, tables, and forms from documents
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="textract"
|
||||
color="linear-gradient(135deg, #055F4E 0%, #56C0A7 100%)"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[AWS Textract](https://aws.amazon.com/textract/) is a powerful AI service from Amazon Web Services designed to automatically extract printed text, handwriting, tables, forms, key-value pairs, and other structured data from scanned documents and images. Textract leverages advanced optical character recognition (OCR) and document analysis to transform documents into actionable data, enabling automation, analytics, compliance, and more.
|
||||
|
||||
With AWS Textract, you can:
|
||||
|
||||
- **Extract text from images and documents**: Recognize printed text and handwriting in formats such as PDF, JPEG, PNG, or TIFF
|
||||
- **Detect and extract tables**: Automatically find tables and output their structured content
|
||||
- **Parse forms and key-value pairs**: Pull structured data from forms, including fields and their corresponding values
|
||||
- **Identify signatures and layout features**: Detect signatures, geometric layout, and relationships between document elements
|
||||
- **Customize extraction with queries**: Extract specific fields and answers using query-based extraction (e.g., "What is the invoice number?")
|
||||
|
||||
In Sim, the AWS Textract integration empowers your agents to intelligently process documents as part of their workflows. This unlocks automation scenarios such as data entry from invoices, onboarding documents, contracts, receipts, and more. Your agents can extract relevant data, analyze structured forms, and generate summaries or reports directly from document uploads or URLs. By connecting Sim with AWS Textract, you can reduce manual effort, improve data accuracy, and streamline your business processes with robust document understanding.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate AWS Textract into your workflow to extract text, tables, forms, and key-value pairs from documents. Single-page mode supports JPEG, PNG, and single-page PDF. Multi-page mode supports multi-page PDF and TIFF.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `textract_parser`
|
||||
|
||||
Parse documents using AWS Textract OCR and document analysis
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region for Textract service \(e.g., us-east-1\) |
|
||||
| `processingMode` | string | No | Document type: single-page or multi-page. Defaults to single-page. |
|
||||
| `filePath` | string | No | URL to a document to be processed \(JPEG, PNG, or single-page PDF\). |
|
||||
| `s3Uri` | string | No | S3 URI for multi-page processing \(s3://bucket/key\). |
|
||||
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||
| `featureTypes` | array | No | Feature types to detect: TABLES, FORMS, QUERIES, SIGNATURES, LAYOUT. If not specified, only text detection is performed. |
|
||||
| `items` | string | No | Feature type |
|
||||
| `queries` | array | No | Custom queries to extract specific information. Only used when featureTypes includes QUERIES. |
|
||||
| `items` | object | No | Query configuration |
|
||||
| `properties` | string | No | The query text |
|
||||
| `Text` | string | No | No description |
|
||||
| `Alias` | string | No | No description |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `blocks` | array | Array of Block objects containing detected text, tables, forms, and other elements |
|
||||
| ↳ `BlockType` | string | Type of block \(PAGE, LINE, WORD, TABLE, CELL, KEY_VALUE_SET, etc.\) |
|
||||
| ↳ `Id` | string | Unique identifier for the block |
|
||||
| ↳ `Text` | string | Query text |
|
||||
| ↳ `TextType` | string | Type of text \(PRINTED or HANDWRITING\) |
|
||||
| ↳ `Confidence` | number | Confidence score \(0-100\) |
|
||||
| ↳ `Page` | number | Page number |
|
||||
| ↳ `Geometry` | object | Location and bounding box information |
|
||||
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Polygon` | array | Polygon coordinates |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Polygon` | array | Polygon coordinates |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `Relationships` | array | Relationships to other blocks |
|
||||
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||
| ↳ `Ids` | array | IDs of related blocks |
|
||||
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||
| ↳ `Ids` | array | IDs of related blocks |
|
||||
| ↳ `EntityTypes` | array | Entity types for KEY_VALUE_SET \(KEY or VALUE\) |
|
||||
| ↳ `SelectionStatus` | string | For checkboxes: SELECTED or NOT_SELECTED |
|
||||
| ↳ `RowIndex` | number | Row index for table cells |
|
||||
| ↳ `ColumnIndex` | number | Column index for table cells |
|
||||
| ↳ `RowSpan` | number | Row span for merged cells |
|
||||
| ↳ `ColumnSpan` | number | Column span for merged cells |
|
||||
| ↳ `Query` | object | Query information for QUERY blocks |
|
||||
| ↳ `Text` | string | Query text |
|
||||
| ↳ `Alias` | string | Query alias |
|
||||
| ↳ `Pages` | array | Pages to search |
|
||||
| ↳ `Alias` | string | Query alias |
|
||||
| ↳ `Pages` | array | Pages to search |
|
||||
| `documentMetadata` | object | Metadata about the analyzed document |
|
||||
| ↳ `pages` | number | Number of pages in the document |
|
||||
| `modelVersion` | string | Version of the Textract model used for processing |
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Generate videos from text using AI
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="video_generator"
|
||||
type="video_generator_v2"
|
||||
color="#181C1E"
|
||||
/>
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('SSO-Providers')
|
||||
const logger = createLogger('SSOProvidersRoute')
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
|
||||
@@ -6,7 +6,7 @@ import { hasSSOAccess } from '@/lib/billing'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||
|
||||
const logger = createLogger('SSO-Register')
|
||||
const logger = createLogger('SSORegisterRoute')
|
||||
|
||||
const mappingSchema = z
|
||||
.object({
|
||||
@@ -43,6 +43,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
|
||||
])
|
||||
.default(['openid', 'profile', 'email']),
|
||||
pkce: z.boolean().default(true),
|
||||
authorizationEndpoint: z.string().url().optional(),
|
||||
tokenEndpoint: z.string().url().optional(),
|
||||
userInfoEndpoint: z.string().url().optional(),
|
||||
jwksEndpoint: z.string().url().optional(),
|
||||
}),
|
||||
z.object({
|
||||
providerType: z.literal('saml'),
|
||||
@@ -64,12 +68,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// SSO plugin must be enabled in Better Auth
|
||||
if (!env.SSO_ENABLED) {
|
||||
return NextResponse.json({ error: 'SSO is not enabled' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Check plan access (enterprise) or env var override
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
@@ -116,7 +118,16 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
if (providerType === 'oidc') {
|
||||
const { clientId, clientSecret, scopes, pkce } = body
|
||||
const {
|
||||
clientId,
|
||||
clientSecret,
|
||||
scopes,
|
||||
pkce,
|
||||
authorizationEndpoint,
|
||||
tokenEndpoint,
|
||||
userInfoEndpoint,
|
||||
jwksEndpoint,
|
||||
} = body
|
||||
|
||||
const oidcConfig: any = {
|
||||
clientId,
|
||||
@@ -127,50 +138,104 @@ export async function POST(request: NextRequest) {
|
||||
pkce: pkce ?? true,
|
||||
}
|
||||
|
||||
// Add manual endpoints for providers that might need them
|
||||
// Common patterns for OIDC providers that don't support discovery properly
|
||||
if (
|
||||
issuer.includes('okta.com') ||
|
||||
issuer.includes('auth0.com') ||
|
||||
issuer.includes('identityserver')
|
||||
) {
|
||||
const baseUrl = issuer.includes('/oauth2/default')
|
||||
? issuer.replace('/oauth2/default', '')
|
||||
: issuer.replace('/oauth', '').replace('/v2.0', '').replace('/oauth2', '')
|
||||
oidcConfig.authorizationEndpoint = authorizationEndpoint
|
||||
oidcConfig.tokenEndpoint = tokenEndpoint
|
||||
oidcConfig.userInfoEndpoint = userInfoEndpoint
|
||||
oidcConfig.jwksEndpoint = jwksEndpoint
|
||||
|
||||
// Okta-style endpoints
|
||||
if (issuer.includes('okta.com')) {
|
||||
oidcConfig.authorizationEndpoint = `${baseUrl}/oauth2/default/v1/authorize`
|
||||
oidcConfig.tokenEndpoint = `${baseUrl}/oauth2/default/v1/token`
|
||||
oidcConfig.userInfoEndpoint = `${baseUrl}/oauth2/default/v1/userinfo`
|
||||
oidcConfig.jwksEndpoint = `${baseUrl}/oauth2/default/v1/keys`
|
||||
}
|
||||
// Auth0-style endpoints
|
||||
else if (issuer.includes('auth0.com')) {
|
||||
oidcConfig.authorizationEndpoint = `${baseUrl}/authorize`
|
||||
oidcConfig.tokenEndpoint = `${baseUrl}/oauth/token`
|
||||
oidcConfig.userInfoEndpoint = `${baseUrl}/userinfo`
|
||||
oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks.json`
|
||||
}
|
||||
// Generic OIDC endpoints (IdentityServer, etc.)
|
||||
else {
|
||||
oidcConfig.authorizationEndpoint = `${baseUrl}/connect/authorize`
|
||||
oidcConfig.tokenEndpoint = `${baseUrl}/connect/token`
|
||||
oidcConfig.userInfoEndpoint = `${baseUrl}/connect/userinfo`
|
||||
oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks`
|
||||
}
|
||||
const needsDiscovery =
|
||||
!oidcConfig.authorizationEndpoint || !oidcConfig.tokenEndpoint || !oidcConfig.jwksEndpoint
|
||||
|
||||
logger.info('Using manual OIDC endpoints for provider', {
|
||||
if (needsDiscovery) {
|
||||
const discoveryUrl = `${issuer.replace(/\/$/, '')}/.well-known/openid-configuration`
|
||||
try {
|
||||
logger.info('Fetching OIDC discovery document for missing endpoints', {
|
||||
discoveryUrl,
|
||||
hasAuthEndpoint: !!oidcConfig.authorizationEndpoint,
|
||||
hasTokenEndpoint: !!oidcConfig.tokenEndpoint,
|
||||
hasJwksEndpoint: !!oidcConfig.jwksEndpoint,
|
||||
})
|
||||
|
||||
const discoveryResponse = await fetch(discoveryUrl, {
|
||||
headers: { Accept: 'application/json' },
|
||||
})
|
||||
|
||||
if (!discoveryResponse.ok) {
|
||||
logger.error('Failed to fetch OIDC discovery document', {
|
||||
status: discoveryResponse.status,
|
||||
statusText: discoveryResponse.statusText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Status: ${discoveryResponse.status}. Provide all endpoints explicitly or verify the issuer URL.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const discovery = await discoveryResponse.json()
|
||||
|
||||
oidcConfig.authorizationEndpoint =
|
||||
oidcConfig.authorizationEndpoint || discovery.authorization_endpoint
|
||||
oidcConfig.tokenEndpoint = oidcConfig.tokenEndpoint || discovery.token_endpoint
|
||||
oidcConfig.userInfoEndpoint = oidcConfig.userInfoEndpoint || discovery.userinfo_endpoint
|
||||
oidcConfig.jwksEndpoint = oidcConfig.jwksEndpoint || discovery.jwks_uri
|
||||
|
||||
logger.info('Merged OIDC endpoints (user-provided + discovery)', {
|
||||
providerId,
|
||||
issuer,
|
||||
authorizationEndpoint: oidcConfig.authorizationEndpoint,
|
||||
tokenEndpoint: oidcConfig.tokenEndpoint,
|
||||
userInfoEndpoint: oidcConfig.userInfoEndpoint,
|
||||
jwksEndpoint: oidcConfig.jwksEndpoint,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching OIDC discovery document', {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
discoveryUrl,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Please verify the issuer URL is correct or provide all endpoints explicitly.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logger.info('Using explicitly provided OIDC endpoints (all present)', {
|
||||
providerId,
|
||||
provider: issuer.includes('okta.com')
|
||||
? 'Okta'
|
||||
: issuer.includes('auth0.com')
|
||||
? 'Auth0'
|
||||
: 'Generic',
|
||||
authEndpoint: oidcConfig.authorizationEndpoint,
|
||||
issuer,
|
||||
authorizationEndpoint: oidcConfig.authorizationEndpoint,
|
||||
tokenEndpoint: oidcConfig.tokenEndpoint,
|
||||
userInfoEndpoint: oidcConfig.userInfoEndpoint,
|
||||
jwksEndpoint: oidcConfig.jwksEndpoint,
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
!oidcConfig.authorizationEndpoint ||
|
||||
!oidcConfig.tokenEndpoint ||
|
||||
!oidcConfig.jwksEndpoint
|
||||
) {
|
||||
const missing: string[] = []
|
||||
if (!oidcConfig.authorizationEndpoint) missing.push('authorizationEndpoint')
|
||||
if (!oidcConfig.tokenEndpoint) missing.push('tokenEndpoint')
|
||||
if (!oidcConfig.jwksEndpoint) missing.push('jwksEndpoint')
|
||||
|
||||
logger.error('Missing required OIDC endpoints after discovery merge', {
|
||||
missing,
|
||||
authorizationEndpoint: oidcConfig.authorizationEndpoint,
|
||||
tokenEndpoint: oidcConfig.tokenEndpoint,
|
||||
jwksEndpoint: oidcConfig.jwksEndpoint,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Missing required OIDC endpoints: ${missing.join(', ')}. Please provide these explicitly or verify the issuer supports OIDC discovery.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
providerConfig.oidcConfig = oidcConfig
|
||||
} else if (providerType === 'saml') {
|
||||
const {
|
||||
|
||||
@@ -224,7 +224,7 @@ export async function POST(req: NextRequest) {
|
||||
hasApiKey: !!executionParams.apiKey,
|
||||
})
|
||||
|
||||
const result = await executeTool(resolvedToolName, executionParams, true)
|
||||
const result = await executeTool(resolvedToolName, executionParams)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
||||
toolName,
|
||||
|
||||
@@ -6,9 +6,10 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
|
||||
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
|
||||
import {
|
||||
@@ -21,6 +22,7 @@ import {
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import '@/lib/uploads/core/setup.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -30,6 +32,12 @@ const logger = createLogger('FilesParseAPI')
|
||||
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
|
||||
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
|
||||
|
||||
interface ExecutionContext {
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
executionId: string
|
||||
}
|
||||
|
||||
interface ParseResult {
|
||||
success: boolean
|
||||
content?: string
|
||||
@@ -37,6 +45,7 @@ interface ParseResult {
|
||||
filePath: string
|
||||
originalName?: string // Original filename from database (for workspace files)
|
||||
viewerUrl?: string | null // Viewer URL for the file if available
|
||||
userFile?: UserFile // UserFile object for the raw file
|
||||
metadata?: {
|
||||
fileType: string
|
||||
size: number
|
||||
@@ -70,27 +79,45 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const userId = authResult.userId
|
||||
const requestData = await request.json()
|
||||
const { filePath, fileType, workspaceId } = requestData
|
||||
const { filePath, fileType, workspaceId, workflowId, executionId } = requestData
|
||||
|
||||
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
|
||||
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('File parse request received:', { filePath, fileType, workspaceId, userId })
|
||||
// Build execution context if all required fields are present
|
||||
const executionContext: ExecutionContext | undefined =
|
||||
workspaceId && workflowId && executionId
|
||||
? { workspaceId, workflowId, executionId }
|
||||
: undefined
|
||||
|
||||
logger.info('File parse request received:', {
|
||||
filePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
hasExecutionContext: !!executionContext,
|
||||
})
|
||||
|
||||
if (Array.isArray(filePath)) {
|
||||
const results = []
|
||||
for (const path of filePath) {
|
||||
if (!path || (typeof path === 'string' && path.trim() === '')) {
|
||||
for (const singlePath of filePath) {
|
||||
if (!singlePath || (typeof singlePath === 'string' && singlePath.trim() === '')) {
|
||||
results.push({
|
||||
success: false,
|
||||
error: 'Empty file path in array',
|
||||
filePath: path || '',
|
||||
filePath: singlePath || '',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(path, fileType, workspaceId, userId)
|
||||
const result = await parseFileSingle(
|
||||
singlePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
executionContext
|
||||
)
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
@@ -106,6 +133,7 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -121,7 +149,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId)
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId, executionContext)
|
||||
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
@@ -137,6 +165,7 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -164,7 +193,8 @@ async function parseFileSingle(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
logger.info('Parsing file:', filePath)
|
||||
|
||||
@@ -186,18 +216,18 @@ async function parseFileSingle(
|
||||
}
|
||||
|
||||
if (filePath.includes('/api/files/serve/')) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId)
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId, executionContext)
|
||||
}
|
||||
|
||||
if (isUsingCloudStorage()) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
return handleLocalFile(filePath, fileType, userId)
|
||||
return handleLocalFile(filePath, fileType, userId, executionContext)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -230,12 +260,14 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
/**
|
||||
* Handle external URL
|
||||
* If workspaceId is provided, checks if file already exists and saves to workspace if not
|
||||
* If executionContext is provided, also stores the file in execution storage and returns UserFile
|
||||
*/
|
||||
async function handleExternalUrl(
|
||||
url: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
logger.info('Fetching external URL:', url)
|
||||
@@ -312,17 +344,13 @@ async function handleExternalUrl(
|
||||
|
||||
if (existingFile) {
|
||||
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId)
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId, executionContext)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
|
||||
headers: {
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
timeout: DOWNLOAD_TIMEOUT_MS,
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)
|
||||
@@ -341,6 +369,19 @@ async function handleExternalUrl(
|
||||
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
let userFile: UserFile | undefined
|
||||
const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(executionContext, buffer, filename, mimeType, userId)
|
||||
logger.info(`Stored file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store file in execution storage:`, uploadError)
|
||||
// Continue without userFile - parsing can still work
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCheckWorkspace) {
|
||||
try {
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
@@ -353,8 +394,6 @@ async function handleExternalUrl(
|
||||
})
|
||||
} else {
|
||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||
const mimeType =
|
||||
response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType)
|
||||
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
||||
}
|
||||
@@ -363,17 +402,23 @@ async function handleExternalUrl(
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
return await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (extension === 'csv') {
|
||||
return await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (isSupportedFileType(extension)) {
|
||||
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
parseResult = await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
} else if (extension === 'csv') {
|
||||
parseResult = await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
} else if (isSupportedFileType(extension)) {
|
||||
parseResult = await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
} else {
|
||||
parseResult = handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
}
|
||||
|
||||
return handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling external URL ${url}:`, error)
|
||||
return {
|
||||
@@ -386,12 +431,15 @@ async function handleExternalUrl(
|
||||
|
||||
/**
|
||||
* Handle file stored in cloud storage
|
||||
* If executionContext is provided and file is not already from execution storage,
|
||||
* copies the file to execution storage and returns UserFile
|
||||
*/
|
||||
async function handleCloudFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
explicitContext: string | undefined,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const cloudKey = extractStorageKey(filePath)
|
||||
@@ -438,6 +486,7 @@ async function handleCloudFile(
|
||||
|
||||
const filename = originalFilename || cloudKey.split('/').pop() || cloudKey
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
|
||||
const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}`
|
||||
let workspaceIdFromKey: string | undefined
|
||||
@@ -453,6 +502,39 @@ async function handleCloudFile(
|
||||
|
||||
const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
|
||||
if (executionContext) {
|
||||
// If file is already from execution context, create UserFile reference without re-uploading
|
||||
if (context === 'execution') {
|
||||
userFile = {
|
||||
id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`,
|
||||
name: filename,
|
||||
url: normalizedFilePath,
|
||||
size: fileBuffer.length,
|
||||
type: mimeType,
|
||||
key: cloudKey,
|
||||
context: 'execution',
|
||||
}
|
||||
logger.info(`Created UserFile reference for existing execution file: ${filename}`)
|
||||
} else {
|
||||
// Copy from workspace/other storage to execution storage
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Copied file to execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to copy file to execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath)
|
||||
@@ -477,6 +559,11 @@ async function handleCloudFile(
|
||||
|
||||
parseResult.viewerUrl = viewerUrl
|
||||
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling cloud file ${filePath}:`, error)
|
||||
@@ -500,7 +587,8 @@ async function handleCloudFile(
|
||||
async function handleLocalFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const filename = filePath.split('/').pop() || filePath
|
||||
@@ -540,13 +628,32 @@ async function handleLocalFile(
|
||||
const hash = createHash('md5').update(fileBuffer).digest('hex')
|
||||
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = fileType || getMimeTypeFromExtension(extension)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Stored local file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store local file in execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
content: result.content,
|
||||
filePath,
|
||||
userFile,
|
||||
metadata: {
|
||||
fileType: fileType || getMimeTypeFromExtension(extension),
|
||||
fileType: mimeType,
|
||||
size: stats.size,
|
||||
hash,
|
||||
processingTime: 0,
|
||||
|
||||
@@ -1,395 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('ProxyAPI')
|
||||
|
||||
const proxyPostSchema = z.object({
|
||||
toolId: z.string().min(1, 'toolId is required'),
|
||||
params: z.record(z.any()).optional().default({}),
|
||||
executionContext: z
|
||||
.object({
|
||||
workflowId: z.string().optional(),
|
||||
workspaceId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
userId: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Creates a minimal set of default headers for proxy requests
|
||||
* @returns Record of HTTP headers
|
||||
*/
|
||||
const getProxyHeaders = (): Record<string, string> => {
|
||||
return {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
|
||||
Accept: '*/*',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a response with CORS headers
|
||||
* @param responseData Response data object
|
||||
* @param status HTTP status code
|
||||
* @returns NextResponse with CORS headers
|
||||
*/
|
||||
const formatResponse = (responseData: any, status = 200) => {
|
||||
return NextResponse.json(responseData, {
|
||||
status,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an error response with consistent formatting
|
||||
* @param error Error object or message
|
||||
* @param status HTTP status code
|
||||
* @param additionalData Additional data to include in the response
|
||||
* @returns Formatted error response
|
||||
*/
|
||||
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const errorStack = error instanceof Error ? error.stack : undefined
|
||||
|
||||
logger.error('Creating error response', {
|
||||
errorMessage,
|
||||
status,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
})
|
||||
|
||||
return formatResponse(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
...additionalData,
|
||||
},
|
||||
status
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET handler for direct external URL proxying
|
||||
* This allows for GET requests to external APIs
|
||||
*/
|
||||
export async function GET(request: Request) {
|
||||
const url = new URL(request.url)
|
||||
const targetUrl = url.searchParams.get('url')
|
||||
const requestId = generateRequestId()
|
||||
|
||||
// Vault download proxy: /api/proxy?vaultDownload=1&bucket=...&object=...&credentialId=...
|
||||
const vaultDownload = url.searchParams.get('vaultDownload')
|
||||
if (vaultDownload === '1') {
|
||||
try {
|
||||
const bucket = url.searchParams.get('bucket')
|
||||
const objectParam = url.searchParams.get('object')
|
||||
const credentialId = url.searchParams.get('credentialId')
|
||||
|
||||
if (!bucket || !objectParam || !credentialId) {
|
||||
return createErrorResponse('Missing bucket, object, or credentialId', 400)
|
||||
}
|
||||
|
||||
// Fetch access token using existing token API
|
||||
const baseUrl = new URL(getBaseUrl())
|
||||
const tokenUrl = new URL('/api/auth/oauth/token', baseUrl)
|
||||
|
||||
// Build headers: forward session cookies if present; include internal auth for server-side
|
||||
const tokenHeaders: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
const incomingCookie = request.headers.get('cookie')
|
||||
if (incomingCookie) tokenHeaders.Cookie = incomingCookie
|
||||
try {
|
||||
const internalToken = await generateInternalToken()
|
||||
tokenHeaders.Authorization = `Bearer ${internalToken}`
|
||||
} catch (_e) {
|
||||
// best-effort internal auth
|
||||
}
|
||||
|
||||
// Optional workflow context for collaboration auth
|
||||
const workflowId = url.searchParams.get('workflowId') || undefined
|
||||
|
||||
const tokenRes = await fetch(tokenUrl.toString(), {
|
||||
method: 'POST',
|
||||
headers: tokenHeaders,
|
||||
body: JSON.stringify({ credentialId, workflowId }),
|
||||
})
|
||||
|
||||
if (!tokenRes.ok) {
|
||||
const err = await tokenRes.text()
|
||||
return createErrorResponse(`Failed to fetch access token: ${err}`, 401)
|
||||
}
|
||||
|
||||
const tokenJson = await tokenRes.json()
|
||||
const accessToken = tokenJson.accessToken
|
||||
if (!accessToken) {
|
||||
return createErrorResponse('No access token available', 401)
|
||||
}
|
||||
|
||||
// Avoid double-encoding: incoming object may already be percent-encoded
|
||||
const objectDecoded = decodeURIComponent(objectParam)
|
||||
const gcsUrl = `https://storage.googleapis.com/storage/v1/b/${encodeURIComponent(
|
||||
bucket
|
||||
)}/o/${encodeURIComponent(objectDecoded)}?alt=media`
|
||||
|
||||
const fileRes = await fetch(gcsUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!fileRes.ok) {
|
||||
const errText = await fileRes.text()
|
||||
return createErrorResponse(errText || 'Failed to download file', fileRes.status)
|
||||
}
|
||||
|
||||
const headers = new Headers()
|
||||
fileRes.headers.forEach((v, k) => headers.set(k, v))
|
||||
return new NextResponse(fileRes.body, { status: 200, headers })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Vault download proxy failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return createErrorResponse('Vault download failed', 500)
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetUrl) {
|
||||
logger.error(`[${requestId}] Missing 'url' parameter`)
|
||||
return createErrorResponse("Missing 'url' parameter", 400)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(targetUrl)
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Blocked proxy request`, {
|
||||
url: targetUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return createErrorResponse(urlValidation.error || 'Invalid URL', 403)
|
||||
}
|
||||
|
||||
const method = url.searchParams.get('method') || 'GET'
|
||||
|
||||
const bodyParam = url.searchParams.get('body')
|
||||
let body: string | undefined
|
||||
|
||||
if (bodyParam && ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())) {
|
||||
try {
|
||||
body = decodeURIComponent(bodyParam)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to decode body parameter`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const customHeaders: Record<string, string> = {}
|
||||
|
||||
for (const [key, value] of url.searchParams.entries()) {
|
||||
if (key.startsWith('header.')) {
|
||||
const headerName = key.substring(7)
|
||||
customHeaders[headerName] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (body && !customHeaders['Content-Type']) {
|
||||
customHeaders['Content-Type'] = 'application/json'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
|
||||
|
||||
try {
|
||||
const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
method: method,
|
||||
headers: {
|
||||
...getProxyHeaders(),
|
||||
...customHeaders,
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
body: body || undefined,
|
||||
})
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
let data
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
data = await response.json()
|
||||
} else {
|
||||
data = await response.text()
|
||||
}
|
||||
|
||||
const errorMessage = !response.ok
|
||||
? data && typeof data === 'object' && data.error
|
||||
? `${data.error.message || JSON.stringify(data.error)}`
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
return formatResponse({
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: Object.fromEntries(response.headers.entries()),
|
||||
data,
|
||||
error: errorMessage,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy GET request failed`, {
|
||||
url: targetUrl,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse(error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const startTime = new Date()
|
||||
const startTimeISO = startTime.toISOString()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
logger.error(`[${requestId}] Authentication failed for proxy:`, authResult.error)
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
let requestBody
|
||||
try {
|
||||
requestBody = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
throw new Error('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
const validationResult = proxyPostSchema.safeParse(requestBody)
|
||||
if (!validationResult.success) {
|
||||
logger.error(`[${requestId}] Request validation failed`, {
|
||||
errors: validationResult.error.errors,
|
||||
})
|
||||
const errorMessages = validationResult.error.errors
|
||||
.map((err) => `${err.path.join('.')}: ${err.message}`)
|
||||
.join(', ')
|
||||
throw new Error(`Validation failed: ${errorMessages}`)
|
||||
}
|
||||
|
||||
const { toolId, params } = validationResult.data
|
||||
|
||||
logger.info(`[${requestId}] Processing tool: ${toolId}`)
|
||||
|
||||
const tool = getTool(toolId)
|
||||
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found: ${toolId}`)
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
try {
|
||||
validateRequiredParametersAfterMerge(toolId, tool, params)
|
||||
} catch (validationError) {
|
||||
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
|
||||
error: validationError instanceof Error ? validationError.message : String(validationError),
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(validationError, 400, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
|
||||
const hasFileOutputs =
|
||||
tool.outputs &&
|
||||
Object.values(tool.outputs).some(
|
||||
(output) => output.type === 'file' || output.type === 'file[]'
|
||||
)
|
||||
|
||||
const result = await executeTool(
|
||||
toolId,
|
||||
params,
|
||||
true, // skipProxy (we're already in the proxy)
|
||||
!hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs)
|
||||
undefined // execution context is not available in proxy context
|
||||
)
|
||||
|
||||
if (!result.success) {
|
||||
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
throw new Error(result.error || 'Tool execution failed')
|
||||
}
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
const responseWithTimingData = {
|
||||
...result,
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
timing: {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
|
||||
|
||||
return formatResponse(responseWithTimingData)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy request failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
name: error instanceof Error ? error.name : undefined,
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(error, 500, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new NextResponse(null, {
|
||||
status: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -47,13 +51,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Mistral parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -48,13 +52,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -44,13 +48,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
@@ -79,11 +79,13 @@ export async function POST(request: NextRequest) {
|
||||
// Generate public URL for destination (properly encode the destination key)
|
||||
const encodedDestKey = validatedData.destinationKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.destinationBucket}.s3.${validatedData.region}.amazonaws.com/${encodedDestKey}`
|
||||
const uri = `s3://${validatedData.destinationBucket}/${validatedData.destinationKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
uri,
|
||||
copySourceVersionId: result.CopySourceVersionId,
|
||||
versionId: result.VersionId,
|
||||
etag: result.CopyObjectResult?.ETag,
|
||||
|
||||
@@ -117,11 +117,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const encodedKey = validatedData.objectKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.bucketName}.s3.${validatedData.region}.amazonaws.com/${encodedKey}`
|
||||
const uri = `s3://${validatedData.bucketName}/${validatedData.objectKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
uri,
|
||||
etag: result.ETag,
|
||||
location: url,
|
||||
key: validatedData.objectKey,
|
||||
|
||||
637
apps/sim/app/api/tools/textract/parse/route.ts
Normal file
637
apps/sim/app/api/tools/textract/parse/route.ts
Normal file
@@ -0,0 +1,637 @@
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAwsRegion,
|
||||
validateExternalUrl,
|
||||
validateS3BucketName,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||
|
||||
const logger = createLogger('TextractParseAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
Text: z.string().min(1),
|
||||
Alias: z.string().optional(),
|
||||
Pages: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
const TextractParseSchema = z
|
||||
.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||
filePath: z.string().optional(),
|
||||
s3Uri: z.string().optional(),
|
||||
featureTypes: z
|
||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||
.optional(),
|
||||
queries: z.array(QuerySchema).optional(),
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
const regionValidation = validateAwsRegion(data.region, 'AWS region')
|
||||
if (!regionValidation.isValid) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: regionValidation.error,
|
||||
path: ['region'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
function getSignatureKey(
|
||||
key: string,
|
||||
dateStamp: string,
|
||||
regionName: string,
|
||||
serviceName: string
|
||||
): Buffer {
|
||||
const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest()
|
||||
const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest()
|
||||
const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest()
|
||||
const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest()
|
||||
return kSigning
|
||||
}
|
||||
|
||||
function signAwsRequest(
|
||||
method: string,
|
||||
host: string,
|
||||
uri: string,
|
||||
body: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string,
|
||||
service: string,
|
||||
amzTarget: string
|
||||
): Record<string, string> {
|
||||
const date = new Date()
|
||||
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '')
|
||||
const dateStamp = amzDate.slice(0, 8)
|
||||
|
||||
const payloadHash = crypto.createHash('sha256').update(body).digest('hex')
|
||||
|
||||
const canonicalHeaders =
|
||||
`content-type:application/x-amz-json-1.1\n` +
|
||||
`host:${host}\n` +
|
||||
`x-amz-date:${amzDate}\n` +
|
||||
`x-amz-target:${amzTarget}\n`
|
||||
|
||||
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target'
|
||||
|
||||
const canonicalRequest = `${method}\n${uri}\n\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`
|
||||
|
||||
const algorithm = 'AWS4-HMAC-SHA256'
|
||||
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`
|
||||
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${crypto.createHash('sha256').update(canonicalRequest).digest('hex')}`
|
||||
|
||||
const signingKey = getSignatureKey(secretAccessKey, dateStamp, region, service)
|
||||
const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex')
|
||||
|
||||
const authorizationHeader = `${algorithm} Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`
|
||||
|
||||
return {
|
||||
'Content-Type': 'application/x-amz-json-1.1',
|
||||
Host: host,
|
||||
'X-Amz-Date': amzDate,
|
||||
'X-Amz-Target': amzTarget,
|
||||
Authorization: authorizationHeader,
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
const bytes = Buffer.from(arrayBuffer).toString('base64')
|
||||
const contentType = response.headers.get('content-type') || 'application/octet-stream'
|
||||
|
||||
return { bytes, contentType }
|
||||
}
|
||||
|
||||
function parseS3Uri(s3Uri: string): { bucket: string; key: string } {
|
||||
const match = s3Uri.match(/^s3:\/\/([^/]+)\/(.+)$/)
|
||||
if (!match) {
|
||||
throw new Error(
|
||||
`Invalid S3 URI format: ${s3Uri}. Expected format: s3://bucket-name/path/to/object`
|
||||
)
|
||||
}
|
||||
|
||||
const bucket = match[1]
|
||||
const key = match[2]
|
||||
|
||||
const bucketValidation = validateS3BucketName(bucket, 'S3 bucket name')
|
||||
if (!bucketValidation.isValid) {
|
||||
throw new Error(bucketValidation.error)
|
||||
}
|
||||
|
||||
if (key.includes('..') || key.startsWith('/')) {
|
||||
throw new Error('S3 key contains invalid path traversal sequences')
|
||||
}
|
||||
|
||||
return { bucket, key }
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function callTextractAsync(
|
||||
host: string,
|
||||
amzTarget: string,
|
||||
body: Record<string, unknown>,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const bodyString = JSON.stringify(body)
|
||||
const headers = signAwsRequest(
|
||||
'POST',
|
||||
host,
|
||||
'/',
|
||||
bodyString,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region,
|
||||
'textract',
|
||||
amzTarget
|
||||
)
|
||||
|
||||
const response = await fetch(`https://${host}/`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: bodyString,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorMessage = `Textract API error: ${response.statusText}`
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.Message) {
|
||||
errorMessage = errorJson.Message
|
||||
} else if (errorJson.__type) {
|
||||
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
|
||||
}
|
||||
} catch {
|
||||
// Use default error message
|
||||
}
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
async function pollForJobCompletion(
|
||||
host: string,
|
||||
jobId: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string,
|
||||
useAnalyzeDocument: boolean,
|
||||
requestId: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const pollIntervalMs = 5000 // 5 seconds between polls
|
||||
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
|
||||
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
||||
|
||||
const getTarget = useAnalyzeDocument
|
||||
? 'Textract.GetDocumentAnalysis'
|
||||
: 'Textract.GetDocumentTextDetection'
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
const result = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
|
||||
const jobStatus = result.JobStatus as string
|
||||
|
||||
if (jobStatus === 'SUCCEEDED') {
|
||||
logger.info(`[${requestId}] Async job completed successfully after ${attempt + 1} polls`)
|
||||
|
||||
let allBlocks = (result.Blocks as unknown[]) || []
|
||||
let nextToken = result.NextToken as string | undefined
|
||||
|
||||
while (nextToken) {
|
||||
const nextResult = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId, NextToken: nextToken },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
|
||||
nextToken = nextResult.NextToken as string | undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...result,
|
||||
Blocks: allBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
if (jobStatus === 'FAILED') {
|
||||
throw new Error(`Textract job failed: ${result.StatusMessage || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (jobStatus === 'PARTIAL_SUCCESS') {
|
||||
logger.warn(`[${requestId}] Job completed with partial success: ${result.StatusMessage}`)
|
||||
|
||||
let allBlocks = (result.Blocks as unknown[]) || []
|
||||
let nextToken = result.NextToken as string | undefined
|
||||
|
||||
while (nextToken) {
|
||||
const nextResult = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId, NextToken: nextToken },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
|
||||
nextToken = nextResult.NextToken as string | undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...result,
|
||||
Blocks: allBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Job status: ${jobStatus}, attempt ${attempt + 1}/${maxAttempts}`)
|
||||
await sleep(pollIntervalMs)
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Timeout waiting for Textract job to complete (max ${maxPollTimeMs / 1000} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Textract parse attempt`, {
|
||||
error: authResult.error || 'Missing userId',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Unauthorized',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const userId = authResult.userId
|
||||
const body = await request.json()
|
||||
const validatedData = TextractParseSchema.parse(body)
|
||||
|
||||
const processingMode = validatedData.processingMode || 'sync'
|
||||
const featureTypes = validatedData.featureTypes ?? []
|
||||
const useAnalyzeDocument = featureTypes.length > 0
|
||||
const host = `textract.${validatedData.region}.amazonaws.com`
|
||||
|
||||
logger.info(`[${requestId}] Textract parse request`, {
|
||||
processingMode,
|
||||
filePath: validatedData.filePath?.substring(0, 50),
|
||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
||||
featureTypes,
|
||||
userId,
|
||||
})
|
||||
|
||||
if (processingMode === 'async') {
|
||||
if (!validatedData.s3Uri) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { bucket: s3Bucket, key: s3Key } = parseS3Uri(validatedData.s3Uri)
|
||||
|
||||
logger.info(`[${requestId}] Starting async Textract job`, { s3Bucket, s3Key })
|
||||
|
||||
const startTarget = useAnalyzeDocument
|
||||
? 'Textract.StartDocumentAnalysis'
|
||||
: 'Textract.StartDocumentTextDetection'
|
||||
|
||||
const startBody: Record<string, unknown> = {
|
||||
DocumentLocation: {
|
||||
S3Object: {
|
||||
Bucket: s3Bucket,
|
||||
Name: s3Key,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if (useAnalyzeDocument) {
|
||||
startBody.FeatureTypes = featureTypes
|
||||
|
||||
if (
|
||||
validatedData.queries &&
|
||||
validatedData.queries.length > 0 &&
|
||||
featureTypes.includes('QUERIES')
|
||||
) {
|
||||
startBody.QueriesConfig = {
|
||||
Queries: validatedData.queries.map((q) => ({
|
||||
Text: q.Text,
|
||||
Alias: q.Alias,
|
||||
Pages: q.Pages,
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const startResult = await callTextractAsync(
|
||||
host,
|
||||
startTarget,
|
||||
startBody,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region
|
||||
)
|
||||
|
||||
const jobId = startResult.JobId as string
|
||||
if (!jobId) {
|
||||
throw new Error('Failed to start Textract job: No JobId returned')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Async job started`, { jobId })
|
||||
|
||||
const textractData = await pollForJobCompletion(
|
||||
host,
|
||||
jobId,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region,
|
||||
useAnalyzeDocument,
|
||||
requestId
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Textract async parse successful`, {
|
||||
pageCount: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
|
||||
blockCount: (textractData.Blocks as unknown[])?.length ?? 0,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
blocks: textractData.Blocks ?? [],
|
||||
documentMetadata: {
|
||||
pages: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
|
||||
},
|
||||
modelVersion: (textractData.AnalyzeDocumentModelVersion ??
|
||||
textractData.DetectDocumentTextModelVersion) as string | undefined,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (!validatedData.filePath) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File path is required for single-page processing',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
||||
|
||||
// Track if this is a PDF for better error messaging
|
||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
|
||||
const uri = '/'
|
||||
|
||||
let textractBody: Record<string, unknown>
|
||||
let amzTarget: string
|
||||
|
||||
if (useAnalyzeDocument) {
|
||||
amzTarget = 'Textract.AnalyzeDocument'
|
||||
textractBody = {
|
||||
Document: {
|
||||
Bytes: bytes,
|
||||
},
|
||||
FeatureTypes: featureTypes,
|
||||
}
|
||||
|
||||
if (
|
||||
validatedData.queries &&
|
||||
validatedData.queries.length > 0 &&
|
||||
featureTypes.includes('QUERIES')
|
||||
) {
|
||||
textractBody.QueriesConfig = {
|
||||
Queries: validatedData.queries.map((q) => ({
|
||||
Text: q.Text,
|
||||
Alias: q.Alias,
|
||||
Pages: q.Pages,
|
||||
})),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
amzTarget = 'Textract.DetectDocumentText'
|
||||
textractBody = {
|
||||
Document: {
|
||||
Bytes: bytes,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const bodyString = JSON.stringify(textractBody)
|
||||
|
||||
const headers = signAwsRequest(
|
||||
'POST',
|
||||
host,
|
||||
uri,
|
||||
bodyString,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region,
|
||||
'textract',
|
||||
amzTarget
|
||||
)
|
||||
|
||||
const textractResponse = await fetch(`https://${host}${uri}`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: bodyString,
|
||||
})
|
||||
|
||||
if (!textractResponse.ok) {
|
||||
const errorText = await textractResponse.text()
|
||||
logger.error(`[${requestId}] Textract API error:`, errorText)
|
||||
|
||||
let errorMessage = `Textract API error: ${textractResponse.statusText}`
|
||||
let isUnsupportedFormat = false
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.Message) {
|
||||
errorMessage = errorJson.Message
|
||||
} else if (errorJson.__type) {
|
||||
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
|
||||
}
|
||||
// Check for unsupported document format error
|
||||
isUnsupportedFormat =
|
||||
errorJson.__type === 'UnsupportedDocumentException' ||
|
||||
errorJson.Message?.toLowerCase().includes('unsupported document') ||
|
||||
errorText.toLowerCase().includes('unsupported document')
|
||||
} catch {
|
||||
isUnsupportedFormat = errorText.toLowerCase().includes('unsupported document')
|
||||
}
|
||||
|
||||
// Provide helpful message for unsupported format (likely multi-page PDF)
|
||||
if (isUnsupportedFormat && isPdf) {
|
||||
errorMessage =
|
||||
'This document format is not supported in Single Page mode. If this is a multi-page PDF, please use "Multi-Page (PDF, TIFF via S3)" mode instead, which requires uploading your document to S3 first. Single Page mode only supports JPEG, PNG, and single-page PDF files.'
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
},
|
||||
{ status: textractResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const textractData = await textractResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Textract parse successful`, {
|
||||
pageCount: textractData.DocumentMetadata?.Pages ?? 0,
|
||||
blockCount: textractData.Blocks?.length ?? 0,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
blocks: textractData.Blocks ?? [],
|
||||
documentMetadata: {
|
||||
pages: textractData.DocumentMetadata?.Pages ?? 0,
|
||||
},
|
||||
modelVersion:
|
||||
textractData.AnalyzeDocumentModelVersion ??
|
||||
textractData.DetectDocumentTextModelVersion ??
|
||||
undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in Textract parse:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,10 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
@@ -25,7 +29,7 @@ import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
@@ -38,6 +42,8 @@ const ExecuteWorkflowSchema = z.object({
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
isClientSession: z.boolean().optional(),
|
||||
includeFileBase64: z.boolean().optional().default(true),
|
||||
base64MaxBytes: z.number().int().positive().optional(),
|
||||
workflowStateOverride: z
|
||||
.object({
|
||||
blocks: z.record(z.any()),
|
||||
@@ -214,6 +220,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
useDraftState,
|
||||
input: validatedInput,
|
||||
isClientSession = false,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
} = validation.data
|
||||
|
||||
@@ -227,6 +235,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
triggerType,
|
||||
stream,
|
||||
useDraftState,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
@@ -427,16 +437,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(result)
|
||||
const outputWithBase64 = includeFileBase64
|
||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})) as NormalizedBlockOutput)
|
||||
: result.output
|
||||
|
||||
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
||||
if (hasResponseBlock) {
|
||||
return createHttpResponseFromBlock(result)
|
||||
return createHttpResponseFromBlock(resultWithBase64)
|
||||
}
|
||||
|
||||
const filteredResult = {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
output: outputWithBase64,
|
||||
error: result.error,
|
||||
metadata: result.metadata
|
||||
? {
|
||||
@@ -498,6 +523,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
selectedOutputs: resolvedSelectedOutputs,
|
||||
isSecureMode: false,
|
||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
},
|
||||
executionId,
|
||||
})
|
||||
@@ -698,6 +725,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
@@ -750,12 +779,21 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
output: includeFileBase64
|
||||
? await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})
|
||||
: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error: any) {
|
||||
const errorMessage = error.message || 'Unknown error'
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
||||
|
||||
@@ -33,6 +33,7 @@ const BlockDataSchema = z.object({
|
||||
doWhileCondition: z.string().optional(),
|
||||
parallelType: z.enum(['collection', 'count']).optional(),
|
||||
type: z.string().optional(),
|
||||
canonicalModes: z.record(z.enum(['basic', 'advanced'])).optional(),
|
||||
})
|
||||
|
||||
const SubBlockStateSchema = z.object({
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isUserFile } from '@/lib/core/utils/display-filters'
|
||||
import { isUserFileWithMetadata } from '@/lib/core/utils/user-file'
|
||||
import type { ChatFile, ChatMessage } from '@/app/chat/components/message/message'
|
||||
import { CHAT_ERROR_MESSAGES } from '@/app/chat/constants'
|
||||
|
||||
@@ -17,7 +17,7 @@ function extractFilesFromData(
|
||||
return files
|
||||
}
|
||||
|
||||
if (isUserFile(data)) {
|
||||
if (isUserFileWithMetadata(data)) {
|
||||
if (!seenIds.has(data.id)) {
|
||||
seenIds.add(data.id)
|
||||
files.push({
|
||||
@@ -232,7 +232,7 @@ export function useChatStreaming() {
|
||||
return null
|
||||
}
|
||||
|
||||
if (isUserFile(value)) {
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -285,7 +285,7 @@ export function useChatStreaming() {
|
||||
|
||||
const value = getOutputValue(blockOutputs, config.path)
|
||||
|
||||
if (isUserFile(value)) {
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
extractedFiles.push({
|
||||
id: value.id,
|
||||
name: value.name,
|
||||
|
||||
@@ -538,15 +538,11 @@ export function Document({
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
if (operation === 'delete') {
|
||||
if (operation === 'delete' || result.errorCount > 0) {
|
||||
refreshChunks()
|
||||
} else {
|
||||
result.results.forEach((opResult) => {
|
||||
if (opResult.operation === operation) {
|
||||
opResult.chunkIds.forEach((chunkId: string) => {
|
||||
updateChunk(chunkId, { enabled: operation === 'enable' })
|
||||
})
|
||||
}
|
||||
chunks.forEach((chunk) => {
|
||||
updateChunk(chunk.id, { enabled: operation === 'enable' })
|
||||
})
|
||||
}
|
||||
logger.info(`Successfully ${operation}d ${result.successCount} chunks`)
|
||||
|
||||
@@ -462,7 +462,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
<ModalHeader>Documents using "{selectedTag?.displayName}"</ModalHeader>
|
||||
<ModalBody>
|
||||
<div className='space-y-[8px]'>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{selectedTagUsage?.documentCount || 0} document
|
||||
{selectedTagUsage?.documentCount !== 1 ? 's are' : ' is'} currently using this tag
|
||||
definition.
|
||||
@@ -470,7 +470,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
|
||||
{selectedTagUsage?.documentCount === 0 ? (
|
||||
<div className='rounded-[6px] border p-[16px] text-center'>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
This tag definition is not being used by any documents. You can safely delete it
|
||||
to free up the tag slot.
|
||||
</p>
|
||||
|
||||
@@ -26,7 +26,6 @@ export interface CanvasMenuProps {
|
||||
onOpenLogs: () => void
|
||||
onToggleVariables: () => void
|
||||
onToggleChat: () => void
|
||||
onInvite: () => void
|
||||
isVariablesOpen?: boolean
|
||||
isChatOpen?: boolean
|
||||
hasClipboard?: boolean
|
||||
@@ -55,15 +54,12 @@ export function CanvasMenu({
|
||||
onOpenLogs,
|
||||
onToggleVariables,
|
||||
onToggleChat,
|
||||
onInvite,
|
||||
isVariablesOpen = false,
|
||||
isChatOpen = false,
|
||||
hasClipboard = false,
|
||||
disableEdit = false,
|
||||
disableAdmin = false,
|
||||
canUndo = false,
|
||||
canRedo = false,
|
||||
isInvitationsDisabled = false,
|
||||
}: CanvasMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
@@ -179,22 +175,6 @@ export function CanvasMenu({
|
||||
>
|
||||
{isChatOpen ? 'Close Chat' : 'Open Chat'}
|
||||
</PopoverItem>
|
||||
|
||||
{/* Admin action - hidden when invitations are disabled */}
|
||||
{!isInvitationsDisabled && (
|
||||
<>
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
disabled={disableAdmin}
|
||||
onClick={() => {
|
||||
onInvite()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Invite to Workspace
|
||||
</PopoverItem>
|
||||
</>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
|
||||
@@ -886,17 +886,16 @@ export function Chat() {
|
||||
onMouseDown={(e) => e.stopPropagation()}
|
||||
>
|
||||
{shouldShowConfigureStartInputsButton && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
<div
|
||||
className='flex flex-none cursor-pointer items-center whitespace-nowrap rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[9px] py-[2px] font-medium font-sans text-[12px] text-[var(--text-primary)] hover:bg-[var(--surface-7)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]'
|
||||
title='Add chat inputs to Start block'
|
||||
onMouseDown={(e) => {
|
||||
e.stopPropagation()
|
||||
handleConfigureStartInputs()
|
||||
}}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
<span className='whitespace-nowrap'>Add inputs</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<OutputSelect
|
||||
|
||||
@@ -129,10 +129,6 @@ export function OutputSelect({
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.responseFormat?.value
|
||||
: subBlockValues?.[block.id]?.responseFormat
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
const operationValue =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.operation?.value
|
||||
: subBlockValues?.[block.id]?.operation
|
||||
|
||||
let outputsToProcess: Record<string, unknown> = {}
|
||||
|
||||
@@ -146,10 +142,20 @@ export function OutputSelect({
|
||||
outputsToProcess = blockConfig?.outputs || {}
|
||||
}
|
||||
} else {
|
||||
const toolOutputs =
|
||||
blockConfig && typeof operationValue === 'string'
|
||||
? getToolOutputs(blockConfig, operationValue)
|
||||
: {}
|
||||
// Build subBlocks object for tool selector
|
||||
const rawSubBlockValues =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks
|
||||
: subBlockValues?.[block.id]
|
||||
const subBlocks: Record<string, { value: unknown }> = {}
|
||||
if (rawSubBlockValues && typeof rawSubBlockValues === 'object') {
|
||||
for (const [key, val] of Object.entries(rawSubBlockValues)) {
|
||||
// Handle both { value: ... } and raw value formats
|
||||
subBlocks[key] = val && typeof val === 'object' && 'value' in val ? val : { value: val }
|
||||
}
|
||||
}
|
||||
|
||||
const toolOutputs = blockConfig ? getToolOutputs(blockConfig, subBlocks) : {}
|
||||
outputsToProcess =
|
||||
Object.keys(toolOutputs).length > 0 ? toolOutputs : blockConfig?.outputs || {}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
import { PopoverSection } from '@/components/emcn'
|
||||
|
||||
/**
|
||||
* Skeleton loading component for chat history dropdown
|
||||
* Displays placeholder content while chats are being loaded
|
||||
*/
|
||||
export function ChatHistorySkeleton() {
|
||||
return (
|
||||
<>
|
||||
<PopoverSection>
|
||||
<div className='h-3 w-12 animate-pulse rounded bg-muted/40' />
|
||||
</PopoverSection>
|
||||
<div className='flex flex-col gap-0.5'>
|
||||
{[1, 2, 3].map((i) => (
|
||||
<div key={i} className='flex h-[25px] items-center px-[6px]'>
|
||||
<div className='h-3 w-full animate-pulse rounded bg-muted/40' />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './chat-history-skeleton'
|
||||
@@ -0,0 +1,79 @@
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
type CheckpointConfirmationVariant = 'restore' | 'discard'
|
||||
|
||||
interface CheckpointConfirmationProps {
|
||||
/** Confirmation variant - 'restore' for reverting, 'discard' for edit with checkpoint options */
|
||||
variant: CheckpointConfirmationVariant
|
||||
/** Whether an action is currently processing */
|
||||
isProcessing: boolean
|
||||
/** Callback when cancel is clicked */
|
||||
onCancel: () => void
|
||||
/** Callback when revert is clicked */
|
||||
onRevert: () => void
|
||||
/** Callback when continue is clicked (only for 'discard' variant) */
|
||||
onContinue?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Inline confirmation for checkpoint operations
|
||||
* Supports two variants:
|
||||
* - 'restore': Simple revert confirmation with warning
|
||||
* - 'discard': Edit with checkpoint options (revert or continue without revert)
|
||||
*/
|
||||
export function CheckpointConfirmation({
|
||||
variant,
|
||||
isProcessing,
|
||||
onCancel,
|
||||
onRevert,
|
||||
onContinue,
|
||||
}: CheckpointConfirmationProps) {
|
||||
const isRestoreVariant = variant === 'restore'
|
||||
|
||||
return (
|
||||
<div className='mt-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] p-[10px]'>
|
||||
<p className='mb-[8px] text-[12px] text-[var(--text-primary)]'>
|
||||
{isRestoreVariant ? (
|
||||
<>
|
||||
Revert to checkpoint? This will restore your workflow to the state saved at this
|
||||
checkpoint.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</>
|
||||
) : (
|
||||
'Continue from a previous message?'
|
||||
)}
|
||||
</p>
|
||||
<div className='flex gap-[8px]'>
|
||||
<Button
|
||||
onClick={onCancel}
|
||||
variant='active'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessing}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={onRevert}
|
||||
variant='destructive'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessing}
|
||||
>
|
||||
{isProcessing ? 'Reverting...' : 'Revert'}
|
||||
</Button>
|
||||
{!isRestoreVariant && onContinue && (
|
||||
<Button
|
||||
onClick={onContinue}
|
||||
variant='tertiary'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessing}
|
||||
>
|
||||
Continue
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './checkpoint-confirmation'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './file-display'
|
||||
@@ -1,5 +1,6 @@
|
||||
export * from './checkpoint-confirmation'
|
||||
export * from './file-display'
|
||||
export { default as CopilotMarkdownRenderer } from './markdown-renderer'
|
||||
export { CopilotMarkdownRenderer } from './markdown-renderer'
|
||||
export * from './smooth-streaming'
|
||||
export * from './thinking-block'
|
||||
export * from './usage-limit-actions'
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export { default as CopilotMarkdownRenderer } from './markdown-renderer'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './smooth-streaming'
|
||||
@@ -1,27 +1,17 @@
|
||||
import { memo, useEffect, useRef, useState } from 'react'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import { CopilotMarkdownRenderer } from '../markdown-renderer'
|
||||
|
||||
/**
|
||||
* Character animation delay in milliseconds
|
||||
*/
|
||||
/** Character animation delay in milliseconds */
|
||||
const CHARACTER_DELAY = 3
|
||||
|
||||
/**
|
||||
* Props for the StreamingIndicator component
|
||||
*/
|
||||
/** Props for the StreamingIndicator component */
|
||||
interface StreamingIndicatorProps {
|
||||
/** Optional class name for layout adjustments */
|
||||
className?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* StreamingIndicator shows animated dots during message streaming
|
||||
* Used as a standalone indicator when no content has arrived yet
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Animated loading indicator
|
||||
*/
|
||||
/** Shows animated dots during message streaming when no content has arrived */
|
||||
export const StreamingIndicator = memo(({ className }: StreamingIndicatorProps) => (
|
||||
<div className={cn('flex h-[1.25rem] items-center text-muted-foreground', className)}>
|
||||
<div className='flex space-x-0.5'>
|
||||
@@ -34,9 +24,7 @@ export const StreamingIndicator = memo(({ className }: StreamingIndicatorProps)
|
||||
|
||||
StreamingIndicator.displayName = 'StreamingIndicator'
|
||||
|
||||
/**
|
||||
* Props for the SmoothStreamingText component
|
||||
*/
|
||||
/** Props for the SmoothStreamingText component */
|
||||
interface SmoothStreamingTextProps {
|
||||
/** Content to display with streaming animation */
|
||||
content: string
|
||||
@@ -44,20 +32,12 @@ interface SmoothStreamingTextProps {
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothStreamingText component displays text with character-by-character animation
|
||||
* Creates a smooth streaming effect for AI responses
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Streaming text with smooth animation
|
||||
*/
|
||||
/** Displays text with character-by-character animation for smooth streaming */
|
||||
export const SmoothStreamingText = memo(
|
||||
({ content, isStreaming }: SmoothStreamingTextProps) => {
|
||||
// Initialize with full content when not streaming to avoid flash on page load
|
||||
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
|
||||
const contentRef = useRef(content)
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
// Initialize index based on streaming state
|
||||
const indexRef = useRef(isStreaming ? 0 : content.length)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
@@ -95,7 +75,6 @@ export const SmoothStreamingText = memo(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
@@ -119,7 +98,6 @@ export const SmoothStreamingText = memo(
|
||||
)
|
||||
},
|
||||
(prevProps, nextProps) => {
|
||||
// Prevent re-renders during streaming unless content actually changed
|
||||
return (
|
||||
prevProps.content === nextProps.content && prevProps.isStreaming === nextProps.isStreaming
|
||||
)
|
||||
@@ -0,0 +1 @@
|
||||
export * from './thinking-block'
|
||||
@@ -3,66 +3,45 @@
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp } from 'lucide-react'
|
||||
import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
import { CopilotMarkdownRenderer } from '../markdown-renderer'
|
||||
|
||||
/**
|
||||
* Removes thinking tags (raw or escaped) from streamed content.
|
||||
*/
|
||||
/** Removes thinking tags (raw or escaped) and special tags from streamed content */
|
||||
function stripThinkingTags(text: string): string {
|
||||
return text
|
||||
.replace(/<\/?thinking[^>]*>/gi, '')
|
||||
.replace(/<\/?thinking[^&]*>/gi, '')
|
||||
.replace(/<options>[\s\S]*?<\/options>/gi, '')
|
||||
.replace(/<options>[\s\S]*$/gi, '')
|
||||
.replace(/<plan>[\s\S]*?<\/plan>/gi, '')
|
||||
.replace(/<plan>[\s\S]*$/gi, '')
|
||||
.trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Max height for thinking content before internal scrolling kicks in
|
||||
*/
|
||||
const THINKING_MAX_HEIGHT = 150
|
||||
|
||||
/**
|
||||
* Height threshold before gradient fade kicks in
|
||||
*/
|
||||
const GRADIENT_THRESHOLD = 100
|
||||
|
||||
/**
|
||||
* Interval for auto-scroll during streaming (ms)
|
||||
*/
|
||||
/** Interval for auto-scroll during streaming (ms) */
|
||||
const SCROLL_INTERVAL = 50
|
||||
|
||||
/**
|
||||
* Timer update interval in milliseconds
|
||||
*/
|
||||
/** Timer update interval in milliseconds */
|
||||
const TIMER_UPDATE_INTERVAL = 100
|
||||
|
||||
/**
|
||||
* Thinking text streaming - much faster than main text
|
||||
* Essentially instant with minimal delay
|
||||
*/
|
||||
/** Thinking text streaming delay - faster than main text */
|
||||
const THINKING_DELAY = 0.5
|
||||
const THINKING_CHARS_PER_FRAME = 3
|
||||
|
||||
/**
|
||||
* Props for the SmoothThinkingText component
|
||||
*/
|
||||
/** Props for the SmoothThinkingText component */
|
||||
interface SmoothThinkingTextProps {
|
||||
content: string
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothThinkingText renders thinking content with fast streaming animation
|
||||
* Uses gradient fade at top when content is tall enough
|
||||
* Renders thinking content with fast streaming animation.
|
||||
*/
|
||||
const SmoothThinkingText = memo(
|
||||
({ content, isStreaming }: SmoothThinkingTextProps) => {
|
||||
// Initialize with full content when not streaming to avoid flash on page load
|
||||
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
|
||||
const [showGradient, setShowGradient] = useState(false)
|
||||
const contentRef = useRef(content)
|
||||
const textRef = useRef<HTMLDivElement>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
// Initialize index based on streaming state
|
||||
const indexRef = useRef(isStreaming ? 0 : content.length)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
@@ -88,7 +67,6 @@ const SmoothThinkingText = memo(
|
||||
|
||||
if (elapsed >= THINKING_DELAY) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
// Reveal multiple characters per frame for faster streaming
|
||||
const newIndex = Math.min(
|
||||
currentIndex + THINKING_CHARS_PER_FRAME,
|
||||
currentContent.length
|
||||
@@ -110,7 +88,6 @@ const SmoothThinkingText = memo(
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
@@ -127,30 +104,10 @@ const SmoothThinkingText = memo(
|
||||
}
|
||||
}, [content, isStreaming])
|
||||
|
||||
// Check if content height exceeds threshold for gradient
|
||||
useEffect(() => {
|
||||
if (textRef.current && isStreaming) {
|
||||
const height = textRef.current.scrollHeight
|
||||
setShowGradient(height > GRADIENT_THRESHOLD)
|
||||
} else {
|
||||
setShowGradient(false)
|
||||
}
|
||||
}, [displayedContent, isStreaming])
|
||||
|
||||
// Apply vertical gradient fade at the top only when content is tall enough
|
||||
const gradientStyle =
|
||||
isStreaming && showGradient
|
||||
? {
|
||||
maskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)',
|
||||
WebkitMaskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)',
|
||||
}
|
||||
: undefined
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={textRef}
|
||||
className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'
|
||||
style={gradientStyle}
|
||||
>
|
||||
<CopilotMarkdownRenderer content={displayedContent} />
|
||||
</div>
|
||||
@@ -165,9 +122,7 @@ const SmoothThinkingText = memo(
|
||||
|
||||
SmoothThinkingText.displayName = 'SmoothThinkingText'
|
||||
|
||||
/**
|
||||
* Props for the ThinkingBlock component
|
||||
*/
|
||||
/** Props for the ThinkingBlock component */
|
||||
interface ThinkingBlockProps {
|
||||
/** Content of the thinking block */
|
||||
content: string
|
||||
@@ -182,13 +137,8 @@ interface ThinkingBlockProps {
|
||||
}
|
||||
|
||||
/**
|
||||
* ThinkingBlock component displays AI reasoning/thinking process
|
||||
* Shows collapsible content with duration timer
|
||||
* Auto-expands during streaming and collapses when complete
|
||||
* Auto-collapses when a tool call or other content comes in after it
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Thinking block with expandable content and timer
|
||||
* Displays AI reasoning/thinking process with collapsible content and duration timer.
|
||||
* Auto-expands during streaming and collapses when complete.
|
||||
*/
|
||||
export function ThinkingBlock({
|
||||
content,
|
||||
@@ -197,7 +147,6 @@ export function ThinkingBlock({
|
||||
label = 'Thought',
|
||||
hasSpecialTags = false,
|
||||
}: ThinkingBlockProps) {
|
||||
// Strip thinking tags from content on render to handle persisted messages
|
||||
const cleanContent = useMemo(() => stripThinkingTags(content || ''), [content])
|
||||
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
@@ -209,12 +158,8 @@ export function ThinkingBlock({
|
||||
const lastScrollTopRef = useRef(0)
|
||||
const programmaticScrollRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Auto-expands block when streaming with content
|
||||
* Auto-collapses when streaming ends OR when following content arrives
|
||||
*/
|
||||
/** Auto-expands during streaming, auto-collapses when streaming ends or following content arrives */
|
||||
useEffect(() => {
|
||||
// Collapse if streaming ended, there's following content, or special tags arrived
|
||||
if (!isStreaming || hasFollowingContent || hasSpecialTags) {
|
||||
setIsExpanded(false)
|
||||
userCollapsedRef.current = false
|
||||
@@ -227,7 +172,6 @@ export function ThinkingBlock({
|
||||
}
|
||||
}, [isStreaming, cleanContent, hasFollowingContent, hasSpecialTags])
|
||||
|
||||
// Reset start time when streaming begins
|
||||
useEffect(() => {
|
||||
if (isStreaming && !hasFollowingContent) {
|
||||
startTimeRef.current = Date.now()
|
||||
@@ -236,9 +180,7 @@ export function ThinkingBlock({
|
||||
}
|
||||
}, [isStreaming, hasFollowingContent])
|
||||
|
||||
// Update duration timer during streaming (stop when following content arrives)
|
||||
useEffect(() => {
|
||||
// Stop timer if not streaming or if there's following content (thinking is done)
|
||||
if (!isStreaming || hasFollowingContent) return
|
||||
|
||||
const interval = setInterval(() => {
|
||||
@@ -248,7 +190,6 @@ export function ThinkingBlock({
|
||||
return () => clearInterval(interval)
|
||||
}, [isStreaming, hasFollowingContent])
|
||||
|
||||
// Handle scroll events to detect user scrolling away
|
||||
useEffect(() => {
|
||||
const container = scrollContainerRef.current
|
||||
if (!container || !isExpanded) return
|
||||
@@ -267,7 +208,6 @@ export function ThinkingBlock({
|
||||
setUserHasScrolledAway(true)
|
||||
}
|
||||
|
||||
// Re-stick if user scrolls back to bottom with intent
|
||||
if (userHasScrolledAway && isNearBottom && delta > 10) {
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
@@ -281,7 +221,6 @@ export function ThinkingBlock({
|
||||
return () => container.removeEventListener('scroll', handleScroll)
|
||||
}, [isExpanded, userHasScrolledAway])
|
||||
|
||||
// Smart auto-scroll: always scroll to bottom while streaming unless user scrolled away
|
||||
useEffect(() => {
|
||||
if (!isStreaming || !isExpanded || userHasScrolledAway) return
|
||||
|
||||
@@ -302,20 +241,16 @@ export function ThinkingBlock({
|
||||
return () => window.clearInterval(intervalId)
|
||||
}, [isStreaming, isExpanded, userHasScrolledAway])
|
||||
|
||||
/**
|
||||
* Formats duration in milliseconds to seconds
|
||||
* Always shows seconds, rounded to nearest whole second, minimum 1s
|
||||
*/
|
||||
/** Formats duration in milliseconds to seconds (minimum 1s) */
|
||||
const formatDuration = (ms: number) => {
|
||||
const seconds = Math.max(1, Math.round(ms / 1000))
|
||||
return `${seconds}s`
|
||||
}
|
||||
|
||||
const hasContent = cleanContent.length > 0
|
||||
// Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear
|
||||
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
|
||||
const durationText = `${label} for ${formatDuration(duration)}`
|
||||
// Convert past tense label to present tense for streaming (e.g., "Thought" → "Thinking")
|
||||
|
||||
const getStreamingLabel = (lbl: string) => {
|
||||
if (lbl === 'Thought') return 'Thinking'
|
||||
if (lbl.endsWith('ed')) return `${lbl.slice(0, -2)}ing`
|
||||
@@ -323,11 +258,9 @@ export function ThinkingBlock({
|
||||
}
|
||||
const streamingLabel = getStreamingLabel(label)
|
||||
|
||||
// During streaming: show header with shimmer effect + expanded content
|
||||
if (!isThinkingDone) {
|
||||
return (
|
||||
<div>
|
||||
{/* Define shimmer keyframes */}
|
||||
<style>{`
|
||||
@keyframes thinking-shimmer {
|
||||
0% { background-position: 150% 0; }
|
||||
@@ -396,7 +329,6 @@ export function ThinkingBlock({
|
||||
)
|
||||
}
|
||||
|
||||
// After done: show collapsible header with duration
|
||||
return (
|
||||
<div>
|
||||
<button
|
||||
@@ -426,7 +358,6 @@ export function ThinkingBlock({
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Completed thinking text - dimmed with markdown */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={cleanContent} />
|
||||
</div>
|
||||
@@ -0,0 +1 @@
|
||||
export * from './usage-limit-actions'
|
||||
@@ -9,18 +9,20 @@ import {
|
||||
ToolCall,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components'
|
||||
import {
|
||||
CheckpointConfirmation,
|
||||
FileAttachmentDisplay,
|
||||
SmoothStreamingText,
|
||||
StreamingIndicator,
|
||||
ThinkingBlock,
|
||||
UsageLimitActions,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import {
|
||||
useCheckpointManagement,
|
||||
useMessageEditing,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/hooks'
|
||||
import { UserInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/user-input'
|
||||
import { buildMentionHighlightNodes } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/utils'
|
||||
import type { CopilotMessage as CopilotMessageType } from '@/stores/panel'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
|
||||
@@ -68,7 +70,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const isUser = message.role === 'user'
|
||||
const isAssistant = message.role === 'assistant'
|
||||
|
||||
// Store state
|
||||
const {
|
||||
messageCheckpoints: allMessageCheckpoints,
|
||||
messages,
|
||||
@@ -79,23 +80,18 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
isAborting,
|
||||
} = useCopilotStore()
|
||||
|
||||
// Get checkpoints for this message if it's a user message
|
||||
const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : []
|
||||
const hasCheckpoints = messageCheckpoints.length > 0 && messageCheckpoints.some((cp) => cp?.id)
|
||||
|
||||
// Check if this is the last user message (for showing abort button)
|
||||
const isLastUserMessage = useMemo(() => {
|
||||
if (!isUser) return false
|
||||
const userMessages = messages.filter((m) => m.role === 'user')
|
||||
return userMessages.length > 0 && userMessages[userMessages.length - 1]?.id === message.id
|
||||
}, [isUser, messages, message.id])
|
||||
|
||||
// UI state
|
||||
const [isHoveringMessage, setIsHoveringMessage] = useState(false)
|
||||
|
||||
const cancelEditRef = useRef<(() => void) | null>(null)
|
||||
|
||||
// Checkpoint management hook
|
||||
const {
|
||||
showRestoreConfirmation,
|
||||
showCheckpointDiscardModal,
|
||||
@@ -118,7 +114,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
() => cancelEditRef.current?.()
|
||||
)
|
||||
|
||||
// Message editing hook
|
||||
const {
|
||||
isEditMode,
|
||||
isExpanded,
|
||||
@@ -147,27 +142,20 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
|
||||
cancelEditRef.current = handleCancelEdit
|
||||
|
||||
// Get clean text content with double newline parsing
|
||||
const cleanTextContent = useMemo(() => {
|
||||
if (!message.content) return ''
|
||||
|
||||
// Parse out excessive newlines (more than 2 consecutive newlines)
|
||||
return message.content.replace(/\n{3,}/g, '\n\n')
|
||||
}, [message.content])
|
||||
|
||||
// Parse special tags from message content (options, plan)
|
||||
// Parse during streaming to show options/plan as they stream in
|
||||
const parsedTags = useMemo(() => {
|
||||
if (isUser) return null
|
||||
|
||||
// Try message.content first
|
||||
if (message.content) {
|
||||
const parsed = parseSpecialTags(message.content)
|
||||
if (parsed.options || parsed.plan) return parsed
|
||||
}
|
||||
|
||||
// During streaming, check content blocks for options/plan
|
||||
if (isStreaming && message.contentBlocks && message.contentBlocks.length > 0) {
|
||||
if (message.contentBlocks && message.contentBlocks.length > 0) {
|
||||
for (const block of message.contentBlocks) {
|
||||
if (block.type === 'text' && block.content) {
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
@@ -176,23 +164,42 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
}
|
||||
}
|
||||
|
||||
return message.content ? parseSpecialTags(message.content) : null
|
||||
}, [message.content, message.contentBlocks, isUser, isStreaming])
|
||||
return null
|
||||
}, [message.content, message.contentBlocks, isUser])
|
||||
|
||||
const selectedOptionKey = useMemo(() => {
|
||||
if (!parsedTags?.options || isStreaming) return null
|
||||
|
||||
const currentIndex = messages.findIndex((m) => m.id === message.id)
|
||||
if (currentIndex === -1 || currentIndex >= messages.length - 1) return null
|
||||
|
||||
const nextMessage = messages[currentIndex + 1]
|
||||
if (!nextMessage || nextMessage.role !== 'user') return null
|
||||
|
||||
const nextContent = nextMessage.content?.trim()
|
||||
if (!nextContent) return null
|
||||
|
||||
for (const [key, option] of Object.entries(parsedTags.options)) {
|
||||
const optionTitle = typeof option === 'string' ? option : option.title
|
||||
if (nextContent === optionTitle) {
|
||||
return key
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}, [parsedTags?.options, messages, message.id, isStreaming])
|
||||
|
||||
// Get sendMessage from store for continuation actions
|
||||
const sendMessage = useCopilotStore((s) => s.sendMessage)
|
||||
|
||||
// Handler for option selection
|
||||
const handleOptionSelect = useCallback(
|
||||
(_optionKey: string, optionText: string) => {
|
||||
// Send the option text as a message
|
||||
sendMessage(optionText)
|
||||
},
|
||||
[sendMessage]
|
||||
)
|
||||
|
||||
// Memoize content blocks to avoid re-rendering unchanged blocks
|
||||
// No entrance animations to prevent layout shift
|
||||
const isActivelyStreaming = isLastMessage && isStreaming
|
||||
|
||||
const memoizedContentBlocks = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) {
|
||||
return null
|
||||
@@ -202,21 +209,21 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
if (block.type === 'text') {
|
||||
const isLastTextBlock =
|
||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||
// Always strip special tags from display (they're rendered separately as options/plan)
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
const cleanBlockContent = parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
|
||||
// Skip if no content after stripping tags
|
||||
if (!cleanBlockContent.trim()) return null
|
||||
|
||||
// Use smooth streaming for the last text block if we're streaming
|
||||
const shouldUseSmoothing = isStreaming && isLastTextBlock
|
||||
const shouldUseSmoothing = isActivelyStreaming && isLastTextBlock
|
||||
const blockKey = `text-${index}-${block.timestamp || index}`
|
||||
|
||||
return (
|
||||
<div key={blockKey} className='w-full max-w-full'>
|
||||
{shouldUseSmoothing ? (
|
||||
<SmoothStreamingText content={cleanBlockContent} isStreaming={isStreaming} />
|
||||
<SmoothStreamingText
|
||||
content={cleanBlockContent}
|
||||
isStreaming={isActivelyStreaming}
|
||||
/>
|
||||
) : (
|
||||
<CopilotMarkdownRenderer content={cleanBlockContent} />
|
||||
)}
|
||||
@@ -224,9 +231,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
}
|
||||
if (block.type === 'thinking') {
|
||||
// Check if there are any blocks after this one (tool calls, text, etc.)
|
||||
const hasFollowingContent = index < message.contentBlocks!.length - 1
|
||||
// Check if special tags (options, plan) are present - should also close thinking
|
||||
const hasSpecialTags = !!(parsedTags?.options || parsedTags?.plan)
|
||||
const blockKey = `thinking-${index}-${block.timestamp || index}`
|
||||
|
||||
@@ -234,7 +239,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={block.content}
|
||||
isStreaming={isStreaming}
|
||||
isStreaming={isActivelyStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
/>
|
||||
@@ -246,18 +251,22 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
|
||||
return (
|
||||
<div key={blockKey}>
|
||||
<ToolCall toolCallId={block.toolCall.id} toolCall={block.toolCall} />
|
||||
<ToolCall
|
||||
toolCallId={block.toolCall.id}
|
||||
toolCall={block.toolCall}
|
||||
isCurrentMessage={isLastMessage}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return null
|
||||
})
|
||||
}, [message.contentBlocks, isStreaming, parsedTags])
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage])
|
||||
|
||||
if (isUser) {
|
||||
return (
|
||||
<div
|
||||
className={`w-full max-w-full overflow-hidden transition-opacity duration-200 [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
className={`w-full max-w-full flex-none overflow-hidden transition-opacity duration-200 [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
style={{ '--panel-max-width': `${panelWidth - 16}px` } as React.CSSProperties}
|
||||
>
|
||||
{isEditMode ? (
|
||||
@@ -288,42 +297,15 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
initialContexts={message.contexts}
|
||||
/>
|
||||
|
||||
{/* Inline Checkpoint Discard Confirmation - shown below input in edit mode */}
|
||||
{/* Inline checkpoint confirmation - shown below input in edit mode */}
|
||||
{showCheckpointDiscardModal && (
|
||||
<div className='mt-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] p-[10px]'>
|
||||
<p className='mb-[8px] text-[12px] text-[var(--text-primary)]'>
|
||||
Continue from a previous message?
|
||||
</p>
|
||||
<div className='flex gap-[8px]'>
|
||||
<Button
|
||||
onClick={handleCancelCheckpointDiscard}
|
||||
variant='active'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessingDiscard}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleContinueAndRevert}
|
||||
variant='destructive'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessingDiscard}
|
||||
>
|
||||
{isProcessingDiscard ? 'Reverting...' : 'Revert'}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleContinueWithoutRevert}
|
||||
variant='tertiary'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessingDiscard}
|
||||
>
|
||||
Continue
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<CheckpointConfirmation
|
||||
variant='discard'
|
||||
isProcessing={isProcessingDiscard}
|
||||
onCancel={handleCancelCheckpointDiscard}
|
||||
onRevert={handleContinueAndRevert}
|
||||
onContinue={handleContinueWithoutRevert}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
@@ -348,46 +330,15 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
ref={messageContentRef}
|
||||
className={`relative whitespace-pre-wrap break-words px-[2px] py-1 font-medium font-sans text-[var(--text-primary)] text-sm leading-[1.25rem] ${isSendingMessage && isLastUserMessage && isHoveringMessage ? 'pr-7' : ''} ${!isExpanded && needsExpansion ? 'max-h-[60px] overflow-hidden' : 'overflow-visible'}`}
|
||||
>
|
||||
{(() => {
|
||||
const text = message.content || ''
|
||||
const contexts: any[] = Array.isArray((message as any).contexts)
|
||||
? ((message as any).contexts as any[])
|
||||
: []
|
||||
|
||||
// Build tokens with their prefixes (@ for mentions, / for commands)
|
||||
const tokens = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow' && c?.label)
|
||||
.map((c) => {
|
||||
const prefix = c?.kind === 'slash_command' ? '/' : '@'
|
||||
return `${prefix}${c.label}`
|
||||
})
|
||||
if (!tokens.length) return text
|
||||
|
||||
const escapeRegex = (s: string) => s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
const pattern = new RegExp(`(${tokens.map(escapeRegex).join('|')})`, 'g')
|
||||
|
||||
const nodes: React.ReactNode[] = []
|
||||
let lastIndex = 0
|
||||
let match: RegExpExecArray | null
|
||||
while ((match = pattern.exec(text)) !== null) {
|
||||
const i = match.index
|
||||
const before = text.slice(lastIndex, i)
|
||||
if (before) nodes.push(before)
|
||||
const mention = match[0]
|
||||
nodes.push(
|
||||
<span
|
||||
key={`mention-${i}-${lastIndex}`}
|
||||
className='rounded-[4px] bg-[rgba(50,189,126,0.65)] py-[1px]'
|
||||
>
|
||||
{mention}
|
||||
</span>
|
||||
)
|
||||
lastIndex = i + mention.length
|
||||
}
|
||||
const tail = text.slice(lastIndex)
|
||||
if (tail) nodes.push(tail)
|
||||
return nodes
|
||||
})()}
|
||||
{buildMentionHighlightNodes(
|
||||
message.content || '',
|
||||
message.contexts || [],
|
||||
(token, key) => (
|
||||
<span key={key} className='rounded-[4px] bg-[rgba(50,189,126,0.65)] py-[1px]'>
|
||||
{token}
|
||||
</span>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Gradient fade when truncated - applies to entire message box */}
|
||||
@@ -437,65 +388,30 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Inline Restore Checkpoint Confirmation */}
|
||||
{/* Inline restore checkpoint confirmation */}
|
||||
{showRestoreConfirmation && (
|
||||
<div className='mt-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] p-[10px]'>
|
||||
<p className='mb-[8px] text-[12px] text-[var(--text-primary)]'>
|
||||
Revert to checkpoint? This will restore your workflow to the state saved at this
|
||||
checkpoint.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
<div className='flex gap-[8px]'>
|
||||
<Button
|
||||
onClick={handleCancelRevert}
|
||||
variant='active'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isReverting}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleConfirmRevert}
|
||||
variant='destructive'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isReverting}
|
||||
>
|
||||
{isReverting ? 'Reverting...' : 'Revert'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<CheckpointConfirmation
|
||||
variant='restore'
|
||||
isProcessing={isReverting}
|
||||
onCancel={handleCancelRevert}
|
||||
onRevert={handleConfirmRevert}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Check if there's any visible content in the blocks
|
||||
const hasVisibleContent = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) return false
|
||||
return message.contentBlocks.some((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
return parsed.cleanContent.trim().length > 0
|
||||
}
|
||||
return block.type === 'thinking' || block.type === 'tool_call'
|
||||
})
|
||||
}, [message.contentBlocks])
|
||||
|
||||
if (isAssistant) {
|
||||
return (
|
||||
<div
|
||||
className={`w-full max-w-full overflow-hidden [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
className={`w-full max-w-full flex-none overflow-hidden [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
style={{ '--panel-max-width': `${panelWidth - 16}px` } as React.CSSProperties}
|
||||
>
|
||||
<div className='max-w-full space-y-1 px-[2px]'>
|
||||
<div className='max-w-full space-y-[4px] px-[2px] pb-[4px]'>
|
||||
{/* Content blocks in chronological order */}
|
||||
{memoizedContentBlocks}
|
||||
{memoizedContentBlocks || (isStreaming && <div className='min-h-0' />)}
|
||||
|
||||
{isStreaming && (
|
||||
<StreamingIndicator className={!hasVisibleContent ? 'mt-1' : undefined} />
|
||||
)}
|
||||
{isStreaming && <StreamingIndicator />}
|
||||
|
||||
{message.errorType === 'usage_limit' && (
|
||||
<div className='flex gap-1.5'>
|
||||
@@ -534,6 +450,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
isLastMessage && !isStreaming && parsedTags.optionsComplete === true
|
||||
}
|
||||
streaming={isStreaming || !parsedTags.optionsComplete}
|
||||
selectedOptionKey={selectedOptionKey}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
@@ -544,50 +461,22 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return null
|
||||
},
|
||||
(prevProps, nextProps) => {
|
||||
// Custom comparison function for better streaming performance
|
||||
const prevMessage = prevProps.message
|
||||
const nextMessage = nextProps.message
|
||||
|
||||
// If message IDs are different, always re-render
|
||||
if (prevMessage.id !== nextMessage.id) {
|
||||
return false
|
||||
}
|
||||
if (prevMessage.id !== nextMessage.id) return false
|
||||
if (prevProps.isStreaming !== nextProps.isStreaming) return false
|
||||
if (prevProps.isDimmed !== nextProps.isDimmed) return false
|
||||
if (prevProps.panelWidth !== nextProps.panelWidth) return false
|
||||
if (prevProps.checkpointCount !== nextProps.checkpointCount) return false
|
||||
if (prevProps.isLastMessage !== nextProps.isLastMessage) return false
|
||||
|
||||
// If streaming state changed, re-render
|
||||
if (prevProps.isStreaming !== nextProps.isStreaming) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If dimmed state changed, re-render
|
||||
if (prevProps.isDimmed !== nextProps.isDimmed) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If panel width changed, re-render
|
||||
if (prevProps.panelWidth !== nextProps.panelWidth) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If checkpoint count changed, re-render
|
||||
if (prevProps.checkpointCount !== nextProps.checkpointCount) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If isLastMessage changed, re-render (for options visibility)
|
||||
if (prevProps.isLastMessage !== nextProps.isLastMessage) {
|
||||
return false
|
||||
}
|
||||
|
||||
// For streaming messages, check if content actually changed
|
||||
if (nextProps.isStreaming) {
|
||||
const prevBlocks = prevMessage.contentBlocks || []
|
||||
const nextBlocks = nextMessage.contentBlocks || []
|
||||
|
||||
if (prevBlocks.length !== nextBlocks.length) {
|
||||
return false // Content blocks changed
|
||||
}
|
||||
if (prevBlocks.length !== nextBlocks.length) return false
|
||||
|
||||
// Helper: get last block content by type
|
||||
const getLastBlockContent = (blocks: any[], type: 'text' | 'thinking'): string | null => {
|
||||
for (let i = blocks.length - 1; i >= 0; i--) {
|
||||
const block = blocks[i]
|
||||
@@ -598,7 +487,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return null
|
||||
}
|
||||
|
||||
// Re-render if the last text block content changed
|
||||
const prevLastTextContent = getLastBlockContent(prevBlocks as any[], 'text')
|
||||
const nextLastTextContent = getLastBlockContent(nextBlocks as any[], 'text')
|
||||
if (
|
||||
@@ -609,7 +497,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return false
|
||||
}
|
||||
|
||||
// Re-render if the last thinking block content changed
|
||||
const prevLastThinkingContent = getLastBlockContent(prevBlocks as any[], 'thinking')
|
||||
const nextLastThinkingContent = getLastBlockContent(nextBlocks as any[], 'thinking')
|
||||
if (
|
||||
@@ -620,24 +507,18 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if tool calls changed
|
||||
const prevToolCalls = prevMessage.toolCalls || []
|
||||
const nextToolCalls = nextMessage.toolCalls || []
|
||||
|
||||
if (prevToolCalls.length !== nextToolCalls.length) {
|
||||
return false // Tool calls count changed
|
||||
}
|
||||
if (prevToolCalls.length !== nextToolCalls.length) return false
|
||||
|
||||
for (let i = 0; i < nextToolCalls.length; i++) {
|
||||
if (prevToolCalls[i]?.state !== nextToolCalls[i]?.state) {
|
||||
return false // Tool call state changed
|
||||
}
|
||||
if (prevToolCalls[i]?.state !== nextToolCalls[i]?.state) return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// For non-streaming messages, do a deeper comparison including tool call states
|
||||
if (
|
||||
prevMessage.content !== nextMessage.content ||
|
||||
prevMessage.role !== nextMessage.role ||
|
||||
@@ -647,16 +528,12 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return false
|
||||
}
|
||||
|
||||
// Check tool call states for non-streaming messages too
|
||||
const prevToolCalls = prevMessage.toolCalls || []
|
||||
const nextToolCalls = nextMessage.toolCalls || []
|
||||
for (let i = 0; i < nextToolCalls.length; i++) {
|
||||
if (prevToolCalls[i]?.state !== nextToolCalls[i]?.state) {
|
||||
return false // Tool call state changed
|
||||
}
|
||||
if (prevToolCalls[i]?.state !== nextToolCalls[i]?.state) return false
|
||||
}
|
||||
|
||||
// Check contentBlocks tool call states
|
||||
const prevContentBlocks = prevMessage.contentBlocks || []
|
||||
const nextContentBlocks = nextMessage.contentBlocks || []
|
||||
for (let i = 0; i < nextContentBlocks.length; i++) {
|
||||
@@ -667,7 +544,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
nextBlock?.type === 'tool_call' &&
|
||||
prevBlock.toolCall?.state !== nextBlock.toolCall?.state
|
||||
) {
|
||||
return false // ContentBlock tool call state changed
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ const logger = createLogger('useCheckpointManagement')
|
||||
* @param messageCheckpoints - Checkpoints for this message
|
||||
* @param onRevertModeChange - Callback for revert mode changes
|
||||
* @param onEditModeChange - Callback for edit mode changes
|
||||
* @param onCancelEdit - Callback when edit is cancelled
|
||||
* @returns Checkpoint management utilities
|
||||
*/
|
||||
export function useCheckpointManagement(
|
||||
@@ -37,17 +38,13 @@ export function useCheckpointManagement(
|
||||
|
||||
const { revertToCheckpoint, currentChat } = useCopilotStore()
|
||||
|
||||
/**
|
||||
* Handles initiating checkpoint revert
|
||||
*/
|
||||
/** Initiates checkpoint revert confirmation */
|
||||
const handleRevertToCheckpoint = useCallback(() => {
|
||||
setShowRestoreConfirmation(true)
|
||||
onRevertModeChange?.(true)
|
||||
}, [onRevertModeChange])
|
||||
|
||||
/**
|
||||
* Confirms checkpoint revert and updates state
|
||||
*/
|
||||
/** Confirms and executes checkpoint revert */
|
||||
const handleConfirmRevert = useCallback(async () => {
|
||||
if (messageCheckpoints.length > 0) {
|
||||
const latestCheckpoint = messageCheckpoints[0]
|
||||
@@ -116,18 +113,13 @@ export function useCheckpointManagement(
|
||||
onRevertModeChange,
|
||||
])
|
||||
|
||||
/**
|
||||
* Cancels checkpoint revert
|
||||
*/
|
||||
/** Cancels checkpoint revert */
|
||||
const handleCancelRevert = useCallback(() => {
|
||||
setShowRestoreConfirmation(false)
|
||||
onRevertModeChange?.(false)
|
||||
}, [onRevertModeChange])
|
||||
|
||||
/**
|
||||
* Handles "Continue and revert" action for checkpoint discard modal
|
||||
* Reverts to checkpoint then proceeds with pending edit
|
||||
*/
|
||||
/** Reverts to checkpoint then proceeds with pending edit */
|
||||
const handleContinueAndRevert = useCallback(async () => {
|
||||
setIsProcessingDiscard(true)
|
||||
try {
|
||||
@@ -184,9 +176,7 @@ export function useCheckpointManagement(
|
||||
}
|
||||
}, [messageCheckpoints, revertToCheckpoint, message, messages, onEditModeChange, onCancelEdit])
|
||||
|
||||
/**
|
||||
* Cancels checkpoint discard and clears pending edit
|
||||
*/
|
||||
/** Cancels checkpoint discard and clears pending edit */
|
||||
const handleCancelCheckpointDiscard = useCallback(() => {
|
||||
setShowCheckpointDiscardModal(false)
|
||||
onEditModeChange?.(false)
|
||||
@@ -194,11 +184,11 @@ export function useCheckpointManagement(
|
||||
pendingEditRef.current = null
|
||||
}, [onEditModeChange, onCancelEdit])
|
||||
|
||||
/**
|
||||
* Continues with edit WITHOUT reverting checkpoint
|
||||
*/
|
||||
/** Continues with edit without reverting checkpoint */
|
||||
const handleContinueWithoutRevert = useCallback(async () => {
|
||||
setShowCheckpointDiscardModal(false)
|
||||
onEditModeChange?.(false)
|
||||
onCancelEdit?.()
|
||||
|
||||
if (pendingEditRef.current) {
|
||||
const { message: msg, fileAttachments, contexts } = pendingEditRef.current
|
||||
@@ -225,43 +215,34 @@ export function useCheckpointManagement(
|
||||
}
|
||||
}, [message, messages, onEditModeChange, onCancelEdit])
|
||||
|
||||
/**
|
||||
* Handles keyboard events for restore confirmation (Escape/Enter)
|
||||
*/
|
||||
/** Handles keyboard events for confirmation dialogs */
|
||||
useEffect(() => {
|
||||
if (!showRestoreConfirmation) return
|
||||
const isActive = showRestoreConfirmation || showCheckpointDiscardModal
|
||||
if (!isActive) return
|
||||
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
if (event.defaultPrevented) return
|
||||
|
||||
if (event.key === 'Escape') {
|
||||
handleCancelRevert()
|
||||
if (showRestoreConfirmation) handleCancelRevert()
|
||||
else handleCancelCheckpointDiscard()
|
||||
} else if (event.key === 'Enter') {
|
||||
event.preventDefault()
|
||||
handleConfirmRevert()
|
||||
if (showRestoreConfirmation) handleConfirmRevert()
|
||||
else handleContinueAndRevert()
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', handleKeyDown)
|
||||
return () => document.removeEventListener('keydown', handleKeyDown)
|
||||
}, [showRestoreConfirmation, handleCancelRevert, handleConfirmRevert])
|
||||
|
||||
/**
|
||||
* Handles keyboard events for checkpoint discard modal (Escape/Enter)
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!showCheckpointDiscardModal) return
|
||||
|
||||
const handleCheckpointDiscardKeyDown = async (event: KeyboardEvent) => {
|
||||
if (event.key === 'Escape') {
|
||||
handleCancelCheckpointDiscard()
|
||||
} else if (event.key === 'Enter') {
|
||||
event.preventDefault()
|
||||
await handleContinueAndRevert()
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', handleCheckpointDiscardKeyDown)
|
||||
return () => document.removeEventListener('keydown', handleCheckpointDiscardKeyDown)
|
||||
}, [showCheckpointDiscardModal, handleCancelCheckpointDiscard, handleContinueAndRevert])
|
||||
}, [
|
||||
showRestoreConfirmation,
|
||||
showCheckpointDiscardModal,
|
||||
handleCancelRevert,
|
||||
handleConfirmRevert,
|
||||
handleCancelCheckpointDiscard,
|
||||
handleContinueAndRevert,
|
||||
])
|
||||
|
||||
return {
|
||||
// State
|
||||
|
||||
@@ -2,24 +2,23 @@
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { CopilotMessage } from '@/stores/panel'
|
||||
import type { ChatContext, CopilotMessage, MessageFileAttachment } from '@/stores/panel'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
|
||||
const logger = createLogger('useMessageEditing')
|
||||
|
||||
/**
|
||||
* Message truncation height in pixels
|
||||
*/
|
||||
/** Ref interface for UserInput component */
|
||||
interface UserInputRef {
|
||||
focus: () => void
|
||||
}
|
||||
|
||||
/** Message truncation height in pixels */
|
||||
const MESSAGE_TRUNCATION_HEIGHT = 60
|
||||
|
||||
/**
|
||||
* Delay before attaching click-outside listener to avoid immediate trigger
|
||||
*/
|
||||
/** Delay before attaching click-outside listener to avoid immediate trigger */
|
||||
const CLICK_OUTSIDE_DELAY = 100
|
||||
|
||||
/**
|
||||
* Delay before aborting when editing during stream
|
||||
*/
|
||||
/** Delay before aborting when editing during stream */
|
||||
const ABORT_DELAY = 100
|
||||
|
||||
interface UseMessageEditingProps {
|
||||
@@ -32,8 +31,8 @@ interface UseMessageEditingProps {
|
||||
setShowCheckpointDiscardModal: (show: boolean) => void
|
||||
pendingEditRef: React.MutableRefObject<{
|
||||
message: string
|
||||
fileAttachments?: any[]
|
||||
contexts?: any[]
|
||||
fileAttachments?: MessageFileAttachment[]
|
||||
contexts?: ChatContext[]
|
||||
} | null>
|
||||
/**
|
||||
* When true, disables the internal document click-outside handler.
|
||||
@@ -69,13 +68,11 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
|
||||
const editContainerRef = useRef<HTMLDivElement>(null)
|
||||
const messageContentRef = useRef<HTMLDivElement>(null)
|
||||
const userInputRef = useRef<any>(null)
|
||||
const userInputRef = useRef<UserInputRef>(null)
|
||||
|
||||
const { sendMessage, isSendingMessage, abortMessage, currentChat } = useCopilotStore()
|
||||
|
||||
/**
|
||||
* Checks if message content needs expansion based on height
|
||||
*/
|
||||
/** Checks if message content needs expansion based on height */
|
||||
useEffect(() => {
|
||||
if (messageContentRef.current && message.role === 'user') {
|
||||
const scrollHeight = messageContentRef.current.scrollHeight
|
||||
@@ -83,9 +80,7 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
}
|
||||
}, [message.content, message.role])
|
||||
|
||||
/**
|
||||
* Handles entering edit mode
|
||||
*/
|
||||
/** Enters edit mode */
|
||||
const handleEditMessage = useCallback(() => {
|
||||
setIsEditMode(true)
|
||||
setIsExpanded(false)
|
||||
@@ -97,18 +92,14 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
}, 0)
|
||||
}, [message.content, onEditModeChange])
|
||||
|
||||
/**
|
||||
* Handles canceling edit mode
|
||||
*/
|
||||
/** Cancels edit mode */
|
||||
const handleCancelEdit = useCallback(() => {
|
||||
setIsEditMode(false)
|
||||
setEditedContent(message.content)
|
||||
onEditModeChange?.(false)
|
||||
}, [message.content, onEditModeChange])
|
||||
|
||||
/**
|
||||
* Handles clicking on message to enter edit mode
|
||||
*/
|
||||
/** Handles message click to enter edit mode */
|
||||
const handleMessageClick = useCallback(() => {
|
||||
if (needsExpansion && !isExpanded) {
|
||||
setIsExpanded(true)
|
||||
@@ -116,12 +107,13 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
handleEditMessage()
|
||||
}, [needsExpansion, isExpanded, handleEditMessage])
|
||||
|
||||
/**
|
||||
* Performs the actual edit operation
|
||||
* Truncates messages after edited message and resends with same ID
|
||||
*/
|
||||
/** Performs the edit operation - truncates messages after edited message and resends */
|
||||
const performEdit = useCallback(
|
||||
async (editedMessage: string, fileAttachments?: any[], contexts?: any[]) => {
|
||||
async (
|
||||
editedMessage: string,
|
||||
fileAttachments?: MessageFileAttachment[],
|
||||
contexts?: ChatContext[]
|
||||
) => {
|
||||
const currentMessages = messages
|
||||
const editIndex = currentMessages.findIndex((m) => m.id === message.id)
|
||||
|
||||
@@ -134,7 +126,7 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
...message,
|
||||
content: editedMessage,
|
||||
fileAttachments: fileAttachments || message.fileAttachments,
|
||||
contexts: contexts || (message as any).contexts,
|
||||
contexts: contexts || message.contexts,
|
||||
}
|
||||
|
||||
useCopilotStore.setState({ messages: [...truncatedMessages, updatedMessage] })
|
||||
@@ -153,7 +145,7 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
timestamp: m.timestamp,
|
||||
...(m.contentBlocks && { contentBlocks: m.contentBlocks }),
|
||||
...(m.fileAttachments && { fileAttachments: m.fileAttachments }),
|
||||
...((m as any).contexts && { contexts: (m as any).contexts }),
|
||||
...(m.contexts && { contexts: m.contexts }),
|
||||
})),
|
||||
}),
|
||||
})
|
||||
@@ -164,7 +156,7 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
|
||||
await sendMessage(editedMessage, {
|
||||
fileAttachments: fileAttachments || message.fileAttachments,
|
||||
contexts: contexts || (message as any).contexts,
|
||||
contexts: contexts || message.contexts,
|
||||
messageId: message.id,
|
||||
queueIfBusy: false,
|
||||
})
|
||||
@@ -173,12 +165,13 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
[messages, message, currentChat, sendMessage, onEditModeChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles submitting edited message
|
||||
* Checks for checkpoints and shows confirmation if needed
|
||||
*/
|
||||
/** Submits edited message, checking for checkpoints first */
|
||||
const handleSubmitEdit = useCallback(
|
||||
async (editedMessage: string, fileAttachments?: any[], contexts?: any[]) => {
|
||||
async (
|
||||
editedMessage: string,
|
||||
fileAttachments?: MessageFileAttachment[],
|
||||
contexts?: ChatContext[]
|
||||
) => {
|
||||
if (!editedMessage.trim()) return
|
||||
|
||||
if (isSendingMessage) {
|
||||
@@ -204,9 +197,7 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
]
|
||||
)
|
||||
|
||||
/**
|
||||
* Keyboard-only exit (Esc). Click-outside is optionally handled by parent.
|
||||
*/
|
||||
/** Keyboard-only exit (Esc) */
|
||||
useEffect(() => {
|
||||
if (!isEditMode) return
|
||||
|
||||
@@ -222,9 +213,7 @@ export function useMessageEditing(props: UseMessageEditingProps) {
|
||||
}
|
||||
}, [isEditMode, handleCancelEdit])
|
||||
|
||||
/**
|
||||
* Optional document-level click-outside handler (disabled when parent manages it).
|
||||
*/
|
||||
/** Optional document-level click-outside handler */
|
||||
useEffect(() => {
|
||||
if (!isEditMode || disableDocumentClickOutside) return
|
||||
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './copilot-message'
|
||||
@@ -1,7 +1,8 @@
|
||||
export * from './copilot-message/copilot-message'
|
||||
export * from './plan-mode-section/plan-mode-section'
|
||||
export * from './queued-messages/queued-messages'
|
||||
export * from './todo-list/todo-list'
|
||||
export * from './tool-call/tool-call'
|
||||
export * from './user-input/user-input'
|
||||
export * from './welcome/welcome'
|
||||
export * from './chat-history-skeleton'
|
||||
export * from './copilot-message'
|
||||
export * from './plan-mode-section'
|
||||
export * from './queued-messages'
|
||||
export * from './todo-list'
|
||||
export * from './tool-call'
|
||||
export * from './user-input'
|
||||
export * from './welcome'
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './plan-mode-section'
|
||||
@@ -29,7 +29,7 @@ import { Check, GripHorizontal, Pencil, X } from 'lucide-react'
|
||||
import { Button, Textarea } from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
* Shared border and background styles
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './queued-messages'
|
||||
@@ -31,21 +31,22 @@ export function QueuedMessages() {
|
||||
if (messageQueue.length === 0) return null
|
||||
|
||||
return (
|
||||
<div className='mx-2 overflow-hidden rounded-t-lg border border-black/[0.08] border-b-0 bg-[var(--bg-secondary)] dark:border-white/[0.08]'>
|
||||
<div className='mx-[14px] overflow-hidden rounded-t-[4px] border border-[var(--border)] border-b-0 bg-[var(--bg-secondary)]'>
|
||||
{/* Header */}
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
className='flex w-full items-center justify-between px-2.5 py-1.5 transition-colors hover:bg-[var(--bg-tertiary)]'
|
||||
className='flex w-full items-center justify-between px-[10px] py-[6px] transition-colors hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
<div className='flex items-center gap-1.5'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
{isExpanded ? (
|
||||
<ChevronDown className='h-3 w-3 text-[var(--text-tertiary)]' />
|
||||
<ChevronDown className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
|
||||
) : (
|
||||
<ChevronRight className='h-3 w-3 text-[var(--text-tertiary)]' />
|
||||
<ChevronRight className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
|
||||
)}
|
||||
<span className='font-medium text-[var(--text-secondary)] text-xs'>
|
||||
{messageQueue.length} Queued
|
||||
<span className='font-medium text-[12px] text-[var(--text-primary)]'>Queued</span>
|
||||
<span className='flex-shrink-0 font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{messageQueue.length}
|
||||
</span>
|
||||
</div>
|
||||
</button>
|
||||
@@ -56,30 +57,30 @@ export function QueuedMessages() {
|
||||
{messageQueue.map((msg) => (
|
||||
<div
|
||||
key={msg.id}
|
||||
className='group flex items-center gap-2 border-black/[0.04] border-t px-2.5 py-1.5 hover:bg-[var(--bg-tertiary)] dark:border-white/[0.04]'
|
||||
className='group flex items-center gap-[8px] border-[var(--border)] border-t px-[10px] py-[6px] hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
{/* Radio indicator */}
|
||||
<div className='flex h-3 w-3 shrink-0 items-center justify-center'>
|
||||
<div className='h-2.5 w-2.5 rounded-full border border-[var(--text-tertiary)]/50' />
|
||||
<div className='flex h-[14px] w-[14px] shrink-0 items-center justify-center'>
|
||||
<div className='h-[10px] w-[10px] rounded-full border border-[var(--text-tertiary)]/50' />
|
||||
</div>
|
||||
|
||||
{/* Message content */}
|
||||
<div className='min-w-0 flex-1'>
|
||||
<p className='truncate text-[var(--text-primary)] text-xs'>{msg.content}</p>
|
||||
<p className='truncate text-[13px] text-[var(--text-primary)]'>{msg.content}</p>
|
||||
</div>
|
||||
|
||||
{/* Actions - always visible */}
|
||||
<div className='flex shrink-0 items-center gap-0.5'>
|
||||
<div className='flex shrink-0 items-center gap-[4px]'>
|
||||
<button
|
||||
type='button'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleSendNow(msg.id)
|
||||
}}
|
||||
className='rounded p-0.5 text-[var(--text-tertiary)] transition-colors hover:bg-[var(--bg-quaternary)] hover:text-[var(--text-primary)]'
|
||||
className='rounded p-[3px] text-[var(--text-tertiary)] transition-colors hover:bg-[var(--bg-quaternary)] hover:text-[var(--text-primary)]'
|
||||
title='Send now (aborts current stream)'
|
||||
>
|
||||
<ArrowUp className='h-3 w-3' />
|
||||
<ArrowUp className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
<button
|
||||
type='button'
|
||||
@@ -87,10 +88,10 @@ export function QueuedMessages() {
|
||||
e.stopPropagation()
|
||||
handleRemove(msg.id)
|
||||
}}
|
||||
className='rounded p-0.5 text-[var(--text-tertiary)] transition-colors hover:bg-red-500/10 hover:text-red-400'
|
||||
className='rounded p-[3px] text-[var(--text-tertiary)] transition-colors hover:bg-red-500/10 hover:text-red-400'
|
||||
title='Remove from queue'
|
||||
>
|
||||
<Trash2 className='h-3 w-3' />
|
||||
<Trash2 className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './todo-list'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './tool-call'
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
hasInterrupt as hasInterruptFromConfig,
|
||||
isSpecialTool as isSpecialToolFromConfig,
|
||||
} from '@/lib/copilot/tools/client/ui-config'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
||||
import { ThinkingBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block'
|
||||
import { getDisplayValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
|
||||
@@ -26,27 +26,30 @@ import { CLASS_TOOL_METADATA } from '@/stores/panel/copilot/store'
|
||||
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Plan step can be either a string or an object with title and plan
|
||||
*/
|
||||
/** Plan step can be a string or an object with title and optional plan content */
|
||||
type PlanStep = string | { title: string; plan?: string }
|
||||
|
||||
/**
|
||||
* Option can be either a string or an object with title and description
|
||||
*/
|
||||
/** Option can be a string or an object with title and optional description */
|
||||
type OptionItem = string | { title: string; description?: string }
|
||||
|
||||
/** Result of parsing special XML tags from message content */
|
||||
interface ParsedTags {
|
||||
/** Parsed plan steps, keyed by step number */
|
||||
plan?: Record<string, PlanStep>
|
||||
/** Whether the plan tag is complete (has closing tag) */
|
||||
planComplete?: boolean
|
||||
/** Parsed options, keyed by option number */
|
||||
options?: Record<string, OptionItem>
|
||||
/** Whether the options tag is complete (has closing tag) */
|
||||
optionsComplete?: boolean
|
||||
/** Content with special tags removed */
|
||||
cleanContent: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract plan steps from plan_respond tool calls in subagent blocks.
|
||||
* Returns { steps, isComplete } where steps is in the format expected by PlanSteps component.
|
||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||
* @param blocks - The subagent content blocks to search
|
||||
* @returns Object containing steps in the format expected by PlanSteps component, and completion status
|
||||
*/
|
||||
function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
steps: Record<string, PlanStep> | undefined
|
||||
@@ -54,7 +57,6 @@ function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
} {
|
||||
if (!blocks) return { steps: undefined, isComplete: false }
|
||||
|
||||
// Find the plan_respond tool call
|
||||
const planRespondBlock = blocks.find(
|
||||
(b) => b.type === 'subagent_tool_call' && b.toolCall?.name === 'plan_respond'
|
||||
)
|
||||
@@ -63,8 +65,6 @@ function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
return { steps: undefined, isComplete: false }
|
||||
}
|
||||
|
||||
// Tool call arguments can be in different places depending on the source
|
||||
// Also handle nested data.arguments structure from the schema
|
||||
const tc = planRespondBlock.toolCall as any
|
||||
const args = tc.params || tc.parameters || tc.input || tc.arguments || tc.data?.arguments || {}
|
||||
const stepsArray = args.steps
|
||||
@@ -73,9 +73,6 @@ function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
return { steps: undefined, isComplete: false }
|
||||
}
|
||||
|
||||
// Convert array format to Record<string, PlanStep> format
|
||||
// From: [{ number: 1, title: "..." }, { number: 2, title: "..." }]
|
||||
// To: { "1": "...", "2": "..." }
|
||||
const steps: Record<string, PlanStep> = {}
|
||||
for (const step of stepsArray) {
|
||||
if (step.number !== undefined && step.title) {
|
||||
@@ -83,7 +80,6 @@ function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the tool call is complete (not pending/executing)
|
||||
const isComplete =
|
||||
planRespondBlock.toolCall.state === ClientToolCallState.success ||
|
||||
planRespondBlock.toolCall.state === ClientToolCallState.error
|
||||
@@ -95,8 +91,9 @@ function extractPlanFromBlocks(blocks: SubAgentContentBlock[] | undefined): {
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse partial JSON for streaming options.
|
||||
* Attempts to extract complete key-value pairs from incomplete JSON.
|
||||
* Parses partial JSON for streaming options, extracting complete key-value pairs from incomplete JSON.
|
||||
* @param jsonStr - The potentially incomplete JSON string
|
||||
* @returns Parsed options record or null if no valid options found
|
||||
*/
|
||||
function parsePartialOptionsJson(jsonStr: string): Record<string, OptionItem> | null {
|
||||
// Try parsing as-is first (might be complete)
|
||||
@@ -107,8 +104,9 @@ function parsePartialOptionsJson(jsonStr: string): Record<string, OptionItem> |
|
||||
}
|
||||
|
||||
// Try to extract complete key-value pairs from partial JSON
|
||||
// Match patterns like "1": "some text" or "1": {"title": "text"}
|
||||
// Match patterns like "1": "some text" or "1": {"title": "text", "description": "..."}
|
||||
const result: Record<string, OptionItem> = {}
|
||||
|
||||
// Match complete string values: "key": "value"
|
||||
const stringPattern = /"(\d+)":\s*"([^"]*?)"/g
|
||||
let match
|
||||
@@ -116,18 +114,24 @@ function parsePartialOptionsJson(jsonStr: string): Record<string, OptionItem> |
|
||||
result[match[1]] = match[2]
|
||||
}
|
||||
|
||||
// Match complete object values: "key": {"title": "value"}
|
||||
const objectPattern = /"(\d+)":\s*\{[^}]*"title":\s*"([^"]*)"[^}]*\}/g
|
||||
// Match complete object values with title and optional description
|
||||
// Pattern matches: "1": {"title": "...", "description": "..."} or "1": {"title": "..."}
|
||||
const objectPattern =
|
||||
/"(\d+)":\s*\{\s*"title":\s*"((?:[^"\\]|\\.)*)"\s*(?:,\s*"description":\s*"((?:[^"\\]|\\.)*)")?\s*\}/g
|
||||
while ((match = objectPattern.exec(jsonStr)) !== null) {
|
||||
result[match[1]] = { title: match[2] }
|
||||
const key = match[1]
|
||||
const title = match[2].replace(/\\"/g, '"').replace(/\\n/g, '\n')
|
||||
const description = match[3]?.replace(/\\"/g, '"').replace(/\\n/g, '\n')
|
||||
result[key] = description ? { title, description } : { title }
|
||||
}
|
||||
|
||||
return Object.keys(result).length > 0 ? result : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse partial JSON for streaming plan steps.
|
||||
* Attempts to extract complete key-value pairs from incomplete JSON.
|
||||
* Parses partial JSON for streaming plan steps, extracting complete key-value pairs from incomplete JSON.
|
||||
* @param jsonStr - The potentially incomplete JSON string
|
||||
* @returns Parsed plan steps record or null if no valid steps found
|
||||
*/
|
||||
function parsePartialPlanJson(jsonStr: string): Record<string, PlanStep> | null {
|
||||
// Try parsing as-is first (might be complete)
|
||||
@@ -159,7 +163,10 @@ function parsePartialPlanJson(jsonStr: string): Record<string, PlanStep> | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse <plan> and <options> tags from content
|
||||
* Parses special XML tags (`<plan>` and `<options>`) from message content.
|
||||
* Handles both complete and streaming/incomplete tags.
|
||||
* @param content - The message content to parse
|
||||
* @returns Parsed tags with plan, options, and clean content
|
||||
*/
|
||||
export function parseSpecialTags(content: string): ParsedTags {
|
||||
const result: ParsedTags = { cleanContent: content }
|
||||
@@ -167,12 +174,18 @@ export function parseSpecialTags(content: string): ParsedTags {
|
||||
// Parse <plan> tag - check for complete tag first
|
||||
const planMatch = content.match(/<plan>([\s\S]*?)<\/plan>/i)
|
||||
if (planMatch) {
|
||||
// Always strip the tag from display, even if JSON is invalid
|
||||
result.cleanContent = result.cleanContent.replace(planMatch[0], '').trim()
|
||||
try {
|
||||
result.plan = JSON.parse(planMatch[1])
|
||||
result.planComplete = true
|
||||
result.cleanContent = result.cleanContent.replace(planMatch[0], '').trim()
|
||||
} catch {
|
||||
// Invalid JSON, ignore
|
||||
// JSON.parse failed - use regex fallback to extract plan from malformed JSON
|
||||
const fallbackPlan = parsePartialPlanJson(planMatch[1])
|
||||
if (fallbackPlan) {
|
||||
result.plan = fallbackPlan
|
||||
result.planComplete = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check for streaming/incomplete plan tag
|
||||
@@ -191,12 +204,18 @@ export function parseSpecialTags(content: string): ParsedTags {
|
||||
// Parse <options> tag - check for complete tag first
|
||||
const optionsMatch = content.match(/<options>([\s\S]*?)<\/options>/i)
|
||||
if (optionsMatch) {
|
||||
// Always strip the tag from display, even if JSON is invalid
|
||||
result.cleanContent = result.cleanContent.replace(optionsMatch[0], '').trim()
|
||||
try {
|
||||
result.options = JSON.parse(optionsMatch[1])
|
||||
result.optionsComplete = true
|
||||
result.cleanContent = result.cleanContent.replace(optionsMatch[0], '').trim()
|
||||
} catch {
|
||||
// Invalid JSON, ignore
|
||||
// JSON.parse failed - use regex fallback to extract options from malformed JSON
|
||||
const fallbackOptions = parsePartialOptionsJson(optionsMatch[1])
|
||||
if (fallbackOptions) {
|
||||
result.options = fallbackOptions
|
||||
result.optionsComplete = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check for streaming/incomplete options tag
|
||||
@@ -220,15 +239,15 @@ export function parseSpecialTags(content: string): ParsedTags {
|
||||
}
|
||||
|
||||
/**
|
||||
* PlanSteps component renders the workflow plan steps from the plan subagent
|
||||
* Displays as a to-do list with checkmarks and strikethrough text
|
||||
* Renders workflow plan steps as a numbered to-do list.
|
||||
* @param steps - Plan steps keyed by step number
|
||||
* @param streaming - When true, uses smooth streaming animation for step titles
|
||||
*/
|
||||
function PlanSteps({
|
||||
steps,
|
||||
streaming = false,
|
||||
}: {
|
||||
steps: Record<string, PlanStep>
|
||||
/** When true, uses smooth streaming animation for step titles */
|
||||
streaming?: boolean
|
||||
}) {
|
||||
const sortedSteps = useMemo(() => {
|
||||
@@ -249,7 +268,7 @@ function PlanSteps({
|
||||
if (sortedSteps.length === 0) return null
|
||||
|
||||
return (
|
||||
<div className='mt-1.5 overflow-hidden rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-1)]'>
|
||||
<div className='mt-0 overflow-hidden rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-1)]'>
|
||||
<div className='flex items-center gap-[8px] border-[var(--border-1)] border-b bg-[var(--surface-2)] p-[8px]'>
|
||||
<LayoutList className='ml-[2px] h-3 w-3 flex-shrink-0 text-[var(--text-tertiary)]' />
|
||||
<span className='font-medium text-[12px] text-[var(--text-primary)]'>To-dos</span>
|
||||
@@ -257,7 +276,7 @@ function PlanSteps({
|
||||
{sortedSteps.length}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex flex-col gap-[6px] px-[10px] py-[8px]'>
|
||||
<div className='flex flex-col gap-[6px] px-[10px] py-[6px]'>
|
||||
{sortedSteps.map(([num, title], index) => {
|
||||
const isLastStep = index === sortedSteps.length - 1
|
||||
return (
|
||||
@@ -281,9 +300,8 @@ function PlanSteps({
|
||||
}
|
||||
|
||||
/**
|
||||
* OptionsSelector component renders selectable options from the agent
|
||||
* Supports keyboard navigation (arrow up/down, enter) and click selection
|
||||
* After selection, shows the chosen option highlighted and others struck through
|
||||
* Renders selectable options from the agent with keyboard navigation and click selection.
|
||||
* After selection, shows the chosen option highlighted and others struck through.
|
||||
*/
|
||||
export function OptionsSelector({
|
||||
options,
|
||||
@@ -291,6 +309,7 @@ export function OptionsSelector({
|
||||
disabled = false,
|
||||
enableKeyboardNav = false,
|
||||
streaming = false,
|
||||
selectedOptionKey = null,
|
||||
}: {
|
||||
options: Record<string, OptionItem>
|
||||
onSelect: (optionKey: string, optionText: string) => void
|
||||
@@ -299,6 +318,8 @@ export function OptionsSelector({
|
||||
enableKeyboardNav?: boolean
|
||||
/** When true, looks enabled but interaction is disabled (for streaming state) */
|
||||
streaming?: boolean
|
||||
/** Pre-selected option key (for restoring selection from history) */
|
||||
selectedOptionKey?: string | null
|
||||
}) {
|
||||
const isInteractionDisabled = disabled || streaming
|
||||
const sortedOptions = useMemo(() => {
|
||||
@@ -316,8 +337,8 @@ export function OptionsSelector({
|
||||
})
|
||||
}, [options])
|
||||
|
||||
const [hoveredIndex, setHoveredIndex] = useState(0)
|
||||
const [chosenKey, setChosenKey] = useState<string | null>(null)
|
||||
const [hoveredIndex, setHoveredIndex] = useState(-1)
|
||||
const [chosenKey, setChosenKey] = useState<string | null>(selectedOptionKey)
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const isLocked = chosenKey !== null
|
||||
@@ -327,7 +348,8 @@ export function OptionsSelector({
|
||||
if (isInteractionDisabled || !enableKeyboardNav || isLocked) return
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
// Only handle if the container or document body is focused (not when typing in input)
|
||||
if (e.defaultPrevented) return
|
||||
|
||||
const activeElement = document.activeElement
|
||||
const isInputFocused =
|
||||
activeElement?.tagName === 'INPUT' ||
|
||||
@@ -338,13 +360,14 @@ export function OptionsSelector({
|
||||
|
||||
if (e.key === 'ArrowDown') {
|
||||
e.preventDefault()
|
||||
setHoveredIndex((prev) => Math.min(prev + 1, sortedOptions.length - 1))
|
||||
setHoveredIndex((prev) => (prev < 0 ? 0 : Math.min(prev + 1, sortedOptions.length - 1)))
|
||||
} else if (e.key === 'ArrowUp') {
|
||||
e.preventDefault()
|
||||
setHoveredIndex((prev) => Math.max(prev - 1, 0))
|
||||
setHoveredIndex((prev) => (prev < 0 ? sortedOptions.length - 1 : Math.max(prev - 1, 0)))
|
||||
} else if (e.key === 'Enter') {
|
||||
e.preventDefault()
|
||||
const selected = sortedOptions[hoveredIndex]
|
||||
const indexToSelect = hoveredIndex < 0 ? 0 : hoveredIndex
|
||||
const selected = sortedOptions[indexToSelect]
|
||||
if (selected) {
|
||||
setChosenKey(selected.key)
|
||||
onSelect(selected.key, selected.title)
|
||||
@@ -368,7 +391,7 @@ export function OptionsSelector({
|
||||
if (sortedOptions.length === 0) return null
|
||||
|
||||
return (
|
||||
<div ref={containerRef} className='flex flex-col gap-0.5 pb-0.5'>
|
||||
<div ref={containerRef} className='flex flex-col gap-[4px] pt-[4px]'>
|
||||
{sortedOptions.map((option, index) => {
|
||||
const isHovered = index === hoveredIndex && !isLocked
|
||||
const isChosen = option.key === chosenKey
|
||||
@@ -386,6 +409,9 @@ export function OptionsSelector({
|
||||
onMouseEnter={() => {
|
||||
if (!isLocked && !streaming) setHoveredIndex(index)
|
||||
}}
|
||||
onMouseLeave={() => {
|
||||
if (!isLocked && !streaming && sortedOptions.length === 1) setHoveredIndex(-1)
|
||||
}}
|
||||
className={clsx(
|
||||
'group flex cursor-pointer items-start gap-2 rounded-[6px] p-1',
|
||||
'hover:bg-[var(--surface-4)]',
|
||||
@@ -421,30 +447,31 @@ export function OptionsSelector({
|
||||
)
|
||||
}
|
||||
|
||||
/** Props for the ToolCall component */
|
||||
interface ToolCallProps {
|
||||
/** Tool call data object */
|
||||
toolCall?: CopilotToolCall
|
||||
/** Tool call ID for store lookup */
|
||||
toolCallId?: string
|
||||
/** Callback when tool call state changes */
|
||||
onStateChange?: (state: any) => void
|
||||
/** Whether this tool call is from the current/latest message. Controls shimmer and action buttons. */
|
||||
isCurrentMessage?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for shimmer overlay text component.
|
||||
*/
|
||||
/** Props for the ShimmerOverlayText component */
|
||||
interface ShimmerOverlayTextProps {
|
||||
/** The text content to display */
|
||||
/** Text content to display */
|
||||
text: string
|
||||
/** Whether the shimmer animation is active */
|
||||
/** Whether shimmer animation is active */
|
||||
active?: boolean
|
||||
/** Additional class names for the wrapper */
|
||||
className?: string
|
||||
/** Whether to use special gradient styling (for important actions) */
|
||||
/** Whether to use special gradient styling for important actions */
|
||||
isSpecial?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Action verbs that appear at the start of tool display names.
|
||||
* These will be highlighted in a lighter color for better visual hierarchy.
|
||||
*/
|
||||
/** Action verbs at the start of tool display names, highlighted for visual hierarchy */
|
||||
const ACTION_VERBS = [
|
||||
'Analyzing',
|
||||
'Analyzed',
|
||||
@@ -552,7 +579,8 @@ const ACTION_VERBS = [
|
||||
|
||||
/**
|
||||
* Splits text into action verb and remainder for two-tone rendering.
|
||||
* Returns [actionVerb, remainder] or [null, text] if no match.
|
||||
* @param text - The text to split
|
||||
* @returns Tuple of [actionVerb, remainder] or [null, text] if no match
|
||||
*/
|
||||
function splitActionVerb(text: string): [string | null, string] {
|
||||
for (const verb of ACTION_VERBS) {
|
||||
@@ -572,10 +600,9 @@ function splitActionVerb(text: string): [string | null, string] {
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders text with a subtle white shimmer overlay when active, creating a skeleton-like
|
||||
* loading effect that passes over the existing words without replacing them.
|
||||
* For special tool calls, uses a gradient color. For normal tools, highlights action verbs
|
||||
* in a lighter color with the rest in default gray.
|
||||
* Renders text with a shimmer overlay animation when active.
|
||||
* Special tools use a gradient color; normal tools highlight action verbs.
|
||||
* Uses CSS truncation to clamp to one line with ellipsis.
|
||||
*/
|
||||
const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
text,
|
||||
@@ -585,10 +612,13 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
}: ShimmerOverlayTextProps) {
|
||||
const [actionVerb, remainder] = splitActionVerb(text)
|
||||
|
||||
// Base classes for single-line truncation with ellipsis
|
||||
const truncateClasses = 'block w-full overflow-hidden text-ellipsis whitespace-nowrap'
|
||||
|
||||
// Special tools: use tertiary-2 color for entire text with shimmer
|
||||
if (isSpecial) {
|
||||
return (
|
||||
<span className={`relative inline-block ${className || ''}`}>
|
||||
<span className={`relative ${truncateClasses} ${className || ''}`}>
|
||||
<span className='text-[var(--brand-tertiary-2)]'>{text}</span>
|
||||
{active ? (
|
||||
<span
|
||||
@@ -596,7 +626,7 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
className='pointer-events-none absolute inset-0 select-none overflow-hidden'
|
||||
>
|
||||
<span
|
||||
className='block text-transparent'
|
||||
className='block overflow-hidden text-ellipsis whitespace-nowrap text-transparent'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'linear-gradient(90deg, rgba(51,196,129,0) 0%, rgba(255,255,255,0.6) 50%, rgba(51,196,129,0) 100%)',
|
||||
@@ -627,7 +657,7 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
// Light mode: primary (#2d2d2d) vs muted (#737373) for good contrast
|
||||
// Dark mode: tertiary (#b3b3b3) vs muted (#787878) for good contrast
|
||||
return (
|
||||
<span className={`relative inline-block ${className || ''}`}>
|
||||
<span className={`relative ${truncateClasses} ${className || ''}`}>
|
||||
{actionVerb ? (
|
||||
<>
|
||||
<span className='text-[var(--text-primary)] dark:text-[var(--text-tertiary)]'>
|
||||
@@ -644,7 +674,7 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
className='pointer-events-none absolute inset-0 select-none overflow-hidden'
|
||||
>
|
||||
<span
|
||||
className='block text-transparent'
|
||||
className='block overflow-hidden text-ellipsis whitespace-nowrap text-transparent'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'linear-gradient(90deg, rgba(255,255,255,0) 0%, rgba(255,255,255,0.85) 50%, rgba(255,255,255,0) 100%)',
|
||||
@@ -672,8 +702,9 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
||||
})
|
||||
|
||||
/**
|
||||
* Get the outer collapse header label for completed subagent tools.
|
||||
* Uses the tool's UI config.
|
||||
* Gets the collapse header label for completed subagent tools.
|
||||
* @param toolName - The tool name to get the label for
|
||||
* @returns The completion label from UI config, defaults to 'Thought'
|
||||
*/
|
||||
function getSubagentCompletionLabel(toolName: string): string {
|
||||
const labels = getSubagentLabelsFromConfig(toolName, false)
|
||||
@@ -681,8 +712,9 @@ function getSubagentCompletionLabel(toolName: string): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* SubAgentThinkingContent renders subagent blocks as simple thinking text (ThinkingBlock).
|
||||
* Used for inline rendering within regular tool calls that have subagent content.
|
||||
* Renders subagent blocks as thinking text within regular tool calls.
|
||||
* @param blocks - The subagent content blocks to render
|
||||
* @param isStreaming - Whether streaming animations should be shown (caller should pre-compute currentMessage check)
|
||||
*/
|
||||
function SubAgentThinkingContent({
|
||||
blocks,
|
||||
@@ -717,7 +749,7 @@ function SubAgentThinkingContent({
|
||||
const hasSpecialTags = hasPlan
|
||||
|
||||
return (
|
||||
<div className='space-y-1.5'>
|
||||
<div className='space-y-[4px]'>
|
||||
{cleanText.trim() && (
|
||||
<ThinkingBlock
|
||||
content={cleanText}
|
||||
@@ -731,32 +763,29 @@ function SubAgentThinkingContent({
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Subagents that should collapse when done streaming.
|
||||
* Default behavior is to NOT collapse (stay expanded like edit, superagent, info, etc.).
|
||||
* Only plan, debug, and research collapse into summary headers.
|
||||
*/
|
||||
/** Subagents that collapse into summary headers when done streaming */
|
||||
const COLLAPSIBLE_SUBAGENTS = new Set(['plan', 'debug', 'research'])
|
||||
|
||||
/**
|
||||
* SubagentContentRenderer handles the rendering of subagent content.
|
||||
* - During streaming: Shows content at top level
|
||||
* - When done (not streaming): Most subagents stay expanded, only specific ones collapse
|
||||
* - Exception: plan, debug, research, info subagents collapse into a header
|
||||
* Handles rendering of subagent content with streaming and collapse behavior.
|
||||
*/
|
||||
const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
toolCall,
|
||||
shouldCollapse,
|
||||
isCurrentMessage = true,
|
||||
}: {
|
||||
toolCall: CopilotToolCall
|
||||
shouldCollapse: boolean
|
||||
/** Whether this is from the current/latest message. Controls shimmer animations. */
|
||||
isCurrentMessage?: boolean
|
||||
}) {
|
||||
const [isExpanded, setIsExpanded] = useState(true)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const startTimeRef = useRef<number>(Date.now())
|
||||
const wasStreamingRef = useRef(false)
|
||||
|
||||
const isStreaming = !!toolCall.subAgentStreaming
|
||||
// Only show streaming animations for current message
|
||||
const isStreaming = isCurrentMessage && !!toolCall.subAgentStreaming
|
||||
|
||||
useEffect(() => {
|
||||
if (isStreaming && !wasStreamingRef.current) {
|
||||
@@ -850,7 +879,11 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
}
|
||||
return (
|
||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||
<ToolCall toolCallId={segment.block.toolCall.id} toolCall={segment.block.toolCall} />
|
||||
<ToolCall
|
||||
toolCallId={segment.block.toolCall.id}
|
||||
toolCall={segment.block.toolCall}
|
||||
isCurrentMessage={isCurrentMessage}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -861,7 +894,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
|
||||
if (isStreaming || !shouldCollapse) {
|
||||
return (
|
||||
<div className='w-full space-y-1.5'>
|
||||
<div className='w-full space-y-[4px]'>
|
||||
{renderCollapsibleContent()}
|
||||
{hasPlan && planToRender && <PlanSteps steps={planToRender} streaming={isPlanStreaming} />}
|
||||
</div>
|
||||
@@ -888,30 +921,30 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
<div
|
||||
className={clsx(
|
||||
'overflow-hidden transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[5000px] opacity-100' : 'max-h-0 opacity-0'
|
||||
isExpanded ? 'mt-1.5 max-h-[5000px] space-y-[4px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{renderCollapsibleContent()}
|
||||
</div>
|
||||
|
||||
{/* Plan stays outside the collapsible */}
|
||||
{hasPlan && planToRender && <PlanSteps steps={planToRender} />}
|
||||
{hasPlan && planToRender && (
|
||||
<div className='mt-[6px]'>
|
||||
<PlanSteps steps={planToRender} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Determines if a tool call is "special" and should display with gradient styling.
|
||||
* Uses the tool's UI config.
|
||||
* Determines if a tool call should display with special gradient styling.
|
||||
*/
|
||||
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
||||
return isSpecialToolFromConfig(toolCall.name)
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowEditSummary shows a full-width summary of workflow edits (like Cursor's diff).
|
||||
* Displays: workflow name with stats (+N green, N orange, -N red)
|
||||
* Expands inline on click to show individual blocks with their icons.
|
||||
* Displays a summary of workflow edits with added, edited, and deleted blocks.
|
||||
*/
|
||||
const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
toolCall,
|
||||
@@ -1169,9 +1202,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Checks if a tool is an integration tool (server-side executed, not a client tool)
|
||||
*/
|
||||
/** Checks if a tool is server-side executed (not a client tool) */
|
||||
function isIntegrationTool(toolName: string): boolean {
|
||||
return !CLASS_TOOL_METADATA[toolName]
|
||||
}
|
||||
@@ -1317,9 +1348,7 @@ function getDisplayName(toolCall: CopilotToolCall): string {
|
||||
return `${stateVerb} ${formattedName}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get verb prefix based on tool state
|
||||
*/
|
||||
/** Gets verb prefix based on tool call state */
|
||||
function getStateVerb(state: string): string {
|
||||
switch (state) {
|
||||
case 'pending':
|
||||
@@ -1338,8 +1367,7 @@ function getStateVerb(state: string): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* Format tool name for display
|
||||
* e.g., "google_calendar_list_events" -> "Google Calendar List Events"
|
||||
* Formats tool name for display (e.g., "google_calendar_list_events" -> "Google Calendar List Events")
|
||||
*/
|
||||
function formatToolName(name: string): string {
|
||||
const baseName = name.replace(/_v\d+$/, '')
|
||||
@@ -1415,7 +1443,7 @@ function RunSkipButtons({
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, (Always Allow for client tools only), Skip
|
||||
return (
|
||||
<div className='mt-1.5 flex gap-[6px]'>
|
||||
<div className='mt-[10px] flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||
{isProcessing ? 'Allowing...' : 'Allow'}
|
||||
</Button>
|
||||
@@ -1431,7 +1459,12 @@ function RunSkipButtons({
|
||||
)
|
||||
}
|
||||
|
||||
export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }: ToolCallProps) {
|
||||
export function ToolCall({
|
||||
toolCall: toolCallProp,
|
||||
toolCallId,
|
||||
onStateChange,
|
||||
isCurrentMessage = true,
|
||||
}: ToolCallProps) {
|
||||
const [, forceUpdate] = useState({})
|
||||
// Get live toolCall from store to ensure we have the latest state
|
||||
const effectiveId = toolCallId || toolCallProp?.id
|
||||
@@ -1445,9 +1478,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
|
||||
const isExpandablePending =
|
||||
toolCall?.state === 'pending' &&
|
||||
(toolCall.name === 'make_api_request' ||
|
||||
toolCall.name === 'set_global_workflow_variables' ||
|
||||
toolCall.name === 'run_workflow')
|
||||
(toolCall.name === 'make_api_request' || toolCall.name === 'set_global_workflow_variables')
|
||||
|
||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||
@@ -1522,6 +1553,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
<SubagentContentRenderer
|
||||
toolCall={toolCall}
|
||||
shouldCollapse={COLLAPSIBLE_SUBAGENTS.has(toolCall.name)}
|
||||
isCurrentMessage={isCurrentMessage}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -1550,37 +1582,34 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
}
|
||||
// Check if tool has params table config (meaning it's expandable)
|
||||
const hasParamsTable = !!getToolUIConfig(toolCall.name)?.paramsTable
|
||||
const isRunWorkflow = toolCall.name === 'run_workflow'
|
||||
const isExpandableTool =
|
||||
hasParamsTable ||
|
||||
toolCall.name === 'make_api_request' ||
|
||||
toolCall.name === 'set_global_workflow_variables' ||
|
||||
toolCall.name === 'run_workflow'
|
||||
toolCall.name === 'set_global_workflow_variables'
|
||||
|
||||
const showButtons = shouldShowRunSkipButtons(toolCall)
|
||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||
|
||||
// Check UI config for secondary action
|
||||
// Check UI config for secondary action - only show for current message tool calls
|
||||
const toolUIConfig = getToolUIConfig(toolCall.name)
|
||||
const secondaryAction = toolUIConfig?.secondaryAction
|
||||
const showSecondaryAction = secondaryAction?.showInStates.includes(
|
||||
toolCall.state as ClientToolCallState
|
||||
)
|
||||
const isExecuting =
|
||||
toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any)
|
||||
|
||||
// Legacy fallbacks for tools that haven't migrated to UI config
|
||||
const showMoveToBackground =
|
||||
showSecondaryAction && secondaryAction?.text === 'Move to Background'
|
||||
? true
|
||||
: !secondaryAction &&
|
||||
toolCall.name === 'run_workflow' &&
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
isCurrentMessage &&
|
||||
((showSecondaryAction && secondaryAction?.text === 'Move to Background') ||
|
||||
(!secondaryAction && toolCall.name === 'run_workflow' && isExecuting))
|
||||
|
||||
const showWake =
|
||||
showSecondaryAction && secondaryAction?.text === 'Wake'
|
||||
? true
|
||||
: !secondaryAction &&
|
||||
toolCall.name === 'sleep' &&
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
isCurrentMessage &&
|
||||
((showSecondaryAction && secondaryAction?.text === 'Wake') ||
|
||||
(!secondaryAction && toolCall.name === 'sleep' && isExecuting))
|
||||
|
||||
const handleStateChange = (state: any) => {
|
||||
forceUpdate({})
|
||||
@@ -1594,6 +1623,8 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
toolCall.state === ClientToolCallState.pending ||
|
||||
toolCall.state === ClientToolCallState.executing
|
||||
|
||||
const shouldShowShimmer = isCurrentMessage && isLoadingState
|
||||
|
||||
const isSpecial = isSpecialToolCall(toolCall)
|
||||
|
||||
const renderPendingDetails = () => {
|
||||
@@ -1903,7 +1934,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
</span>
|
||||
</div>
|
||||
{/* Input entries */}
|
||||
<div className='flex flex-col'>
|
||||
<div className='flex flex-col pt-[6px]'>
|
||||
{inputEntries.map(([key, value], index) => {
|
||||
const isComplex = isComplexValue(value)
|
||||
const displayValue = formatValueForDisplay(value)
|
||||
@@ -1912,8 +1943,8 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
<div
|
||||
key={key}
|
||||
className={clsx(
|
||||
'flex flex-col gap-1.5 px-[10px] py-[8px]',
|
||||
index > 0 && 'border-[var(--border-1)] border-t'
|
||||
'flex flex-col gap-[6px] px-[10px] pb-[6px]',
|
||||
index > 0 && 'mt-[6px] border-[var(--border-1)] border-t pt-[6px]'
|
||||
)}
|
||||
>
|
||||
{/* Input key */}
|
||||
@@ -2005,14 +2036,14 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
<div className={isEnvVarsClickable ? 'cursor-pointer' : ''} onClick={handleEnvVarsClick}>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
active={shouldShowShimmer}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
<div className='mt-1.5'>{renderPendingDetails()}</div>
|
||||
<div className='mt-[10px]'>{renderPendingDetails()}</div>
|
||||
{showRemoveAutoAllow && isAutoAllowed && (
|
||||
<div className='mt-1.5'>
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
await removeAutoAllowedTool(toolCall.name)
|
||||
@@ -2037,7 +2068,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
{toolCall.subAgentBlocks && toolCall.subAgentBlocks.length > 0 && (
|
||||
<SubAgentThinkingContent
|
||||
blocks={toolCall.subAgentBlocks}
|
||||
isStreaming={toolCall.subAgentStreaming}
|
||||
isStreaming={isCurrentMessage && toolCall.subAgentStreaming}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
@@ -2062,18 +2093,18 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
active={shouldShowShimmer}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
{code && (
|
||||
<div className='mt-1.5'>
|
||||
<div className='mt-[10px]'>
|
||||
<Code.Viewer code={code} language='javascript' showGutter className='min-h-0' />
|
||||
</div>
|
||||
)}
|
||||
{showRemoveAutoAllow && isAutoAllowed && (
|
||||
<div className='mt-1.5'>
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
await removeAutoAllowedTool(toolCall.name)
|
||||
@@ -2098,14 +2129,14 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
{toolCall.subAgentBlocks && toolCall.subAgentBlocks.length > 0 && (
|
||||
<SubAgentThinkingContent
|
||||
blocks={toolCall.subAgentBlocks}
|
||||
isStreaming={toolCall.subAgentStreaming}
|
||||
isStreaming={isCurrentMessage && toolCall.subAgentStreaming}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const isToolNameClickable = isExpandableTool || isAutoAllowed
|
||||
const isToolNameClickable = (!isRunWorkflow && isExpandableTool) || isAutoAllowed
|
||||
|
||||
const handleToolNameClick = () => {
|
||||
if (isExpandableTool) {
|
||||
@@ -2116,6 +2147,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
}
|
||||
|
||||
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||
|
||||
@@ -2125,15 +2157,15 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
<div className={isToolNameClickable ? 'cursor-pointer' : ''} onClick={handleToolNameClick}>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
active={shouldShowShimmer}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{isExpandableTool && expanded && <div className='mt-1.5'>{renderPendingDetails()}</div>}
|
||||
{shouldShowDetails && <div className='mt-[10px]'>{renderPendingDetails()}</div>}
|
||||
{showRemoveAutoAllow && isAutoAllowed && (
|
||||
<div className='mt-1.5'>
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
await removeAutoAllowedTool(toolCall.name)
|
||||
@@ -2154,7 +2186,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
editedParams={editedParams}
|
||||
/>
|
||||
) : showMoveToBackground ? (
|
||||
<div className='mt-1.5'>
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
@@ -2175,7 +2207,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
</Button>
|
||||
</div>
|
||||
) : showWake ? (
|
||||
<div className='mt-1.5'>
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
@@ -2208,7 +2240,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
{toolCall.subAgentBlocks && toolCall.subAgentBlocks.length > 0 && (
|
||||
<SubAgentThinkingContent
|
||||
blocks={toolCall.subAgentBlocks}
|
||||
isStreaming={toolCall.subAgentStreaming}
|
||||
isStreaming={isCurrentMessage && toolCall.subAgentStreaming}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './attached-files-display'
|
||||
@@ -0,0 +1,127 @@
|
||||
'use client'
|
||||
|
||||
import { ArrowUp, Image, Loader2 } from 'lucide-react'
|
||||
import { Badge, Button } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { ModeSelector } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/components/mode-selector/mode-selector'
|
||||
import { ModelSelector } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/components/model-selector/model-selector'
|
||||
|
||||
interface BottomControlsProps {
|
||||
mode: 'ask' | 'build' | 'plan'
|
||||
onModeChange?: (mode: 'ask' | 'build' | 'plan') => void
|
||||
selectedModel: string
|
||||
onModelSelect: (model: string) => void
|
||||
isNearTop: boolean
|
||||
disabled: boolean
|
||||
hideModeSelector: boolean
|
||||
canSubmit: boolean
|
||||
isLoading: boolean
|
||||
isAborting: boolean
|
||||
showAbortButton: boolean
|
||||
onSubmit: () => void
|
||||
onAbort: () => void
|
||||
onFileSelect: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Bottom controls section of the user input
|
||||
* Contains mode selector, model selector, file attachment button, and submit/abort buttons
|
||||
*/
|
||||
export function BottomControls({
|
||||
mode,
|
||||
onModeChange,
|
||||
selectedModel,
|
||||
onModelSelect,
|
||||
isNearTop,
|
||||
disabled,
|
||||
hideModeSelector,
|
||||
canSubmit,
|
||||
isLoading,
|
||||
isAborting,
|
||||
showAbortButton,
|
||||
onSubmit,
|
||||
onAbort,
|
||||
onFileSelect,
|
||||
}: BottomControlsProps) {
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-2'>
|
||||
{/* Left side: Mode Selector + Model Selector */}
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
{!hideModeSelector && (
|
||||
<ModeSelector
|
||||
mode={mode}
|
||||
onModeChange={onModeChange}
|
||||
isNearTop={isNearTop}
|
||||
disabled={disabled}
|
||||
/>
|
||||
)}
|
||||
|
||||
<ModelSelector
|
||||
selectedModel={selectedModel}
|
||||
isNearTop={isNearTop}
|
||||
onModelSelect={onModelSelect}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Right side: Attach Button + Send Button */}
|
||||
<div className='flex flex-shrink-0 items-center gap-[10px]'>
|
||||
<Badge
|
||||
onClick={onFileSelect}
|
||||
title='Attach file'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] border-0 bg-transparent p-[0px] dark:bg-transparent',
|
||||
disabled && 'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
>
|
||||
<Image className='!h-3.5 !w-3.5 scale-x-110' />
|
||||
</Badge>
|
||||
|
||||
{showAbortButton ? (
|
||||
<Button
|
||||
onClick={onAbort}
|
||||
disabled={isAborting}
|
||||
className={cn(
|
||||
'h-[20px] w-[20px] rounded-full border-0 p-0 transition-colors',
|
||||
!isAborting
|
||||
? 'bg-[var(--c-383838)] hover:bg-[var(--c-575757)] dark:bg-[var(--c-E0E0E0)] dark:hover:bg-[var(--c-CFCFCF)]'
|
||||
: 'bg-[var(--c-383838)] dark:bg-[var(--c-E0E0E0)]'
|
||||
)}
|
||||
title='Stop generation'
|
||||
>
|
||||
{isAborting ? (
|
||||
<Loader2 className='block h-[13px] w-[13px] animate-spin text-white dark:text-black' />
|
||||
) : (
|
||||
<svg
|
||||
className='block h-[13px] w-[13px] fill-white dark:fill-black'
|
||||
viewBox='0 0 24 24'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<rect x='4' y='4' width='16' height='16' rx='3' ry='3' />
|
||||
</svg>
|
||||
)}
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
onClick={onSubmit}
|
||||
disabled={!canSubmit}
|
||||
className={cn(
|
||||
'h-[22px] w-[22px] rounded-full border-0 p-0 transition-colors',
|
||||
canSubmit
|
||||
? 'bg-[var(--c-383838)] hover:bg-[var(--c-575757)] dark:bg-[var(--c-E0E0E0)] dark:hover:bg-[var(--c-CFCFCF)]'
|
||||
: 'bg-[var(--c-808080)] dark:bg-[var(--c-808080)]'
|
||||
)}
|
||||
>
|
||||
{isLoading ? (
|
||||
<Loader2 className='block h-3.5 w-3.5 animate-spin text-white dark:text-black' />
|
||||
) : (
|
||||
<ArrowUp
|
||||
className='block h-3.5 w-3.5 text-white dark:text-black'
|
||||
strokeWidth={2.25}
|
||||
/>
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './bottom-controls'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './context-pills'
|
||||
@@ -1,6 +1,7 @@
|
||||
export { AttachedFilesDisplay } from './attached-files-display/attached-files-display'
|
||||
export { ContextPills } from './context-pills/context-pills'
|
||||
export { type MentionFolderNav, MentionMenu } from './mention-menu/mention-menu'
|
||||
export { ModeSelector } from './mode-selector/mode-selector'
|
||||
export { ModelSelector } from './model-selector/model-selector'
|
||||
export { type SlashFolderNav, SlashMenu } from './slash-menu/slash-menu'
|
||||
export { AttachedFilesDisplay } from './attached-files-display'
|
||||
export { BottomControls } from './bottom-controls'
|
||||
export { ContextPills } from './context-pills'
|
||||
export { type MentionFolderNav, MentionMenu } from './mention-menu'
|
||||
export { ModeSelector } from './mode-selector'
|
||||
export { ModelSelector } from './model-selector'
|
||||
export { type SlashFolderNav, SlashMenu } from './slash-menu'
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './mention-menu'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './mode-selector'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './model-selector'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './slash-menu'
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import {
|
||||
escapeRegex,
|
||||
filterOutContext,
|
||||
isContextAlreadySelected,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/utils'
|
||||
@@ -22,9 +23,6 @@ interface UseContextManagementProps {
|
||||
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
|
||||
const initializedRef = useRef(false)
|
||||
const escapeRegex = useCallback((value: string) => {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
}, [])
|
||||
|
||||
// Initialize with initial contexts when they're first provided (for edit mode)
|
||||
useEffect(() => {
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './user-input'
|
||||
@@ -9,19 +9,19 @@ import {
|
||||
useState,
|
||||
} from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowUp, AtSign, Image, Loader2 } from 'lucide-react'
|
||||
import { AtSign } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { createPortal } from 'react-dom'
|
||||
import { Badge, Button, Textarea } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import type { CopilotModelId } from '@/lib/copilot/models'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
AttachedFilesDisplay,
|
||||
BottomControls,
|
||||
ContextPills,
|
||||
type MentionFolderNav,
|
||||
MentionMenu,
|
||||
ModelSelector,
|
||||
ModeSelector,
|
||||
type SlashFolderNav,
|
||||
SlashMenu,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/components'
|
||||
@@ -44,6 +44,10 @@ import {
|
||||
useTextareaAutoResize,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks'
|
||||
import type { MessageFileAttachment } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-file-attachments'
|
||||
import {
|
||||
computeMentionHighlightRanges,
|
||||
extractContextTokens,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/utils'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
|
||||
@@ -263,7 +267,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
if (q && q.length > 0) {
|
||||
void mentionData.ensurePastChatsLoaded()
|
||||
// workflows and workflow-blocks auto-load from stores
|
||||
void mentionData.ensureKnowledgeLoaded()
|
||||
void mentionData.ensureBlocksLoaded()
|
||||
void mentionData.ensureTemplatesLoaded()
|
||||
@@ -306,7 +309,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
size: f.size,
|
||||
}))
|
||||
|
||||
onSubmit(trimmedMessage, fileAttachmentsForApi, contextManagement.selectedContexts as any)
|
||||
onSubmit(trimmedMessage, fileAttachmentsForApi, contextManagement.selectedContexts)
|
||||
|
||||
const shouldClearInput = clearOnSubmit && !options.preserveInput && !overrideMessage
|
||||
if (shouldClearInput) {
|
||||
@@ -657,7 +660,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
const handleModelSelect = useCallback(
|
||||
(model: string) => {
|
||||
setSelectedModel(model as any)
|
||||
setSelectedModel(model as CopilotModelId)
|
||||
},
|
||||
[setSelectedModel]
|
||||
)
|
||||
@@ -677,15 +680,17 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
return <span>{displayText}</span>
|
||||
}
|
||||
|
||||
const elements: React.ReactNode[] = []
|
||||
const ranges = mentionTokensWithContext.computeMentionRanges()
|
||||
const tokens = extractContextTokens(contexts)
|
||||
const ranges = computeMentionHighlightRanges(message, tokens)
|
||||
|
||||
if (ranges.length === 0) {
|
||||
const displayText = message.endsWith('\n') ? `${message}\u200B` : message
|
||||
return <span>{displayText}</span>
|
||||
}
|
||||
|
||||
const elements: React.ReactNode[] = []
|
||||
let lastIndex = 0
|
||||
|
||||
for (let i = 0; i < ranges.length; i++) {
|
||||
const range = ranges[i]
|
||||
|
||||
@@ -694,13 +699,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
elements.push(<span key={`text-${i}-${lastIndex}-${range.start}`}>{before}</span>)
|
||||
}
|
||||
|
||||
const mentionText = message.slice(range.start, range.end)
|
||||
elements.push(
|
||||
<span
|
||||
key={`mention-${i}-${range.start}-${range.end}`}
|
||||
className='rounded-[4px] bg-[rgba(50,189,126,0.65)] py-[1px]'
|
||||
>
|
||||
{mentionText}
|
||||
{range.token}
|
||||
</span>
|
||||
)
|
||||
lastIndex = range.end
|
||||
@@ -713,7 +717,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
|
||||
return elements.length > 0 ? elements : <span>{'\u00A0'}</span>
|
||||
}, [message, contextManagement.selectedContexts, mentionTokensWithContext])
|
||||
}, [message, contextManagement.selectedContexts])
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -855,87 +859,22 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
</div>
|
||||
|
||||
{/* Bottom Row: Mode Selector + Model Selector + Attach Button + Send Button */}
|
||||
<div className='flex items-center justify-between gap-2'>
|
||||
{/* Left side: Mode Selector + Model Selector */}
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
{!hideModeSelector && (
|
||||
<ModeSelector
|
||||
mode={mode}
|
||||
onModeChange={onModeChange}
|
||||
isNearTop={isNearTop}
|
||||
disabled={disabled}
|
||||
/>
|
||||
)}
|
||||
|
||||
<ModelSelector
|
||||
selectedModel={selectedModel}
|
||||
isNearTop={isNearTop}
|
||||
onModelSelect={handleModelSelect}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Right side: Attach Button + Send Button */}
|
||||
<div className='flex flex-shrink-0 items-center gap-[10px]'>
|
||||
<Badge
|
||||
onClick={fileAttachments.handleFileSelect}
|
||||
title='Attach file'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] border-0 bg-transparent p-[0px] dark:bg-transparent',
|
||||
disabled && 'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
>
|
||||
<Image className='!h-3.5 !w-3.5 scale-x-110' />
|
||||
</Badge>
|
||||
|
||||
{showAbortButton ? (
|
||||
<Button
|
||||
onClick={handleAbort}
|
||||
disabled={isAborting}
|
||||
className={cn(
|
||||
'h-[20px] w-[20px] rounded-full border-0 p-0 transition-colors',
|
||||
!isAborting
|
||||
? 'bg-[var(--c-383838)] hover:bg-[var(--c-575757)] dark:bg-[var(--c-E0E0E0)] dark:hover:bg-[var(--c-CFCFCF)]'
|
||||
: 'bg-[var(--c-383838)] dark:bg-[var(--c-E0E0E0)]'
|
||||
)}
|
||||
title='Stop generation'
|
||||
>
|
||||
{isAborting ? (
|
||||
<Loader2 className='block h-[13px] w-[13px] animate-spin text-white dark:text-black' />
|
||||
) : (
|
||||
<svg
|
||||
className='block h-[13px] w-[13px] fill-white dark:fill-black'
|
||||
viewBox='0 0 24 24'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<rect x='4' y='4' width='16' height='16' rx='3' ry='3' />
|
||||
</svg>
|
||||
)}
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
onClick={() => {
|
||||
void handleSubmit()
|
||||
}}
|
||||
disabled={!canSubmit}
|
||||
className={cn(
|
||||
'h-[22px] w-[22px] rounded-full border-0 p-0 transition-colors',
|
||||
canSubmit
|
||||
? 'bg-[var(--c-383838)] hover:bg-[var(--c-575757)] dark:bg-[var(--c-E0E0E0)] dark:hover:bg-[var(--c-CFCFCF)]'
|
||||
: 'bg-[var(--c-808080)] dark:bg-[var(--c-808080)]'
|
||||
)}
|
||||
>
|
||||
{isLoading ? (
|
||||
<Loader2 className='block h-3.5 w-3.5 animate-spin text-white dark:text-black' />
|
||||
) : (
|
||||
<ArrowUp
|
||||
className='block h-3.5 w-3.5 text-white dark:text-black'
|
||||
strokeWidth={2.25}
|
||||
/>
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<BottomControls
|
||||
mode={mode}
|
||||
onModeChange={onModeChange}
|
||||
selectedModel={selectedModel}
|
||||
onModelSelect={handleModelSelect}
|
||||
isNearTop={isNearTop}
|
||||
disabled={disabled}
|
||||
hideModeSelector={hideModeSelector}
|
||||
canSubmit={canSubmit}
|
||||
isLoading={isLoading}
|
||||
isAborting={isAborting}
|
||||
showAbortButton={Boolean(showAbortButton)}
|
||||
onSubmit={() => void handleSubmit()}
|
||||
onAbort={handleAbort}
|
||||
onFileSelect={fileAttachments.handleFileSelect}
|
||||
/>
|
||||
|
||||
{/* Hidden File Input - enabled during streaming so users can prepare images for the next message */}
|
||||
<input
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import {
|
||||
FOLDER_CONFIGS,
|
||||
type MentionFolderId,
|
||||
@@ -5,6 +6,102 @@ import {
|
||||
import type { MentionDataReturn } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/hooks/use-mention-data'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
|
||||
/**
|
||||
* Escapes special regex characters in a string
|
||||
* @param value - String to escape
|
||||
* @returns Escaped string safe for use in RegExp
|
||||
*/
|
||||
export function escapeRegex(value: string): string {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts mention tokens from contexts for display/matching
|
||||
* Filters out current_workflow contexts and builds prefixed labels
|
||||
* @param contexts - Array of chat contexts
|
||||
* @returns Array of prefixed token strings (e.g., "@workflow", "/web")
|
||||
*/
|
||||
export function extractContextTokens(contexts: ChatContext[]): string[] {
|
||||
return contexts
|
||||
.filter((c) => c.kind !== 'current_workflow' && c.label)
|
||||
.map((c) => {
|
||||
const prefix = c.kind === 'slash_command' ? '/' : '@'
|
||||
return `${prefix}${c.label}`
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Mention range for text highlighting
|
||||
*/
|
||||
export interface MentionHighlightRange {
|
||||
start: number
|
||||
end: number
|
||||
token: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes mention ranges in text for highlighting
|
||||
* @param text - Text to search
|
||||
* @param tokens - Prefixed tokens to find (e.g., "@workflow", "/web")
|
||||
* @returns Array of ranges with start, end, and matched token
|
||||
*/
|
||||
export function computeMentionHighlightRanges(
|
||||
text: string,
|
||||
tokens: string[]
|
||||
): MentionHighlightRange[] {
|
||||
if (!tokens.length || !text) return []
|
||||
|
||||
const pattern = new RegExp(`(${tokens.map(escapeRegex).join('|')})`, 'g')
|
||||
const ranges: MentionHighlightRange[] = []
|
||||
let match: RegExpExecArray | null
|
||||
|
||||
while ((match = pattern.exec(text)) !== null) {
|
||||
ranges.push({
|
||||
start: match.index,
|
||||
end: match.index + match[0].length,
|
||||
token: match[0],
|
||||
})
|
||||
}
|
||||
|
||||
return ranges
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds React nodes with highlighted mention tokens
|
||||
* @param text - Text to render
|
||||
* @param contexts - Chat contexts to highlight
|
||||
* @param createHighlightSpan - Function to create highlighted span element
|
||||
* @returns Array of React nodes with highlighted mentions
|
||||
*/
|
||||
export function buildMentionHighlightNodes(
|
||||
text: string,
|
||||
contexts: ChatContext[],
|
||||
createHighlightSpan: (token: string, key: string) => ReactNode
|
||||
): ReactNode[] {
|
||||
const tokens = extractContextTokens(contexts)
|
||||
if (!tokens.length) return [text]
|
||||
|
||||
const ranges = computeMentionHighlightRanges(text, tokens)
|
||||
if (!ranges.length) return [text]
|
||||
|
||||
const nodes: ReactNode[] = []
|
||||
let lastIndex = 0
|
||||
|
||||
for (const range of ranges) {
|
||||
if (range.start > lastIndex) {
|
||||
nodes.push(text.slice(lastIndex, range.start))
|
||||
}
|
||||
nodes.push(createHighlightSpan(range.token, `mention-${range.start}-${range.end}`))
|
||||
lastIndex = range.end
|
||||
}
|
||||
|
||||
if (lastIndex < text.length) {
|
||||
nodes.push(text.slice(lastIndex))
|
||||
}
|
||||
|
||||
return nodes
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the data array for a folder ID from mentionData.
|
||||
* Uses FOLDER_CONFIGS as the source of truth for key mapping.
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export * from './welcome'
|
||||
@@ -2,9 +2,7 @@
|
||||
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
/**
|
||||
* Props for the CopilotWelcome component
|
||||
*/
|
||||
/** Props for the Welcome component */
|
||||
interface WelcomeProps {
|
||||
/** Callback when a suggested question is clicked */
|
||||
onQuestionClick?: (question: string) => void
|
||||
@@ -12,13 +10,7 @@ interface WelcomeProps {
|
||||
mode?: 'ask' | 'build' | 'plan'
|
||||
}
|
||||
|
||||
/**
|
||||
* Welcome screen component for the copilot
|
||||
* Displays suggested questions and capabilities based on current mode
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Welcome screen UI
|
||||
*/
|
||||
/** Welcome screen displaying suggested questions based on current mode */
|
||||
export function Welcome({ onQuestionClick, mode = 'ask' }: WelcomeProps) {
|
||||
const capabilities =
|
||||
mode === 'build'
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
ChatHistorySkeleton,
|
||||
CopilotMessage,
|
||||
PlanModeSection,
|
||||
QueuedMessages,
|
||||
@@ -40,6 +41,7 @@ import {
|
||||
useTodoManagement,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/hooks'
|
||||
import { useScrollManagement } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -74,10 +76,12 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
const copilotContainerRef = useRef<HTMLDivElement>(null)
|
||||
const cancelEditCallbackRef = useRef<(() => void) | null>(null)
|
||||
const [editingMessageId, setEditingMessageId] = useState<string | null>(null)
|
||||
const [isEditingMessage, setIsEditingMessage] = useState(false)
|
||||
const [revertingMessageId, setRevertingMessageId] = useState<string | null>(null)
|
||||
const [isHistoryDropdownOpen, setIsHistoryDropdownOpen] = useState(false)
|
||||
|
||||
// Derived state - editing when there's an editingMessageId
|
||||
const isEditingMessage = editingMessageId !== null
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
const {
|
||||
@@ -106,9 +110,9 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
areChatsFresh,
|
||||
workflowId: copilotWorkflowId,
|
||||
setPlanTodos,
|
||||
closePlanTodos,
|
||||
clearPlanArtifact,
|
||||
savePlanArtifact,
|
||||
setSelectedModel,
|
||||
loadAutoAllowedTools,
|
||||
} = useCopilotStore()
|
||||
|
||||
@@ -126,7 +130,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
|
||||
// Handle scroll management (80px stickiness for copilot)
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage, {
|
||||
stickinessThreshold: 80,
|
||||
stickinessThreshold: 40,
|
||||
})
|
||||
|
||||
// Handle chat history grouping
|
||||
@@ -146,15 +150,10 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
isSendingMessage,
|
||||
showPlanTodos,
|
||||
planTodos,
|
||||
setPlanTodos,
|
||||
})
|
||||
|
||||
/**
|
||||
* Get markdown content for design document section
|
||||
* Available in all modes once created
|
||||
*/
|
||||
/** Gets markdown content for design document section (available in all modes once created) */
|
||||
const designDocumentContent = useMemo(() => {
|
||||
// Use streaming content if available
|
||||
if (streamingPlanContent) {
|
||||
logger.info('[DesignDocument] Using streaming plan content', {
|
||||
contentLength: streamingPlanContent.length,
|
||||
@@ -165,9 +164,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
return ''
|
||||
}, [streamingPlanContent])
|
||||
|
||||
/**
|
||||
* Helper function to focus the copilot input
|
||||
*/
|
||||
/** Focuses the copilot input */
|
||||
const focusInput = useCallback(() => {
|
||||
userInputRef.current?.focus()
|
||||
}, [])
|
||||
@@ -181,20 +178,14 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
currentInputValue: inputValue,
|
||||
})
|
||||
|
||||
/**
|
||||
* Auto-scroll to bottom when chat loads in
|
||||
*/
|
||||
/** Auto-scrolls to bottom when chat loads */
|
||||
useEffect(() => {
|
||||
if (isInitialized && messages.length > 0) {
|
||||
scrollToBottom()
|
||||
}
|
||||
}, [isInitialized, messages.length, scrollToBottom])
|
||||
|
||||
/**
|
||||
* Cleanup on component unmount (page refresh, navigation, etc.)
|
||||
* Uses a ref to track sending state to avoid stale closure issues
|
||||
* Note: Parent workflow.tsx also has useStreamCleanup for page-level cleanup
|
||||
*/
|
||||
/** Cleanup on unmount - aborts active streaming. Uses refs to avoid stale closures */
|
||||
const isSendingRef = useRef(isSendingMessage)
|
||||
isSendingRef.current = isSendingMessage
|
||||
const abortMessageRef = useRef(abortMessage)
|
||||
@@ -202,19 +193,15 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
// Use refs to check current values, not stale closure values
|
||||
if (isSendingRef.current) {
|
||||
abortMessageRef.current()
|
||||
logger.info('Aborted active message streaming due to component unmount')
|
||||
}
|
||||
}
|
||||
// Empty deps - only run cleanup on actual unmount, not on re-renders
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Container-level click capture to cancel edit mode when clicking outside the current edit area
|
||||
*/
|
||||
/** Cancels edit mode when clicking outside the current edit area */
|
||||
const handleCopilotClickCapture = useCallback(
|
||||
(event: ReactMouseEvent<HTMLDivElement>) => {
|
||||
if (!isEditingMessage) return
|
||||
@@ -243,10 +230,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
[isEditingMessage, editingMessageId]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles creating a new chat session
|
||||
* Focuses the input after creation
|
||||
*/
|
||||
/** Creates a new chat session and focuses the input */
|
||||
const handleStartNewChat = useCallback(() => {
|
||||
createNewChat()
|
||||
logger.info('Started new chat')
|
||||
@@ -256,10 +240,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
}, 100)
|
||||
}, [createNewChat])
|
||||
|
||||
/**
|
||||
* Sets the input value and focuses the textarea
|
||||
* @param value - The value to set in the input
|
||||
*/
|
||||
/** Sets the input value and focuses the textarea */
|
||||
const handleSetInputValueAndFocus = useCallback(
|
||||
(value: string) => {
|
||||
setInputValue(value)
|
||||
@@ -270,7 +251,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
[setInputValue]
|
||||
)
|
||||
|
||||
// Expose functions to parent
|
||||
/** Exposes imperative functions to parent */
|
||||
useImperativeHandle(
|
||||
ref,
|
||||
() => ({
|
||||
@@ -281,10 +262,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
[handleStartNewChat, handleSetInputValueAndFocus, focusInput]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles aborting the current message streaming
|
||||
* Collapses todos if they are currently shown
|
||||
*/
|
||||
/** Aborts current message streaming and collapses todos if shown */
|
||||
const handleAbort = useCallback(() => {
|
||||
abortMessage()
|
||||
if (showPlanTodos) {
|
||||
@@ -292,20 +270,20 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
}
|
||||
}, [abortMessage, showPlanTodos])
|
||||
|
||||
/**
|
||||
* Handles message submission to the copilot
|
||||
* @param query - The message text to send
|
||||
* @param fileAttachments - Optional file attachments
|
||||
* @param contexts - Optional context references
|
||||
*/
|
||||
/** Closes the plan todos section and clears the todos */
|
||||
const handleClosePlanTodos = useCallback(() => {
|
||||
closePlanTodos()
|
||||
setPlanTodos([])
|
||||
}, [closePlanTodos, setPlanTodos])
|
||||
|
||||
/** Handles message submission to the copilot */
|
||||
const handleSubmit = useCallback(
|
||||
async (query: string, fileAttachments?: MessageFileAttachment[], contexts?: any[]) => {
|
||||
async (query: string, fileAttachments?: MessageFileAttachment[], contexts?: ChatContext[]) => {
|
||||
// Allow submission even when isSendingMessage - store will queue the message
|
||||
if (!query || !activeWorkflowId) return
|
||||
|
||||
if (showPlanTodos) {
|
||||
const store = useCopilotStore.getState()
|
||||
store.setPlanTodos([])
|
||||
setPlanTodos([])
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -319,37 +297,25 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
logger.error('Failed to send message:', error)
|
||||
}
|
||||
},
|
||||
[activeWorkflowId, sendMessage, showPlanTodos]
|
||||
[activeWorkflowId, sendMessage, showPlanTodos, setPlanTodos]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles message edit mode changes
|
||||
* @param messageId - ID of the message being edited
|
||||
* @param isEditing - Whether edit mode is active
|
||||
*/
|
||||
/** Handles message edit mode changes */
|
||||
const handleEditModeChange = useCallback(
|
||||
(messageId: string, isEditing: boolean, cancelCallback?: () => void) => {
|
||||
setEditingMessageId(isEditing ? messageId : null)
|
||||
setIsEditingMessage(isEditing)
|
||||
cancelEditCallbackRef.current = isEditing ? cancelCallback || null : null
|
||||
logger.info('Edit mode changed', { messageId, isEditing, willDimMessages: isEditing })
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles checkpoint revert mode changes
|
||||
* @param messageId - ID of the message being reverted
|
||||
* @param isReverting - Whether revert mode is active
|
||||
*/
|
||||
/** Handles checkpoint revert mode changes */
|
||||
const handleRevertModeChange = useCallback((messageId: string, isReverting: boolean) => {
|
||||
setRevertingMessageId(isReverting ? messageId : null)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handles chat deletion
|
||||
* @param chatId - ID of the chat to delete
|
||||
*/
|
||||
/** Handles chat deletion */
|
||||
const handleDeleteChat = useCallback(
|
||||
async (chatId: string) => {
|
||||
try {
|
||||
@@ -361,38 +327,15 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
[deleteChat]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles history dropdown opening state
|
||||
* Loads chats if needed when dropdown opens (non-blocking)
|
||||
* @param open - Whether the dropdown is open
|
||||
*/
|
||||
/** Handles history dropdown opening state, loads chats if needed (non-blocking) */
|
||||
const handleHistoryDropdownOpen = useCallback(
|
||||
(open: boolean) => {
|
||||
setIsHistoryDropdownOpen(open)
|
||||
// Fire hook without awaiting - prevents blocking and state issues
|
||||
handleHistoryDropdownOpenHook(open)
|
||||
},
|
||||
[handleHistoryDropdownOpenHook]
|
||||
)
|
||||
|
||||
/**
|
||||
* Skeleton loading component for chat history
|
||||
*/
|
||||
const ChatHistorySkeleton = () => (
|
||||
<>
|
||||
<PopoverSection>
|
||||
<div className='h-3 w-12 animate-pulse rounded bg-muted/40' />
|
||||
</PopoverSection>
|
||||
<div className='flex flex-col gap-0.5'>
|
||||
{[1, 2, 3].map((i) => (
|
||||
<div key={i} className='flex h-[25px] items-center px-[6px]'>
|
||||
<div className='h-3 w-full animate-pulse rounded bg-muted/40' />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
@@ -531,21 +474,18 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
className='h-full overflow-y-auto overflow-x-hidden px-[8px]'
|
||||
>
|
||||
<div
|
||||
className={`w-full max-w-full space-y-4 overflow-hidden py-[8px] ${
|
||||
className={`w-full max-w-full space-y-[8px] overflow-hidden py-[8px] ${
|
||||
showPlanTodos && planTodos.length > 0 ? 'pb-14' : 'pb-10'
|
||||
}`}
|
||||
>
|
||||
{messages.map((message, index) => {
|
||||
// Determine if this message should be dimmed
|
||||
let isDimmed = false
|
||||
|
||||
// Dim messages after the one being edited
|
||||
if (editingMessageId) {
|
||||
const editingIndex = messages.findIndex((m) => m.id === editingMessageId)
|
||||
isDimmed = editingIndex !== -1 && index > editingIndex
|
||||
}
|
||||
|
||||
// Also dim messages after the one showing restore confirmation
|
||||
if (!isDimmed && revertingMessageId) {
|
||||
const revertingIndex = messages.findIndex(
|
||||
(m) => m.id === revertingMessageId
|
||||
@@ -553,7 +493,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
isDimmed = revertingIndex !== -1 && index > revertingIndex
|
||||
}
|
||||
|
||||
// Get checkpoint count for this message to force re-render when it changes
|
||||
const checkpointCount = messageCheckpoints[message.id]?.length || 0
|
||||
|
||||
return (
|
||||
@@ -588,11 +527,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
<TodoList
|
||||
todos={planTodos}
|
||||
collapsed={todosCollapsed}
|
||||
onClose={() => {
|
||||
const store = useCopilotStore.getState()
|
||||
store.closePlanTodos?.()
|
||||
useCopilotStore.setState({ planTodos: [] })
|
||||
}}
|
||||
onClose={handleClosePlanTodos}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -24,9 +24,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
||||
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, areChatsFresh, isSendingMessage } =
|
||||
props
|
||||
|
||||
/**
|
||||
* Groups chats by time period (Today, Yesterday, This Week, etc.)
|
||||
*/
|
||||
/** Groups chats by time period (Today, Yesterday, This Week, etc.) */
|
||||
const groupedChats = useMemo(() => {
|
||||
if (!activeWorkflowId || copilotWorkflowId !== activeWorkflowId || chats.length === 0) {
|
||||
return []
|
||||
@@ -68,18 +66,21 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
||||
}
|
||||
})
|
||||
|
||||
for (const groupName of Object.keys(groups)) {
|
||||
groups[groupName].sort((a, b) => {
|
||||
const dateA = new Date(a.updatedAt).getTime()
|
||||
const dateB = new Date(b.updatedAt).getTime()
|
||||
return dateB - dateA
|
||||
})
|
||||
}
|
||||
|
||||
return Object.entries(groups).filter(([, chats]) => chats.length > 0)
|
||||
}, [chats, activeWorkflowId, copilotWorkflowId])
|
||||
|
||||
/**
|
||||
* Handles history dropdown opening and loads chats if needed
|
||||
* Does not await loading - fires in background to avoid blocking UI
|
||||
*/
|
||||
/** Handles history dropdown opening and loads chats if needed (non-blocking) */
|
||||
const handleHistoryDropdownOpen = useCallback(
|
||||
(open: boolean) => {
|
||||
// Only load if opening dropdown AND we don't have fresh chats AND not streaming
|
||||
if (open && activeWorkflowId && !isSendingMessage && !areChatsFresh(activeWorkflowId)) {
|
||||
// Fire in background, don't await - same pattern as old panel
|
||||
loadChats(false).catch((error) => {
|
||||
logger.error('Failed to load chat history:', error)
|
||||
})
|
||||
|
||||
@@ -38,11 +38,7 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
const lastWorkflowIdRef = useRef<string | null>(null)
|
||||
const hasMountedRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Initialize on mount - only load chats if needed, don't force refresh
|
||||
* This prevents unnecessary reloads when the component remounts (e.g., hot reload)
|
||||
* Never loads during message streaming to prevent interrupting active conversations
|
||||
*/
|
||||
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
||||
useEffect(() => {
|
||||
if (activeWorkflowId && !hasMountedRef.current && !isSendingMessage) {
|
||||
hasMountedRef.current = true
|
||||
@@ -50,19 +46,12 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
lastWorkflowIdRef.current = null
|
||||
|
||||
setCopilotWorkflowId(activeWorkflowId)
|
||||
// Use false to let the store decide if a reload is needed based on cache
|
||||
loadChats(false)
|
||||
}
|
||||
}, [activeWorkflowId, setCopilotWorkflowId, loadChats, isSendingMessage])
|
||||
|
||||
/**
|
||||
* Initialize the component - only on mount and genuine workflow changes
|
||||
* Prevents re-initialization on every render or tab switch
|
||||
* Never reloads during message streaming to preserve active conversations
|
||||
*/
|
||||
/** Handles genuine workflow changes, preventing re-init on every render */
|
||||
useEffect(() => {
|
||||
// Handle genuine workflow changes (not initial mount, not same workflow)
|
||||
// Only reload if not currently streaming to avoid interrupting conversations
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
activeWorkflowId !== lastWorkflowIdRef.current &&
|
||||
@@ -80,7 +69,23 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
loadChats(false)
|
||||
}
|
||||
|
||||
// Mark as initialized when chats are loaded for the active workflow
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
!isLoadingChats &&
|
||||
chatsLoadedForWorkflow !== null &&
|
||||
chatsLoadedForWorkflow !== activeWorkflowId &&
|
||||
!isSendingMessage
|
||||
) {
|
||||
logger.info('Chats loaded for wrong workflow, reloading', {
|
||||
loaded: chatsLoadedForWorkflow,
|
||||
active: activeWorkflowId,
|
||||
})
|
||||
setIsInitialized(false)
|
||||
lastWorkflowIdRef.current = activeWorkflowId
|
||||
setCopilotWorkflowId(activeWorkflowId)
|
||||
loadChats(false)
|
||||
}
|
||||
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
!isLoadingChats &&
|
||||
@@ -100,9 +105,7 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
isSendingMessage,
|
||||
])
|
||||
|
||||
/**
|
||||
* Load auto-allowed tools once on mount
|
||||
*/
|
||||
/** Load auto-allowed tools once on mount */
|
||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (hasMountedRef.current && !hasLoadedAutoAllowedToolsRef.current) {
|
||||
|
||||
@@ -6,7 +6,6 @@ interface UseTodoManagementProps {
|
||||
isSendingMessage: boolean
|
||||
showPlanTodos: boolean
|
||||
planTodos: Array<{ id: string; content: string; completed?: boolean }>
|
||||
setPlanTodos: (todos: any[]) => void
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -16,14 +15,12 @@ interface UseTodoManagementProps {
|
||||
* @returns Todo management utilities
|
||||
*/
|
||||
export function useTodoManagement(props: UseTodoManagementProps) {
|
||||
const { isSendingMessage, showPlanTodos, planTodos, setPlanTodos } = props
|
||||
const { isSendingMessage, showPlanTodos, planTodos } = props
|
||||
|
||||
const [todosCollapsed, setTodosCollapsed] = useState(false)
|
||||
const wasSendingRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Auto-collapse todos when stream completes. Do not prune items.
|
||||
*/
|
||||
/** Auto-collapse todos when stream completes */
|
||||
useEffect(() => {
|
||||
if (wasSendingRef.current && !isSendingMessage && showPlanTodos) {
|
||||
setTodosCollapsed(true)
|
||||
@@ -31,9 +28,7 @@ export function useTodoManagement(props: UseTodoManagementProps) {
|
||||
wasSendingRef.current = isSendingMessage
|
||||
}, [isSendingMessage, showPlanTodos])
|
||||
|
||||
/**
|
||||
* Reset collapsed state when todos first appear
|
||||
*/
|
||||
/** Reset collapsed state when todos first appear */
|
||||
useEffect(() => {
|
||||
if (showPlanTodos && planTodos.length > 0) {
|
||||
if (isSendingMessage) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { Check, Clipboard } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
@@ -883,14 +882,13 @@ console.log(data);`
|
||||
<code className='text-[10px]'><start.files></code>.
|
||||
</p>
|
||||
{missingFields.any && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
<div
|
||||
className='flex flex-none cursor-pointer items-center whitespace-nowrap rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[9px] py-[2px] font-medium font-sans text-[12px] text-[var(--text-primary)] hover:bg-[var(--surface-7)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]'
|
||||
title='Add required A2A input fields to Start block'
|
||||
onClick={handleAddA2AInputs}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
<span className='whitespace-nowrap'>Add inputs</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -283,7 +283,7 @@ export function GeneralDeploy({
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Promote to live</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to promote{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{versionToPromoteInfo?.name || `v${versionToPromote}`}
|
||||
|
||||
@@ -591,12 +591,11 @@ export function DeployModal({
|
||||
)}
|
||||
{activeTab === 'api' && (
|
||||
<ModalFooter className='items-center justify-between'>
|
||||
<div>
|
||||
<div />
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button variant='default' onClick={() => setIsApiInfoModalOpen(true)}>
|
||||
Edit API Info
|
||||
</Button>
|
||||
</div>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
onClick={() => setIsCreateKeyModalOpen(true)}
|
||||
|
||||
@@ -8,9 +8,10 @@ import { Button, Combobox } from '@/components/emcn/components'
|
||||
import { Progress } from '@/components/ui/progress'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace'
|
||||
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
|
||||
const logger = createLogger('FileUpload')
|
||||
|
||||
@@ -85,14 +86,47 @@ export function FileUpload({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a file's MIME type matches the accepted types
|
||||
* Supports exact matches, wildcard patterns (e.g., 'image/*'), and '*' for all types
|
||||
*/
|
||||
const isFileTypeAccepted = (fileType: string | undefined, accepted: string): boolean => {
|
||||
if (accepted === '*') return true
|
||||
if (!fileType) return false
|
||||
|
||||
const acceptedList = accepted.split(',').map((t) => t.trim().toLowerCase())
|
||||
const normalizedFileType = fileType.toLowerCase()
|
||||
|
||||
return acceptedList.some((acceptedType) => {
|
||||
if (acceptedType === normalizedFileType) return true
|
||||
|
||||
if (acceptedType.endsWith('/*')) {
|
||||
const typePrefix = acceptedType.slice(0, -1) // 'image/' from 'image/*'
|
||||
return normalizedFileType.startsWith(typePrefix)
|
||||
}
|
||||
|
||||
if (acceptedType.startsWith('.')) {
|
||||
const extension = acceptedType.slice(1).toLowerCase()
|
||||
const fileExtension = getExtensionFromMimeType(normalizedFileType)
|
||||
if (fileExtension === extension) return true
|
||||
return normalizedFileType.endsWith(`/${extension}`)
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
const availableWorkspaceFiles = workspaceFiles.filter((workspaceFile) => {
|
||||
const existingFiles = Array.isArray(value) ? value : value ? [value] : []
|
||||
return !existingFiles.some(
|
||||
|
||||
const isAlreadySelected = existingFiles.some(
|
||||
(existing) =>
|
||||
existing.name === workspaceFile.name ||
|
||||
existing.path?.includes(workspaceFile.key) ||
|
||||
existing.key === workspaceFile.key
|
||||
)
|
||||
|
||||
return !isAlreadySelected
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
@@ -421,23 +455,23 @@ export function FileUpload({
|
||||
return (
|
||||
<div
|
||||
key={fileKey}
|
||||
className='flex items-center justify-between rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[8px] py-[6px] hover:border-[var(--surface-7)] hover:bg-[var(--surface-5)] dark:bg-[var(--surface-5)] dark:hover:bg-[var(--border-1)]'
|
||||
className='relative rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[8px] py-[6px] hover:border-[var(--surface-7)] hover:bg-[var(--surface-5)] dark:bg-[var(--surface-5)] dark:hover:bg-[var(--border-1)]'
|
||||
>
|
||||
<div className='flex-1 truncate pr-2 text-sm' title={file.name}>
|
||||
<div className='truncate pr-[24px] text-sm' title={file.name}>
|
||||
<span className='text-[var(--text-primary)]'>{truncateMiddle(file.name)}</span>
|
||||
<span className='ml-2 text-[var(--text-muted)]'>({formatFileSize(file.size)})</span>
|
||||
</div>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
className='h-5 w-5 shrink-0 p-0'
|
||||
className='-translate-y-1/2 absolute top-1/2 right-[4px] h-6 w-6 p-0'
|
||||
onClick={(e) => handleRemoveFile(file, e)}
|
||||
disabled={isDeleting}
|
||||
>
|
||||
{isDeleting ? (
|
||||
<div className='h-3.5 w-3.5 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
|
||||
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
|
||||
) : (
|
||||
<X className='h-3.5 w-3.5' />
|
||||
<X className='h-4 w-4 opacity-50' />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
@@ -468,19 +502,30 @@ export function FileUpload({
|
||||
const comboboxOptions = useMemo(
|
||||
() => [
|
||||
{ label: 'Upload New File', value: '__upload_new__' },
|
||||
...availableWorkspaceFiles.map((file) => ({
|
||||
label: file.name,
|
||||
value: file.id,
|
||||
})),
|
||||
...availableWorkspaceFiles.map((file) => {
|
||||
const isAccepted =
|
||||
!acceptedTypes || acceptedTypes === '*' || isFileTypeAccepted(file.type, acceptedTypes)
|
||||
return {
|
||||
label: file.name,
|
||||
value: file.id,
|
||||
disabled: !isAccepted,
|
||||
}
|
||||
}),
|
||||
],
|
||||
[availableWorkspaceFiles]
|
||||
[availableWorkspaceFiles, acceptedTypes]
|
||||
)
|
||||
|
||||
const handleComboboxChange = (value: string) => {
|
||||
setInputValue(value)
|
||||
|
||||
const isValidOption =
|
||||
value === '__upload_new__' || availableWorkspaceFiles.some((file) => file.id === value)
|
||||
const selectedFile = availableWorkspaceFiles.find((file) => file.id === value)
|
||||
const isAcceptedType =
|
||||
selectedFile &&
|
||||
(!acceptedTypes ||
|
||||
acceptedTypes === '*' ||
|
||||
isFileTypeAccepted(selectedFile.type, acceptedTypes))
|
||||
|
||||
const isValidOption = value === '__upload_new__' || isAcceptedType
|
||||
|
||||
if (!isValidOption) {
|
||||
return
|
||||
|
||||
@@ -28,6 +28,7 @@ interface Field {
|
||||
name: string
|
||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||
value?: string
|
||||
description?: string
|
||||
collapsed?: boolean
|
||||
}
|
||||
|
||||
@@ -41,7 +42,9 @@ interface FieldFormatProps {
|
||||
placeholder?: string
|
||||
showType?: boolean
|
||||
showValue?: boolean
|
||||
showDescription?: boolean
|
||||
valuePlaceholder?: string
|
||||
descriptionPlaceholder?: string
|
||||
config?: any
|
||||
}
|
||||
|
||||
@@ -73,6 +76,7 @@ const createDefaultField = (): Field => ({
|
||||
name: '',
|
||||
type: 'string',
|
||||
value: '',
|
||||
description: '',
|
||||
collapsed: false,
|
||||
})
|
||||
|
||||
@@ -93,7 +97,9 @@ export function FieldFormat({
|
||||
placeholder = 'fieldName',
|
||||
showType = true,
|
||||
showValue = false,
|
||||
showDescription = false,
|
||||
valuePlaceholder = 'Enter default value',
|
||||
descriptionPlaceholder = 'Describe this field',
|
||||
}: FieldFormatProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<Field[]>(blockId, subBlockId)
|
||||
const valueInputRefs = useRef<Record<string, HTMLInputElement | HTMLTextAreaElement>>({})
|
||||
@@ -554,6 +560,18 @@ export function FieldFormat({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showDescription && (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Description</Label>
|
||||
<Input
|
||||
value={field.description ?? ''}
|
||||
onChange={(e) => updateField(field.id, 'description', e.target.value)}
|
||||
placeholder={descriptionPlaceholder}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showValue && (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Value</Label>
|
||||
@@ -568,8 +586,10 @@ export function FieldFormat({
|
||||
)
|
||||
}
|
||||
|
||||
export function InputFormat(props: Omit<FieldFormatProps, 'title' | 'placeholder'>) {
|
||||
return <FieldFormat {...props} title='Input' placeholder='firstName' />
|
||||
export function InputFormat(
|
||||
props: Omit<FieldFormatProps, 'title' | 'placeholder' | 'showDescription'>
|
||||
) {
|
||||
return <FieldFormat {...props} title='Input' placeholder='firstName' showDescription={true} />
|
||||
}
|
||||
|
||||
export function ResponseFormat(
|
||||
|
||||
@@ -241,13 +241,16 @@ const getOutputTypeForPath = (
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getBlockOutputType(block.type, outputPath, subBlocks)
|
||||
} else {
|
||||
const operationValue = getSubBlockValue(blockId, 'operation')
|
||||
if (blockConfig && operationValue) {
|
||||
return getToolOutputType(blockConfig, operationValue, outputPath)
|
||||
}
|
||||
} else if (blockConfig?.tools?.config?.tool) {
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getToolOutputType(blockConfig, subBlocks, outputPath)
|
||||
}
|
||||
return 'any'
|
||||
|
||||
const subBlocks =
|
||||
mergedSubBlocksOverride ?? useWorkflowStore.getState().blocks[blockId]?.subBlocks
|
||||
const triggerMode = block?.triggerMode && blockConfig?.triggers?.enabled
|
||||
return getBlockOutputType(block?.type ?? '', outputPath, subBlocks, triggerMode)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1211,11 +1214,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
: allTags
|
||||
}
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(activeSourceBlockId, 'operation')
|
||||
const toolOutputPaths = operationValue
|
||||
? getToolOutputPaths(blockConfig, operationValue, mergedSubBlocks)
|
||||
: []
|
||||
const toolOutputPaths = getToolOutputPaths(blockConfig, mergedSubBlocks)
|
||||
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
@@ -1535,7 +1534,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
// For self-reference, only show url and resumeEndpoint (not response format fields)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
@@ -1543,11 +1541,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockTags = [`${normalizedBlockName}.url`, `${normalizedBlockName}.resumeEndpoint`]
|
||||
}
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(accessibleBlockId, 'operation')
|
||||
const toolOutputPaths = operationValue
|
||||
? getToolOutputPaths(blockConfig, operationValue, mergedSubBlocks)
|
||||
: []
|
||||
const toolOutputPaths = getToolOutputPaths(blockConfig, mergedSubBlocks)
|
||||
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
@@ -1789,7 +1783,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
mergedSubBlocks
|
||||
)
|
||||
|
||||
if (fieldType === 'files' || fieldType === 'array') {
|
||||
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') {
|
||||
const blockName = parts[0]
|
||||
const remainingPath = parts.slice(2).join('.')
|
||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||
|
||||
@@ -42,7 +42,7 @@ export function CodeEditor({
|
||||
placeholder = '',
|
||||
className = '',
|
||||
gutterClassName = '',
|
||||
minHeight = '360px',
|
||||
minHeight,
|
||||
highlightVariables = true,
|
||||
onKeyDown,
|
||||
disabled = false,
|
||||
@@ -186,7 +186,7 @@ export function CodeEditor({
|
||||
}
|
||||
|
||||
return (
|
||||
<Code.Container className={className} style={{ minHeight }}>
|
||||
<Code.Container className={className} style={minHeight ? { minHeight } : undefined}>
|
||||
{showWandButton && onWandClick && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
@@ -220,7 +220,7 @@ export function CodeEditor({
|
||||
disabled={disabled}
|
||||
{...getCodeEditorProps({ disabled })}
|
||||
className={cn(getCodeEditorProps({ disabled }).className, 'h-full')}
|
||||
style={{ minHeight }}
|
||||
style={minHeight ? { minHeight } : undefined}
|
||||
textareaClassName={cn(
|
||||
getCodeEditorProps({ disabled }).textareaClassName,
|
||||
'!block !h-full !min-h-full'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user