mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-22 05:18:08 -05:00
Compare commits
56 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc2be33d6b | ||
|
|
376f7cb571 | ||
|
|
42159c23b9 | ||
|
|
2f0f246002 | ||
|
|
900d3ef9ea | ||
|
|
f3fcc28f89 | ||
|
|
7cfdf46724 | ||
|
|
d681451297 | ||
|
|
5987a6d060 | ||
|
|
e2ccefb2f4 | ||
|
|
103b31a569 | ||
|
|
004e058353 | ||
|
|
5157f0bbb2 | ||
|
|
8bbcf31b83 | ||
|
|
9e814315dd | ||
|
|
0ea0256623 | ||
|
|
fb8868c854 | ||
|
|
ea4964052d | ||
|
|
268e2f114f | ||
|
|
45371e521e | ||
|
|
5988d0e46f | ||
|
|
145db9d8c3 | ||
|
|
0ce0f98aa5 | ||
|
|
294b168ed9 | ||
|
|
0dc2c1fe0d | ||
|
|
fb90c4e9b1 | ||
|
|
0af96d06c6 | ||
|
|
1d450578c8 | ||
|
|
c6d408c65b | ||
|
|
16716ea26a | ||
|
|
563098ca0a | ||
|
|
1f1f015031 | ||
|
|
4afb245fa2 | ||
|
|
8344d68ca8 | ||
|
|
a26a1a9737 | ||
|
|
689037a300 | ||
|
|
07f0c01dc4 | ||
|
|
dff1c9d083 | ||
|
|
e4ad31bb6b | ||
|
|
84691fc873 | ||
|
|
2daf34386e | ||
|
|
ac991d4b54 | ||
|
|
69614d2d93 | ||
|
|
6cbadd7110 | ||
|
|
9efd3d5b4c | ||
|
|
e575ba2965 | ||
|
|
5f45db4343 | ||
|
|
81cbfe7af4 | ||
|
|
739341b08e | ||
|
|
3c43779ba3 | ||
|
|
1861f77283 | ||
|
|
72c2ba7443 | ||
|
|
037dad6975 | ||
|
|
408597e12b | ||
|
|
932f8fd654 | ||
|
|
b4c2294e67 |
35
.claude/rules/emcn-components.md
Normal file
35
.claude/rules/emcn-components.md
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/components/emcn/**"
|
||||
---
|
||||
|
||||
# EMCN Components
|
||||
|
||||
Import from `@/components/emcn`, never from subpaths (except CSS files).
|
||||
|
||||
## CVA vs Direct Styles
|
||||
|
||||
**Use CVA when:** 2+ variants (primary/secondary, sm/md/lg)
|
||||
|
||||
```tsx
|
||||
const buttonVariants = cva('base-classes', {
|
||||
variants: { variant: { default: '...', primary: '...' } }
|
||||
})
|
||||
export { Button, buttonVariants }
|
||||
```
|
||||
|
||||
**Use direct className when:** Single consistent style, no variations
|
||||
|
||||
```tsx
|
||||
function Label({ className, ...props }) {
|
||||
return <Primitive className={cn('style-classes', className)} {...props} />
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
- Use Radix UI primitives for accessibility
|
||||
- Export component and variants (if using CVA)
|
||||
- TSDoc with usage examples
|
||||
- Consistent tokens: `font-medium`, `text-[12px]`, `rounded-[4px]`
|
||||
- `transition-colors` for hover states
|
||||
13
.claude/rules/global.md
Normal file
13
.claude/rules/global.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Global Standards
|
||||
|
||||
## Logging
|
||||
Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
|
||||
|
||||
## Comments
|
||||
Use TSDoc for documentation. No `====` separators. No non-TSDoc comments.
|
||||
|
||||
## Styling
|
||||
Never update global styles. Keep all styling local to components.
|
||||
|
||||
## Package Manager
|
||||
Use `bun` and `bunx`, not `npm` and `npx`.
|
||||
56
.claude/rules/sim-architecture.md
Normal file
56
.claude/rules/sim-architecture.md
Normal file
@@ -0,0 +1,56 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**"
|
||||
---
|
||||
|
||||
# Sim App Architecture
|
||||
|
||||
## Core Principles
|
||||
1. **Single Responsibility**: Each component, hook, store has one clear purpose
|
||||
2. **Composition Over Complexity**: Break down complex logic into smaller pieces
|
||||
3. **Type Safety First**: TypeScript interfaces for all props, state, return types
|
||||
4. **Predictable State**: Zustand for global state, useState for UI-only concerns
|
||||
|
||||
## Root-Level Structure
|
||||
|
||||
```
|
||||
apps/sim/
|
||||
├── app/ # Next.js app router (pages, API routes)
|
||||
├── blocks/ # Block definitions and registry
|
||||
├── components/ # Shared UI (emcn/, ui/)
|
||||
├── executor/ # Workflow execution engine
|
||||
├── hooks/ # Shared hooks (queries/, selectors/)
|
||||
├── lib/ # App-wide utilities
|
||||
├── providers/ # LLM provider integrations
|
||||
├── stores/ # Zustand stores
|
||||
├── tools/ # Tool definitions
|
||||
└── triggers/ # Trigger definitions
|
||||
```
|
||||
|
||||
## Feature Organization
|
||||
|
||||
Features live under `app/workspace/[workspaceId]/`:
|
||||
|
||||
```
|
||||
feature/
|
||||
├── components/ # Feature components
|
||||
├── hooks/ # Feature-scoped hooks
|
||||
├── utils/ # Feature-scoped utilities (2+ consumers)
|
||||
├── feature.tsx # Main component
|
||||
└── page.tsx # Next.js page entry
|
||||
```
|
||||
|
||||
## Naming Conventions
|
||||
- **Components**: PascalCase (`WorkflowList`)
|
||||
- **Hooks**: `use` prefix (`useWorkflowOperations`)
|
||||
- **Files**: kebab-case (`workflow-list.tsx`)
|
||||
- **Stores**: `stores/feature/store.ts`
|
||||
- **Constants**: SCREAMING_SNAKE_CASE
|
||||
- **Interfaces**: PascalCase with suffix (`WorkflowListProps`)
|
||||
|
||||
## Utils Rules
|
||||
|
||||
- **Never create `utils.ts` for single consumer** - inline it
|
||||
- **Create `utils.ts` when** 2+ files need the same helper
|
||||
- **Check existing sources** before duplicating (`lib/` has many utilities)
|
||||
- **Location**: `lib/` (app-wide) → `feature/utils/` (feature-scoped) → inline (single-use)
|
||||
48
.claude/rules/sim-components.md
Normal file
48
.claude/rules/sim-components.md
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.tsx"
|
||||
---
|
||||
|
||||
# Component Patterns
|
||||
|
||||
## Structure Order
|
||||
|
||||
```typescript
|
||||
'use client' // Only if using hooks
|
||||
|
||||
// Imports (external → internal)
|
||||
// Constants at module level
|
||||
const CONFIG = { SPACING: 8 } as const
|
||||
|
||||
// Props interface
|
||||
interface ComponentProps {
|
||||
requiredProp: string
|
||||
optionalProp?: boolean
|
||||
}
|
||||
|
||||
export function Component({ requiredProp, optionalProp = false }: ComponentProps) {
|
||||
// a. Refs
|
||||
// b. External hooks (useParams, useRouter)
|
||||
// c. Store hooks
|
||||
// d. Custom hooks
|
||||
// e. Local state
|
||||
// f. useMemo
|
||||
// g. useCallback
|
||||
// h. useEffect
|
||||
// i. Return JSX
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
1. `'use client'` only when using React hooks
|
||||
2. Always define props interface
|
||||
3. Extract constants with `as const`
|
||||
4. Semantic HTML (`aside`, `nav`, `article`)
|
||||
5. Optional chain callbacks: `onAction?.(id)`
|
||||
|
||||
## Component Extraction
|
||||
|
||||
**Extract when:** 50+ lines, used in 2+ files, or has own state/logic
|
||||
|
||||
**Keep inline when:** < 10 lines, single use, purely presentational
|
||||
55
.claude/rules/sim-hooks.md
Normal file
55
.claude/rules/sim-hooks.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/use-*.ts"
|
||||
- "apps/sim/**/hooks/**/*.ts"
|
||||
---
|
||||
|
||||
# Hook Patterns
|
||||
|
||||
## Structure
|
||||
|
||||
```typescript
|
||||
interface UseFeatureProps {
|
||||
id: string
|
||||
onSuccess?: (result: Result) => void
|
||||
}
|
||||
|
||||
export function useFeature({ id, onSuccess }: UseFeatureProps) {
|
||||
// 1. Refs for stable dependencies
|
||||
const idRef = useRef(id)
|
||||
const onSuccessRef = useRef(onSuccess)
|
||||
|
||||
// 2. State
|
||||
const [data, setData] = useState<Data | null>(null)
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
// 3. Sync refs
|
||||
useEffect(() => {
|
||||
idRef.current = id
|
||||
onSuccessRef.current = onSuccess
|
||||
}, [id, onSuccess])
|
||||
|
||||
// 4. Operations (useCallback with empty deps when using refs)
|
||||
const fetchData = useCallback(async () => {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const result = await fetch(`/api/${idRef.current}`).then(r => r.json())
|
||||
setData(result)
|
||||
onSuccessRef.current?.(result)
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [])
|
||||
|
||||
return { data, isLoading, fetchData }
|
||||
}
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
1. Single responsibility per hook
|
||||
2. Props interface required
|
||||
3. Refs for stable callback dependencies
|
||||
4. Wrap returned functions in useCallback
|
||||
5. Always try/catch async operations
|
||||
6. Track loading/error states
|
||||
62
.claude/rules/sim-imports.md
Normal file
62
.claude/rules/sim-imports.md
Normal file
@@ -0,0 +1,62 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.ts"
|
||||
- "apps/sim/**/*.tsx"
|
||||
---
|
||||
|
||||
# Import Patterns
|
||||
|
||||
## Absolute Imports
|
||||
|
||||
**Always use absolute imports.** Never use relative imports.
|
||||
|
||||
```typescript
|
||||
// ✓ Good
|
||||
import { useWorkflowStore } from '@/stores/workflows/store'
|
||||
import { Button } from '@/components/ui/button'
|
||||
|
||||
// ✗ Bad
|
||||
import { useWorkflowStore } from '../../../stores/workflows/store'
|
||||
```
|
||||
|
||||
## Barrel Exports
|
||||
|
||||
Use barrel exports (`index.ts`) when a folder has 3+ exports. Import from barrel, not individual files.
|
||||
|
||||
```typescript
|
||||
// ✓ Good
|
||||
import { Dashboard, Sidebar } from '@/app/workspace/[workspaceId]/logs/components'
|
||||
|
||||
// ✗ Bad
|
||||
import { Dashboard } from '@/app/workspace/[workspaceId]/logs/components/dashboard/dashboard'
|
||||
```
|
||||
|
||||
## No Re-exports
|
||||
|
||||
Do not re-export from non-barrel files. Import directly from the source.
|
||||
|
||||
```typescript
|
||||
// ✓ Good - import from where it's declared
|
||||
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
||||
|
||||
// ✗ Bad - re-exporting in utils.ts then importing from there
|
||||
import { CORE_TRIGGER_TYPES } from '@/app/workspace/.../utils'
|
||||
```
|
||||
|
||||
## Import Order
|
||||
|
||||
1. React/core libraries
|
||||
2. External libraries
|
||||
3. UI components (`@/components/emcn`, `@/components/ui`)
|
||||
4. Utilities (`@/lib/...`)
|
||||
5. Stores (`@/stores/...`)
|
||||
6. Feature imports
|
||||
7. CSS imports
|
||||
|
||||
## Type Imports
|
||||
|
||||
Use `type` keyword for type-only imports:
|
||||
|
||||
```typescript
|
||||
import type { WorkflowLog } from '@/stores/logs/types'
|
||||
```
|
||||
209
.claude/rules/sim-integrations.md
Normal file
209
.claude/rules/sim-integrations.md
Normal file
@@ -0,0 +1,209 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/tools/**"
|
||||
- "apps/sim/blocks/**"
|
||||
- "apps/sim/triggers/**"
|
||||
---
|
||||
|
||||
# Adding Integrations
|
||||
|
||||
## Overview
|
||||
|
||||
Adding a new integration typically requires:
|
||||
1. **Tools** - API operations (`tools/{service}/`)
|
||||
2. **Block** - UI component (`blocks/blocks/{service}.ts`)
|
||||
3. **Icon** - SVG icon (`components/icons.tsx`)
|
||||
4. **Trigger** (optional) - Webhooks/polling (`triggers/{service}/`)
|
||||
|
||||
Always look up the service's API docs first.
|
||||
|
||||
## 1. Tools (`tools/{service}/`)
|
||||
|
||||
```
|
||||
tools/{service}/
|
||||
├── index.ts # Export all tools
|
||||
├── types.ts # Params/response types
|
||||
├── {action}.ts # Individual tool (e.g., send_message.ts)
|
||||
└── ...
|
||||
```
|
||||
|
||||
**Tool file structure:**
|
||||
|
||||
```typescript
|
||||
// tools/{service}/{action}.ts
|
||||
import type { {Service}Params, {Service}Response } from '@/tools/{service}/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const {service}{Action}Tool: ToolConfig<{Service}Params, {Service}Response> = {
|
||||
id: '{service}_{action}',
|
||||
name: '{Service} {Action}',
|
||||
description: 'What this tool does',
|
||||
version: '1.0.0',
|
||||
oauth: { required: true, provider: '{service}' }, // if OAuth
|
||||
params: { /* param definitions */ },
|
||||
request: {
|
||||
url: '/api/tools/{service}/{action}',
|
||||
method: 'POST',
|
||||
headers: () => ({ 'Content-Type': 'application/json' }),
|
||||
body: (params) => ({ ...params }),
|
||||
},
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) throw new Error(data.error)
|
||||
return { success: true, output: data.output }
|
||||
},
|
||||
outputs: { /* output definitions */ },
|
||||
}
|
||||
```
|
||||
|
||||
**Register in `tools/registry.ts`:**
|
||||
|
||||
```typescript
|
||||
import { {service}{Action}Tool } from '@/tools/{service}'
|
||||
// Add to registry object
|
||||
{service}_{action}: {service}{Action}Tool,
|
||||
```
|
||||
|
||||
## 2. Block (`blocks/blocks/{service}.ts`)
|
||||
|
||||
```typescript
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { {Service}Response } from '@/tools/{service}/types'
|
||||
|
||||
export const {Service}Block: BlockConfig<{Service}Response> = {
|
||||
type: '{service}',
|
||||
name: '{Service}',
|
||||
description: 'Short description',
|
||||
longDescription: 'Detailed description',
|
||||
category: 'tools',
|
||||
bgColor: '#hexcolor',
|
||||
icon: {Service}Icon,
|
||||
subBlocks: [ /* see SubBlock Properties below */ ],
|
||||
tools: {
|
||||
access: ['{service}_{action}', ...],
|
||||
config: {
|
||||
tool: (params) => `{service}_${params.operation}`,
|
||||
params: (params) => ({ ...params }),
|
||||
},
|
||||
},
|
||||
inputs: { /* input definitions */ },
|
||||
outputs: { /* output definitions */ },
|
||||
}
|
||||
```
|
||||
|
||||
### SubBlock Properties
|
||||
|
||||
```typescript
|
||||
{
|
||||
id: 'fieldName', // Unique identifier
|
||||
title: 'Field Label', // UI label
|
||||
type: 'short-input', // See SubBlock Types below
|
||||
placeholder: 'Hint text',
|
||||
required: true, // See Required below
|
||||
condition: { ... }, // See Condition below
|
||||
dependsOn: ['otherField'], // See DependsOn below
|
||||
mode: 'basic', // 'basic' | 'advanced' | 'both' | 'trigger'
|
||||
}
|
||||
```
|
||||
|
||||
**SubBlock Types:** `short-input`, `long-input`, `dropdown`, `code`, `switch`, `slider`, `oauth-input`, `channel-selector`, `user-selector`, `file-upload`, etc.
|
||||
|
||||
### `condition` - Show/hide based on another field
|
||||
|
||||
```typescript
|
||||
// Show when operation === 'send'
|
||||
condition: { field: 'operation', value: 'send' }
|
||||
|
||||
// Show when operation is 'send' OR 'read'
|
||||
condition: { field: 'operation', value: ['send', 'read'] }
|
||||
|
||||
// Show when operation !== 'send'
|
||||
condition: { field: 'operation', value: 'send', not: true }
|
||||
|
||||
// Complex: NOT in list AND another condition
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users'],
|
||||
not: true,
|
||||
and: { field: 'destinationType', value: 'dm', not: true }
|
||||
}
|
||||
```
|
||||
|
||||
### `required` - Field validation
|
||||
|
||||
```typescript
|
||||
// Always required
|
||||
required: true
|
||||
|
||||
// Conditionally required (same syntax as condition)
|
||||
required: { field: 'operation', value: 'send' }
|
||||
```
|
||||
|
||||
### `dependsOn` - Clear field when dependencies change
|
||||
|
||||
```typescript
|
||||
// Clear when credential changes
|
||||
dependsOn: ['credential']
|
||||
|
||||
// Clear when authMethod changes AND (credential OR botToken) changes
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] }
|
||||
```
|
||||
|
||||
### `mode` - When to show field
|
||||
|
||||
- `'basic'` - Only in basic mode (default UI)
|
||||
- `'advanced'` - Only in advanced mode (manual input)
|
||||
- `'both'` - Show in both modes (default)
|
||||
- `'trigger'` - Only when block is used as trigger
|
||||
|
||||
**Register in `blocks/registry.ts`:**
|
||||
|
||||
```typescript
|
||||
import { {Service}Block } from '@/blocks/blocks/{service}'
|
||||
// Add to registry object (alphabetically)
|
||||
{service}: {Service}Block,
|
||||
```
|
||||
|
||||
## 3. Icon (`components/icons.tsx`)
|
||||
|
||||
```typescript
|
||||
export function {Service}Icon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
{/* SVG path from service's brand assets */}
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
## 4. Trigger (`triggers/{service}/`) - Optional
|
||||
|
||||
```
|
||||
triggers/{service}/
|
||||
├── index.ts # Export all triggers
|
||||
├── webhook.ts # Webhook handler
|
||||
├── utils.ts # Shared utilities
|
||||
└── {event}.ts # Specific event handlers
|
||||
```
|
||||
|
||||
**Register in `triggers/registry.ts`:**
|
||||
|
||||
```typescript
|
||||
import { {service}WebhookTrigger } from '@/triggers/{service}'
|
||||
// Add to TRIGGER_REGISTRY
|
||||
{service}_webhook: {service}WebhookTrigger,
|
||||
```
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Look up API docs for the service
|
||||
- [ ] Create `tools/{service}/types.ts` with proper types
|
||||
- [ ] Create tool files for each operation
|
||||
- [ ] Create `tools/{service}/index.ts` barrel export
|
||||
- [ ] Register tools in `tools/registry.ts`
|
||||
- [ ] Add icon to `components/icons.tsx`
|
||||
- [ ] Create block in `blocks/blocks/{service}.ts`
|
||||
- [ ] Register block in `blocks/registry.ts`
|
||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||
66
.claude/rules/sim-queries.md
Normal file
66
.claude/rules/sim-queries.md
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/hooks/queries/**/*.ts"
|
||||
---
|
||||
|
||||
# React Query Patterns
|
||||
|
||||
All React Query hooks live in `hooks/queries/`.
|
||||
|
||||
## Query Key Factory
|
||||
|
||||
Every query file defines a keys factory:
|
||||
|
||||
```typescript
|
||||
export const entityKeys = {
|
||||
all: ['entity'] as const,
|
||||
list: (workspaceId?: string) => [...entityKeys.all, 'list', workspaceId ?? ''] as const,
|
||||
detail: (id?: string) => [...entityKeys.all, 'detail', id ?? ''] as const,
|
||||
}
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
```typescript
|
||||
// 1. Query keys factory
|
||||
// 2. Types (if needed)
|
||||
// 3. Private fetch functions
|
||||
// 4. Exported hooks
|
||||
```
|
||||
|
||||
## Query Hook
|
||||
|
||||
```typescript
|
||||
export function useEntityList(workspaceId?: string, options?: { enabled?: boolean }) {
|
||||
return useQuery({
|
||||
queryKey: entityKeys.list(workspaceId),
|
||||
queryFn: () => fetchEntities(workspaceId as string),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
## Mutation Hook
|
||||
|
||||
```typescript
|
||||
export function useCreateEntity() {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: async (variables) => { /* fetch POST */ },
|
||||
onSuccess: () => queryClient.invalidateQueries({ queryKey: entityKeys.all }),
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
## Optimistic Updates
|
||||
|
||||
For optimistic mutations syncing with Zustand, use `createOptimisticMutationHandlers` from `@/hooks/queries/utils/optimistic-mutation`.
|
||||
|
||||
## Naming
|
||||
|
||||
- **Keys**: `entityKeys`
|
||||
- **Query hooks**: `useEntity`, `useEntityList`
|
||||
- **Mutation hooks**: `useCreateEntity`, `useUpdateEntity`
|
||||
- **Fetch functions**: `fetchEntity` (private)
|
||||
71
.claude/rules/sim-stores.md
Normal file
71
.claude/rules/sim-stores.md
Normal file
@@ -0,0 +1,71 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/store.ts"
|
||||
- "apps/sim/**/stores/**/*.ts"
|
||||
---
|
||||
|
||||
# Zustand Store Patterns
|
||||
|
||||
Stores live in `stores/`. Complex stores split into `store.ts` + `types.ts`.
|
||||
|
||||
## Basic Store
|
||||
|
||||
```typescript
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import type { FeatureState } from '@/stores/feature/types'
|
||||
|
||||
const initialState = { items: [] as Item[], activeId: null as string | null }
|
||||
|
||||
export const useFeatureStore = create<FeatureState>()(
|
||||
devtools(
|
||||
(set, get) => ({
|
||||
...initialState,
|
||||
setItems: (items) => set({ items }),
|
||||
addItem: (item) => set((state) => ({ items: [...state.items, item] })),
|
||||
reset: () => set(initialState),
|
||||
}),
|
||||
{ name: 'feature-store' }
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
## Persisted Store
|
||||
|
||||
```typescript
|
||||
import { create } from 'zustand'
|
||||
import { persist } from 'zustand/middleware'
|
||||
|
||||
export const useFeatureStore = create<FeatureState>()(
|
||||
persist(
|
||||
(set) => ({
|
||||
width: 300,
|
||||
setWidth: (width) => set({ width }),
|
||||
_hasHydrated: false,
|
||||
setHasHydrated: (v) => set({ _hasHydrated: v }),
|
||||
}),
|
||||
{
|
||||
name: 'feature-state',
|
||||
partialize: (state) => ({ width: state.width }),
|
||||
onRehydrateStorage: () => (state) => state?.setHasHydrated(true),
|
||||
}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
1. Use `devtools` middleware (named stores)
|
||||
2. Use `persist` only when data should survive reload
|
||||
3. `partialize` to persist only necessary state
|
||||
4. `_hasHydrated` pattern for persisted stores needing hydration tracking
|
||||
5. Immutable updates only
|
||||
6. `set((state) => ...)` when depending on previous state
|
||||
7. Provide `reset()` action
|
||||
|
||||
## Outside React
|
||||
|
||||
```typescript
|
||||
const items = useFeatureStore.getState().items
|
||||
useFeatureStore.setState({ items: newItems })
|
||||
```
|
||||
41
.claude/rules/sim-styling.md
Normal file
41
.claude/rules/sim-styling.md
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.tsx"
|
||||
- "apps/sim/**/*.css"
|
||||
---
|
||||
|
||||
# Styling Rules
|
||||
|
||||
## Tailwind
|
||||
|
||||
1. **No inline styles** - Use Tailwind classes
|
||||
2. **No duplicate dark classes** - Skip `dark:` when value matches light mode
|
||||
3. **Exact values** - `text-[14px]`, `h-[26px]`
|
||||
4. **Transitions** - `transition-colors` for interactive states
|
||||
|
||||
## Conditional Classes
|
||||
|
||||
```typescript
|
||||
import { cn } from '@/lib/utils'
|
||||
|
||||
<div className={cn(
|
||||
'base-classes',
|
||||
isActive && 'active-classes',
|
||||
disabled ? 'opacity-60' : 'hover:bg-accent'
|
||||
)} />
|
||||
```
|
||||
|
||||
## CSS Variables
|
||||
|
||||
For dynamic values (widths, heights) synced with stores:
|
||||
|
||||
```typescript
|
||||
// In store
|
||||
setWidth: (width) => {
|
||||
set({ width })
|
||||
document.documentElement.style.setProperty('--sidebar-width', `${width}px`)
|
||||
}
|
||||
|
||||
// In component
|
||||
<aside style={{ width: 'var(--sidebar-width)' }} />
|
||||
```
|
||||
58
.claude/rules/sim-testing.md
Normal file
58
.claude/rules/sim-testing.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.test.ts"
|
||||
- "apps/sim/**/*.test.tsx"
|
||||
---
|
||||
|
||||
# Testing Patterns
|
||||
|
||||
Use Vitest. Test files: `feature.ts` → `feature.test.ts`
|
||||
|
||||
## Structure
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
import { myFunction } from '@/lib/feature'
|
||||
|
||||
describe('myFunction', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
it.concurrent('isolated tests run in parallel', () => { ... })
|
||||
})
|
||||
```
|
||||
|
||||
## @sim/testing Package
|
||||
|
||||
Always prefer over local mocks.
|
||||
|
||||
| Category | Utilities |
|
||||
|----------|-----------|
|
||||
| **Mocks** | `loggerMock`, `databaseMock`, `setupGlobalFetchMock()` |
|
||||
| **Factories** | `createSession()`, `createWorkflowRecord()`, `createBlock()`, `createExecutorContext()` |
|
||||
| **Builders** | `WorkflowBuilder`, `ExecutionContextBuilder` |
|
||||
| **Assertions** | `expectWorkflowAccessGranted()`, `expectBlockExecuted()` |
|
||||
|
||||
## Rules
|
||||
|
||||
1. `@vitest-environment node` directive at file top
|
||||
2. `vi.mock()` calls before importing mocked modules
|
||||
3. `@sim/testing` utilities over local mocks
|
||||
4. `it.concurrent` for isolated tests (no shared mutable state)
|
||||
5. `beforeEach(() => vi.clearAllMocks())` to reset state
|
||||
|
||||
## Hoisted Mocks
|
||||
|
||||
For mutable mock references:
|
||||
|
||||
```typescript
|
||||
const mockFn = vi.hoisted(() => vi.fn())
|
||||
vi.mock('@/lib/module', () => ({ myFunction: mockFn }))
|
||||
mockFn.mockResolvedValue({ data: 'test' })
|
||||
```
|
||||
21
.claude/rules/sim-typescript.md
Normal file
21
.claude/rules/sim-typescript.md
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
paths:
|
||||
- "apps/sim/**/*.ts"
|
||||
- "apps/sim/**/*.tsx"
|
||||
---
|
||||
|
||||
# TypeScript Rules
|
||||
|
||||
1. **No `any`** - Use proper types or `unknown` with type guards
|
||||
2. **Props interface** - Always define for components
|
||||
3. **Const assertions** - `as const` for constant objects/arrays
|
||||
4. **Ref types** - Explicit: `useRef<HTMLDivElement>(null)`
|
||||
5. **Type imports** - `import type { X }` for type-only imports
|
||||
|
||||
```typescript
|
||||
// ✗ Bad
|
||||
const handleClick = (e: any) => {}
|
||||
|
||||
// ✓ Good
|
||||
const handleClick = (e: React.MouseEvent<HTMLButtonElement>) => {}
|
||||
```
|
||||
@@ -8,7 +8,7 @@ alwaysApply: true
|
||||
You are a professional software engineer. All code must follow best practices: accurate, readable, clean, and efficient.
|
||||
|
||||
## Logging
|
||||
Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
|
||||
Import `createLogger` from `@sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
|
||||
|
||||
## Comments
|
||||
Use TSDoc for documentation. No `====` separators. No non-TSDoc comments.
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
</p>
|
||||
|
||||
### Build Workflows with Ease
|
||||
|
||||
@@ -86,27 +86,112 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
.limit(candidateLimit)
|
||||
|
||||
const seenIds = new Set<string>()
|
||||
const mergedResults = []
|
||||
const knownLocales = ['en', 'es', 'fr', 'de', 'ja', 'zh']
|
||||
|
||||
for (let i = 0; i < Math.max(vectorResults.length, keywordResults.length); i++) {
|
||||
if (i < vectorResults.length && !seenIds.has(vectorResults[i].chunkId)) {
|
||||
mergedResults.push(vectorResults[i])
|
||||
seenIds.add(vectorResults[i].chunkId)
|
||||
}
|
||||
if (i < keywordResults.length && !seenIds.has(keywordResults[i].chunkId)) {
|
||||
mergedResults.push(keywordResults[i])
|
||||
seenIds.add(keywordResults[i].chunkId)
|
||||
const vectorRankMap = new Map<string, number>()
|
||||
vectorResults.forEach((r, idx) => vectorRankMap.set(r.chunkId, idx + 1))
|
||||
|
||||
const keywordRankMap = new Map<string, number>()
|
||||
keywordResults.forEach((r, idx) => keywordRankMap.set(r.chunkId, idx + 1))
|
||||
|
||||
const allChunkIds = new Set([
|
||||
...vectorResults.map((r) => r.chunkId),
|
||||
...keywordResults.map((r) => r.chunkId),
|
||||
])
|
||||
|
||||
const k = 60
|
||||
type ResultWithRRF = (typeof vectorResults)[0] & { rrfScore: number }
|
||||
const scoredResults: ResultWithRRF[] = []
|
||||
|
||||
for (const chunkId of allChunkIds) {
|
||||
const vectorRank = vectorRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
|
||||
const keywordRank = keywordRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
|
||||
|
||||
const rrfScore = 1 / (k + vectorRank) + 1 / (k + keywordRank)
|
||||
|
||||
const result =
|
||||
vectorResults.find((r) => r.chunkId === chunkId) ||
|
||||
keywordResults.find((r) => r.chunkId === chunkId)
|
||||
|
||||
if (result) {
|
||||
scoredResults.push({ ...result, rrfScore })
|
||||
}
|
||||
}
|
||||
|
||||
const filteredResults = mergedResults.slice(0, limit)
|
||||
const searchResults = filteredResults.map((result) => {
|
||||
scoredResults.sort((a, b) => b.rrfScore - a.rrfScore)
|
||||
|
||||
const localeFilteredResults = scoredResults.filter((result) => {
|
||||
const firstPart = result.sourceDocument.split('/')[0]
|
||||
if (knownLocales.includes(firstPart)) {
|
||||
return firstPart === locale
|
||||
}
|
||||
return locale === 'en'
|
||||
})
|
||||
|
||||
const queryLower = query.toLowerCase()
|
||||
const getTitleBoost = (result: ResultWithRRF): number => {
|
||||
const fileName = result.sourceDocument
|
||||
.replace('.mdx', '')
|
||||
.split('/')
|
||||
.pop()
|
||||
?.toLowerCase()
|
||||
?.replace(/_/g, ' ')
|
||||
|
||||
if (fileName === queryLower) return 0.01
|
||||
if (fileName?.includes(queryLower)) return 0.005
|
||||
return 0
|
||||
}
|
||||
|
||||
localeFilteredResults.sort((a, b) => {
|
||||
return b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a))
|
||||
})
|
||||
|
||||
const pageMap = new Map<string, ResultWithRRF>()
|
||||
|
||||
for (const result of localeFilteredResults) {
|
||||
const pageKey = result.sourceDocument
|
||||
const existing = pageMap.get(pageKey)
|
||||
|
||||
if (!existing || result.rrfScore > existing.rrfScore) {
|
||||
pageMap.set(pageKey, result)
|
||||
}
|
||||
}
|
||||
|
||||
const deduplicatedResults = Array.from(pageMap.values())
|
||||
.sort((a, b) => b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a)))
|
||||
.slice(0, limit)
|
||||
|
||||
const searchResults = deduplicatedResults.map((result) => {
|
||||
const title = result.headerText || result.sourceDocument.replace('.mdx', '')
|
||||
|
||||
const pathParts = result.sourceDocument
|
||||
.replace('.mdx', '')
|
||||
.split('/')
|
||||
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
|
||||
.filter((part) => part !== 'index' && !knownLocales.includes(part))
|
||||
.map((part) => {
|
||||
return part
|
||||
.replace(/_/g, ' ')
|
||||
.split(' ')
|
||||
.map((word) => {
|
||||
const acronyms = [
|
||||
'api',
|
||||
'mcp',
|
||||
'sdk',
|
||||
'url',
|
||||
'http',
|
||||
'json',
|
||||
'xml',
|
||||
'html',
|
||||
'css',
|
||||
'ai',
|
||||
]
|
||||
if (acronyms.includes(word.toLowerCase())) {
|
||||
return word.toUpperCase()
|
||||
}
|
||||
return word.charAt(0).toUpperCase() + word.slice(1)
|
||||
})
|
||||
.join(' ')
|
||||
})
|
||||
|
||||
return {
|
||||
id: result.chunkId,
|
||||
|
||||
@@ -1739,12 +1739,12 @@ export function BrowserUseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
{...props}
|
||||
version='1.0'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='150pt'
|
||||
height='150pt'
|
||||
width='28'
|
||||
height='28'
|
||||
viewBox='0 0 150 150'
|
||||
preserveAspectRatio='xMidYMid meet'
|
||||
>
|
||||
<g transform='translate(0,150) scale(0.05,-0.05)' fill='#000000' stroke='none'>
|
||||
<g transform='translate(0,150) scale(0.05,-0.05)' fill='currentColor' stroke='none'>
|
||||
<path
|
||||
d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666
|
||||
l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111
|
||||
@@ -4093,6 +4093,23 @@ export function SQSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TextractIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='10 14 60 52'
|
||||
version='1.1'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
xmlnsXlink='http://www.w3.org/1999/xlink'
|
||||
>
|
||||
<path
|
||||
d='M22.0624102,50 C24.3763895,53.603 28.4103535,56 33.0003125,56 C40.1672485,56 45.9991964,50.168 45.9991964,43 C45.9991964,35.832 40.1672485,30 33.0003125,30 C27.6033607,30 22.9664021,33.307 21.0024196,38 L23.2143999,38 C25.0393836,34.444 28.7363506,32 33.0003125,32 C39.0652583,32 43.9992143,36.935 43.9992143,43 C43.9992143,49.065 39.0652583,54 33.0003125,54 C29.5913429,54 26.5413702,52.441 24.5213882,50 L22.0624102,50 Z M37.0002768,45 L37.0002768,43 L41.9992321,43 C41.9992321,38.038 37.9622682,34 33.0003125,34 C28.0373568,34 23.9993929,38.038 23.9993929,43 L28.9993482,43 L28.9993482,45 L24.2313908,45 C25.1443826,49.002 28.7253507,52 33.0003125,52 C35.1362934,52 37.0992759,51.249 38.6442621,50 L34.0003036,50 L34.0003036,48 L40.4782457,48 C41.0812403,47.102 41.5202364,46.087 41.7682342,45 L37.0002768,45 Z M21.0024196,48 L23.2143999,48 C22.4434068,46.498 22.0004107,44.801 22.0004107,43 C22.0004107,41.959 22.1554093,40.955 22.4264069,40 L20.3634253,40 C20.1344274,40.965 19.9994286,41.966 19.9994286,43 C19.9994286,44.771 20.3584254,46.46 21.0024196,48 L21.0024196,48 Z M19.7434309,50 L17.0004554,50 L17.0004554,48 L18.8744386,48 C18.5344417,47.04 18.2894438,46.038 18.1494451,45 L15.4144695,45 L16.707458,46.293 L15.2924706,47.707 L12.2924974,44.707 C11.9025009,44.316 11.9025009,43.684 12.2924974,43.293 L15.2924706,40.293 L16.707458,41.707 L15.4144695,43 L18.0004464,43 C18.0004464,41.973 18.1044455,40.97 18.3024437,40 L17.0004554,40 L17.0004554,38 L18.8744386,38 C20.9404202,32.184 26.4833707,28 33.0003125,28 C37.427273,28 41.4002375,29.939 44.148213,33 L59.0000804,33 L59.0000804,35 L45.6661994,35 C47.1351863,37.318 47.9991786,40.058 47.9991786,43 L59.0000804,43 L59.0000804,45 L47.8501799,45 C46.8681887,52.327 40.5912447,58 33.0003125,58 C27.2563638,58 22.2624084,54.752 19.7434309,50 L19.7434309,50 Z M37.0002768,39 C37.0002768,38.448 36.5522808,38 36.0002857,38 L29.9993482,38 C29.4473442,38 28.9993482,38.448 28.9993482,39 L28.9993482,41 L31.0003304,41 L31.0003304,40 L32.0003214,40 L32.0003214,43 L31.0003304,43 L31.0003304,45 L35.0002946,45 L35.0002946,43 L34.0003036,43 L34.0003036,40 L35.0002946,40 L35.0002946,41 L37.0002768,41 L37.0002768,39 Z M49.0001696,40 L59.0000804,40 L59.0000804,38 L49.0001696,38 L49.0001696,40 Z M49.0001696,50 L59.0000804,50 L59.0000804,48 L49.0001696,48 L49.0001696,50 Z M57.0000982,27 L60.5850662,27 L57.0000982,23.414 L57.0000982,27 Z M63.7070383,27.293 C63.8940367,27.48 64.0000357,27.735 64.0000357,28 L64.0000357,63 C64.0000357,63.552 63.5520397,64 63.0000446,64 L32.0003304,64 C31.4473264,64 31.0003304,63.552 31.0003304,63 L31.0003304,59 L33.0003125,59 L33.0003125,62 L62.0000536,62 L62.0000536,29 L56.0001071,29 C55.4471121,29 55.0001161,28.552 55.0001161,28 L55.0001161,22 L33.0003125,22 L33.0003125,27 L31.0003304,27 L31.0003304,21 C31.0003304,20.448 31.4473264,20 32.0003304,20 L56.0001071,20 C56.2651048,20 56.5191025,20.105 56.7071008,20.293 L63.7070383,27.293 Z M68,24.166 L68,61 C68,61.552 67.552004,62 67.0000089,62 L65.0000268,62 L65.0000268,60 L66.0000179,60 L66.0000179,24.612 L58.6170838,18 L36.0002857,18 L36.0002857,19 L34.0003036,19 L34.0003036,17 C34.0003036,16.448 34.4472996,16 35.0003036,16 L59.0000804,16 C59.2460782,16 59.483076,16.091 59.6660744,16.255 L67.666003,23.42 C67.8780011,23.61 68,23.881 68,24.166 L68,24.166 Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -110,6 +110,7 @@ import {
|
||||
SupabaseIcon,
|
||||
TavilyIcon,
|
||||
TelegramIcon,
|
||||
TextractIcon,
|
||||
TinybirdIcon,
|
||||
TranslateIcon,
|
||||
TrelloIcon,
|
||||
@@ -143,7 +144,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
calendly: CalendlyIcon,
|
||||
circleback: CirclebackIcon,
|
||||
clay: ClayIcon,
|
||||
confluence: ConfluenceIcon,
|
||||
confluence_v2: ConfluenceIcon,
|
||||
cursor_v2: CursorIcon,
|
||||
datadog: DatadogIcon,
|
||||
discord: DiscordIcon,
|
||||
@@ -153,7 +154,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
elasticsearch: ElasticsearchIcon,
|
||||
elevenlabs: ElevenLabsIcon,
|
||||
exa: ExaAIIcon,
|
||||
file: DocumentIcon,
|
||||
file_v2: DocumentIcon,
|
||||
firecrawl: FirecrawlIcon,
|
||||
fireflies: FirefliesIcon,
|
||||
github_v2: GithubIcon,
|
||||
@@ -195,7 +196,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
microsoft_excel_v2: MicrosoftExcelIcon,
|
||||
microsoft_planner: MicrosoftPlannerIcon,
|
||||
microsoft_teams: MicrosoftTeamsIcon,
|
||||
mistral_parse: MistralIcon,
|
||||
mistral_parse_v2: MistralIcon,
|
||||
mongodb: MongoDBIcon,
|
||||
mysql: MySQLIcon,
|
||||
neo4j: Neo4jIcon,
|
||||
@@ -237,6 +238,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
supabase: SupabaseIcon,
|
||||
tavily: TavilyIcon,
|
||||
telegram: TelegramIcon,
|
||||
textract: TextractIcon,
|
||||
tinybird: TinybirdIcon,
|
||||
translate: TranslateIcon,
|
||||
trello: TrelloIcon,
|
||||
@@ -244,7 +246,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
twilio_sms: TwilioIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
typeform: TypeformIcon,
|
||||
video_generator: VideoIcon,
|
||||
video_generator_v2: VideoIcon,
|
||||
vision: EyeIcon,
|
||||
wealthbox: WealthboxIcon,
|
||||
webflow: WebflowIcon,
|
||||
|
||||
@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="browser_use"
|
||||
color="#E0E0E0"
|
||||
color="#181C1E"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Interact with Confluence
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="confluence"
|
||||
type="confluence_v2"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Read and parse multiple files
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="file"
|
||||
type="file_v2"
|
||||
color="#40916C"
|
||||
/>
|
||||
|
||||
@@ -48,7 +48,7 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `files` | array | Array of parsed files |
|
||||
| `combinedContent` | string | Combined content of all parsed files |
|
||||
| `files` | array | Array of parsed files with content, metadata, and file properties |
|
||||
| `combinedContent` | string | All file contents merged into a single text string |
|
||||
|
||||
|
||||
|
||||
@@ -52,6 +52,15 @@ Read content from a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `slides` | json | Array of slides with their content |
|
||||
| `metadata` | json | Presentation metadata including ID, title, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `title` | string | The presentation title |
|
||||
| ↳ `pageSize` | object | Presentation page size |
|
||||
| ↳ `width` | json | Page width as a Dimension object |
|
||||
| ↳ `height` | json | Page height as a Dimension object |
|
||||
| ↳ `width` | json | Page width as a Dimension object |
|
||||
| ↳ `height` | json | Page height as a Dimension object |
|
||||
| ↳ `mimeType` | string | The mime type of the presentation |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_write`
|
||||
|
||||
@@ -71,6 +80,10 @@ Write or update content in a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `updatedContent` | boolean | Indicates if presentation content was updated successfully |
|
||||
| `metadata` | json | Updated presentation metadata including ID, title, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `title` | string | The presentation title |
|
||||
| ↳ `mimeType` | string | The mime type of the presentation |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_create`
|
||||
|
||||
@@ -90,6 +103,10 @@ Create a new Google Slides presentation
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `metadata` | json | Created presentation metadata including ID, title, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `title` | string | The presentation title |
|
||||
| ↳ `mimeType` | string | The mime type of the presentation |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_replace_all_text`
|
||||
|
||||
@@ -111,6 +128,10 @@ Find and replace all occurrences of text throughout a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `occurrencesChanged` | number | Number of text occurrences that were replaced |
|
||||
| `metadata` | json | Operation metadata including presentation ID and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `findText` | string | The text that was searched for |
|
||||
| ↳ `replaceText` | string | The text that replaced the matches |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_add_slide`
|
||||
|
||||
@@ -131,6 +152,10 @@ Add a new slide to a Google Slides presentation with a specified layout
|
||||
| --------- | ---- | ----------- |
|
||||
| `slideId` | string | The object ID of the newly created slide |
|
||||
| `metadata` | json | Operation metadata including presentation ID, layout, and URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `layout` | string | The layout used for the new slide |
|
||||
| ↳ `insertionIndex` | number | The zero-based index where the slide was inserted |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_add_image`
|
||||
|
||||
@@ -154,6 +179,10 @@ Insert an image into a specific slide in a Google Slides presentation
|
||||
| --------- | ---- | ----------- |
|
||||
| `imageId` | string | The object ID of the newly created image |
|
||||
| `metadata` | json | Operation metadata including presentation ID and image URL |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `pageObjectId` | string | The page object ID where the image was inserted |
|
||||
| ↳ `imageUrl` | string | The source image URL |
|
||||
| ↳ `url` | string | URL to open the presentation |
|
||||
|
||||
### `google_slides_get_thumbnail`
|
||||
|
||||
@@ -176,6 +205,10 @@ Generate a thumbnail image of a specific slide in a Google Slides presentation
|
||||
| `width` | number | Width of the thumbnail in pixels |
|
||||
| `height` | number | Height of the thumbnail in pixels |
|
||||
| `metadata` | json | Operation metadata including presentation ID and page object ID |
|
||||
| ↳ `presentationId` | string | The presentation ID |
|
||||
| ↳ `pageObjectId` | string | The page object ID for the thumbnail |
|
||||
| ↳ `thumbnailSize` | string | The requested thumbnail size |
|
||||
| ↳ `mimeType` | string | The thumbnail MIME type |
|
||||
|
||||
### `google_slides_get_page`
|
||||
|
||||
|
||||
@@ -106,6 +106,7 @@
|
||||
"supabase",
|
||||
"tavily",
|
||||
"telegram",
|
||||
"textract",
|
||||
"tinybird",
|
||||
"translate",
|
||||
"trello",
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Extract text from PDF documents
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="mistral_parse"
|
||||
type="mistral_parse_v2"
|
||||
color="#000000"
|
||||
/>
|
||||
|
||||
@@ -54,18 +54,37 @@ Parse PDF documents using Mistral OCR API
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the PDF was parsed successfully |
|
||||
| `content` | string | Extracted content in the requested format \(markdown, text, or JSON\) |
|
||||
| `metadata` | object | Processing metadata including jobId, fileType, pageCount, and usage info |
|
||||
| ↳ `jobId` | string | Unique job identifier |
|
||||
| ↳ `fileType` | string | File type \(e.g., pdf\) |
|
||||
| ↳ `fileName` | string | Original file name |
|
||||
| ↳ `source` | string | Source type \(url\) |
|
||||
| ↳ `pageCount` | number | Number of pages processed |
|
||||
| ↳ `model` | string | Mistral model used |
|
||||
| ↳ `resultType` | string | Output format \(markdown, text, json\) |
|
||||
| ↳ `processedAt` | string | Processing timestamp |
|
||||
| ↳ `sourceUrl` | string | Source URL if applicable |
|
||||
| ↳ `usageInfo` | object | Usage statistics from OCR processing |
|
||||
| `pages` | array | Array of page objects from Mistral OCR |
|
||||
| ↳ `index` | number | Page index \(zero-based\) |
|
||||
| ↳ `markdown` | string | Extracted markdown content |
|
||||
| ↳ `images` | array | Images extracted from this page with bounding boxes |
|
||||
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||
| ↳ `dimensions` | object | Page dimensions |
|
||||
| ↳ `dpi` | number | Dots per inch |
|
||||
| ↳ `height` | number | Page height in pixels |
|
||||
| ↳ `width` | number | Page width in pixels |
|
||||
| ↳ `dpi` | number | Dots per inch |
|
||||
| ↳ `height` | number | Page height in pixels |
|
||||
| ↳ `width` | number | Page width in pixels |
|
||||
| ↳ `tables` | array | Extracted tables as HTML/markdown \(when table_format is set\). Referenced via placeholders like \[tbl-0.html\] |
|
||||
| ↳ `hyperlinks` | array | Array of URL strings detected in the page \(e.g., \[ |
|
||||
| ↳ `header` | string | Page header content \(when extract_header=true\) |
|
||||
| ↳ `footer` | string | Page footer content \(when extract_footer=true\) |
|
||||
| `model` | string | Mistral OCR model identifier \(e.g., mistral-ocr-latest\) |
|
||||
| `usage_info` | object | Usage and processing statistics |
|
||||
| ↳ `pages_processed` | number | Total number of pages processed |
|
||||
| ↳ `doc_size_bytes` | number | Document file size in bytes |
|
||||
| `document_annotation` | string | Structured annotation data as JSON string \(when applicable\) |
|
||||
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ Upload a file to an AWS S3 bucket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the uploaded S3 object |
|
||||
| `uri` | string | S3 URI of the uploaded object \(s3://bucket/key\) |
|
||||
| `metadata` | object | Upload metadata including ETag and location |
|
||||
|
||||
### `s3_get_object`
|
||||
@@ -149,6 +150,7 @@ Copy an object within or between AWS S3 buckets
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the copied S3 object |
|
||||
| `uri` | string | S3 URI of the copied object \(s3://bucket/key\) |
|
||||
| `metadata` | object | Copy operation metadata |
|
||||
|
||||
|
||||
|
||||
120
apps/docs/content/docs/en/tools/textract.mdx
Normal file
120
apps/docs/content/docs/en/tools/textract.mdx
Normal file
@@ -0,0 +1,120 @@
|
||||
---
|
||||
title: AWS Textract
|
||||
description: Extract text, tables, and forms from documents
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="textract"
|
||||
color="linear-gradient(135deg, #055F4E 0%, #56C0A7 100%)"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[AWS Textract](https://aws.amazon.com/textract/) is a powerful AI service from Amazon Web Services designed to automatically extract printed text, handwriting, tables, forms, key-value pairs, and other structured data from scanned documents and images. Textract leverages advanced optical character recognition (OCR) and document analysis to transform documents into actionable data, enabling automation, analytics, compliance, and more.
|
||||
|
||||
With AWS Textract, you can:
|
||||
|
||||
- **Extract text from images and documents**: Recognize printed text and handwriting in formats such as PDF, JPEG, PNG, or TIFF
|
||||
- **Detect and extract tables**: Automatically find tables and output their structured content
|
||||
- **Parse forms and key-value pairs**: Pull structured data from forms, including fields and their corresponding values
|
||||
- **Identify signatures and layout features**: Detect signatures, geometric layout, and relationships between document elements
|
||||
- **Customize extraction with queries**: Extract specific fields and answers using query-based extraction (e.g., "What is the invoice number?")
|
||||
|
||||
In Sim, the AWS Textract integration empowers your agents to intelligently process documents as part of their workflows. This unlocks automation scenarios such as data entry from invoices, onboarding documents, contracts, receipts, and more. Your agents can extract relevant data, analyze structured forms, and generate summaries or reports directly from document uploads or URLs. By connecting Sim with AWS Textract, you can reduce manual effort, improve data accuracy, and streamline your business processes with robust document understanding.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate AWS Textract into your workflow to extract text, tables, forms, and key-value pairs from documents. Single-page mode supports JPEG, PNG, and single-page PDF. Multi-page mode supports multi-page PDF and TIFF.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `textract_parser`
|
||||
|
||||
Parse documents using AWS Textract OCR and document analysis
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region for Textract service \(e.g., us-east-1\) |
|
||||
| `processingMode` | string | No | Document type: single-page or multi-page. Defaults to single-page. |
|
||||
| `filePath` | string | No | URL to a document to be processed \(JPEG, PNG, or single-page PDF\). |
|
||||
| `s3Uri` | string | No | S3 URI for multi-page processing \(s3://bucket/key\). |
|
||||
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||
| `featureTypes` | array | No | Feature types to detect: TABLES, FORMS, QUERIES, SIGNATURES, LAYOUT. If not specified, only text detection is performed. |
|
||||
| `items` | string | No | Feature type |
|
||||
| `queries` | array | No | Custom queries to extract specific information. Only used when featureTypes includes QUERIES. |
|
||||
| `items` | object | No | Query configuration |
|
||||
| `properties` | string | No | The query text |
|
||||
| `Text` | string | No | No description |
|
||||
| `Alias` | string | No | No description |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `blocks` | array | Array of Block objects containing detected text, tables, forms, and other elements |
|
||||
| ↳ `BlockType` | string | Type of block \(PAGE, LINE, WORD, TABLE, CELL, KEY_VALUE_SET, etc.\) |
|
||||
| ↳ `Id` | string | Unique identifier for the block |
|
||||
| ↳ `Text` | string | Query text |
|
||||
| ↳ `TextType` | string | Type of text \(PRINTED or HANDWRITING\) |
|
||||
| ↳ `Confidence` | number | Confidence score \(0-100\) |
|
||||
| ↳ `Page` | number | Page number |
|
||||
| ↳ `Geometry` | object | Location and bounding box information |
|
||||
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Polygon` | array | Polygon coordinates |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Polygon` | array | Polygon coordinates |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `Relationships` | array | Relationships to other blocks |
|
||||
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||
| ↳ `Ids` | array | IDs of related blocks |
|
||||
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||
| ↳ `Ids` | array | IDs of related blocks |
|
||||
| ↳ `EntityTypes` | array | Entity types for KEY_VALUE_SET \(KEY or VALUE\) |
|
||||
| ↳ `SelectionStatus` | string | For checkboxes: SELECTED or NOT_SELECTED |
|
||||
| ↳ `RowIndex` | number | Row index for table cells |
|
||||
| ↳ `ColumnIndex` | number | Column index for table cells |
|
||||
| ↳ `RowSpan` | number | Row span for merged cells |
|
||||
| ↳ `ColumnSpan` | number | Column span for merged cells |
|
||||
| ↳ `Query` | object | Query information for QUERY blocks |
|
||||
| ↳ `Text` | string | Query text |
|
||||
| ↳ `Alias` | string | Query alias |
|
||||
| ↳ `Pages` | array | Pages to search |
|
||||
| ↳ `Alias` | string | Query alias |
|
||||
| ↳ `Pages` | array | Pages to search |
|
||||
| `documentMetadata` | object | Metadata about the analyzed document |
|
||||
| ↳ `pages` | number | Number of pages in the document |
|
||||
| `modelVersion` | string | Version of the Textract model used for processing |
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Generate videos from text using AI
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="video_generator"
|
||||
type="video_generator_v2"
|
||||
color="#181C1E"
|
||||
/>
|
||||
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -22,8 +21,10 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('LoginForm')
|
||||
|
||||
@@ -105,8 +106,7 @@ export default function LoginPage({
|
||||
const [password, setPassword] = useState('')
|
||||
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
|
||||
const [showValidationError, setShowValidationError] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
@@ -114,7 +114,6 @@ export default function LoginPage({
|
||||
const [forgotPasswordOpen, setForgotPasswordOpen] = useState(false)
|
||||
const [forgotPasswordEmail, setForgotPasswordEmail] = useState('')
|
||||
const [isSubmittingReset, setIsSubmittingReset] = useState(false)
|
||||
const [isResetButtonHovered, setIsResetButtonHovered] = useState(false)
|
||||
const [resetStatus, setResetStatus] = useState<{
|
||||
type: 'success' | 'error' | null
|
||||
message: string
|
||||
@@ -123,6 +122,7 @@ export default function LoginPage({
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [resetSuccessMessage, setResetSuccessMessage] = useState<string | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
setMounted(true)
|
||||
@@ -139,32 +139,12 @@ export default function LoginPage({
|
||||
|
||||
const inviteFlow = searchParams.get('invite_flow') === 'true'
|
||||
setIsInviteFlow(inviteFlow)
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
const resetSuccess = searchParams.get('resetSuccess') === 'true'
|
||||
if (resetSuccess) {
|
||||
setResetSuccessMessage('Password reset successful. Please sign in with your new password.')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
useEffect(() => {
|
||||
@@ -202,6 +182,13 @@ export default function LoginPage({
|
||||
e.preventDefault()
|
||||
setIsLoading(true)
|
||||
|
||||
const redirectToVerify = (emailToVerify: string) => {
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', emailToVerify)
|
||||
}
|
||||
router.push('/verify')
|
||||
}
|
||||
|
||||
const formData = new FormData(e.currentTarget)
|
||||
const emailRaw = formData.get('email') as string
|
||||
const email = emailRaw.trim().toLowerCase()
|
||||
@@ -221,6 +208,7 @@ export default function LoginPage({
|
||||
|
||||
try {
|
||||
const safeCallbackUrl = validateCallbackUrl(callbackUrl) ? callbackUrl : '/workspace'
|
||||
let errorHandled = false
|
||||
|
||||
const result = await client.signIn.email(
|
||||
{
|
||||
@@ -231,11 +219,16 @@ export default function LoginPage({
|
||||
{
|
||||
onError: (ctx) => {
|
||||
logger.error('Login error:', ctx.error)
|
||||
const errorMessage: string[] = ['Invalid email or password']
|
||||
|
||||
if (ctx.error.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
errorHandled = true
|
||||
redirectToVerify(email)
|
||||
return
|
||||
}
|
||||
|
||||
errorHandled = true
|
||||
const errorMessage: string[] = ['Invalid email or password']
|
||||
|
||||
if (
|
||||
ctx.error.code?.includes('BAD_REQUEST') ||
|
||||
ctx.error.message?.includes('Email and password sign in is not enabled')
|
||||
@@ -271,6 +264,7 @@ export default function LoginPage({
|
||||
errorMessage.push('Too many requests. Please wait a moment before trying again.')
|
||||
}
|
||||
|
||||
setResetSuccessMessage(null)
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
},
|
||||
@@ -278,15 +272,25 @@ export default function LoginPage({
|
||||
)
|
||||
|
||||
if (!result || result.error) {
|
||||
// Show error if not already handled by onError callback
|
||||
if (!errorHandled) {
|
||||
setResetSuccessMessage(null)
|
||||
const errorMessage = result?.error?.message || 'Login failed. Please try again.'
|
||||
setPasswordErrors([errorMessage])
|
||||
setShowValidationError(true)
|
||||
}
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
// Clear reset success message on successful login
|
||||
setResetSuccessMessage(null)
|
||||
|
||||
// Explicit redirect fallback if better-auth doesn't redirect
|
||||
router.push(safeCallbackUrl)
|
||||
} catch (err: any) {
|
||||
if (err.message?.includes('not verified') || err.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', email)
|
||||
}
|
||||
router.push('/verify')
|
||||
redirectToVerify(email)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -400,6 +404,13 @@ export default function LoginPage({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Password reset success message */}
|
||||
{resetSuccessMessage && (
|
||||
<div className={`${inter.className} mt-1 space-y-1 text-[#4CAF50] text-xs`}>
|
||||
<p>{resetSuccessMessage}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Email/Password Form - show unless explicitly disabled */}
|
||||
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
|
||||
<form onSubmit={onSubmit} className={`${inter.className} mt-8 space-y-8`}>
|
||||
@@ -482,24 +493,14 @@ export default function LoginPage({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Signing in'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Signing in...' : 'Sign in'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Sign in
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)}
|
||||
|
||||
@@ -610,25 +611,15 @@ export default function LoginPage({
|
||||
<p>{resetStatus.message}</p>
|
||||
</div>
|
||||
)}
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='button'
|
||||
onClick={handleForgotPassword}
|
||||
onMouseEnter={() => setIsResetButtonHovered(true)}
|
||||
onMouseLeave={() => setIsResetButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isSubmittingReset}
|
||||
loading={isSubmittingReset}
|
||||
loadingText='Sending'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmittingReset ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isResetButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { useState } from 'react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
|
||||
interface RequestResetFormProps {
|
||||
email: string
|
||||
@@ -27,36 +27,6 @@ export function RequestResetForm({
|
||||
statusMessage,
|
||||
className,
|
||||
}: RequestResetFormProps) {
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
onSubmit(email)
|
||||
@@ -94,24 +64,14 @@ export function RequestResetForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
disabled={isSubmitting}
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
loading={isSubmitting}
|
||||
loadingText='Sending'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -138,35 +98,6 @@ export function SetNewPasswordForm({
|
||||
const [validationMessage, setValidationMessage] = useState('')
|
||||
const [showPassword, setShowPassword] = useState(false)
|
||||
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
@@ -296,24 +227,14 @@ export function SetNewPasswordForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Button
|
||||
disabled={isSubmitting || !token}
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isSubmitting || !token}
|
||||
loading={isSubmitting}
|
||||
loadingText='Resetting'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Resetting...' : 'Reset Password'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Reset Password
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
import { Suspense, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { client, useSession } from '@/lib/auth/auth-client'
|
||||
@@ -14,8 +13,10 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('SignupForm')
|
||||
|
||||
@@ -95,8 +96,7 @@ function SignupFormContent({
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [redirectUrl, setRedirectUrl] = useState('')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
const [name, setName] = useState('')
|
||||
const [nameErrors, setNameErrors] = useState<string[]>([])
|
||||
@@ -126,31 +126,6 @@ function SignupFormContent({
|
||||
if (inviteFlowParam === 'true') {
|
||||
setIsInviteFlow(true)
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
const validatePassword = (passwordValue: string): string[] => {
|
||||
@@ -500,24 +475,14 @@ function SignupFormContent({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Creating account'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Creating account' : 'Create account'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Create account
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('SSOForm')
|
||||
|
||||
@@ -57,7 +58,7 @@ export default function SSOForm() {
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
|
||||
useEffect(() => {
|
||||
@@ -90,31 +91,6 @@ export default function SSOForm() {
|
||||
setShowEmailValidationError(true)
|
||||
}
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
const handleEmailChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
|
||||
@@ -8,6 +8,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { useVerification } from '@/app/(auth)/verify/use-verification'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface VerifyContentProps {
|
||||
hasEmailService: boolean
|
||||
@@ -58,34 +59,7 @@ function VerificationForm({
|
||||
setCountdown(30)
|
||||
}
|
||||
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -4,7 +4,6 @@ import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { X } from 'lucide-react'
|
||||
import { Textarea } from '@/components/emcn'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import {
|
||||
@@ -18,6 +17,7 @@ import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import Footer from '@/app/(landing)/components/footer/footer'
|
||||
import Nav from '@/app/(landing)/components/nav/nav'
|
||||
|
||||
@@ -493,18 +493,17 @@ export default function CareersPage() {
|
||||
|
||||
{/* Submit Button */}
|
||||
<div className='flex justify-end pt-2'>
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
disabled={isSubmitting || submitStatus === 'success'}
|
||||
className='min-w-[200px] rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all duration-300 hover:opacity-90 disabled:opacity-50'
|
||||
size='lg'
|
||||
loading={isSubmitting}
|
||||
loadingText='Submitting'
|
||||
showArrow={false}
|
||||
fullWidth={false}
|
||||
className='min-w-[200px]'
|
||||
>
|
||||
{isSubmitting
|
||||
? 'Submitting...'
|
||||
: submitStatus === 'success'
|
||||
? 'Submitted'
|
||||
: 'Submit Application'}
|
||||
</Button>
|
||||
{submitStatus === 'success' ? 'Submitted' : 'Submit Application'}
|
||||
</BrandedButton>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
@@ -11,6 +11,7 @@ import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('nav')
|
||||
|
||||
@@ -20,11 +21,12 @@ interface NavProps {
|
||||
}
|
||||
|
||||
export default function Nav({ hideAuthButtons = false, variant = 'landing' }: NavProps = {}) {
|
||||
const [githubStars, setGithubStars] = useState('25.1k')
|
||||
const [githubStars, setGithubStars] = useState('25.8k')
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
const [isLoginHovered, setIsLoginHovered] = useState(false)
|
||||
const router = useRouter()
|
||||
const brand = useBrandConfig()
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
useEffect(() => {
|
||||
if (variant !== 'landing') return
|
||||
@@ -183,7 +185,7 @@ export default function Nav({ hideAuthButtons = false, variant = 'landing' }: Na
|
||||
href='/signup'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all`}
|
||||
aria-label='Get started with Sim - Sign up for free'
|
||||
prefetch={true}
|
||||
>
|
||||
|
||||
@@ -4,6 +4,11 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { refreshOAuthToken } from '@/lib/oauth'
|
||||
import {
|
||||
getMicrosoftRefreshTokenExpiry,
|
||||
isMicrosoftProvider,
|
||||
PROACTIVE_REFRESH_THRESHOLD_DAYS,
|
||||
} from '@/lib/oauth/microsoft'
|
||||
|
||||
const logger = createLogger('OAuthUtilsAPI')
|
||||
|
||||
@@ -205,15 +210,32 @@ export async function refreshAccessTokenIfNeeded(
|
||||
}
|
||||
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const now = new Date()
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
if (shouldRefresh) {
|
||||
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
|
||||
logger.info(`[${requestId}] Refreshing token for credential`)
|
||||
try {
|
||||
const refreshedToken = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
@@ -227,11 +249,15 @@ export async function refreshAccessTokenIfNeeded(
|
||||
userId: credential.userId,
|
||||
hasRefreshToken: !!credential.refreshToken,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Prepare update data
|
||||
const updateData: any = {
|
||||
const updateData: Record<string, unknown> = {
|
||||
accessToken: refreshedToken.accessToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + refreshedToken.expiresIn * 1000),
|
||||
updatedAt: new Date(),
|
||||
@@ -243,6 +269,10 @@ export async function refreshAccessTokenIfNeeded(
|
||||
updateData.refreshToken = refreshedToken.refreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
// Update the token in the database
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
@@ -256,6 +286,10 @@ export async function refreshAccessTokenIfNeeded(
|
||||
credentialId,
|
||||
userId: credential.userId,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
} else if (!accessToken) {
|
||||
@@ -277,10 +311,27 @@ export async function refreshTokenIfNeeded(
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const now = new Date()
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
|
||||
// If token appears valid and present, return it directly
|
||||
if (!shouldRefresh) {
|
||||
@@ -293,13 +344,17 @@ export async function refreshTokenIfNeeded(
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential`)
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
throw new Error('Failed to refresh token')
|
||||
}
|
||||
|
||||
const { accessToken: refreshedToken, expiresIn, refreshToken: newRefreshToken } = refreshResult
|
||||
|
||||
// Prepare update data
|
||||
const updateData: any = {
|
||||
const updateData: Record<string, unknown> = {
|
||||
accessToken: refreshedToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + expiresIn * 1000), // Use provider's expiry
|
||||
updatedAt: new Date(),
|
||||
@@ -311,6 +366,10 @@ export async function refreshTokenIfNeeded(
|
||||
updateData.refreshToken = newRefreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token`)
|
||||
@@ -331,6 +390,11 @@ export async function refreshTokenIfNeeded(
|
||||
}
|
||||
}
|
||||
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Refresh failed and no valid token found in DB`, error)
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -15,7 +15,8 @@ const resetPasswordSchema = z.object({
|
||||
.max(100, 'Password must not exceed 100 characters')
|
||||
.regex(/[A-Z]/, 'Password must contain at least one uppercase letter')
|
||||
.regex(/[a-z]/, 'Password must contain at least one lowercase letter')
|
||||
.regex(/[0-9]/, 'Password must contain at least one number'),
|
||||
.regex(/[0-9]/, 'Password must contain at least one number')
|
||||
.regex(/[^A-Za-z0-9]/, 'Password must contain at least one special character'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -4,7 +4,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('SSO-Providers')
|
||||
const logger = createLogger('SSOProvidersRoute')
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
|
||||
@@ -6,7 +6,7 @@ import { hasSSOAccess } from '@/lib/billing'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||
|
||||
const logger = createLogger('SSO-Register')
|
||||
const logger = createLogger('SSORegisterRoute')
|
||||
|
||||
const mappingSchema = z
|
||||
.object({
|
||||
@@ -43,6 +43,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
|
||||
])
|
||||
.default(['openid', 'profile', 'email']),
|
||||
pkce: z.boolean().default(true),
|
||||
authorizationEndpoint: z.string().url().optional(),
|
||||
tokenEndpoint: z.string().url().optional(),
|
||||
userInfoEndpoint: z.string().url().optional(),
|
||||
jwksEndpoint: z.string().url().optional(),
|
||||
}),
|
||||
z.object({
|
||||
providerType: z.literal('saml'),
|
||||
@@ -64,12 +68,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// SSO plugin must be enabled in Better Auth
|
||||
if (!env.SSO_ENABLED) {
|
||||
return NextResponse.json({ error: 'SSO is not enabled' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Check plan access (enterprise) or env var override
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
@@ -116,7 +118,16 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
if (providerType === 'oidc') {
|
||||
const { clientId, clientSecret, scopes, pkce } = body
|
||||
const {
|
||||
clientId,
|
||||
clientSecret,
|
||||
scopes,
|
||||
pkce,
|
||||
authorizationEndpoint,
|
||||
tokenEndpoint,
|
||||
userInfoEndpoint,
|
||||
jwksEndpoint,
|
||||
} = body
|
||||
|
||||
const oidcConfig: any = {
|
||||
clientId,
|
||||
@@ -127,50 +138,104 @@ export async function POST(request: NextRequest) {
|
||||
pkce: pkce ?? true,
|
||||
}
|
||||
|
||||
// Add manual endpoints for providers that might need them
|
||||
// Common patterns for OIDC providers that don't support discovery properly
|
||||
if (
|
||||
issuer.includes('okta.com') ||
|
||||
issuer.includes('auth0.com') ||
|
||||
issuer.includes('identityserver')
|
||||
) {
|
||||
const baseUrl = issuer.includes('/oauth2/default')
|
||||
? issuer.replace('/oauth2/default', '')
|
||||
: issuer.replace('/oauth', '').replace('/v2.0', '').replace('/oauth2', '')
|
||||
oidcConfig.authorizationEndpoint = authorizationEndpoint
|
||||
oidcConfig.tokenEndpoint = tokenEndpoint
|
||||
oidcConfig.userInfoEndpoint = userInfoEndpoint
|
||||
oidcConfig.jwksEndpoint = jwksEndpoint
|
||||
|
||||
// Okta-style endpoints
|
||||
if (issuer.includes('okta.com')) {
|
||||
oidcConfig.authorizationEndpoint = `${baseUrl}/oauth2/default/v1/authorize`
|
||||
oidcConfig.tokenEndpoint = `${baseUrl}/oauth2/default/v1/token`
|
||||
oidcConfig.userInfoEndpoint = `${baseUrl}/oauth2/default/v1/userinfo`
|
||||
oidcConfig.jwksEndpoint = `${baseUrl}/oauth2/default/v1/keys`
|
||||
}
|
||||
// Auth0-style endpoints
|
||||
else if (issuer.includes('auth0.com')) {
|
||||
oidcConfig.authorizationEndpoint = `${baseUrl}/authorize`
|
||||
oidcConfig.tokenEndpoint = `${baseUrl}/oauth/token`
|
||||
oidcConfig.userInfoEndpoint = `${baseUrl}/userinfo`
|
||||
oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks.json`
|
||||
}
|
||||
// Generic OIDC endpoints (IdentityServer, etc.)
|
||||
else {
|
||||
oidcConfig.authorizationEndpoint = `${baseUrl}/connect/authorize`
|
||||
oidcConfig.tokenEndpoint = `${baseUrl}/connect/token`
|
||||
oidcConfig.userInfoEndpoint = `${baseUrl}/connect/userinfo`
|
||||
oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks`
|
||||
}
|
||||
const needsDiscovery =
|
||||
!oidcConfig.authorizationEndpoint || !oidcConfig.tokenEndpoint || !oidcConfig.jwksEndpoint
|
||||
|
||||
logger.info('Using manual OIDC endpoints for provider', {
|
||||
if (needsDiscovery) {
|
||||
const discoveryUrl = `${issuer.replace(/\/$/, '')}/.well-known/openid-configuration`
|
||||
try {
|
||||
logger.info('Fetching OIDC discovery document for missing endpoints', {
|
||||
discoveryUrl,
|
||||
hasAuthEndpoint: !!oidcConfig.authorizationEndpoint,
|
||||
hasTokenEndpoint: !!oidcConfig.tokenEndpoint,
|
||||
hasJwksEndpoint: !!oidcConfig.jwksEndpoint,
|
||||
})
|
||||
|
||||
const discoveryResponse = await fetch(discoveryUrl, {
|
||||
headers: { Accept: 'application/json' },
|
||||
})
|
||||
|
||||
if (!discoveryResponse.ok) {
|
||||
logger.error('Failed to fetch OIDC discovery document', {
|
||||
status: discoveryResponse.status,
|
||||
statusText: discoveryResponse.statusText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Status: ${discoveryResponse.status}. Provide all endpoints explicitly or verify the issuer URL.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const discovery = await discoveryResponse.json()
|
||||
|
||||
oidcConfig.authorizationEndpoint =
|
||||
oidcConfig.authorizationEndpoint || discovery.authorization_endpoint
|
||||
oidcConfig.tokenEndpoint = oidcConfig.tokenEndpoint || discovery.token_endpoint
|
||||
oidcConfig.userInfoEndpoint = oidcConfig.userInfoEndpoint || discovery.userinfo_endpoint
|
||||
oidcConfig.jwksEndpoint = oidcConfig.jwksEndpoint || discovery.jwks_uri
|
||||
|
||||
logger.info('Merged OIDC endpoints (user-provided + discovery)', {
|
||||
providerId,
|
||||
issuer,
|
||||
authorizationEndpoint: oidcConfig.authorizationEndpoint,
|
||||
tokenEndpoint: oidcConfig.tokenEndpoint,
|
||||
userInfoEndpoint: oidcConfig.userInfoEndpoint,
|
||||
jwksEndpoint: oidcConfig.jwksEndpoint,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching OIDC discovery document', {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
discoveryUrl,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Please verify the issuer URL is correct or provide all endpoints explicitly.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logger.info('Using explicitly provided OIDC endpoints (all present)', {
|
||||
providerId,
|
||||
provider: issuer.includes('okta.com')
|
||||
? 'Okta'
|
||||
: issuer.includes('auth0.com')
|
||||
? 'Auth0'
|
||||
: 'Generic',
|
||||
authEndpoint: oidcConfig.authorizationEndpoint,
|
||||
issuer,
|
||||
authorizationEndpoint: oidcConfig.authorizationEndpoint,
|
||||
tokenEndpoint: oidcConfig.tokenEndpoint,
|
||||
userInfoEndpoint: oidcConfig.userInfoEndpoint,
|
||||
jwksEndpoint: oidcConfig.jwksEndpoint,
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
!oidcConfig.authorizationEndpoint ||
|
||||
!oidcConfig.tokenEndpoint ||
|
||||
!oidcConfig.jwksEndpoint
|
||||
) {
|
||||
const missing: string[] = []
|
||||
if (!oidcConfig.authorizationEndpoint) missing.push('authorizationEndpoint')
|
||||
if (!oidcConfig.tokenEndpoint) missing.push('tokenEndpoint')
|
||||
if (!oidcConfig.jwksEndpoint) missing.push('jwksEndpoint')
|
||||
|
||||
logger.error('Missing required OIDC endpoints after discovery merge', {
|
||||
missing,
|
||||
authorizationEndpoint: oidcConfig.authorizationEndpoint,
|
||||
tokenEndpoint: oidcConfig.tokenEndpoint,
|
||||
jwksEndpoint: oidcConfig.jwksEndpoint,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Missing required OIDC endpoints: ${missing.join(', ')}. Please provide these explicitly or verify the issuer supports OIDC discovery.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
providerConfig.oidcConfig = oidcConfig
|
||||
} else if (providerType === 'saml') {
|
||||
const {
|
||||
|
||||
@@ -224,7 +224,7 @@ export async function POST(req: NextRequest) {
|
||||
hasApiKey: !!executionParams.apiKey,
|
||||
})
|
||||
|
||||
const result = await executeTool(resolvedToolName, executionParams, true)
|
||||
const result = await executeTool(resolvedToolName, executionParams)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
||||
toolName,
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templateCreators, user } from '@sim/db/schema'
|
||||
import { templateCreators } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('CreatorVerificationAPI')
|
||||
|
||||
@@ -23,9 +24,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
|
||||
}
|
||||
@@ -76,9 +76,8 @@ export async function DELETE(
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
|
||||
}
|
||||
|
||||
@@ -6,9 +6,10 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
|
||||
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
|
||||
import {
|
||||
@@ -21,6 +22,7 @@ import {
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import '@/lib/uploads/core/setup.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -30,6 +32,12 @@ const logger = createLogger('FilesParseAPI')
|
||||
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
|
||||
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
|
||||
|
||||
interface ExecutionContext {
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
executionId: string
|
||||
}
|
||||
|
||||
interface ParseResult {
|
||||
success: boolean
|
||||
content?: string
|
||||
@@ -37,6 +45,7 @@ interface ParseResult {
|
||||
filePath: string
|
||||
originalName?: string // Original filename from database (for workspace files)
|
||||
viewerUrl?: string | null // Viewer URL for the file if available
|
||||
userFile?: UserFile // UserFile object for the raw file
|
||||
metadata?: {
|
||||
fileType: string
|
||||
size: number
|
||||
@@ -70,27 +79,45 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const userId = authResult.userId
|
||||
const requestData = await request.json()
|
||||
const { filePath, fileType, workspaceId } = requestData
|
||||
const { filePath, fileType, workspaceId, workflowId, executionId } = requestData
|
||||
|
||||
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
|
||||
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('File parse request received:', { filePath, fileType, workspaceId, userId })
|
||||
// Build execution context if all required fields are present
|
||||
const executionContext: ExecutionContext | undefined =
|
||||
workspaceId && workflowId && executionId
|
||||
? { workspaceId, workflowId, executionId }
|
||||
: undefined
|
||||
|
||||
logger.info('File parse request received:', {
|
||||
filePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
hasExecutionContext: !!executionContext,
|
||||
})
|
||||
|
||||
if (Array.isArray(filePath)) {
|
||||
const results = []
|
||||
for (const path of filePath) {
|
||||
if (!path || (typeof path === 'string' && path.trim() === '')) {
|
||||
for (const singlePath of filePath) {
|
||||
if (!singlePath || (typeof singlePath === 'string' && singlePath.trim() === '')) {
|
||||
results.push({
|
||||
success: false,
|
||||
error: 'Empty file path in array',
|
||||
filePath: path || '',
|
||||
filePath: singlePath || '',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(path, fileType, workspaceId, userId)
|
||||
const result = await parseFileSingle(
|
||||
singlePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
executionContext
|
||||
)
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
@@ -106,6 +133,7 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -121,7 +149,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId)
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId, executionContext)
|
||||
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
@@ -137,6 +165,7 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -164,7 +193,8 @@ async function parseFileSingle(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
logger.info('Parsing file:', filePath)
|
||||
|
||||
@@ -186,18 +216,18 @@ async function parseFileSingle(
|
||||
}
|
||||
|
||||
if (filePath.includes('/api/files/serve/')) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId)
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId, executionContext)
|
||||
}
|
||||
|
||||
if (isUsingCloudStorage()) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
return handleLocalFile(filePath, fileType, userId)
|
||||
return handleLocalFile(filePath, fileType, userId, executionContext)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -230,12 +260,14 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
/**
|
||||
* Handle external URL
|
||||
* If workspaceId is provided, checks if file already exists and saves to workspace if not
|
||||
* If executionContext is provided, also stores the file in execution storage and returns UserFile
|
||||
*/
|
||||
async function handleExternalUrl(
|
||||
url: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
logger.info('Fetching external URL:', url)
|
||||
@@ -312,17 +344,13 @@ async function handleExternalUrl(
|
||||
|
||||
if (existingFile) {
|
||||
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId)
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId, executionContext)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
|
||||
headers: {
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
timeout: DOWNLOAD_TIMEOUT_MS,
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)
|
||||
@@ -341,6 +369,19 @@ async function handleExternalUrl(
|
||||
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
let userFile: UserFile | undefined
|
||||
const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(executionContext, buffer, filename, mimeType, userId)
|
||||
logger.info(`Stored file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store file in execution storage:`, uploadError)
|
||||
// Continue without userFile - parsing can still work
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCheckWorkspace) {
|
||||
try {
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
@@ -353,8 +394,6 @@ async function handleExternalUrl(
|
||||
})
|
||||
} else {
|
||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||
const mimeType =
|
||||
response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType)
|
||||
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
||||
}
|
||||
@@ -363,17 +402,23 @@ async function handleExternalUrl(
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
return await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (extension === 'csv') {
|
||||
return await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (isSupportedFileType(extension)) {
|
||||
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
parseResult = await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
} else if (extension === 'csv') {
|
||||
parseResult = await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
} else if (isSupportedFileType(extension)) {
|
||||
parseResult = await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
} else {
|
||||
parseResult = handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
}
|
||||
|
||||
return handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling external URL ${url}:`, error)
|
||||
return {
|
||||
@@ -386,12 +431,15 @@ async function handleExternalUrl(
|
||||
|
||||
/**
|
||||
* Handle file stored in cloud storage
|
||||
* If executionContext is provided and file is not already from execution storage,
|
||||
* copies the file to execution storage and returns UserFile
|
||||
*/
|
||||
async function handleCloudFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
explicitContext: string | undefined,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const cloudKey = extractStorageKey(filePath)
|
||||
@@ -438,6 +486,7 @@ async function handleCloudFile(
|
||||
|
||||
const filename = originalFilename || cloudKey.split('/').pop() || cloudKey
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
|
||||
const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}`
|
||||
let workspaceIdFromKey: string | undefined
|
||||
@@ -453,6 +502,39 @@ async function handleCloudFile(
|
||||
|
||||
const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
|
||||
if (executionContext) {
|
||||
// If file is already from execution context, create UserFile reference without re-uploading
|
||||
if (context === 'execution') {
|
||||
userFile = {
|
||||
id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`,
|
||||
name: filename,
|
||||
url: normalizedFilePath,
|
||||
size: fileBuffer.length,
|
||||
type: mimeType,
|
||||
key: cloudKey,
|
||||
context: 'execution',
|
||||
}
|
||||
logger.info(`Created UserFile reference for existing execution file: ${filename}`)
|
||||
} else {
|
||||
// Copy from workspace/other storage to execution storage
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Copied file to execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to copy file to execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath)
|
||||
@@ -477,6 +559,11 @@ async function handleCloudFile(
|
||||
|
||||
parseResult.viewerUrl = viewerUrl
|
||||
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling cloud file ${filePath}:`, error)
|
||||
@@ -500,7 +587,8 @@ async function handleCloudFile(
|
||||
async function handleLocalFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const filename = filePath.split('/').pop() || filePath
|
||||
@@ -540,13 +628,32 @@ async function handleLocalFile(
|
||||
const hash = createHash('md5').update(fileBuffer).digest('hex')
|
||||
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = fileType || getMimeTypeFromExtension(extension)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Stored local file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store local file in execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
content: result.content,
|
||||
filePath,
|
||||
userFile,
|
||||
metadata: {
|
||||
fileType: fileType || getMimeTypeFromExtension(extension),
|
||||
fileType: mimeType,
|
||||
size: stats.size,
|
||||
hash,
|
||||
processingTime: 0,
|
||||
|
||||
@@ -11,7 +11,7 @@ import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -36,7 +36,7 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
const startBlock = blocks.find((block) => isValidStartBlockType(block.type))
|
||||
const startBlock = blocks.find((block) => isInputDefinitionTrigger(block.type))
|
||||
|
||||
if (!startBlock) {
|
||||
return []
|
||||
|
||||
@@ -276,8 +276,11 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent('should resolve tag variables with <tag_name> syntax', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
params: {
|
||||
email: { id: '123', subject: 'Test Email' },
|
||||
blockData: {
|
||||
'block-123': { id: '123', subject: 'Test Email' },
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-123',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -305,9 +308,13 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent('should only match valid variable names in angle brackets', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <validVar> + "<invalid@email.com>" + <another_valid>',
|
||||
params: {
|
||||
validVar: 'hello',
|
||||
another_valid: 'world',
|
||||
blockData: {
|
||||
'block-1': 'hello',
|
||||
'block-2': 'world',
|
||||
},
|
||||
blockNameMapping: {
|
||||
validVar: 'block-1',
|
||||
another_valid: 'block-2',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -321,28 +328,22 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent(
|
||||
'should handle Gmail webhook data with email addresses containing angle brackets',
|
||||
async () => {
|
||||
const gmailData = {
|
||||
email: {
|
||||
id: '123',
|
||||
from: 'Waleed Latif <waleed@sim.ai>',
|
||||
to: 'User <user@example.com>',
|
||||
subject: 'Test Email',
|
||||
bodyText: 'Hello world',
|
||||
},
|
||||
rawEmail: {
|
||||
id: '123',
|
||||
payload: {
|
||||
headers: [
|
||||
{ name: 'From', value: 'Waleed Latif <waleed@sim.ai>' },
|
||||
{ name: 'To', value: 'User <user@example.com>' },
|
||||
],
|
||||
},
|
||||
},
|
||||
const emailData = {
|
||||
id: '123',
|
||||
from: 'Waleed Latif <waleed@sim.ai>',
|
||||
to: 'User <user@example.com>',
|
||||
subject: 'Test Email',
|
||||
bodyText: 'Hello world',
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
params: gmailData,
|
||||
blockData: {
|
||||
'block-email': emailData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-email',
|
||||
},
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -356,17 +357,20 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent(
|
||||
'should properly serialize complex email objects with special characters',
|
||||
async () => {
|
||||
const complexEmailData = {
|
||||
email: {
|
||||
from: 'Test User <test@example.com>',
|
||||
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
|
||||
bodyText: 'Text with\nnewlines\tand\ttabs',
|
||||
},
|
||||
const emailData = {
|
||||
from: 'Test User <test@example.com>',
|
||||
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
|
||||
bodyText: 'Text with\nnewlines\tand\ttabs',
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
params: complexEmailData,
|
||||
blockData: {
|
||||
'block-email': emailData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-email',
|
||||
},
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -519,18 +523,23 @@ describe('Function Execute API Route', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should handle JSON serialization edge cases', async () => {
|
||||
const complexData = {
|
||||
special: 'chars"with\'quotes',
|
||||
unicode: '🎉 Unicode content',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <complexData>',
|
||||
params: {
|
||||
complexData: {
|
||||
special: 'chars"with\'quotes',
|
||||
unicode: '🎉 Unicode content',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
blockData: {
|
||||
'block-complex': complexData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
complexData: 'block-complex',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -9,8 +9,8 @@ import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
resolveEnvVarReferences,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
@@ -18,8 +18,8 @@ export const MAX_DURATION = 210
|
||||
|
||||
const logger = createLogger('FunctionExecuteAPI')
|
||||
|
||||
const E2B_JS_WRAPPER_LINES = 3 // Lines before user code: ';(async () => {', ' try {', ' const __sim_result = await (async () => {'
|
||||
const E2B_PYTHON_WRAPPER_LINES = 1 // Lines before user code: 'def __sim_main__():'
|
||||
const E2B_JS_WRAPPER_LINES = 3
|
||||
const E2B_PYTHON_WRAPPER_LINES = 1
|
||||
|
||||
type TypeScriptModule = typeof import('typescript')
|
||||
|
||||
@@ -134,33 +134,21 @@ function extractEnhancedError(
|
||||
if (error.stack) {
|
||||
enhanced.stack = error.stack
|
||||
|
||||
// Parse stack trace to extract line and column information
|
||||
// Handle both compilation errors and runtime errors
|
||||
const stackLines: string[] = error.stack.split('\n')
|
||||
|
||||
for (const line of stackLines) {
|
||||
// Pattern 1: Compilation errors - "user-function.js:6"
|
||||
let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
|
||||
// Pattern 2: Runtime errors - "at user-function.js:5:12"
|
||||
if (!match) {
|
||||
match = line.match(/at\s+user-function\.js:(\d+):(\d+)/)
|
||||
}
|
||||
|
||||
// Pattern 3: Generic patterns for any line containing our filename
|
||||
if (!match) {
|
||||
match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
}
|
||||
|
||||
if (match) {
|
||||
const stackLine = Number.parseInt(match[1], 10)
|
||||
const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined
|
||||
|
||||
// Adjust line number to account for wrapper code
|
||||
// The user code starts at a specific line in our wrapper
|
||||
const adjustedLine = stackLine - userCodeStartLine + 1
|
||||
|
||||
// Check if this is a syntax error in wrapper code caused by incomplete user code
|
||||
const isWrapperSyntaxError =
|
||||
stackLine > userCodeStartLine &&
|
||||
error.name === 'SyntaxError' &&
|
||||
@@ -168,7 +156,6 @@ function extractEnhancedError(
|
||||
error.message.includes('Unexpected end of input'))
|
||||
|
||||
if (isWrapperSyntaxError && userCode) {
|
||||
// Map wrapper syntax errors to the last line of user code
|
||||
const codeLines = userCode.split('\n')
|
||||
const lastUserLine = codeLines.length
|
||||
enhanced.line = lastUserLine
|
||||
@@ -181,7 +168,6 @@ function extractEnhancedError(
|
||||
enhanced.line = adjustedLine
|
||||
enhanced.column = stackColumn
|
||||
|
||||
// Extract the actual line content from user code
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
@@ -192,7 +178,6 @@ function extractEnhancedError(
|
||||
}
|
||||
|
||||
if (stackLine <= userCodeStartLine) {
|
||||
// Error is in wrapper code itself
|
||||
enhanced.line = stackLine
|
||||
enhanced.column = stackColumn
|
||||
break
|
||||
@@ -200,7 +185,6 @@ function extractEnhancedError(
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up stack trace to show user-relevant information
|
||||
const cleanedStackLines: string[] = stackLines
|
||||
.filter(
|
||||
(line: string) =>
|
||||
@@ -214,9 +198,6 @@ function extractEnhancedError(
|
||||
}
|
||||
}
|
||||
|
||||
// Keep original message without adding error type prefix
|
||||
// The error type will be added later in createUserFriendlyErrorMessage
|
||||
|
||||
return enhanced
|
||||
}
|
||||
|
||||
@@ -231,7 +212,6 @@ function formatE2BError(
|
||||
userCode: string,
|
||||
prologueLineCount: number
|
||||
): { formattedError: string; cleanedOutput: string } {
|
||||
// Calculate line offset based on language and prologue
|
||||
const wrapperLines =
|
||||
language === CodeLanguage.Python ? E2B_PYTHON_WRAPPER_LINES : E2B_JS_WRAPPER_LINES
|
||||
const totalOffset = prologueLineCount + wrapperLines
|
||||
@@ -241,27 +221,20 @@ function formatE2BError(
|
||||
let cleanErrorMsg = ''
|
||||
|
||||
if (language === CodeLanguage.Python) {
|
||||
// Python error format: "Cell In[X], line Y" followed by error details
|
||||
// Extract line number from the Cell reference
|
||||
const cellMatch = errorOutput.match(/Cell In\[\d+\], line (\d+)/)
|
||||
if (cellMatch) {
|
||||
const originalLine = Number.parseInt(cellMatch[1], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
}
|
||||
|
||||
// Extract clean error message from the error string
|
||||
// Remove file references like "(detected at line X) (file.py, line Y)"
|
||||
cleanErrorMsg = errorMessage
|
||||
.replace(/\s*\(detected at line \d+\)/g, '')
|
||||
.replace(/\s*\([^)]+\.py, line \d+\)/g, '')
|
||||
.trim()
|
||||
} else if (language === CodeLanguage.JavaScript) {
|
||||
// JavaScript error format from E2B: "SyntaxError: /path/file.ts: Message. (line:col)\n\n 9 | ..."
|
||||
// First, extract the error type and message from the first line
|
||||
const firstLineEnd = errorMessage.indexOf('\n')
|
||||
const firstLine = firstLineEnd > 0 ? errorMessage.substring(0, firstLineEnd) : errorMessage
|
||||
|
||||
// Parse: "SyntaxError: /home/user/index.ts: Missing semicolon. (11:9)"
|
||||
const jsErrorMatch = firstLine.match(/^(\w+Error):\s*[^:]+:\s*([^(]+)\.\s*\((\d+):(\d+)\)/)
|
||||
if (jsErrorMatch) {
|
||||
cleanErrorType = jsErrorMatch[1]
|
||||
@@ -269,13 +242,11 @@ function formatE2BError(
|
||||
const originalLine = Number.parseInt(jsErrorMatch[3], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
} else {
|
||||
// Fallback: look for line number in the arrow pointer line (> 11 |)
|
||||
const arrowMatch = errorMessage.match(/^>\s*(\d+)\s*\|/m)
|
||||
if (arrowMatch) {
|
||||
const originalLine = Number.parseInt(arrowMatch[1], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
}
|
||||
// Try to extract error type and message
|
||||
const errorMatch = firstLine.match(/^(\w+Error):\s*(.+)/)
|
||||
if (errorMatch) {
|
||||
cleanErrorType = errorMatch[1]
|
||||
@@ -289,13 +260,11 @@ function formatE2BError(
|
||||
}
|
||||
}
|
||||
|
||||
// Build the final clean error message
|
||||
const finalErrorMsg =
|
||||
cleanErrorType && cleanErrorMsg
|
||||
? `${cleanErrorType}: ${cleanErrorMsg}`
|
||||
: cleanErrorMsg || errorMessage
|
||||
|
||||
// Format with line number if available
|
||||
let formattedError = finalErrorMsg
|
||||
if (userLine && userLine > 0) {
|
||||
const codeLines = userCode.split('\n')
|
||||
@@ -311,7 +280,6 @@ function formatE2BError(
|
||||
}
|
||||
}
|
||||
|
||||
// For stdout, just return the clean error message without the full traceback
|
||||
const cleanedOutput = finalErrorMsg
|
||||
|
||||
return { formattedError, cleanedOutput }
|
||||
@@ -327,7 +295,6 @@ function createUserFriendlyErrorMessage(
|
||||
): string {
|
||||
let errorMessage = enhanced.message
|
||||
|
||||
// Add line information if available
|
||||
if (enhanced.line !== undefined) {
|
||||
let lineInfo = `Line ${enhanced.line}`
|
||||
|
||||
@@ -338,18 +305,14 @@ function createUserFriendlyErrorMessage(
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
} else {
|
||||
// If no line number, try to extract it from stack trace for display
|
||||
if (enhanced.stack) {
|
||||
const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
if (stackMatch) {
|
||||
const line = Number.parseInt(stackMatch[1], 10)
|
||||
let lineInfo = `Line ${line}`
|
||||
|
||||
// Try to get line content if we have userCode
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
// Note: stackMatch gives us VM line number, need to adjust
|
||||
// This is a fallback case, so we might not have perfect line mapping
|
||||
if (line <= codeLines.length) {
|
||||
const lineContent = codeLines[line - 1]?.trim()
|
||||
if (lineContent) {
|
||||
@@ -363,7 +326,6 @@ function createUserFriendlyErrorMessage(
|
||||
}
|
||||
}
|
||||
|
||||
// Add error type prefix with consistent naming
|
||||
if (enhanced.name !== 'Error') {
|
||||
const errorTypePrefix =
|
||||
enhanced.name === 'SyntaxError'
|
||||
@@ -374,7 +336,6 @@ function createUserFriendlyErrorMessage(
|
||||
? 'Reference Error'
|
||||
: enhanced.name
|
||||
|
||||
// Only add prefix if not already present
|
||||
if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) {
|
||||
errorMessage = `${errorTypePrefix}: ${errorMessage}`
|
||||
}
|
||||
@@ -383,9 +344,6 @@ function createUserFriendlyErrorMessage(
|
||||
return errorMessage
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves workflow variables with <variable.name> syntax
|
||||
*/
|
||||
function resolveWorkflowVariables(
|
||||
code: string,
|
||||
workflowVariables: Record<string, any>,
|
||||
@@ -405,39 +363,35 @@ function resolveWorkflowVariables(
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const variableName = match[1].trim()
|
||||
|
||||
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
|
||||
const foundVariable = Object.entries(workflowVariables).find(
|
||||
([_, variable]) => normalizeName(variable.name || '') === variableName
|
||||
)
|
||||
|
||||
let variableValue: unknown = ''
|
||||
if (foundVariable) {
|
||||
const variable = foundVariable[1]
|
||||
variableValue = variable.value
|
||||
if (!foundVariable) {
|
||||
const availableVars = Object.values(workflowVariables)
|
||||
.map((v) => v.name)
|
||||
.filter(Boolean)
|
||||
throw new Error(
|
||||
`Variable "${variableName}" doesn't exist.` +
|
||||
(availableVars.length > 0 ? ` Available: ${availableVars.join(', ')}` : '')
|
||||
)
|
||||
}
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
const variable = foundVariable[1]
|
||||
let variableValue: unknown = variable.value
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json' && typeof variableValue === 'string') {
|
||||
try {
|
||||
// Handle 'string' type the same as 'plain' for backward compatibility
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
// For plain text, use exactly what's entered without modifications
|
||||
if (type === 'plain' && typeof variableValue === 'string') {
|
||||
// Use as-is for plain text
|
||||
} else if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json') {
|
||||
try {
|
||||
variableValue =
|
||||
typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue
|
||||
} catch {
|
||||
// Keep original value if JSON parsing fails
|
||||
}
|
||||
}
|
||||
variableValue = JSON.parse(variableValue)
|
||||
} catch {
|
||||
// Fallback to original value on error
|
||||
variableValue = variable.value
|
||||
// Keep as-is
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -450,11 +404,9 @@ function resolveWorkflowVariables(
|
||||
})
|
||||
}
|
||||
|
||||
// Process replacements in reverse order to maintain correct indices
|
||||
for (let i = replacements.length - 1; i >= 0; i--) {
|
||||
const { match: matchStr, index, variableName, variableValue } = replacements[i]
|
||||
|
||||
// Use variable reference approach
|
||||
const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = variableValue
|
||||
resolvedCode =
|
||||
@@ -464,9 +416,6 @@ function resolveWorkflowVariables(
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables with {{var_name}} syntax
|
||||
*/
|
||||
function resolveEnvironmentVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
@@ -482,32 +431,28 @@ function resolveEnvironmentVariables(
|
||||
|
||||
const resolverVars: Record<string, string> = {}
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value) {
|
||||
if (value !== undefined && value !== null) {
|
||||
resolverVars[key] = String(value)
|
||||
}
|
||||
})
|
||||
Object.entries(envVars).forEach(([key, value]) => {
|
||||
if (value) {
|
||||
if (value !== undefined && value !== null) {
|
||||
resolverVars[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const varName = match[1].trim()
|
||||
const resolved = resolveEnvVarReferences(match[0], resolverVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'empty',
|
||||
deep: false,
|
||||
})
|
||||
const varValue =
|
||||
typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved)
|
||||
|
||||
if (!(varName in resolverVars)) {
|
||||
continue
|
||||
}
|
||||
|
||||
replacements.push({
|
||||
match: match[0],
|
||||
index: match.index,
|
||||
varName,
|
||||
varValue: String(varValue),
|
||||
varValue: resolverVars[varName],
|
||||
})
|
||||
}
|
||||
|
||||
@@ -523,12 +468,8 @@ function resolveEnvironmentVariables(
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
*/
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
blockData: Record<string, any>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
@@ -543,27 +484,30 @@ function resolveTagVariables(
|
||||
|
||||
for (const match of tagMatches) {
|
||||
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const blockName = pathParts[0]
|
||||
|
||||
// Handle nested paths like "getrecord.response.data" or "function1.response.result"
|
||||
// First try params, then blockData directly, then try with block name mapping
|
||||
let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || ''
|
||||
|
||||
// If not found and the path starts with a block name, try mapping the block name to ID
|
||||
if (!tagValue && tagName.includes(REFERENCE.PATH_DELIMITER)) {
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const normalizedBlockName = pathParts[0] // This should already be normalized like "function1"
|
||||
|
||||
// Direct lookup using normalized block name
|
||||
const blockId = blockNameMapping[normalizedBlockName] ?? null
|
||||
|
||||
if (blockId) {
|
||||
const remainingPath = pathParts.slice(1).join('.')
|
||||
const fullPath = `${blockId}.${remainingPath}`
|
||||
tagValue = getNestedValue(blockData, fullPath) || ''
|
||||
}
|
||||
const blockId = blockNameMapping[blockName]
|
||||
if (!blockId) {
|
||||
continue
|
||||
}
|
||||
|
||||
const blockOutput = blockData[blockId]
|
||||
if (blockOutput === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
let tagValue: any
|
||||
if (pathParts.length === 1) {
|
||||
tagValue = blockOutput
|
||||
} else {
|
||||
tagValue = navigatePath(blockOutput, pathParts.slice(1))
|
||||
}
|
||||
|
||||
if (tagValue === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
// If the value is a stringified JSON, parse it back to object
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
@@ -571,16 +515,13 @@ function resolveTagVariables(
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch (e) {
|
||||
// Keep as string if parsing fails
|
||||
} catch {
|
||||
// Keep as-is
|
||||
}
|
||||
}
|
||||
|
||||
// Instead of injecting large JSON directly, create a variable reference
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = tagValue
|
||||
|
||||
// Replace the template with a variable reference
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
@@ -605,35 +546,13 @@ function resolveCodeVariables(
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
// Resolve workflow variables with <variable.name> syntax first
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
params,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
contextVariables
|
||||
)
|
||||
resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get nested value from object using dot notation path
|
||||
*/
|
||||
function getNestedValue(obj: any, path: string): any {
|
||||
if (!obj || !path) return undefined
|
||||
|
||||
return path.split('.').reduce((current, key) => {
|
||||
return current && typeof current === 'object' ? current[key] : undefined
|
||||
}, obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove one trailing newline from stdout
|
||||
* This handles the common case where print() or console.log() adds a trailing \n
|
||||
@@ -671,7 +590,6 @@ export async function POST(req: NextRequest) {
|
||||
isCustomTool = false,
|
||||
} = body
|
||||
|
||||
// Extract internal parameters that shouldn't be passed to the execution context
|
||||
const executionParams = { ...params }
|
||||
executionParams._context = undefined
|
||||
|
||||
@@ -697,7 +615,6 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
// Extract imports once for JavaScript code (reuse later to avoid double extraction)
|
||||
let jsImports = ''
|
||||
let jsRemainingCode = resolvedCode
|
||||
let hasImports = false
|
||||
@@ -707,31 +624,22 @@ export async function POST(req: NextRequest) {
|
||||
jsImports = extractionResult.imports
|
||||
jsRemainingCode = extractionResult.remainingCode
|
||||
|
||||
// Check for ES6 imports or CommonJS require statements
|
||||
// ES6 imports are extracted by the TypeScript parser
|
||||
// Also check for require() calls which indicate external dependencies
|
||||
const hasRequireStatements = /require\s*\(\s*['"`]/.test(resolvedCode)
|
||||
hasImports = jsImports.trim().length > 0 || hasRequireStatements
|
||||
}
|
||||
|
||||
// Python always requires E2B
|
||||
if (lang === CodeLanguage.Python && !isE2bEnabled) {
|
||||
throw new Error(
|
||||
'Python execution requires E2B to be enabled. Please contact your administrator to enable E2B, or use JavaScript instead.'
|
||||
)
|
||||
}
|
||||
|
||||
// JavaScript with imports requires E2B
|
||||
if (lang === CodeLanguage.JavaScript && hasImports && !isE2bEnabled) {
|
||||
throw new Error(
|
||||
'JavaScript code with import statements requires E2B to be enabled. Please remove the import statements, or contact your administrator to enable E2B.'
|
||||
)
|
||||
}
|
||||
|
||||
// Use E2B if:
|
||||
// - E2B is enabled AND
|
||||
// - Not a custom tool AND
|
||||
// - (Python OR JavaScript with imports)
|
||||
const useE2B =
|
||||
isE2bEnabled &&
|
||||
!isCustomTool &&
|
||||
@@ -744,13 +652,10 @@ export async function POST(req: NextRequest) {
|
||||
language: lang,
|
||||
})
|
||||
let prologue = ''
|
||||
const epilogue = ''
|
||||
|
||||
if (lang === CodeLanguage.JavaScript) {
|
||||
// Track prologue lines for error adjustment
|
||||
let prologueLineCount = 0
|
||||
|
||||
// Reuse the imports we already extracted earlier
|
||||
const imports = jsImports
|
||||
const remainingCode = jsRemainingCode
|
||||
|
||||
@@ -782,7 +687,7 @@ export async function POST(req: NextRequest) {
|
||||
' }',
|
||||
'})();',
|
||||
].join('\n')
|
||||
const codeForE2B = importSection + prologue + wrapped + epilogue
|
||||
const codeForE2B = importSection + prologue + wrapped
|
||||
|
||||
const execStart = Date.now()
|
||||
const {
|
||||
@@ -804,7 +709,6 @@ export async function POST(req: NextRequest) {
|
||||
error: e2bError,
|
||||
})
|
||||
|
||||
// If there was an execution error, format it properly
|
||||
if (e2bError) {
|
||||
const { formattedError, cleanedOutput } = formatE2BError(
|
||||
e2bError,
|
||||
@@ -828,7 +732,7 @@ export async function POST(req: NextRequest) {
|
||||
output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime },
|
||||
})
|
||||
}
|
||||
// Track prologue lines for error adjustment
|
||||
|
||||
let prologueLineCount = 0
|
||||
prologue += 'import json\n'
|
||||
prologueLineCount++
|
||||
@@ -846,7 +750,7 @@ export async function POST(req: NextRequest) {
|
||||
'__sim_result__ = __sim_main__()',
|
||||
"print('__SIM_RESULT__=' + json.dumps(__sim_result__))",
|
||||
].join('\n')
|
||||
const codeForE2B = prologue + wrapped + epilogue
|
||||
const codeForE2B = prologue + wrapped
|
||||
|
||||
const execStart = Date.now()
|
||||
const {
|
||||
@@ -868,7 +772,6 @@ export async function POST(req: NextRequest) {
|
||||
error: e2bError,
|
||||
})
|
||||
|
||||
// If there was an execution error, format it properly
|
||||
if (e2bError) {
|
||||
const { formattedError, cleanedOutput } = formatE2BError(
|
||||
e2bError,
|
||||
@@ -897,7 +800,6 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const wrapperLines = ['(async () => {', ' try {']
|
||||
if (isCustomTool) {
|
||||
wrapperLines.push(' // For custom tools, make parameters directly accessible')
|
||||
Object.keys(executionParams).forEach((key) => {
|
||||
wrapperLines.push(` const ${key} = params.${key};`)
|
||||
})
|
||||
@@ -931,12 +833,10 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
const ivmError = isolatedResult.error
|
||||
// Adjust line number for prepended param destructuring in custom tools
|
||||
let adjustedLine = ivmError.line
|
||||
let adjustedLineContent = ivmError.lineContent
|
||||
if (prependedLineCount > 0 && ivmError.line !== undefined) {
|
||||
adjustedLine = Math.max(1, ivmError.line - prependedLineCount)
|
||||
// Get line content from original user code, not the prepended code
|
||||
const codeLines = resolvedCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
adjustedLineContent = codeLines[adjustedLine - 1]?.trim()
|
||||
|
||||
@@ -1,395 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('ProxyAPI')
|
||||
|
||||
const proxyPostSchema = z.object({
|
||||
toolId: z.string().min(1, 'toolId is required'),
|
||||
params: z.record(z.any()).optional().default({}),
|
||||
executionContext: z
|
||||
.object({
|
||||
workflowId: z.string().optional(),
|
||||
workspaceId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
userId: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Creates a minimal set of default headers for proxy requests
|
||||
* @returns Record of HTTP headers
|
||||
*/
|
||||
const getProxyHeaders = (): Record<string, string> => {
|
||||
return {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
|
||||
Accept: '*/*',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a response with CORS headers
|
||||
* @param responseData Response data object
|
||||
* @param status HTTP status code
|
||||
* @returns NextResponse with CORS headers
|
||||
*/
|
||||
const formatResponse = (responseData: any, status = 200) => {
|
||||
return NextResponse.json(responseData, {
|
||||
status,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an error response with consistent formatting
|
||||
* @param error Error object or message
|
||||
* @param status HTTP status code
|
||||
* @param additionalData Additional data to include in the response
|
||||
* @returns Formatted error response
|
||||
*/
|
||||
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const errorStack = error instanceof Error ? error.stack : undefined
|
||||
|
||||
logger.error('Creating error response', {
|
||||
errorMessage,
|
||||
status,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
})
|
||||
|
||||
return formatResponse(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
...additionalData,
|
||||
},
|
||||
status
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET handler for direct external URL proxying
|
||||
* This allows for GET requests to external APIs
|
||||
*/
|
||||
export async function GET(request: Request) {
|
||||
const url = new URL(request.url)
|
||||
const targetUrl = url.searchParams.get('url')
|
||||
const requestId = generateRequestId()
|
||||
|
||||
// Vault download proxy: /api/proxy?vaultDownload=1&bucket=...&object=...&credentialId=...
|
||||
const vaultDownload = url.searchParams.get('vaultDownload')
|
||||
if (vaultDownload === '1') {
|
||||
try {
|
||||
const bucket = url.searchParams.get('bucket')
|
||||
const objectParam = url.searchParams.get('object')
|
||||
const credentialId = url.searchParams.get('credentialId')
|
||||
|
||||
if (!bucket || !objectParam || !credentialId) {
|
||||
return createErrorResponse('Missing bucket, object, or credentialId', 400)
|
||||
}
|
||||
|
||||
// Fetch access token using existing token API
|
||||
const baseUrl = new URL(getBaseUrl())
|
||||
const tokenUrl = new URL('/api/auth/oauth/token', baseUrl)
|
||||
|
||||
// Build headers: forward session cookies if present; include internal auth for server-side
|
||||
const tokenHeaders: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
const incomingCookie = request.headers.get('cookie')
|
||||
if (incomingCookie) tokenHeaders.Cookie = incomingCookie
|
||||
try {
|
||||
const internalToken = await generateInternalToken()
|
||||
tokenHeaders.Authorization = `Bearer ${internalToken}`
|
||||
} catch (_e) {
|
||||
// best-effort internal auth
|
||||
}
|
||||
|
||||
// Optional workflow context for collaboration auth
|
||||
const workflowId = url.searchParams.get('workflowId') || undefined
|
||||
|
||||
const tokenRes = await fetch(tokenUrl.toString(), {
|
||||
method: 'POST',
|
||||
headers: tokenHeaders,
|
||||
body: JSON.stringify({ credentialId, workflowId }),
|
||||
})
|
||||
|
||||
if (!tokenRes.ok) {
|
||||
const err = await tokenRes.text()
|
||||
return createErrorResponse(`Failed to fetch access token: ${err}`, 401)
|
||||
}
|
||||
|
||||
const tokenJson = await tokenRes.json()
|
||||
const accessToken = tokenJson.accessToken
|
||||
if (!accessToken) {
|
||||
return createErrorResponse('No access token available', 401)
|
||||
}
|
||||
|
||||
// Avoid double-encoding: incoming object may already be percent-encoded
|
||||
const objectDecoded = decodeURIComponent(objectParam)
|
||||
const gcsUrl = `https://storage.googleapis.com/storage/v1/b/${encodeURIComponent(
|
||||
bucket
|
||||
)}/o/${encodeURIComponent(objectDecoded)}?alt=media`
|
||||
|
||||
const fileRes = await fetch(gcsUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!fileRes.ok) {
|
||||
const errText = await fileRes.text()
|
||||
return createErrorResponse(errText || 'Failed to download file', fileRes.status)
|
||||
}
|
||||
|
||||
const headers = new Headers()
|
||||
fileRes.headers.forEach((v, k) => headers.set(k, v))
|
||||
return new NextResponse(fileRes.body, { status: 200, headers })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Vault download proxy failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return createErrorResponse('Vault download failed', 500)
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetUrl) {
|
||||
logger.error(`[${requestId}] Missing 'url' parameter`)
|
||||
return createErrorResponse("Missing 'url' parameter", 400)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(targetUrl)
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Blocked proxy request`, {
|
||||
url: targetUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return createErrorResponse(urlValidation.error || 'Invalid URL', 403)
|
||||
}
|
||||
|
||||
const method = url.searchParams.get('method') || 'GET'
|
||||
|
||||
const bodyParam = url.searchParams.get('body')
|
||||
let body: string | undefined
|
||||
|
||||
if (bodyParam && ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())) {
|
||||
try {
|
||||
body = decodeURIComponent(bodyParam)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to decode body parameter`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const customHeaders: Record<string, string> = {}
|
||||
|
||||
for (const [key, value] of url.searchParams.entries()) {
|
||||
if (key.startsWith('header.')) {
|
||||
const headerName = key.substring(7)
|
||||
customHeaders[headerName] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (body && !customHeaders['Content-Type']) {
|
||||
customHeaders['Content-Type'] = 'application/json'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
|
||||
|
||||
try {
|
||||
const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
method: method,
|
||||
headers: {
|
||||
...getProxyHeaders(),
|
||||
...customHeaders,
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
body: body || undefined,
|
||||
})
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
let data
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
data = await response.json()
|
||||
} else {
|
||||
data = await response.text()
|
||||
}
|
||||
|
||||
const errorMessage = !response.ok
|
||||
? data && typeof data === 'object' && data.error
|
||||
? `${data.error.message || JSON.stringify(data.error)}`
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
return formatResponse({
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: Object.fromEntries(response.headers.entries()),
|
||||
data,
|
||||
error: errorMessage,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy GET request failed`, {
|
||||
url: targetUrl,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse(error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const startTime = new Date()
|
||||
const startTimeISO = startTime.toISOString()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
logger.error(`[${requestId}] Authentication failed for proxy:`, authResult.error)
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
let requestBody
|
||||
try {
|
||||
requestBody = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
throw new Error('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
const validationResult = proxyPostSchema.safeParse(requestBody)
|
||||
if (!validationResult.success) {
|
||||
logger.error(`[${requestId}] Request validation failed`, {
|
||||
errors: validationResult.error.errors,
|
||||
})
|
||||
const errorMessages = validationResult.error.errors
|
||||
.map((err) => `${err.path.join('.')}: ${err.message}`)
|
||||
.join(', ')
|
||||
throw new Error(`Validation failed: ${errorMessages}`)
|
||||
}
|
||||
|
||||
const { toolId, params } = validationResult.data
|
||||
|
||||
logger.info(`[${requestId}] Processing tool: ${toolId}`)
|
||||
|
||||
const tool = getTool(toolId)
|
||||
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found: ${toolId}`)
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
try {
|
||||
validateRequiredParametersAfterMerge(toolId, tool, params)
|
||||
} catch (validationError) {
|
||||
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
|
||||
error: validationError instanceof Error ? validationError.message : String(validationError),
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(validationError, 400, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
|
||||
const hasFileOutputs =
|
||||
tool.outputs &&
|
||||
Object.values(tool.outputs).some(
|
||||
(output) => output.type === 'file' || output.type === 'file[]'
|
||||
)
|
||||
|
||||
const result = await executeTool(
|
||||
toolId,
|
||||
params,
|
||||
true, // skipProxy (we're already in the proxy)
|
||||
!hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs)
|
||||
undefined // execution context is not available in proxy context
|
||||
)
|
||||
|
||||
if (!result.success) {
|
||||
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
throw new Error(result.error || 'Tool execution failed')
|
||||
}
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
const responseWithTimingData = {
|
||||
...result,
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
timing: {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
|
||||
|
||||
return formatResponse(responseWithTimingData)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy request failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
name: error instanceof Error ? error.name : undefined,
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(error, 500, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new NextResponse(null, {
|
||||
status: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
})
|
||||
}
|
||||
193
apps/sim/app/api/superuser/import-workflow/route.ts
Normal file
193
apps/sim/app/api/superuser/import-workflow/route.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats, workflow, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
import { parseWorkflowJson } from '@/lib/workflows/operations/import-export'
|
||||
import {
|
||||
loadWorkflowFromNormalizedTables,
|
||||
saveWorkflowToNormalizedTables,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
|
||||
const logger = createLogger('SuperUserImportWorkflow')
|
||||
|
||||
interface ImportWorkflowRequest {
|
||||
workflowId: string
|
||||
targetWorkspaceId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/superuser/import-workflow
|
||||
*
|
||||
* Superuser endpoint to import a workflow by ID along with its copilot chats.
|
||||
* This creates a copy of the workflow in the target workspace with new IDs.
|
||||
* Only the workflow structure and copilot chats are copied - no deployments,
|
||||
* webhooks, triggers, or other sensitive data.
|
||||
*
|
||||
* Requires both isSuperUser flag AND superUserModeEnabled setting.
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser, isSuperUser, superUserModeEnabled } =
|
||||
await verifyEffectiveSuperUser(session.user.id)
|
||||
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn('Non-effective-superuser attempted to access import-workflow endpoint', {
|
||||
userId: session.user.id,
|
||||
isSuperUser,
|
||||
superUserModeEnabled,
|
||||
})
|
||||
return NextResponse.json({ error: 'Forbidden: Superuser access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body: ImportWorkflowRequest = await request.json()
|
||||
const { workflowId, targetWorkspaceId } = body
|
||||
|
||||
if (!workflowId) {
|
||||
return NextResponse.json({ error: 'workflowId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!targetWorkspaceId) {
|
||||
return NextResponse.json({ error: 'targetWorkspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify target workspace exists
|
||||
const [targetWorkspace] = await db
|
||||
.select({ id: workspace.id, ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, targetWorkspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!targetWorkspace) {
|
||||
return NextResponse.json({ error: 'Target workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Get the source workflow
|
||||
const [sourceWorkflow] = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!sourceWorkflow) {
|
||||
return NextResponse.json({ error: 'Source workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Load the workflow state from normalized tables
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Workflow has no normalized data - cannot import' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Use existing export logic to create export format
|
||||
const workflowState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
metadata: {
|
||||
name: sourceWorkflow.name,
|
||||
description: sourceWorkflow.description ?? undefined,
|
||||
color: sourceWorkflow.color,
|
||||
},
|
||||
}
|
||||
|
||||
const exportData = sanitizeForExport(workflowState)
|
||||
|
||||
// Use existing import logic (parseWorkflowJson regenerates IDs automatically)
|
||||
const { data: importedData, errors } = parseWorkflowJson(JSON.stringify(exportData))
|
||||
|
||||
if (!importedData || errors.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: `Failed to parse workflow: ${errors.join(', ')}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Create new workflow record
|
||||
const newWorkflowId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
await db.insert(workflow).values({
|
||||
id: newWorkflowId,
|
||||
userId: session.user.id,
|
||||
workspaceId: targetWorkspaceId,
|
||||
folderId: null, // Don't copy folder association
|
||||
name: `[Debug Import] ${sourceWorkflow.name}`,
|
||||
description: sourceWorkflow.description,
|
||||
color: sourceWorkflow.color,
|
||||
lastSynced: now,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
isDeployed: false, // Never copy deployment status
|
||||
runCount: 0,
|
||||
variables: sourceWorkflow.variables || {},
|
||||
})
|
||||
|
||||
// Save using existing persistence logic
|
||||
const saveResult = await saveWorkflowToNormalizedTables(newWorkflowId, importedData)
|
||||
|
||||
if (!saveResult.success) {
|
||||
// Clean up the workflow record if save failed
|
||||
await db.delete(workflow).where(eq(workflow.id, newWorkflowId))
|
||||
return NextResponse.json(
|
||||
{ error: `Failed to save workflow state: ${saveResult.error}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Copy copilot chats associated with the source workflow
|
||||
const sourceCopilotChats = await db
|
||||
.select()
|
||||
.from(copilotChats)
|
||||
.where(eq(copilotChats.workflowId, workflowId))
|
||||
|
||||
let copilotChatsImported = 0
|
||||
|
||||
for (const chat of sourceCopilotChats) {
|
||||
await db.insert(copilotChats).values({
|
||||
userId: session.user.id,
|
||||
workflowId: newWorkflowId,
|
||||
title: chat.title ? `[Import] ${chat.title}` : null,
|
||||
messages: chat.messages,
|
||||
model: chat.model,
|
||||
conversationId: null, // Don't copy conversation ID
|
||||
previewYaml: chat.previewYaml,
|
||||
planArtifact: chat.planArtifact,
|
||||
config: chat.config,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
copilotChatsImported++
|
||||
}
|
||||
|
||||
logger.info('Superuser imported workflow', {
|
||||
userId: session.user.id,
|
||||
sourceWorkflowId: workflowId,
|
||||
newWorkflowId,
|
||||
targetWorkspaceId,
|
||||
copilotChatsImported,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
newWorkflowId,
|
||||
copilotChatsImported,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error importing workflow', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateApprovalAPI')
|
||||
|
||||
@@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
||||
}
|
||||
@@ -71,8 +71,8 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateRejectionAPI')
|
||||
|
||||
@@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
templateCreators,
|
||||
templateStars,
|
||||
templates,
|
||||
user,
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
} from '@sim/db/schema'
|
||||
@@ -14,6 +13,7 @@ import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
import {
|
||||
extractRequiredCredentials,
|
||||
sanitizeCredentials,
|
||||
@@ -70,8 +70,8 @@ export async function GET(request: NextRequest) {
|
||||
logger.debug(`[${requestId}] Fetching templates with params:`, params)
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
const isSuperUser = currentUser[0]?.isSuperUser || false
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
const isSuperUser = effectiveSuperUser
|
||||
|
||||
// Build query conditions
|
||||
const conditions = []
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -47,13 +51,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Mistral parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -48,13 +52,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -44,13 +48,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
@@ -79,11 +79,13 @@ export async function POST(request: NextRequest) {
|
||||
// Generate public URL for destination (properly encode the destination key)
|
||||
const encodedDestKey = validatedData.destinationKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.destinationBucket}.s3.${validatedData.region}.amazonaws.com/${encodedDestKey}`
|
||||
const uri = `s3://${validatedData.destinationBucket}/${validatedData.destinationKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
uri,
|
||||
copySourceVersionId: result.CopySourceVersionId,
|
||||
versionId: result.VersionId,
|
||||
etag: result.CopyObjectResult?.ETag,
|
||||
|
||||
@@ -117,11 +117,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const encodedKey = validatedData.objectKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.bucketName}.s3.${validatedData.region}.amazonaws.com/${encodedKey}`
|
||||
const uri = `s3://${validatedData.bucketName}/${validatedData.objectKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
uri,
|
||||
etag: result.ETag,
|
||||
location: url,
|
||||
key: validatedData.objectKey,
|
||||
|
||||
637
apps/sim/app/api/tools/textract/parse/route.ts
Normal file
637
apps/sim/app/api/tools/textract/parse/route.ts
Normal file
@@ -0,0 +1,637 @@
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAwsRegion,
|
||||
validateExternalUrl,
|
||||
validateS3BucketName,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||
|
||||
const logger = createLogger('TextractParseAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
Text: z.string().min(1),
|
||||
Alias: z.string().optional(),
|
||||
Pages: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
const TextractParseSchema = z
|
||||
.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||
filePath: z.string().optional(),
|
||||
s3Uri: z.string().optional(),
|
||||
featureTypes: z
|
||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||
.optional(),
|
||||
queries: z.array(QuerySchema).optional(),
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
const regionValidation = validateAwsRegion(data.region, 'AWS region')
|
||||
if (!regionValidation.isValid) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: regionValidation.error,
|
||||
path: ['region'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
function getSignatureKey(
|
||||
key: string,
|
||||
dateStamp: string,
|
||||
regionName: string,
|
||||
serviceName: string
|
||||
): Buffer {
|
||||
const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest()
|
||||
const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest()
|
||||
const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest()
|
||||
const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest()
|
||||
return kSigning
|
||||
}
|
||||
|
||||
function signAwsRequest(
|
||||
method: string,
|
||||
host: string,
|
||||
uri: string,
|
||||
body: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string,
|
||||
service: string,
|
||||
amzTarget: string
|
||||
): Record<string, string> {
|
||||
const date = new Date()
|
||||
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '')
|
||||
const dateStamp = amzDate.slice(0, 8)
|
||||
|
||||
const payloadHash = crypto.createHash('sha256').update(body).digest('hex')
|
||||
|
||||
const canonicalHeaders =
|
||||
`content-type:application/x-amz-json-1.1\n` +
|
||||
`host:${host}\n` +
|
||||
`x-amz-date:${amzDate}\n` +
|
||||
`x-amz-target:${amzTarget}\n`
|
||||
|
||||
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target'
|
||||
|
||||
const canonicalRequest = `${method}\n${uri}\n\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`
|
||||
|
||||
const algorithm = 'AWS4-HMAC-SHA256'
|
||||
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`
|
||||
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${crypto.createHash('sha256').update(canonicalRequest).digest('hex')}`
|
||||
|
||||
const signingKey = getSignatureKey(secretAccessKey, dateStamp, region, service)
|
||||
const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex')
|
||||
|
||||
const authorizationHeader = `${algorithm} Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`
|
||||
|
||||
return {
|
||||
'Content-Type': 'application/x-amz-json-1.1',
|
||||
Host: host,
|
||||
'X-Amz-Date': amzDate,
|
||||
'X-Amz-Target': amzTarget,
|
||||
Authorization: authorizationHeader,
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
const bytes = Buffer.from(arrayBuffer).toString('base64')
|
||||
const contentType = response.headers.get('content-type') || 'application/octet-stream'
|
||||
|
||||
return { bytes, contentType }
|
||||
}
|
||||
|
||||
function parseS3Uri(s3Uri: string): { bucket: string; key: string } {
|
||||
const match = s3Uri.match(/^s3:\/\/([^/]+)\/(.+)$/)
|
||||
if (!match) {
|
||||
throw new Error(
|
||||
`Invalid S3 URI format: ${s3Uri}. Expected format: s3://bucket-name/path/to/object`
|
||||
)
|
||||
}
|
||||
|
||||
const bucket = match[1]
|
||||
const key = match[2]
|
||||
|
||||
const bucketValidation = validateS3BucketName(bucket, 'S3 bucket name')
|
||||
if (!bucketValidation.isValid) {
|
||||
throw new Error(bucketValidation.error)
|
||||
}
|
||||
|
||||
if (key.includes('..') || key.startsWith('/')) {
|
||||
throw new Error('S3 key contains invalid path traversal sequences')
|
||||
}
|
||||
|
||||
return { bucket, key }
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function callTextractAsync(
|
||||
host: string,
|
||||
amzTarget: string,
|
||||
body: Record<string, unknown>,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const bodyString = JSON.stringify(body)
|
||||
const headers = signAwsRequest(
|
||||
'POST',
|
||||
host,
|
||||
'/',
|
||||
bodyString,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region,
|
||||
'textract',
|
||||
amzTarget
|
||||
)
|
||||
|
||||
const response = await fetch(`https://${host}/`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: bodyString,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorMessage = `Textract API error: ${response.statusText}`
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.Message) {
|
||||
errorMessage = errorJson.Message
|
||||
} else if (errorJson.__type) {
|
||||
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
|
||||
}
|
||||
} catch {
|
||||
// Use default error message
|
||||
}
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
async function pollForJobCompletion(
|
||||
host: string,
|
||||
jobId: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string,
|
||||
useAnalyzeDocument: boolean,
|
||||
requestId: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const pollIntervalMs = 5000 // 5 seconds between polls
|
||||
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
|
||||
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
||||
|
||||
const getTarget = useAnalyzeDocument
|
||||
? 'Textract.GetDocumentAnalysis'
|
||||
: 'Textract.GetDocumentTextDetection'
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
const result = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
|
||||
const jobStatus = result.JobStatus as string
|
||||
|
||||
if (jobStatus === 'SUCCEEDED') {
|
||||
logger.info(`[${requestId}] Async job completed successfully after ${attempt + 1} polls`)
|
||||
|
||||
let allBlocks = (result.Blocks as unknown[]) || []
|
||||
let nextToken = result.NextToken as string | undefined
|
||||
|
||||
while (nextToken) {
|
||||
const nextResult = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId, NextToken: nextToken },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
|
||||
nextToken = nextResult.NextToken as string | undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...result,
|
||||
Blocks: allBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
if (jobStatus === 'FAILED') {
|
||||
throw new Error(`Textract job failed: ${result.StatusMessage || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (jobStatus === 'PARTIAL_SUCCESS') {
|
||||
logger.warn(`[${requestId}] Job completed with partial success: ${result.StatusMessage}`)
|
||||
|
||||
let allBlocks = (result.Blocks as unknown[]) || []
|
||||
let nextToken = result.NextToken as string | undefined
|
||||
|
||||
while (nextToken) {
|
||||
const nextResult = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId, NextToken: nextToken },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
|
||||
nextToken = nextResult.NextToken as string | undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...result,
|
||||
Blocks: allBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Job status: ${jobStatus}, attempt ${attempt + 1}/${maxAttempts}`)
|
||||
await sleep(pollIntervalMs)
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Timeout waiting for Textract job to complete (max ${maxPollTimeMs / 1000} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Textract parse attempt`, {
|
||||
error: authResult.error || 'Missing userId',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Unauthorized',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const userId = authResult.userId
|
||||
const body = await request.json()
|
||||
const validatedData = TextractParseSchema.parse(body)
|
||||
|
||||
const processingMode = validatedData.processingMode || 'sync'
|
||||
const featureTypes = validatedData.featureTypes ?? []
|
||||
const useAnalyzeDocument = featureTypes.length > 0
|
||||
const host = `textract.${validatedData.region}.amazonaws.com`
|
||||
|
||||
logger.info(`[${requestId}] Textract parse request`, {
|
||||
processingMode,
|
||||
filePath: validatedData.filePath?.substring(0, 50),
|
||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
||||
featureTypes,
|
||||
userId,
|
||||
})
|
||||
|
||||
if (processingMode === 'async') {
|
||||
if (!validatedData.s3Uri) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { bucket: s3Bucket, key: s3Key } = parseS3Uri(validatedData.s3Uri)
|
||||
|
||||
logger.info(`[${requestId}] Starting async Textract job`, { s3Bucket, s3Key })
|
||||
|
||||
const startTarget = useAnalyzeDocument
|
||||
? 'Textract.StartDocumentAnalysis'
|
||||
: 'Textract.StartDocumentTextDetection'
|
||||
|
||||
const startBody: Record<string, unknown> = {
|
||||
DocumentLocation: {
|
||||
S3Object: {
|
||||
Bucket: s3Bucket,
|
||||
Name: s3Key,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if (useAnalyzeDocument) {
|
||||
startBody.FeatureTypes = featureTypes
|
||||
|
||||
if (
|
||||
validatedData.queries &&
|
||||
validatedData.queries.length > 0 &&
|
||||
featureTypes.includes('QUERIES')
|
||||
) {
|
||||
startBody.QueriesConfig = {
|
||||
Queries: validatedData.queries.map((q) => ({
|
||||
Text: q.Text,
|
||||
Alias: q.Alias,
|
||||
Pages: q.Pages,
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const startResult = await callTextractAsync(
|
||||
host,
|
||||
startTarget,
|
||||
startBody,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region
|
||||
)
|
||||
|
||||
const jobId = startResult.JobId as string
|
||||
if (!jobId) {
|
||||
throw new Error('Failed to start Textract job: No JobId returned')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Async job started`, { jobId })
|
||||
|
||||
const textractData = await pollForJobCompletion(
|
||||
host,
|
||||
jobId,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region,
|
||||
useAnalyzeDocument,
|
||||
requestId
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Textract async parse successful`, {
|
||||
pageCount: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
|
||||
blockCount: (textractData.Blocks as unknown[])?.length ?? 0,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
blocks: textractData.Blocks ?? [],
|
||||
documentMetadata: {
|
||||
pages: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
|
||||
},
|
||||
modelVersion: (textractData.AnalyzeDocumentModelVersion ??
|
||||
textractData.DetectDocumentTextModelVersion) as string | undefined,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (!validatedData.filePath) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File path is required for single-page processing',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
||||
|
||||
// Track if this is a PDF for better error messaging
|
||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
|
||||
const uri = '/'
|
||||
|
||||
let textractBody: Record<string, unknown>
|
||||
let amzTarget: string
|
||||
|
||||
if (useAnalyzeDocument) {
|
||||
amzTarget = 'Textract.AnalyzeDocument'
|
||||
textractBody = {
|
||||
Document: {
|
||||
Bytes: bytes,
|
||||
},
|
||||
FeatureTypes: featureTypes,
|
||||
}
|
||||
|
||||
if (
|
||||
validatedData.queries &&
|
||||
validatedData.queries.length > 0 &&
|
||||
featureTypes.includes('QUERIES')
|
||||
) {
|
||||
textractBody.QueriesConfig = {
|
||||
Queries: validatedData.queries.map((q) => ({
|
||||
Text: q.Text,
|
||||
Alias: q.Alias,
|
||||
Pages: q.Pages,
|
||||
})),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
amzTarget = 'Textract.DetectDocumentText'
|
||||
textractBody = {
|
||||
Document: {
|
||||
Bytes: bytes,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const bodyString = JSON.stringify(textractBody)
|
||||
|
||||
const headers = signAwsRequest(
|
||||
'POST',
|
||||
host,
|
||||
uri,
|
||||
bodyString,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region,
|
||||
'textract',
|
||||
amzTarget
|
||||
)
|
||||
|
||||
const textractResponse = await fetch(`https://${host}${uri}`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: bodyString,
|
||||
})
|
||||
|
||||
if (!textractResponse.ok) {
|
||||
const errorText = await textractResponse.text()
|
||||
logger.error(`[${requestId}] Textract API error:`, errorText)
|
||||
|
||||
let errorMessage = `Textract API error: ${textractResponse.statusText}`
|
||||
let isUnsupportedFormat = false
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.Message) {
|
||||
errorMessage = errorJson.Message
|
||||
} else if (errorJson.__type) {
|
||||
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
|
||||
}
|
||||
// Check for unsupported document format error
|
||||
isUnsupportedFormat =
|
||||
errorJson.__type === 'UnsupportedDocumentException' ||
|
||||
errorJson.Message?.toLowerCase().includes('unsupported document') ||
|
||||
errorText.toLowerCase().includes('unsupported document')
|
||||
} catch {
|
||||
isUnsupportedFormat = errorText.toLowerCase().includes('unsupported document')
|
||||
}
|
||||
|
||||
// Provide helpful message for unsupported format (likely multi-page PDF)
|
||||
if (isUnsupportedFormat && isPdf) {
|
||||
errorMessage =
|
||||
'This document format is not supported in Single Page mode. If this is a multi-page PDF, please use "Multi-Page (PDF, TIFF via S3)" mode instead, which requires uploading your document to S3 first. Single Page mode only supports JPEG, PNG, and single-page PDF files.'
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
},
|
||||
{ status: textractResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const textractData = await textractResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Textract parse successful`, {
|
||||
pageCount: textractData.DocumentMetadata?.Pages ?? 0,
|
||||
blockCount: textractData.Blocks?.length ?? 0,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
blocks: textractData.Blocks ?? [],
|
||||
documentMetadata: {
|
||||
pages: textractData.DocumentMetadata?.Pages ?? 0,
|
||||
},
|
||||
modelVersion:
|
||||
textractData.AnalyzeDocumentModelVersion ??
|
||||
textractData.DetectDocumentTextModelVersion ??
|
||||
undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in Textract parse:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -550,6 +550,8 @@ export interface AdminUserBilling {
|
||||
totalWebhookTriggers: number
|
||||
totalScheduledExecutions: number
|
||||
totalChatExecutions: number
|
||||
totalMcpExecutions: number
|
||||
totalA2aExecutions: number
|
||||
totalTokensUsed: number
|
||||
totalCost: string
|
||||
currentUsageLimit: string | null
|
||||
|
||||
@@ -97,6 +97,8 @@ export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
||||
totalWebhookTriggers: stats?.totalWebhookTriggers ?? 0,
|
||||
totalScheduledExecutions: stats?.totalScheduledExecutions ?? 0,
|
||||
totalChatExecutions: stats?.totalChatExecutions ?? 0,
|
||||
totalMcpExecutions: stats?.totalMcpExecutions ?? 0,
|
||||
totalA2aExecutions: stats?.totalA2aExecutions ?? 0,
|
||||
totalTokensUsed: stats?.totalTokensUsed ?? 0,
|
||||
totalCost: stats?.totalCost ?? '0',
|
||||
currentUsageLimit: stats?.currentUsageLimit ?? null,
|
||||
|
||||
@@ -19,7 +19,7 @@ export interface RateLimitResult {
|
||||
|
||||
export async function checkRateLimit(
|
||||
request: NextRequest,
|
||||
endpoint: 'logs' | 'logs-detail' = 'logs'
|
||||
endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' = 'logs'
|
||||
): Promise<RateLimitResult> {
|
||||
try {
|
||||
const auth = await authenticateV1Request(request)
|
||||
|
||||
102
apps/sim/app/api/v1/workflows/[id]/route.ts
Normal file
102
apps/sim/app/api/v1/workflows/[id]/route.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowBlocks } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
|
||||
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
|
||||
|
||||
const logger = createLogger('V1WorkflowDetailsAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const rateLimit = await checkRateLimit(request, 'workflow-detail')
|
||||
if (!rateLimit.allowed) {
|
||||
return createRateLimitResponse(rateLimit)
|
||||
}
|
||||
|
||||
const userId = rateLimit.userId!
|
||||
const { id } = await params
|
||||
|
||||
logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId })
|
||||
|
||||
const rows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
folderId: workflow.folderId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
variables: workflow.variables,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflow.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(eq(workflow.id, id))
|
||||
.limit(1)
|
||||
|
||||
const workflowData = rows[0]
|
||||
if (!workflowData) {
|
||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const blockRows = await db
|
||||
.select({
|
||||
id: workflowBlocks.id,
|
||||
type: workflowBlocks.type,
|
||||
subBlocks: workflowBlocks.subBlocks,
|
||||
})
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, id))
|
||||
|
||||
const blocksRecord = Object.fromEntries(
|
||||
blockRows.map((block) => [block.id, { type: block.type, subBlocks: block.subBlocks }])
|
||||
)
|
||||
const inputs = extractInputFieldsFromBlocks(blocksRecord)
|
||||
|
||||
const response = {
|
||||
id: workflowData.id,
|
||||
name: workflowData.name,
|
||||
description: workflowData.description,
|
||||
color: workflowData.color,
|
||||
folderId: workflowData.folderId,
|
||||
workspaceId: workflowData.workspaceId,
|
||||
isDeployed: workflowData.isDeployed,
|
||||
deployedAt: workflowData.deployedAt?.toISOString() || null,
|
||||
runCount: workflowData.runCount,
|
||||
lastRunAt: workflowData.lastRunAt?.toISOString() || null,
|
||||
variables: workflowData.variables || {},
|
||||
inputs,
|
||||
createdAt: workflowData.createdAt.toISOString(),
|
||||
updatedAt: workflowData.updatedAt.toISOString(),
|
||||
}
|
||||
|
||||
const limits = await getUserLimits(userId)
|
||||
|
||||
const apiResponse = createApiResponse({ data: response }, limits, rateLimit)
|
||||
|
||||
return NextResponse.json(apiResponse.body, { headers: apiResponse.headers })
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Workflow details fetch error`, { error: message })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
184
apps/sim/app/api/v1/workflows/route.ts
Normal file
184
apps/sim/app/api/v1/workflows/route.ts
Normal file
@@ -0,0 +1,184 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, asc, eq, gt, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
|
||||
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
|
||||
|
||||
const logger = createLogger('V1WorkflowsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const revalidate = 0
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
workspaceId: z.string(),
|
||||
folderId: z.string().optional(),
|
||||
deployedOnly: z.coerce.boolean().optional().default(false),
|
||||
limit: z.coerce.number().min(1).max(100).optional().default(50),
|
||||
cursor: z.string().optional(),
|
||||
})
|
||||
|
||||
interface CursorData {
|
||||
sortOrder: number
|
||||
createdAt: string
|
||||
id: string
|
||||
}
|
||||
|
||||
function encodeCursor(data: CursorData): string {
|
||||
return Buffer.from(JSON.stringify(data)).toString('base64')
|
||||
}
|
||||
|
||||
function decodeCursor(cursor: string): CursorData | null {
|
||||
try {
|
||||
return JSON.parse(Buffer.from(cursor, 'base64').toString())
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const rateLimit = await checkRateLimit(request, 'workflows')
|
||||
if (!rateLimit.allowed) {
|
||||
return createRateLimitResponse(rateLimit)
|
||||
}
|
||||
|
||||
const userId = rateLimit.userId!
|
||||
const { searchParams } = new URL(request.url)
|
||||
const rawParams = Object.fromEntries(searchParams.entries())
|
||||
|
||||
const validationResult = QueryParamsSchema.safeParse(rawParams)
|
||||
if (!validationResult.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid parameters', details: validationResult.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
logger.info(`[${requestId}] Fetching workflows for workspace ${params.workspaceId}`, {
|
||||
userId,
|
||||
filters: {
|
||||
folderId: params.folderId,
|
||||
deployedOnly: params.deployedOnly,
|
||||
},
|
||||
})
|
||||
|
||||
const conditions = [
|
||||
eq(workflow.workspaceId, params.workspaceId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, params.workspaceId),
|
||||
eq(permissions.userId, userId),
|
||||
]
|
||||
|
||||
if (params.folderId) {
|
||||
conditions.push(eq(workflow.folderId, params.folderId))
|
||||
}
|
||||
|
||||
if (params.deployedOnly) {
|
||||
conditions.push(eq(workflow.isDeployed, true))
|
||||
}
|
||||
|
||||
if (params.cursor) {
|
||||
const cursorData = decodeCursor(params.cursor)
|
||||
if (cursorData) {
|
||||
const cursorCondition = or(
|
||||
gt(workflow.sortOrder, cursorData.sortOrder),
|
||||
and(
|
||||
eq(workflow.sortOrder, cursorData.sortOrder),
|
||||
gt(workflow.createdAt, new Date(cursorData.createdAt))
|
||||
),
|
||||
and(
|
||||
eq(workflow.sortOrder, cursorData.sortOrder),
|
||||
eq(workflow.createdAt, new Date(cursorData.createdAt)),
|
||||
gt(workflow.id, cursorData.id)
|
||||
)
|
||||
)
|
||||
if (cursorCondition) {
|
||||
conditions.push(cursorCondition)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
|
||||
|
||||
const rows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
folderId: workflow.folderId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
sortOrder: workflow.sortOrder,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, params.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(and(...conditions))
|
||||
.orderBy(...orderByClause)
|
||||
.limit(params.limit + 1)
|
||||
|
||||
const hasMore = rows.length > params.limit
|
||||
const data = rows.slice(0, params.limit)
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (hasMore && data.length > 0) {
|
||||
const lastWorkflow = data[data.length - 1]
|
||||
nextCursor = encodeCursor({
|
||||
sortOrder: lastWorkflow.sortOrder,
|
||||
createdAt: lastWorkflow.createdAt.toISOString(),
|
||||
id: lastWorkflow.id,
|
||||
})
|
||||
}
|
||||
|
||||
const formattedWorkflows = data.map((w) => ({
|
||||
id: w.id,
|
||||
name: w.name,
|
||||
description: w.description,
|
||||
color: w.color,
|
||||
folderId: w.folderId,
|
||||
workspaceId: w.workspaceId,
|
||||
isDeployed: w.isDeployed,
|
||||
deployedAt: w.deployedAt?.toISOString() || null,
|
||||
runCount: w.runCount,
|
||||
lastRunAt: w.lastRunAt?.toISOString() || null,
|
||||
createdAt: w.createdAt.toISOString(),
|
||||
updatedAt: w.updatedAt.toISOString(),
|
||||
}))
|
||||
|
||||
const limits = await getUserLimits(userId)
|
||||
|
||||
const response = createApiResponse(
|
||||
{
|
||||
data: formattedWorkflows,
|
||||
nextCursor,
|
||||
},
|
||||
limits,
|
||||
rateLimit
|
||||
)
|
||||
|
||||
return NextResponse.json(response.body, { headers: response.headers })
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Workflows fetch error`, { error: message })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,10 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
@@ -25,7 +29,7 @@ import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
@@ -38,6 +42,8 @@ const ExecuteWorkflowSchema = z.object({
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
isClientSession: z.boolean().optional(),
|
||||
includeFileBase64: z.boolean().optional().default(true),
|
||||
base64MaxBytes: z.number().int().positive().optional(),
|
||||
workflowStateOverride: z
|
||||
.object({
|
||||
blocks: z.record(z.any()),
|
||||
@@ -214,6 +220,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
useDraftState,
|
||||
input: validatedInput,
|
||||
isClientSession = false,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
} = validation.data
|
||||
|
||||
@@ -227,6 +235,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
triggerType,
|
||||
stream,
|
||||
useDraftState,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
@@ -427,16 +437,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(result)
|
||||
const outputWithBase64 = includeFileBase64
|
||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})) as NormalizedBlockOutput)
|
||||
: result.output
|
||||
|
||||
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
||||
if (hasResponseBlock) {
|
||||
return createHttpResponseFromBlock(result)
|
||||
return createHttpResponseFromBlock(resultWithBase64)
|
||||
}
|
||||
|
||||
const filteredResult = {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
output: outputWithBase64,
|
||||
error: result.error,
|
||||
metadata: result.metadata
|
||||
? {
|
||||
@@ -498,6 +523,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
selectedOutputs: resolvedSelectedOutputs,
|
||||
isSecureMode: false,
|
||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
},
|
||||
executionId,
|
||||
})
|
||||
@@ -698,6 +725,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
@@ -750,12 +779,21 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
output: includeFileBase64
|
||||
? await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})
|
||||
: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error: any) {
|
||||
const errorMessage = error.message || 'Unknown error'
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
||||
|
||||
@@ -33,6 +33,7 @@ const BlockDataSchema = z.object({
|
||||
doWhileCondition: z.string().optional(),
|
||||
parallelType: z.enum(['collection', 'count']).optional(),
|
||||
type: z.string().optional(),
|
||||
canonicalModes: z.record(z.enum(['basic', 'advanced'])).optional(),
|
||||
})
|
||||
|
||||
const SubBlockStateSchema = z.object({
|
||||
|
||||
27
apps/sim/app/changelog/components/branded-link.tsx
Normal file
27
apps/sim/app/changelog/components/branded-link.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
'use client'
|
||||
|
||||
import Link from 'next/link'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface BrandedLinkProps {
|
||||
href: string
|
||||
children: React.ReactNode
|
||||
className?: string
|
||||
target?: string
|
||||
rel?: string
|
||||
}
|
||||
|
||||
export function BrandedLink({ href, children, className = '', target, rel }: BrandedLinkProps) {
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
return (
|
||||
<Link
|
||||
href={href}
|
||||
target={target}
|
||||
rel={rel}
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all ${className}`}
|
||||
>
|
||||
{children}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
@@ -2,6 +2,7 @@ import { BookOpen, Github, Rss } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedLink } from '@/app/changelog/components/branded-link'
|
||||
import ChangelogList from '@/app/changelog/components/timeline-list'
|
||||
|
||||
export interface ChangelogEntry {
|
||||
@@ -66,25 +67,24 @@ export default async function ChangelogContent() {
|
||||
<hr className='mt-6 border-border' />
|
||||
|
||||
<div className='mt-6 flex flex-wrap items-center gap-3 text-sm'>
|
||||
<Link
|
||||
<BrandedLink
|
||||
href='https://github.com/simstudioai/sim/releases'
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
>
|
||||
<Github className='h-4 w-4' />
|
||||
View on GitHub
|
||||
</Link>
|
||||
</BrandedLink>
|
||||
<Link
|
||||
href='https://docs.sim.ai'
|
||||
className='inline-flex items-center gap-2 rounded-md border border-border px-3 py-1.5 hover:bg-muted'
|
||||
className='inline-flex items-center gap-2 rounded-[10px] border border-border py-[6px] pr-[10px] pl-[12px] text-[15px] transition-all hover:bg-muted'
|
||||
>
|
||||
<BookOpen className='h-4 w-4' />
|
||||
Documentation
|
||||
</Link>
|
||||
<Link
|
||||
href='/changelog.xml'
|
||||
className='inline-flex items-center gap-2 rounded-md border border-border px-3 py-1.5 hover:bg-muted'
|
||||
className='inline-flex items-center gap-2 rounded-[10px] border border-border py-[6px] pr-[10px] pl-[12px] text-[15px] transition-all hover:bg-muted'
|
||||
>
|
||||
<Rss className='h-4 w-4' />
|
||||
RSS Feed
|
||||
|
||||
@@ -117,7 +117,7 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null)
|
||||
const messagesContainerRef = useRef<HTMLDivElement>(null)
|
||||
const [starCount, setStarCount] = useState('25.1k')
|
||||
const [starCount, setStarCount] = useState('25.8k')
|
||||
const [conversationId, setConversationId] = useState('')
|
||||
|
||||
const [showScrollButton, setShowScrollButton] = useState(false)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isUserFile } from '@/lib/core/utils/display-filters'
|
||||
import { isUserFileWithMetadata } from '@/lib/core/utils/user-file'
|
||||
import type { ChatFile, ChatMessage } from '@/app/chat/components/message/message'
|
||||
import { CHAT_ERROR_MESSAGES } from '@/app/chat/constants'
|
||||
|
||||
@@ -17,7 +17,7 @@ function extractFilesFromData(
|
||||
return files
|
||||
}
|
||||
|
||||
if (isUserFile(data)) {
|
||||
if (isUserFileWithMetadata(data)) {
|
||||
if (!seenIds.has(data.id)) {
|
||||
seenIds.add(data.id)
|
||||
files.push({
|
||||
@@ -232,7 +232,7 @@ export function useChatStreaming() {
|
||||
return null
|
||||
}
|
||||
|
||||
if (isUserFile(value)) {
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -285,7 +285,7 @@ export function useChatStreaming() {
|
||||
|
||||
const value = getOutputValue(blockOutputs, config.path)
|
||||
|
||||
if (isUserFile(value)) {
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
extractedFiles.push({
|
||||
id: value.id,
|
||||
name: value.name,
|
||||
|
||||
@@ -207,7 +207,6 @@ function TemplateCardInner({
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
/>
|
||||
) : (
|
||||
<div className='h-full w-full bg-[var(--surface-4)]' />
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
'use client'
|
||||
|
||||
import { Suspense, useEffect, useState } from 'react'
|
||||
import { CheckCircle, Heart, Info, Loader2, XCircle } from 'lucide-react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { useSearchParams } from 'next/navigation'
|
||||
import { Button, Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SupportFooter } from '@/app/(auth)/components/support-footer'
|
||||
import { InviteLayout } from '@/app/invite/components'
|
||||
|
||||
interface UnsubscribeData {
|
||||
success: boolean
|
||||
@@ -27,7 +30,6 @@ function UnsubscribeContent() {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [processing, setProcessing] = useState(false)
|
||||
const [unsubscribed, setUnsubscribed] = useState(false)
|
||||
const brand = useBrandConfig()
|
||||
|
||||
const email = searchParams.get('email')
|
||||
const token = searchParams.get('token')
|
||||
@@ -109,7 +111,7 @@ function UnsubscribeContent() {
|
||||
} else {
|
||||
setError(result.error || 'Failed to unsubscribe')
|
||||
}
|
||||
} catch (error) {
|
||||
} catch {
|
||||
setError('Failed to process unsubscribe request')
|
||||
} finally {
|
||||
setProcessing(false)
|
||||
@@ -118,272 +120,171 @@ function UnsubscribeContent() {
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardContent className='flex items-center justify-center p-8'>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Loading
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Validating your unsubscribe link...
|
||||
</p>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 flex w-full items-center justify-center py-8`}>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</div>
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center p-4 before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<XCircle className='mx-auto mb-2 h-12 w-12 text-red-500' />
|
||||
<CardTitle className='text-foreground'>Invalid Unsubscribe Link</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
This unsubscribe link is invalid or has expired
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='rounded-lg border bg-red-50 p-4'>
|
||||
<p className='text-red-800 text-sm'>
|
||||
<strong>Error:</strong> {error}
|
||||
</p>
|
||||
</div>
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Invalid Unsubscribe Link
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
{error}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='space-y-3'>
|
||||
<p className='text-muted-foreground text-sm'>This could happen if:</p>
|
||||
<ul className='ml-4 list-inside list-disc space-y-1 text-muted-foreground text-sm'>
|
||||
<li>The link is missing required parameters</li>
|
||||
<li>The link has expired or been used already</li>
|
||||
<li>The link was copied incorrectly</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => window.history.back()}>Go Back</BrandedButton>
|
||||
</div>
|
||||
|
||||
<div className='mt-6 flex flex-col gap-3'>
|
||||
<Button
|
||||
onClick={() =>
|
||||
window.open(
|
||||
`mailto:${brand.supportEmail}?subject=Unsubscribe%20Help&body=Hi%2C%20I%20need%20help%20unsubscribing%20from%20emails.%20My%20unsubscribe%20link%20is%20not%20working.`,
|
||||
'_blank'
|
||||
)
|
||||
}
|
||||
className='w-full bg-[var(--brand-primary-hex)] font-medium text-white shadow-sm transition-colors duration-200 hover:bg-[var(--brand-primary-hover-hex)]'
|
||||
>
|
||||
Contact Support
|
||||
</Button>
|
||||
<Button onClick={() => window.history.back()} variant='outline' className='w-full'>
|
||||
Go Back
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className='mt-4 text-center'>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Need immediate help? Email us at{' '}
|
||||
<a
|
||||
href={`mailto:${brand.supportEmail}`}
|
||||
className='text-muted-foreground hover:underline'
|
||||
>
|
||||
{brand.supportEmail}
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
)
|
||||
}
|
||||
|
||||
if (data?.isTransactional) {
|
||||
return (
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center p-4 before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<Info className='mx-auto mb-2 h-12 w-12 text-blue-500' />
|
||||
<CardTitle className='text-foreground'>Important Account Emails</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
This email contains important information about your account
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='rounded-lg border bg-blue-50 p-4'>
|
||||
<p className='text-blue-800 text-sm'>
|
||||
<strong>Transactional emails</strong> like password resets, account confirmations,
|
||||
and security alerts cannot be unsubscribed from as they contain essential
|
||||
information for your account security and functionality.
|
||||
</p>
|
||||
</div>
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Important Account Emails
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Transactional emails like password resets, account confirmations, and security alerts
|
||||
cannot be unsubscribed from as they contain essential information for your account.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='space-y-3'>
|
||||
<p className='text-foreground text-sm'>
|
||||
If you no longer wish to receive these emails, you can:
|
||||
</p>
|
||||
<ul className='ml-4 list-inside list-disc space-y-1 text-muted-foreground text-sm'>
|
||||
<li>Close your account entirely</li>
|
||||
<li>Contact our support team for assistance</li>
|
||||
</ul>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => window.close()}>Close</BrandedButton>
|
||||
</div>
|
||||
|
||||
<div className='mt-6 flex flex-col gap-3'>
|
||||
<Button
|
||||
onClick={() =>
|
||||
window.open(
|
||||
`mailto:${brand.supportEmail}?subject=Account%20Help&body=Hi%2C%20I%20need%20help%20with%20my%20account%20emails.`,
|
||||
'_blank'
|
||||
)
|
||||
}
|
||||
className='w-full bg-blue-600 text-white hover:bg-blue-700'
|
||||
>
|
||||
Contact Support
|
||||
</Button>
|
||||
<Button onClick={() => window.close()} variant='outline' className='w-full'>
|
||||
Close
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
)
|
||||
}
|
||||
|
||||
if (unsubscribed) {
|
||||
return (
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<CheckCircle className='mx-auto mb-2 h-12 w-12 text-green-500' />
|
||||
<CardTitle className='text-foreground'>Successfully Unsubscribed</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
You have been unsubscribed from our emails. You will stop receiving emails within 48
|
||||
hours.
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className='text-center'>
|
||||
<p className='text-muted-foreground text-sm'>
|
||||
If you change your mind, you can always update your email preferences in your account
|
||||
settings or contact us at{' '}
|
||||
<a
|
||||
href={`mailto:${brand.supportEmail}`}
|
||||
className='text-muted-foreground hover:underline'
|
||||
>
|
||||
{brand.supportEmail}
|
||||
</a>
|
||||
</p>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Successfully Unsubscribed
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
You have been unsubscribed from our emails. You will stop receiving emails within 48
|
||||
hours.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => window.close()}>Close</BrandedButton>
|
||||
</div>
|
||||
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
)
|
||||
}
|
||||
|
||||
const isAlreadyUnsubscribedFromAll = data?.currentPreferences.unsubscribeAll
|
||||
|
||||
return (
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center p-4 before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardHeader className='text-center'>
|
||||
<Heart className='mx-auto mb-2 h-12 w-12 text-red-500' />
|
||||
<CardTitle className='text-foreground'>We're sorry to see you go!</CardTitle>
|
||||
<CardDescription className='text-muted-foreground'>
|
||||
We understand email preferences are personal. Choose which emails you'd like to
|
||||
stop receiving from Sim.
|
||||
</CardDescription>
|
||||
<div className='mt-2 rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Email: <span className='font-medium text-foreground'>{data?.email}</span>
|
||||
</p>
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='space-y-3'>
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('all')}
|
||||
disabled={processing || data?.currentPreferences.unsubscribeAll}
|
||||
variant='destructive'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeAll ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{processing
|
||||
? 'Unsubscribing...'
|
||||
: data?.currentPreferences.unsubscribeAll
|
||||
? 'Unsubscribed from All Emails'
|
||||
: 'Unsubscribe from All Marketing Emails'}
|
||||
</Button>
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Email Preferences
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Choose which emails you'd like to stop receiving.
|
||||
</p>
|
||||
<p className={`${inter.className} mt-2 font-[380] text-[14px] text-muted-foreground`}>
|
||||
{data?.email}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='text-center text-muted-foreground text-sm'>
|
||||
or choose specific types:
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('all')}
|
||||
disabled={processing || isAlreadyUnsubscribedFromAll}
|
||||
loading={processing}
|
||||
loadingText='Unsubscribing'
|
||||
>
|
||||
{isAlreadyUnsubscribedFromAll
|
||||
? 'Unsubscribed from All Emails'
|
||||
: 'Unsubscribe from All Marketing Emails'}
|
||||
</BrandedButton>
|
||||
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('marketing')}
|
||||
disabled={
|
||||
processing ||
|
||||
data?.currentPreferences.unsubscribeAll ||
|
||||
data?.currentPreferences.unsubscribeMarketing
|
||||
}
|
||||
variant='outline'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeMarketing ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{data?.currentPreferences.unsubscribeMarketing
|
||||
? 'Unsubscribed from Marketing'
|
||||
: 'Unsubscribe from Marketing Emails'}
|
||||
</Button>
|
||||
<div className='py-2 text-center'>
|
||||
<span className={`${inter.className} font-[380] text-[14px] text-muted-foreground`}>
|
||||
or choose specific types
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('updates')}
|
||||
disabled={
|
||||
processing ||
|
||||
data?.currentPreferences.unsubscribeAll ||
|
||||
data?.currentPreferences.unsubscribeUpdates
|
||||
}
|
||||
variant='outline'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeUpdates ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{data?.currentPreferences.unsubscribeUpdates
|
||||
? 'Unsubscribed from Updates'
|
||||
: 'Unsubscribe from Product Updates'}
|
||||
</Button>
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('marketing')}
|
||||
disabled={
|
||||
processing ||
|
||||
isAlreadyUnsubscribedFromAll ||
|
||||
data?.currentPreferences.unsubscribeMarketing
|
||||
}
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeMarketing
|
||||
? 'Unsubscribed from Marketing'
|
||||
: 'Unsubscribe from Marketing Emails'}
|
||||
</BrandedButton>
|
||||
|
||||
<Button
|
||||
onClick={() => handleUnsubscribe('notifications')}
|
||||
disabled={
|
||||
processing ||
|
||||
data?.currentPreferences.unsubscribeAll ||
|
||||
data?.currentPreferences.unsubscribeNotifications
|
||||
}
|
||||
variant='outline'
|
||||
className='w-full'
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeNotifications ? (
|
||||
<CheckCircle className='mr-2 h-4 w-4' />
|
||||
) : null}
|
||||
{data?.currentPreferences.unsubscribeNotifications
|
||||
? 'Unsubscribed from Notifications'
|
||||
: 'Unsubscribe from Notifications'}
|
||||
</Button>
|
||||
</div>
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('updates')}
|
||||
disabled={
|
||||
processing ||
|
||||
isAlreadyUnsubscribedFromAll ||
|
||||
data?.currentPreferences.unsubscribeUpdates
|
||||
}
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeUpdates
|
||||
? 'Unsubscribed from Updates'
|
||||
: 'Unsubscribe from Product Updates'}
|
||||
</BrandedButton>
|
||||
|
||||
<div className='mt-6 space-y-3'>
|
||||
<div className='rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='text-center text-muted-foreground text-xs'>
|
||||
<strong>Note:</strong> You'll continue receiving important account emails like
|
||||
password resets and security alerts.
|
||||
</p>
|
||||
</div>
|
||||
<BrandedButton
|
||||
onClick={() => handleUnsubscribe('notifications')}
|
||||
disabled={
|
||||
processing ||
|
||||
isAlreadyUnsubscribedFromAll ||
|
||||
data?.currentPreferences.unsubscribeNotifications
|
||||
}
|
||||
>
|
||||
{data?.currentPreferences.unsubscribeNotifications
|
||||
? 'Unsubscribed from Notifications'
|
||||
: 'Unsubscribe from Notifications'}
|
||||
</BrandedButton>
|
||||
</div>
|
||||
|
||||
<p className='text-center text-muted-foreground text-xs'>
|
||||
Questions? Contact us at{' '}
|
||||
<a
|
||||
href={`mailto:${brand.supportEmail}`}
|
||||
className='text-muted-foreground hover:underline'
|
||||
>
|
||||
{brand.supportEmail}
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-6 max-w-[410px] text-center`}>
|
||||
<p className='font-[380] text-[13px] text-muted-foreground'>
|
||||
You'll continue receiving important account emails like password resets and security
|
||||
alerts.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -391,13 +292,20 @@ export default function Unsubscribe() {
|
||||
return (
|
||||
<Suspense
|
||||
fallback={
|
||||
<div className='before:-z-50 relative flex min-h-screen items-center justify-center before:pointer-events-none before:fixed before:inset-0 before:bg-white'>
|
||||
<Card className='w-full max-w-md border shadow-sm'>
|
||||
<CardContent className='flex items-center justify-center p-8'>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
<InviteLayout>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Loading
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Validating your unsubscribe link...
|
||||
</p>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 flex w-full items-center justify-center py-8`}>
|
||||
<Loader2 className='h-8 w-8 animate-spin text-muted-foreground' />
|
||||
</div>
|
||||
<SupportFooter position='absolute' />
|
||||
</InviteLayout>
|
||||
}
|
||||
>
|
||||
<UnsubscribeContent />
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
Button,
|
||||
Label,
|
||||
@@ -14,7 +13,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useCreateChunk } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateChunkModal')
|
||||
|
||||
@@ -31,16 +30,20 @@ export function CreateChunkModal({
|
||||
document,
|
||||
knowledgeBaseId,
|
||||
}: CreateChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const {
|
||||
mutate: createChunk,
|
||||
isPending: isCreating,
|
||||
error: mutationError,
|
||||
reset: resetMutation,
|
||||
} = useCreateChunk()
|
||||
const [content, setContent] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const isProcessingRef = useRef(false)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
const hasUnsavedChanges = content.trim().length > 0
|
||||
|
||||
const handleCreateChunk = async () => {
|
||||
const handleCreateChunk = () => {
|
||||
if (!document || content.trim().length === 0 || isProcessingRef.current) {
|
||||
if (isProcessingRef.current) {
|
||||
logger.warn('Chunk creation already in progress, ignoring duplicate request')
|
||||
@@ -48,57 +51,32 @@ export function CreateChunkModal({
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
isProcessingRef.current = true
|
||||
setIsCreating(true)
|
||||
setError(null)
|
||||
isProcessingRef.current = true
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: content.trim(),
|
||||
enabled: true,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create chunk')
|
||||
createChunk(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId: document.id,
|
||||
content: content.trim(),
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
isProcessingRef.current = false
|
||||
onClose()
|
||||
},
|
||||
onError: () => {
|
||||
isProcessingRef.current = false
|
||||
},
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success && result.data) {
|
||||
logger.info('Chunk created successfully:', result.data.id)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
onClose()
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to create chunk')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error creating chunk:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
isProcessingRef.current = false
|
||||
setIsCreating(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const onClose = () => {
|
||||
onOpenChange(false)
|
||||
setContent('')
|
||||
setError(null)
|
||||
setShowUnsavedChangesAlert(false)
|
||||
resetMutation()
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('DeleteChunkModal')
|
||||
import { useDeleteChunk } from '@/hooks/queries/knowledge'
|
||||
|
||||
interface DeleteChunkModalProps {
|
||||
chunk: ChunkData | null
|
||||
@@ -24,44 +19,12 @@ export function DeleteChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
}: DeleteChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const { mutate: deleteChunk, isPending: isDeleting } = useDeleteChunk()
|
||||
|
||||
const handleDeleteChunk = async () => {
|
||||
const handleDeleteChunk = () => {
|
||||
if (!chunk || isDeleting) return
|
||||
|
||||
try {
|
||||
setIsDeleting(true)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunk.id}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Chunk deleted successfully:', chunk.id)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
onClose()
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete chunk')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting chunk:', err)
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
deleteChunk({ knowledgeBaseId, documentId, chunkId: chunk.id }, { onSuccess: onClose })
|
||||
}
|
||||
|
||||
if (!chunk) return null
|
||||
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/kb/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/kb/use-tag-definitions'
|
||||
import { useUpdateDocumentTags } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('DocumentTagsModal')
|
||||
|
||||
@@ -58,8 +59,6 @@ function formatValueForDisplay(value: string, fieldType: string): string {
|
||||
try {
|
||||
const date = new Date(value)
|
||||
if (Number.isNaN(date.getTime())) return value
|
||||
// For UTC dates, display the UTC date to prevent timezone shifts
|
||||
// e.g., 2002-05-16T00:00:00.000Z should show as "May 16, 2002" not "May 15, 2002"
|
||||
if (typeof value === 'string' && (value.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(value))) {
|
||||
return new Date(
|
||||
date.getUTCFullYear(),
|
||||
@@ -96,6 +95,7 @@ export function DocumentTagsModal({
|
||||
const documentTagHook = useTagDefinitions(knowledgeBaseId, documentId)
|
||||
const kbTagHook = useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
const { getNextAvailableSlot: getServerNextSlot } = useNextAvailableSlot(knowledgeBaseId)
|
||||
const { mutateAsync: updateDocumentTags } = useUpdateDocumentTags()
|
||||
|
||||
const { saveTagDefinitions, tagDefinitions, fetchTagDefinitions } = documentTagHook
|
||||
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } = kbTagHook
|
||||
@@ -118,7 +118,6 @@ export function DocumentTagsModal({
|
||||
const definition = definitions.find((def) => def.tagSlot === slot)
|
||||
|
||||
if (rawValue !== null && rawValue !== undefined && definition) {
|
||||
// Convert value to string for storage
|
||||
const stringValue = String(rawValue).trim()
|
||||
if (stringValue) {
|
||||
tags.push({
|
||||
@@ -142,41 +141,34 @@ export function DocumentTagsModal({
|
||||
async (tagsToSave: DocumentTag[]) => {
|
||||
if (!documentData) return
|
||||
|
||||
try {
|
||||
const tagData: Record<string, string> = {}
|
||||
const tagData: Record<string, string> = {}
|
||||
|
||||
// Only include tags that have values (omit empty ones)
|
||||
// Use empty string for slots that should be cleared
|
||||
ALL_TAG_SLOTS.forEach((slot) => {
|
||||
const tag = tagsToSave.find((t) => t.slot === slot)
|
||||
if (tag?.value.trim()) {
|
||||
tagData[slot] = tag.value.trim()
|
||||
} else {
|
||||
// Use empty string to clear a tag (API schema expects string, not null)
|
||||
tagData[slot] = ''
|
||||
}
|
||||
})
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(tagData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update document tags')
|
||||
ALL_TAG_SLOTS.forEach((slot) => {
|
||||
const tag = tagsToSave.find((t) => t.slot === slot)
|
||||
if (tag?.value.trim()) {
|
||||
tagData[slot] = tag.value.trim()
|
||||
} else {
|
||||
tagData[slot] = ''
|
||||
}
|
||||
})
|
||||
|
||||
onDocumentUpdate?.(tagData as Record<string, string>)
|
||||
await fetchTagDefinitions()
|
||||
} catch (error) {
|
||||
logger.error('Error updating document tags:', error)
|
||||
throw error
|
||||
}
|
||||
await updateDocumentTags({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
tags: tagData,
|
||||
})
|
||||
|
||||
onDocumentUpdate?.(tagData)
|
||||
await fetchTagDefinitions()
|
||||
},
|
||||
[documentData, knowledgeBaseId, documentId, fetchTagDefinitions, onDocumentUpdate]
|
||||
[
|
||||
documentData,
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
updateDocumentTags,
|
||||
fetchTagDefinitions,
|
||||
onDocumentUpdate,
|
||||
]
|
||||
)
|
||||
|
||||
const handleRemoveTag = async (index: number) => {
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
@@ -19,7 +18,7 @@ import {
|
||||
import type { ChunkData, DocumentData } from '@/lib/knowledge/types'
|
||||
import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useUpdateChunk } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('EditChunkModal')
|
||||
|
||||
@@ -50,17 +49,22 @@ export function EditChunkModal({
|
||||
onNavigateToPage,
|
||||
maxChunkSize,
|
||||
}: EditChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const {
|
||||
mutate: updateChunk,
|
||||
isPending: isSaving,
|
||||
error: mutationError,
|
||||
reset: resetMutation,
|
||||
} = useUpdateChunk()
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
|
||||
const tokenStrings = useMemo(() => {
|
||||
@@ -102,44 +106,15 @@ export function EditChunkModal({
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = async () => {
|
||||
const handleSaveContent = () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
try {
|
||||
setIsSaving(true)
|
||||
setError(null)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks/${chunk.id}`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: editedContent,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update chunk')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
updateChunk({
|
||||
knowledgeBaseId,
|
||||
documentId: document.id,
|
||||
chunkId: chunk.id,
|
||||
content: editedContent,
|
||||
})
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
@@ -165,7 +140,6 @@ export function EditChunkModal({
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
@@ -185,6 +159,7 @@ export function EditChunkModal({
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
resetMutation()
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
@@ -195,6 +170,7 @@ export function EditChunkModal({
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
resetMutation()
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,13 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/componen
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useDocument, useDocumentChunks, useKnowledgeBase } from '@/hooks/kb/use-knowledge'
|
||||
import { knowledgeKeys, useDocumentChunkSearchQuery } from '@/hooks/queries/knowledge'
|
||||
import {
|
||||
knowledgeKeys,
|
||||
useBulkChunkOperation,
|
||||
useDeleteDocument,
|
||||
useDocumentChunkSearchQuery,
|
||||
useUpdateChunk,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
@@ -403,11 +409,13 @@ export function Document({
|
||||
const [isCreateChunkModalOpen, setIsCreateChunkModalOpen] = useState(false)
|
||||
const [chunkToDelete, setChunkToDelete] = useState<ChunkData | null>(null)
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [showDeleteDocumentDialog, setShowDeleteDocumentDialog] = useState(false)
|
||||
const [isDeletingDocument, setIsDeletingDocument] = useState(false)
|
||||
const [contextMenuChunk, setContextMenuChunk] = useState<ChunkData | null>(null)
|
||||
|
||||
const { mutate: updateChunkMutation } = useUpdateChunk()
|
||||
const { mutate: deleteDocumentMutation, isPending: isDeletingDocument } = useDeleteDocument()
|
||||
const { mutate: bulkChunkMutation, isPending: isBulkOperating } = useBulkChunkOperation()
|
||||
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position: contextMenuPosition,
|
||||
@@ -440,36 +448,23 @@ export function Document({
|
||||
setSelectedChunk(null)
|
||||
}
|
||||
|
||||
const handleToggleEnabled = async (chunkId: string) => {
|
||||
const handleToggleEnabled = (chunkId: string) => {
|
||||
const chunk = displayChunks.find((c) => c.id === chunkId)
|
||||
if (!chunk) return
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
enabled: !chunk.enabled,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update chunk')
|
||||
updateChunkMutation(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
chunkId,
|
||||
enabled: !chunk.enabled,
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
updateChunk(chunkId, { enabled: !chunk.enabled })
|
||||
},
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
updateChunk(chunkId, { enabled: !chunk.enabled })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const handleDeleteChunk = (chunkId: string) => {
|
||||
@@ -515,107 +510,65 @@ export function Document({
|
||||
/**
|
||||
* Handles deleting the document
|
||||
*/
|
||||
const handleDeleteDocument = async () => {
|
||||
const handleDeleteDocument = () => {
|
||||
if (!documentData) return
|
||||
|
||||
try {
|
||||
setIsDeletingDocument(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete document')
|
||||
deleteDocumentMutation(
|
||||
{ knowledgeBaseId, documentId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`)
|
||||
},
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete document')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting document:', err)
|
||||
setIsDeletingDocument(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const performBulkChunkOperation = async (
|
||||
const performBulkChunkOperation = (
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
chunks: ChunkData[]
|
||||
) => {
|
||||
if (chunks.length === 0) return
|
||||
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation,
|
||||
chunkIds: chunks.map((chunk) => chunk.id),
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to ${operation} chunks`)
|
||||
bulkChunkMutation(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
operation,
|
||||
chunkIds: chunks.map((chunk) => chunk.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
if (operation === 'delete' || result.errorCount > 0) {
|
||||
refreshChunks()
|
||||
} else {
|
||||
chunks.forEach((chunk) => {
|
||||
updateChunk(chunk.id, { enabled: operation === 'enable' })
|
||||
})
|
||||
}
|
||||
logger.info(`Successfully ${operation}d ${result.successCount} chunks`)
|
||||
setSelectedChunks(new Set())
|
||||
},
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
if (operation === 'delete') {
|
||||
await refreshChunks()
|
||||
} else {
|
||||
result.data.results.forEach((opResult: any) => {
|
||||
if (opResult.operation === operation) {
|
||||
opResult.chunkIds.forEach((chunkId: string) => {
|
||||
updateChunk(chunkId, { enabled: operation === 'enable' })
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`Successfully ${operation}d ${result.data.successCount} chunks`)
|
||||
}
|
||||
|
||||
setSelectedChunks(new Set())
|
||||
} catch (err) {
|
||||
logger.error(`Error ${operation}ing chunks:`, err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const handleBulkEnable = async () => {
|
||||
const handleBulkEnable = () => {
|
||||
const chunksToEnable = displayChunks.filter(
|
||||
(chunk) => selectedChunks.has(chunk.id) && !chunk.enabled
|
||||
)
|
||||
await performBulkChunkOperation('enable', chunksToEnable)
|
||||
performBulkChunkOperation('enable', chunksToEnable)
|
||||
}
|
||||
|
||||
const handleBulkDisable = async () => {
|
||||
const handleBulkDisable = () => {
|
||||
const chunksToDisable = displayChunks.filter(
|
||||
(chunk) => selectedChunks.has(chunk.id) && chunk.enabled
|
||||
)
|
||||
await performBulkChunkOperation('disable', chunksToDisable)
|
||||
performBulkChunkOperation('disable', chunksToDisable)
|
||||
}
|
||||
|
||||
const handleBulkDelete = async () => {
|
||||
const handleBulkDelete = () => {
|
||||
const chunksToDelete = displayChunks.filter((chunk) => selectedChunks.has(chunk.id))
|
||||
await performBulkChunkOperation('delete', chunksToDelete)
|
||||
performBulkChunkOperation('delete', chunksToDelete)
|
||||
}
|
||||
|
||||
const selectedChunksList = displayChunks.filter((chunk) => selectedChunks.has(chunk.id))
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { format } from 'date-fns'
|
||||
import {
|
||||
AlertCircle,
|
||||
@@ -62,7 +61,12 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import {
|
||||
useBulkDocumentOperation,
|
||||
useDeleteDocument,
|
||||
useDeleteKnowledgeBase,
|
||||
useUpdateDocument,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
@@ -407,12 +411,17 @@ export function KnowledgeBase({
|
||||
id,
|
||||
knowledgeBaseName: passedKnowledgeBaseName,
|
||||
}: KnowledgeBaseProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false })
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const { mutate: updateDocumentMutation } = useUpdateDocument()
|
||||
const { mutate: deleteDocumentMutation } = useDeleteDocument()
|
||||
const { mutate: deleteKnowledgeBaseMutation, isPending: isDeleting } =
|
||||
useDeleteKnowledgeBase(workspaceId)
|
||||
const { mutate: bulkDocumentMutation, isPending: isBulkOperating } = useBulkDocumentOperation()
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
|
||||
@@ -427,8 +436,6 @@ export function KnowledgeBase({
|
||||
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
|
||||
const [documentToDelete, setDocumentToDelete] = useState<string | null>(null)
|
||||
const [showBulkDeleteModal, setShowBulkDeleteModal] = useState(false)
|
||||
@@ -550,7 +557,7 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
*/
|
||||
const checkForDeadProcesses = async () => {
|
||||
const checkForDeadProcesses = () => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
|
||||
@@ -567,116 +574,79 @@ export function KnowledgeBase({
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
const markFailedPromises = staleDocuments.map(async (doc) => {
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${doc.id}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
},
|
||||
body: JSON.stringify({
|
||||
markFailedDueToTimeout: true,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: 'Unknown error' }))
|
||||
logger.error(`Failed to mark document ${doc.id} as failed: ${errorData.error}`)
|
||||
return
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (result.success) {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error marking document ${doc.id} as failed:`, error)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
await Promise.allSettled(markFailedPromises)
|
||||
}
|
||||
|
||||
const handleToggleEnabled = async (docId: string) => {
|
||||
const handleToggleEnabled = (docId: string) => {
|
||||
const document = documents.find((doc) => doc.id === docId)
|
||||
if (!document) return
|
||||
|
||||
const newEnabled = !document.enabled
|
||||
|
||||
// Optimistic update
|
||||
updateDocument(docId, { enabled: newEnabled })
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: docId,
|
||||
updates: { enabled: newEnabled },
|
||||
},
|
||||
{
|
||||
onError: () => {
|
||||
// Rollback on error
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
enabled: newEnabled,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to update document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
}
|
||||
} catch (err) {
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
logger.error('Error updating document:', err)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles retrying a failed document processing
|
||||
*/
|
||||
const handleRetryDocument = async (docId: string) => {
|
||||
try {
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'pending',
|
||||
processingError: null,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
})
|
||||
const handleRetryDocument = (docId: string) => {
|
||||
// Optimistic update
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'pending',
|
||||
processingError: null,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
})
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: docId,
|
||||
updates: { retryProcessing: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
refreshDocuments()
|
||||
logger.info(`Document retry initiated successfully for: ${docId}`)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Error retrying document:', err)
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'failed',
|
||||
processingError:
|
||||
err instanceof Error ? err.message : 'Failed to retry document processing',
|
||||
})
|
||||
},
|
||||
body: JSON.stringify({
|
||||
retryProcessing: true,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to retry document processing')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to retry document processing')
|
||||
}
|
||||
|
||||
await refreshDocuments()
|
||||
|
||||
logger.info(`Document retry initiated successfully for: ${docId}`)
|
||||
} catch (err) {
|
||||
logger.error('Error retrying document:', err)
|
||||
const currentDoc = documents.find((doc) => doc.id === docId)
|
||||
if (currentDoc) {
|
||||
updateDocument(docId, {
|
||||
processingStatus: 'failed',
|
||||
processingError:
|
||||
err instanceof Error ? err.message : 'Failed to retry document processing',
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -694,43 +664,32 @@ export function KnowledgeBase({
|
||||
const currentDoc = documents.find((doc) => doc.id === documentId)
|
||||
const previousName = currentDoc?.filename
|
||||
|
||||
// Optimistic update
|
||||
updateDocument(documentId, { filename: newName })
|
||||
queryClient.setQueryData<DocumentData>(knowledgeKeys.document(id, documentId), (previous) =>
|
||||
previous ? { ...previous, filename: newName } : previous
|
||||
)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId,
|
||||
updates: { filename: newName },
|
||||
},
|
||||
body: JSON.stringify({ filename: newName }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to rename document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to rename document')
|
||||
}
|
||||
|
||||
logger.info(`Document renamed: ${documentId}`)
|
||||
} catch (err) {
|
||||
if (previousName !== undefined) {
|
||||
updateDocument(documentId, { filename: previousName })
|
||||
queryClient.setQueryData<DocumentData>(
|
||||
knowledgeKeys.document(id, documentId),
|
||||
(previous) => (previous ? { ...previous, filename: previousName } : previous)
|
||||
)
|
||||
}
|
||||
logger.error('Error renaming document:', err)
|
||||
throw err
|
||||
}
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Document renamed: ${documentId}`)
|
||||
resolve()
|
||||
},
|
||||
onError: (err) => {
|
||||
// Rollback on error
|
||||
if (previousName !== undefined) {
|
||||
updateDocument(documentId, { filename: previousName })
|
||||
}
|
||||
logger.error('Error renaming document:', err)
|
||||
reject(err)
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -744,35 +703,26 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Confirms and executes the deletion of a single document
|
||||
*/
|
||||
const confirmDeleteDocument = async () => {
|
||||
const confirmDeleteDocument = () => {
|
||||
if (!documentToDelete) return
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${id}/documents/${documentToDelete}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete document')
|
||||
deleteDocumentMutation(
|
||||
{ knowledgeBaseId: id, documentId: documentToDelete },
|
||||
{
|
||||
onSuccess: () => {
|
||||
refreshDocuments()
|
||||
setSelectedDocuments((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(documentToDelete)
|
||||
return newSet
|
||||
})
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowDeleteDocumentModal(false)
|
||||
setDocumentToDelete(null)
|
||||
},
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
refreshDocuments()
|
||||
|
||||
setSelectedDocuments((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(documentToDelete)
|
||||
return newSet
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting document:', err)
|
||||
} finally {
|
||||
setShowDeleteDocumentModal(false)
|
||||
setDocumentToDelete(null)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -818,32 +768,18 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Handles deleting the entire knowledge base
|
||||
*/
|
||||
const handleDeleteKnowledgeBase = async () => {
|
||||
const handleDeleteKnowledgeBase = () => {
|
||||
if (!knowledgeBase) return
|
||||
|
||||
try {
|
||||
setIsDeleting(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete knowledge base')
|
||||
deleteKnowledgeBaseMutation(
|
||||
{ knowledgeBaseId: id },
|
||||
{
|
||||
onSuccess: () => {
|
||||
removeKnowledgeBase(id)
|
||||
router.push(`/workspace/${workspaceId}/knowledge`)
|
||||
},
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
removeKnowledgeBase(id)
|
||||
router.push(`/workspace/${workspaceId}/knowledge`)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting knowledge base:', err)
|
||||
setIsDeleting(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -856,93 +792,57 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Handles bulk enabling of selected documents
|
||||
*/
|
||||
const handleBulkEnable = async () => {
|
||||
const handleBulkEnable = () => {
|
||||
const documentsToEnable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && !doc.enabled
|
||||
)
|
||||
|
||||
if (documentsToEnable.length === 0) return
|
||||
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'enable',
|
||||
documentIds: documentsToEnable.map((doc) => doc.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
result.updatedDocuments?.forEach((updatedDoc) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
logger.info(`Successfully enabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation: 'enable',
|
||||
documentIds: documentsToEnable.map((doc) => doc.id),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to enable documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
|
||||
logger.info(`Successfully enabled ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error enabling documents:', err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles bulk disabling of selected documents
|
||||
*/
|
||||
const handleBulkDisable = async () => {
|
||||
const handleBulkDisable = () => {
|
||||
const documentsToDisable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && doc.enabled
|
||||
)
|
||||
|
||||
if (documentsToDisable.length === 0) return
|
||||
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'disable',
|
||||
documentIds: documentsToDisable.map((doc) => doc.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
result.updatedDocuments?.forEach((updatedDoc) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
logger.info(`Successfully disabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation: 'disable',
|
||||
documentIds: documentsToDisable.map((doc) => doc.id),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to disable documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
|
||||
logger.info(`Successfully disabled ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error disabling documents:', err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -956,44 +856,28 @@ export function KnowledgeBase({
|
||||
/**
|
||||
* Confirms and executes the bulk deletion of selected documents
|
||||
*/
|
||||
const confirmBulkDelete = async () => {
|
||||
const confirmBulkDelete = () => {
|
||||
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
if (documentsToDelete.length === 0) return
|
||||
|
||||
try {
|
||||
setIsBulkOperating(true)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'delete',
|
||||
documentIds: documentsToDelete.map((doc) => doc.id),
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully deleted ${result.successCount} documents`)
|
||||
refreshDocuments()
|
||||
setSelectedDocuments(new Set())
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowBulkDeleteModal(false)
|
||||
},
|
||||
body: JSON.stringify({
|
||||
operation: 'delete',
|
||||
documentIds: documentsToDelete.map((doc) => doc.id),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Successfully deleted ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
await refreshDocuments()
|
||||
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error deleting documents:', err)
|
||||
} finally {
|
||||
setIsBulkOperating(false)
|
||||
setShowBulkDeleteModal(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
@@ -22,10 +22,10 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('BaseTagsModal')
|
||||
|
||||
/** Field type display labels */
|
||||
const FIELD_TYPE_LABELS: Record<string, string> = {
|
||||
text: 'Text',
|
||||
number: 'Number',
|
||||
@@ -45,7 +45,6 @@ interface DocumentListProps {
|
||||
totalCount: number
|
||||
}
|
||||
|
||||
/** Displays a list of documents affected by tag operations */
|
||||
function DocumentList({ documents, totalCount }: DocumentListProps) {
|
||||
const displayLimit = 5
|
||||
const hasMore = totalCount > displayLimit
|
||||
@@ -95,13 +94,14 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } =
|
||||
useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
|
||||
const createTagMutation = useCreateTagDefinition()
|
||||
const deleteTagMutation = useDeleteTagDefinition()
|
||||
|
||||
const [deleteTagDialogOpen, setDeleteTagDialogOpen] = useState(false)
|
||||
const [selectedTag, setSelectedTag] = useState<TagDefinition | null>(null)
|
||||
const [viewDocumentsDialogOpen, setViewDocumentsDialogOpen] = useState(false)
|
||||
const [isDeletingTag, setIsDeletingTag] = useState(false)
|
||||
const [tagUsageData, setTagUsageData] = useState<TagUsageData[]>([])
|
||||
const [isCreatingTag, setIsCreatingTag] = useState(false)
|
||||
const [isSavingTag, setIsSavingTag] = useState(false)
|
||||
const [createTagForm, setCreateTagForm] = useState({
|
||||
displayName: '',
|
||||
fieldType: 'text',
|
||||
@@ -177,13 +177,12 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
}
|
||||
|
||||
const tagNameConflict =
|
||||
isCreatingTag && !isSavingTag && hasTagNameConflict(createTagForm.displayName)
|
||||
isCreatingTag && !createTagMutation.isPending && hasTagNameConflict(createTagForm.displayName)
|
||||
|
||||
const canSaveTag = () => {
|
||||
return createTagForm.displayName.trim() && !hasTagNameConflict(createTagForm.displayName)
|
||||
}
|
||||
|
||||
/** Get slot usage counts per field type */
|
||||
const getSlotUsageByFieldType = (fieldType: string): { used: number; max: number } => {
|
||||
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
|
||||
if (!config) return { used: 0, max: 0 }
|
||||
@@ -191,13 +190,11 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
return { used, max: config.maxSlots }
|
||||
}
|
||||
|
||||
/** Check if a field type has available slots */
|
||||
const hasAvailableSlots = (fieldType: string): boolean => {
|
||||
const { used, max } = getSlotUsageByFieldType(fieldType)
|
||||
return used < max
|
||||
}
|
||||
|
||||
/** Field type options for Combobox */
|
||||
const fieldTypeOptions: ComboboxOption[] = useMemo(() => {
|
||||
return SUPPORTED_FIELD_TYPES.filter((type) => hasAvailableSlots(type)).map((type) => {
|
||||
const { used, max } = getSlotUsageByFieldType(type)
|
||||
@@ -211,43 +208,17 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
const saveTagDefinition = async () => {
|
||||
if (!canSaveTag()) return
|
||||
|
||||
setIsSavingTag(true)
|
||||
try {
|
||||
// Check if selected field type has available slots
|
||||
if (!hasAvailableSlots(createTagForm.fieldType)) {
|
||||
throw new Error(`No available slots for ${createTagForm.fieldType} type`)
|
||||
}
|
||||
|
||||
// Get the next available slot from the API
|
||||
const slotResponse = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/next-available-slot?fieldType=${createTagForm.fieldType}`
|
||||
)
|
||||
if (!slotResponse.ok) {
|
||||
throw new Error('Failed to get available slot')
|
||||
}
|
||||
const slotResult = await slotResponse.json()
|
||||
if (!slotResult.success || !slotResult.data?.nextAvailableSlot) {
|
||||
throw new Error('No available tag slots for this field type')
|
||||
}
|
||||
|
||||
const newTagDefinition = {
|
||||
tagSlot: slotResult.data.nextAvailableSlot,
|
||||
await createTagMutation.mutateAsync({
|
||||
knowledgeBaseId,
|
||||
displayName: createTagForm.displayName.trim(),
|
||||
fieldType: createTagForm.fieldType,
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(newTagDefinition),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to create tag definition')
|
||||
}
|
||||
|
||||
await Promise.all([refreshTagDefinitions(), fetchTagUsage()])
|
||||
|
||||
setCreateTagForm({
|
||||
@@ -257,27 +228,17 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
setIsCreatingTag(false)
|
||||
} catch (error) {
|
||||
logger.error('Error creating tag definition:', error)
|
||||
} finally {
|
||||
setIsSavingTag(false)
|
||||
}
|
||||
}
|
||||
|
||||
const confirmDeleteTag = async () => {
|
||||
if (!selectedTag) return
|
||||
|
||||
setIsDeletingTag(true)
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/tag-definitions/${selectedTag.id}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
throw new Error(`Failed to delete tag definition: ${response.status} ${errorText}`)
|
||||
}
|
||||
await deleteTagMutation.mutateAsync({
|
||||
knowledgeBaseId,
|
||||
tagDefinitionId: selectedTag.id,
|
||||
})
|
||||
|
||||
await Promise.all([refreshTagDefinitions(), fetchTagUsage()])
|
||||
|
||||
@@ -285,8 +246,6 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
setSelectedTag(null)
|
||||
} catch (error) {
|
||||
logger.error('Error deleting tag definition:', error)
|
||||
} finally {
|
||||
setIsDeletingTag(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -433,11 +392,11 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
className='flex-1'
|
||||
disabled={
|
||||
!canSaveTag() ||
|
||||
isSavingTag ||
|
||||
createTagMutation.isPending ||
|
||||
!hasAvailableSlots(createTagForm.fieldType)
|
||||
}
|
||||
>
|
||||
{isSavingTag ? 'Creating...' : 'Create Tag'}
|
||||
{createTagMutation.isPending ? 'Creating...' : 'Create Tag'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
@@ -481,13 +440,17 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
<ModalFooter>
|
||||
<Button
|
||||
variant='default'
|
||||
disabled={isDeletingTag}
|
||||
disabled={deleteTagMutation.isPending}
|
||||
onClick={() => setDeleteTagDialogOpen(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={confirmDeleteTag} disabled={isDeletingTag}>
|
||||
{isDeletingTag ? <>Deleting...</> : 'Delete Tag'}
|
||||
<Button
|
||||
variant='destructive'
|
||||
onClick={confirmDeleteTag}
|
||||
disabled={deleteTagMutation.isPending}
|
||||
>
|
||||
{deleteTagMutation.isPending ? 'Deleting...' : 'Delete Tag'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
@@ -499,7 +462,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
<ModalHeader>Documents using "{selectedTag?.displayName}"</ModalHeader>
|
||||
<ModalBody>
|
||||
<div className='space-y-[8px]'>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{selectedTagUsage?.documentCount || 0} document
|
||||
{selectedTagUsage?.documentCount !== 1 ? 's are' : ' is'} currently using this tag
|
||||
definition.
|
||||
@@ -507,7 +470,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
|
||||
|
||||
{selectedTagUsage?.documentCount === 0 ? (
|
||||
<div className='rounded-[6px] border p-[16px] text-center'>
|
||||
<p className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
This tag definition is not being used by any documents. You can safely delete it
|
||||
to free up the tag slot.
|
||||
</p>
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { zodResolver } from '@hookform/resolvers/zod'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Loader2, RotateCcw, X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { useForm } from 'react-hook-form'
|
||||
@@ -23,7 +22,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateBaseModal')
|
||||
|
||||
@@ -82,10 +81,11 @@ interface SubmitStatus {
|
||||
export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const createKnowledgeBaseMutation = useCreateKnowledgeBase(workspaceId)
|
||||
const deleteKnowledgeBaseMutation = useDeleteKnowledgeBase(workspaceId)
|
||||
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
const [submitStatus, setSubmitStatus] = useState<SubmitStatus | null>(null)
|
||||
const [files, setFiles] = useState<FileWithPreview[]>([])
|
||||
const [fileError, setFileError] = useState<string | null>(null)
|
||||
@@ -245,12 +245,14 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
})
|
||||
}
|
||||
|
||||
const isSubmitting =
|
||||
createKnowledgeBaseMutation.isPending || deleteKnowledgeBaseMutation.isPending || isUploading
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
|
||||
try {
|
||||
const knowledgeBasePayload = {
|
||||
const newKnowledgeBase = await createKnowledgeBaseMutation.mutateAsync({
|
||||
name: data.name,
|
||||
description: data.description || undefined,
|
||||
workspaceId: workspaceId,
|
||||
@@ -259,29 +261,8 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
minSize: data.minChunkSize,
|
||||
overlap: data.overlapSize,
|
||||
},
|
||||
}
|
||||
|
||||
const response = await fetch('/api/knowledge', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(knowledgeBasePayload),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(errorData.error || 'Failed to create knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to create knowledge base')
|
||||
}
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
if (files.length > 0) {
|
||||
try {
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
@@ -293,15 +274,11 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
logger.info(`Started processing ${uploadedFiles.length} documents in the background`)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
} catch (uploadError) {
|
||||
logger.error('File upload failed, deleting knowledge base:', uploadError)
|
||||
try {
|
||||
await fetch(`/api/knowledge/${newKnowledgeBase.id}`, {
|
||||
method: 'DELETE',
|
||||
await deleteKnowledgeBaseMutation.mutateAsync({
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
})
|
||||
logger.info(`Deleted orphaned knowledge base: ${newKnowledgeBase.id}`)
|
||||
} catch (deleteError) {
|
||||
@@ -309,10 +286,6 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
}
|
||||
throw uploadError
|
||||
}
|
||||
} else {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
}
|
||||
|
||||
files.forEach((file) => URL.revokeObjectURL(file.preview))
|
||||
@@ -325,8 +298,6 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
type: 'error',
|
||||
message: error instanceof Error ? error.message : 'An unknown error occurred',
|
||||
})
|
||||
} finally {
|
||||
setIsSubmitting(false)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { AlertTriangle, ChevronDown, LibraryBig, MoreHorizontal } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import {
|
||||
@@ -15,7 +14,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { filterButtonClass } from '@/app/workspace/[workspaceId]/knowledge/components/constants'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeHeader')
|
||||
|
||||
@@ -54,14 +53,13 @@ interface Workspace {
|
||||
}
|
||||
|
||||
export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const [isActionsPopoverOpen, setIsActionsPopoverOpen] = useState(false)
|
||||
const [isWorkspacePopoverOpen, setIsWorkspacePopoverOpen] = useState(false)
|
||||
const [workspaces, setWorkspaces] = useState<Workspace[]>([])
|
||||
const [isLoadingWorkspaces, setIsLoadingWorkspaces] = useState(false)
|
||||
const [isUpdatingWorkspace, setIsUpdatingWorkspace] = useState(false)
|
||||
|
||||
// Fetch available workspaces
|
||||
const updateKnowledgeBase = useUpdateKnowledgeBase()
|
||||
|
||||
useEffect(() => {
|
||||
if (!options?.knowledgeBaseId) return
|
||||
|
||||
@@ -76,7 +74,6 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// Filter workspaces where user has write/admin permissions
|
||||
const availableWorkspaces = data.workspaces
|
||||
.filter((ws: any) => ws.permissions === 'write' || ws.permissions === 'admin')
|
||||
.map((ws: any) => ({
|
||||
@@ -97,47 +94,27 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
}, [options?.knowledgeBaseId])
|
||||
|
||||
const handleWorkspaceChange = async (workspaceId: string | null) => {
|
||||
if (isUpdatingWorkspace || !options?.knowledgeBaseId) return
|
||||
if (updateKnowledgeBase.isPending || !options?.knowledgeBaseId) return
|
||||
|
||||
try {
|
||||
setIsUpdatingWorkspace(true)
|
||||
setIsWorkspacePopoverOpen(false)
|
||||
setIsWorkspacePopoverOpen(false)
|
||||
|
||||
const response = await fetch(`/api/knowledge/${options.knowledgeBaseId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
updateKnowledgeBase.mutate(
|
||||
{
|
||||
knowledgeBaseId: options.knowledgeBaseId,
|
||||
updates: { workspaceId },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(
|
||||
`Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}`
|
||||
)
|
||||
options.onWorkspaceChange?.(workspaceId)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Error updating workspace:', err)
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update workspace')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(
|
||||
`Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}`
|
||||
)
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(options.knowledgeBaseId),
|
||||
})
|
||||
|
||||
await options.onWorkspaceChange?.(workspaceId)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to update workspace')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating workspace:', err)
|
||||
} finally {
|
||||
setIsUpdatingWorkspace(false)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const currentWorkspace = workspaces.find((ws) => ws.id === options?.currentWorkspaceId)
|
||||
@@ -147,7 +124,6 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
<div className={HEADER_STYLES.container}>
|
||||
<div className={HEADER_STYLES.breadcrumbs}>
|
||||
{breadcrumbs.map((breadcrumb, index) => {
|
||||
// Use unique identifier when available, fallback to content-based key
|
||||
const key = breadcrumb.id || `${breadcrumb.label}-${breadcrumb.href || index}`
|
||||
|
||||
return (
|
||||
@@ -189,13 +165,13 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
disabled={isLoadingWorkspaces || isUpdatingWorkspace}
|
||||
disabled={isLoadingWorkspaces || updateKnowledgeBase.isPending}
|
||||
className={filterButtonClass}
|
||||
>
|
||||
<span className='truncate'>
|
||||
{isLoadingWorkspaces
|
||||
? 'Loading...'
|
||||
: isUpdatingWorkspace
|
||||
: updateKnowledgeBase.isPending
|
||||
? 'Updating...'
|
||||
: currentWorkspace?.name || 'No workspace'}
|
||||
</span>
|
||||
|
||||
@@ -32,6 +32,7 @@ import {
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge'
|
||||
import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
|
||||
const logger = createLogger('Knowledge')
|
||||
@@ -51,10 +52,12 @@ export function Knowledge() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const { knowledgeBases, isLoading, error, removeKnowledgeBase, updateKnowledgeBase } =
|
||||
useKnowledgeBasesList(workspaceId)
|
||||
const { knowledgeBases, isLoading, error } = useKnowledgeBasesList(workspaceId)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const { mutateAsync: updateKnowledgeBaseMutation } = useUpdateKnowledgeBase(workspaceId)
|
||||
const { mutateAsync: deleteKnowledgeBaseMutation } = useDeleteKnowledgeBase(workspaceId)
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const debouncedSearchQuery = useDebounce(searchQuery, 300)
|
||||
const [isCreateModalOpen, setIsCreateModalOpen] = useState(false)
|
||||
@@ -112,29 +115,13 @@ export function Knowledge() {
|
||||
*/
|
||||
const handleUpdateKnowledgeBase = useCallback(
|
||||
async (id: string, name: string, description: string) => {
|
||||
const response = await fetch(`/api/knowledge/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ name, description }),
|
||||
await updateKnowledgeBaseMutation({
|
||||
knowledgeBaseId: id,
|
||||
updates: { name, description },
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Knowledge base updated: ${id}`)
|
||||
updateKnowledgeBase(id, { name, description })
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to update knowledge base')
|
||||
}
|
||||
logger.info(`Knowledge base updated: ${id}`)
|
||||
},
|
||||
[updateKnowledgeBase]
|
||||
[updateKnowledgeBaseMutation]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -142,25 +129,10 @@ export function Knowledge() {
|
||||
*/
|
||||
const handleDeleteKnowledgeBase = useCallback(
|
||||
async (id: string) => {
|
||||
const response = await fetch(`/api/knowledge/${id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Knowledge base deleted: ${id}`)
|
||||
removeKnowledgeBase(id)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete knowledge base')
|
||||
}
|
||||
await deleteKnowledgeBaseMutation({ knowledgeBaseId: id })
|
||||
logger.info(`Knowledge base deleted: ${id}`)
|
||||
},
|
||||
[removeKnowledgeBase]
|
||||
[deleteKnowledgeBaseMutation]
|
||||
)
|
||||
|
||||
/**
|
||||
|
||||
@@ -16,8 +16,8 @@ import {
|
||||
import { redactApiKeys } from '@/lib/core/security/redaction'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
BlockDetailsSidebar,
|
||||
getLeftmostBlockId,
|
||||
PreviewEditor,
|
||||
WorkflowPreview,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { useExecutionSnapshot } from '@/hooks/queries/logs'
|
||||
@@ -248,11 +248,10 @@ export function ExecutionSnapshot({
|
||||
cursorStyle='pointer'
|
||||
executedBlocks={blockExecutions}
|
||||
selectedBlockId={pinnedBlockId}
|
||||
lightweight
|
||||
/>
|
||||
</div>
|
||||
{pinnedBlockId && workflowState.blocks[pinnedBlockId] && (
|
||||
<BlockDetailsSidebar
|
||||
<PreviewEditor
|
||||
block={workflowState.blocks[pinnedBlockId]}
|
||||
executionData={blockExecutions[pinnedBlockId]}
|
||||
allBlockExecutions={blockExecutions}
|
||||
|
||||
@@ -234,7 +234,7 @@ function ProgressBar({
|
||||
{segments.map((segment, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className='absolute h-full'
|
||||
className='absolute h-full opacity-70'
|
||||
style={{
|
||||
left: `${segment.startPercent}%`,
|
||||
width: `${segment.widthPercent}%`,
|
||||
|
||||
@@ -257,7 +257,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
Version
|
||||
</span>
|
||||
<div className='flex w-0 flex-1 justify-end'>
|
||||
<span className='max-w-full truncate rounded-[6px] bg-[#14291B] px-[9px] py-[2px] font-medium text-[#86EFAC] text-[12px]'>
|
||||
<span className='max-w-full truncate rounded-[6px] bg-[#bbf7d0] px-[9px] py-[2px] font-medium text-[#15803d] text-[12px] dark:bg-[#14291B] dark:text-[#86EFAC]'>
|
||||
{log.deploymentVersionName || `v${log.deploymentVersion}`}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
@@ -19,6 +19,7 @@ import { DatePicker } from '@/components/emcn/components/date-picker/date-picker
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { hasActiveFilters } from '@/lib/logs/filters'
|
||||
import { getTriggerOptions } from '@/lib/logs/get-trigger-options'
|
||||
import { type LogStatus, STATUS_CONFIG } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
@@ -211,12 +212,12 @@ export function LogsToolbar({
|
||||
}, [level])
|
||||
|
||||
const statusOptions: ComboboxOption[] = useMemo(
|
||||
() => [
|
||||
{ value: 'error', label: 'Error', icon: getColorIcon('var(--text-error)') },
|
||||
{ value: 'info', label: 'Info', icon: getColorIcon('var(--terminal-status-info-color)') },
|
||||
{ value: 'running', label: 'Running', icon: getColorIcon('#22c55e') },
|
||||
{ value: 'pending', label: 'Pending', icon: getColorIcon('#f59e0b') },
|
||||
],
|
||||
() =>
|
||||
(Object.keys(STATUS_CONFIG) as LogStatus[]).map((status) => ({
|
||||
value: status,
|
||||
label: STATUS_CONFIG[status].label,
|
||||
icon: getColorIcon(STATUS_CONFIG[status].color),
|
||||
})),
|
||||
[]
|
||||
)
|
||||
|
||||
@@ -242,12 +243,8 @@ export function LogsToolbar({
|
||||
|
||||
const selectedStatusColor = useMemo(() => {
|
||||
if (selectedStatuses.length !== 1) return null
|
||||
const status = selectedStatuses[0]
|
||||
if (status === 'error') return 'var(--text-error)'
|
||||
if (status === 'info') return 'var(--terminal-status-info-color)'
|
||||
if (status === 'running') return '#22c55e'
|
||||
if (status === 'pending') return '#f59e0b'
|
||||
return null
|
||||
const status = selectedStatuses[0] as LogStatus
|
||||
return STATUS_CONFIG[status]?.color ?? null
|
||||
}, [selectedStatuses])
|
||||
|
||||
const workflowOptions: ComboboxOption[] = useMemo(
|
||||
|
||||
@@ -5,7 +5,6 @@ import { getIntegrationMetadata } from '@/lib/logs/get-trigger-options'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
||||
|
||||
/** Column configuration for logs table - shared between header and rows */
|
||||
export const LOG_COLUMNS = {
|
||||
date: { width: 'w-[8%]', minWidth: 'min-w-[70px]', label: 'Date' },
|
||||
time: { width: 'w-[12%]', minWidth: 'min-w-[90px]', label: 'Time' },
|
||||
@@ -16,10 +15,8 @@ export const LOG_COLUMNS = {
|
||||
duration: { width: 'w-[20%]', minWidth: 'min-w-[100px]', label: 'Duration' },
|
||||
} as const
|
||||
|
||||
/** Type-safe column key derived from LOG_COLUMNS */
|
||||
export type LogColumnKey = keyof typeof LOG_COLUMNS
|
||||
|
||||
/** Ordered list of column keys for rendering table headers */
|
||||
export const LOG_COLUMN_ORDER: readonly LogColumnKey[] = [
|
||||
'date',
|
||||
'time',
|
||||
@@ -30,7 +27,6 @@ export const LOG_COLUMN_ORDER: readonly LogColumnKey[] = [
|
||||
'duration',
|
||||
] as const
|
||||
|
||||
/** Possible execution status values for workflow logs */
|
||||
export type LogStatus = 'error' | 'pending' | 'running' | 'info' | 'cancelled'
|
||||
|
||||
/**
|
||||
@@ -53,30 +49,28 @@ export function getDisplayStatus(status: string | null | undefined): LogStatus {
|
||||
}
|
||||
}
|
||||
|
||||
/** Configuration mapping log status to Badge variant and display label */
|
||||
const STATUS_VARIANT_MAP: Record<
|
||||
export const STATUS_CONFIG: Record<
|
||||
LogStatus,
|
||||
{ variant: React.ComponentProps<typeof Badge>['variant']; label: string }
|
||||
{ variant: React.ComponentProps<typeof Badge>['variant']; label: string; color: string }
|
||||
> = {
|
||||
error: { variant: 'red', label: 'Error' },
|
||||
pending: { variant: 'amber', label: 'Pending' },
|
||||
running: { variant: 'green', label: 'Running' },
|
||||
cancelled: { variant: 'gray', label: 'Cancelled' },
|
||||
info: { variant: 'gray', label: 'Info' },
|
||||
error: { variant: 'red', label: 'Error', color: 'var(--text-error)' },
|
||||
pending: { variant: 'amber', label: 'Pending', color: '#f59e0b' },
|
||||
running: { variant: 'green', label: 'Running', color: '#22c55e' },
|
||||
cancelled: { variant: 'orange', label: 'Cancelled', color: '#f97316' },
|
||||
info: { variant: 'gray', label: 'Info', color: 'var(--terminal-status-info-color)' },
|
||||
}
|
||||
|
||||
/** Configuration mapping core trigger types to Badge color variants */
|
||||
const TRIGGER_VARIANT_MAP: Record<string, React.ComponentProps<typeof Badge>['variant']> = {
|
||||
manual: 'gray-secondary',
|
||||
api: 'blue',
|
||||
schedule: 'green',
|
||||
chat: 'purple',
|
||||
webhook: 'orange',
|
||||
mcp: 'cyan',
|
||||
a2a: 'teal',
|
||||
}
|
||||
|
||||
interface StatusBadgeProps {
|
||||
/** The execution status to display */
|
||||
status: LogStatus
|
||||
}
|
||||
|
||||
@@ -86,14 +80,13 @@ interface StatusBadgeProps {
|
||||
* @returns A Badge with dot indicator and status label
|
||||
*/
|
||||
export const StatusBadge = React.memo(({ status }: StatusBadgeProps) => {
|
||||
const config = STATUS_VARIANT_MAP[status]
|
||||
const config = STATUS_CONFIG[status]
|
||||
return React.createElement(Badge, { variant: config.variant, dot: true }, config.label)
|
||||
})
|
||||
|
||||
StatusBadge.displayName = 'StatusBadge'
|
||||
|
||||
interface TriggerBadgeProps {
|
||||
/** The trigger type identifier (e.g., 'manual', 'api', or integration block type) */
|
||||
trigger: string
|
||||
}
|
||||
|
||||
|
||||
@@ -213,7 +213,6 @@ function TemplateCardInner({
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
cursorStyle='pointer'
|
||||
/>
|
||||
) : (
|
||||
|
||||
@@ -2,12 +2,12 @@ import { memo, useCallback } from 'react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||
import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getUniqueBlockName, prepareDuplicateBlockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const DEFAULT_DUPLICATE_OFFSET = { x: 50, y: 50 }
|
||||
@@ -48,29 +48,38 @@ export const ActionBar = memo(
|
||||
collaborativeBatchToggleBlockEnabled,
|
||||
collaborativeBatchToggleBlockHandles,
|
||||
} = useCollaborativeWorkflow()
|
||||
const { activeWorkflowId, setPendingSelection } = useWorkflowRegistry()
|
||||
const { setPendingSelection } = useWorkflowRegistry()
|
||||
|
||||
const addNotification = useNotificationStore((s) => s.addNotification)
|
||||
|
||||
const handleDuplicateBlock = useCallback(() => {
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const sourceBlock = blocks[blockId]
|
||||
if (!sourceBlock) return
|
||||
const { copyBlocks, preparePasteData, activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
const existingBlocks = useWorkflowStore.getState().blocks
|
||||
copyBlocks([blockId])
|
||||
|
||||
const newId = crypto.randomUUID()
|
||||
const newName = getUniqueBlockName(sourceBlock.name, blocks)
|
||||
const subBlockValues =
|
||||
useSubBlockStore.getState().workflowValues[activeWorkflowId || '']?.[blockId] || {}
|
||||
const pasteData = preparePasteData(DEFAULT_DUPLICATE_OFFSET)
|
||||
if (!pasteData) return
|
||||
|
||||
const { block, subBlockValues: filteredValues } = prepareDuplicateBlockState({
|
||||
sourceBlock,
|
||||
newId,
|
||||
newName,
|
||||
positionOffset: DEFAULT_DUPLICATE_OFFSET,
|
||||
subBlockValues,
|
||||
})
|
||||
const blocks = Object.values(pasteData.blocks)
|
||||
const validation = validateTriggerPaste(blocks, existingBlocks, 'duplicate')
|
||||
if (!validation.isValid) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: validation.message!,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
setPendingSelection([newId])
|
||||
collaborativeBatchAddBlocks([block], [], {}, {}, { [newId]: filteredValues })
|
||||
}, [blockId, activeWorkflowId, collaborativeBatchAddBlocks, setPendingSelection])
|
||||
setPendingSelection(blocks.map((b) => b.id))
|
||||
collaborativeBatchAddBlocks(
|
||||
blocks,
|
||||
pasteData.edges,
|
||||
pasteData.loops,
|
||||
pasteData.parallels,
|
||||
pasteData.subBlockValues
|
||||
)
|
||||
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
||||
|
||||
const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
|
||||
useCallback(
|
||||
@@ -90,7 +99,7 @@ export const ActionBar = memo(
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const isStartBlock = isValidStartBlockType(blockType)
|
||||
const isStartBlock = isInputDefinitionTrigger(blockType)
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
|
||||
@@ -142,7 +151,7 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && !isResponseBlock && !isSubflowBlock && (
|
||||
{!isStartBlock && !isResponseBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -213,6 +222,29 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
|
||||
/**
|
||||
* Block information for context menu actions
|
||||
@@ -74,12 +74,16 @@ export function BlockMenu({
|
||||
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
||||
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
||||
|
||||
const hasStarterBlock = selectedBlocks.some((b) => isValidStartBlockType(b.type))
|
||||
const hasSingletonBlock = selectedBlocks.some(
|
||||
(b) =>
|
||||
TriggerUtils.requiresSingleInstance(b.type) || TriggerUtils.isSingleInstanceBlockType(b.type)
|
||||
)
|
||||
const hasTriggerBlock = selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b))
|
||||
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
|
||||
const isSubflow =
|
||||
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')
|
||||
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasStarterBlock
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasTriggerBlock
|
||||
|
||||
const getToggleEnabledLabel = () => {
|
||||
if (allEnabled) return 'Disable'
|
||||
@@ -127,7 +131,7 @@ export function BlockMenu({
|
||||
<span>Paste</span>
|
||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⌘V</span>
|
||||
</PopoverItem>
|
||||
{!hasStarterBlock && (
|
||||
{!hasSingletonBlock && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
onClick={() => {
|
||||
|
||||
@@ -26,7 +26,6 @@ export interface CanvasMenuProps {
|
||||
onOpenLogs: () => void
|
||||
onToggleVariables: () => void
|
||||
onToggleChat: () => void
|
||||
onInvite: () => void
|
||||
isVariablesOpen?: boolean
|
||||
isChatOpen?: boolean
|
||||
hasClipboard?: boolean
|
||||
@@ -55,15 +54,12 @@ export function CanvasMenu({
|
||||
onOpenLogs,
|
||||
onToggleVariables,
|
||||
onToggleChat,
|
||||
onInvite,
|
||||
isVariablesOpen = false,
|
||||
isChatOpen = false,
|
||||
hasClipboard = false,
|
||||
disableEdit = false,
|
||||
disableAdmin = false,
|
||||
canUndo = false,
|
||||
canRedo = false,
|
||||
isInvitationsDisabled = false,
|
||||
}: CanvasMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
@@ -179,22 +175,6 @@ export function CanvasMenu({
|
||||
>
|
||||
{isChatOpen ? 'Close Chat' : 'Open Chat'}
|
||||
</PopoverItem>
|
||||
|
||||
{/* Admin action - hidden when invitations are disabled */}
|
||||
{!isInvitationsDisabled && (
|
||||
<>
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
disabled={disableAdmin}
|
||||
onClick={() => {
|
||||
onInvite()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Invite to Workspace
|
||||
</PopoverItem>
|
||||
</>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
|
||||
@@ -886,17 +886,16 @@ export function Chat() {
|
||||
onMouseDown={(e) => e.stopPropagation()}
|
||||
>
|
||||
{shouldShowConfigureStartInputsButton && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
<div
|
||||
className='flex flex-none cursor-pointer items-center whitespace-nowrap rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[9px] py-[2px] font-medium font-sans text-[12px] text-[var(--text-primary)] hover:bg-[var(--surface-7)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]'
|
||||
title='Add chat inputs to Start block'
|
||||
onMouseDown={(e) => {
|
||||
e.stopPropagation()
|
||||
handleConfigureStartInputs()
|
||||
}}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
<span className='whitespace-nowrap'>Add inputs</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<OutputSelect
|
||||
|
||||
@@ -129,10 +129,6 @@ export function OutputSelect({
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.responseFormat?.value
|
||||
: subBlockValues?.[block.id]?.responseFormat
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
const operationValue =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.operation?.value
|
||||
: subBlockValues?.[block.id]?.operation
|
||||
|
||||
let outputsToProcess: Record<string, unknown> = {}
|
||||
|
||||
@@ -146,10 +142,20 @@ export function OutputSelect({
|
||||
outputsToProcess = blockConfig?.outputs || {}
|
||||
}
|
||||
} else {
|
||||
const toolOutputs =
|
||||
blockConfig && typeof operationValue === 'string'
|
||||
? getToolOutputs(blockConfig, operationValue)
|
||||
: {}
|
||||
// Build subBlocks object for tool selector
|
||||
const rawSubBlockValues =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks
|
||||
: subBlockValues?.[block.id]
|
||||
const subBlocks: Record<string, { value: unknown }> = {}
|
||||
if (rawSubBlockValues && typeof rawSubBlockValues === 'object') {
|
||||
for (const [key, val] of Object.entries(rawSubBlockValues)) {
|
||||
// Handle both { value: ... } and raw value formats
|
||||
subBlocks[key] = val && typeof val === 'object' && 'value' in val ? val : { value: val }
|
||||
}
|
||||
}
|
||||
|
||||
const toolOutputs = blockConfig ? getToolOutputs(blockConfig, subBlocks) : {}
|
||||
outputsToProcess =
|
||||
Object.keys(toolOutputs).length > 0 ? toolOutputs : blockConfig?.outputs || {}
|
||||
}
|
||||
|
||||
@@ -138,18 +138,24 @@ export const Notifications = memo(function Notifications() {
|
||||
}`}
|
||||
>
|
||||
<div className='flex h-full flex-col justify-between px-[8px] pt-[6px] pb-[8px]'>
|
||||
<div
|
||||
className={`font-medium text-[12px] leading-[16px] ${
|
||||
hasAction ? 'line-clamp-2' : 'line-clamp-4'
|
||||
}`}
|
||||
>
|
||||
<div className='flex items-start gap-[8px]'>
|
||||
<div
|
||||
className={`min-w-0 flex-1 font-medium text-[12px] leading-[16px] ${
|
||||
hasAction ? 'line-clamp-2' : 'line-clamp-4'
|
||||
}`}
|
||||
>
|
||||
{notification.level === 'error' && (
|
||||
<span className='mr-[6px] mb-[2.75px] inline-block h-[6px] w-[6px] rounded-[2px] bg-[var(--text-error)] align-middle' />
|
||||
)}
|
||||
{notification.message}
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => removeNotification(notification.id)}
|
||||
aria-label='Dismiss notification'
|
||||
className='!p-1.5 -m-1.5 float-right ml-[16px]'
|
||||
className='!p-1.5 -m-1.5 shrink-0'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
@@ -158,10 +164,6 @@ export const Notifications = memo(function Notifications() {
|
||||
<Tooltip.Shortcut keys='⌘E'>Clear all</Tooltip.Shortcut>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{notification.level === 'error' && (
|
||||
<span className='mr-[6px] mb-[2.75px] inline-block h-[6px] w-[6px] rounded-[2px] bg-[var(--text-error)] align-middle' />
|
||||
)}
|
||||
{notification.message}
|
||||
</div>
|
||||
{hasAction && (
|
||||
<Button
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
import { PopoverSection } from '@/components/emcn'
|
||||
|
||||
/**
|
||||
* Skeleton loading component for chat history dropdown
|
||||
* Displays placeholder content while chats are being loaded
|
||||
*/
|
||||
export function ChatHistorySkeleton() {
|
||||
return (
|
||||
<>
|
||||
<PopoverSection>
|
||||
<div className='h-3 w-12 animate-pulse rounded bg-muted/40' />
|
||||
</PopoverSection>
|
||||
<div className='flex flex-col gap-0.5'>
|
||||
{[1, 2, 3].map((i) => (
|
||||
<div key={i} className='flex h-[25px] items-center px-[6px]'>
|
||||
<div className='h-3 w-full animate-pulse rounded bg-muted/40' />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './chat-history-skeleton'
|
||||
@@ -0,0 +1,79 @@
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
type CheckpointConfirmationVariant = 'restore' | 'discard'
|
||||
|
||||
interface CheckpointConfirmationProps {
|
||||
/** Confirmation variant - 'restore' for reverting, 'discard' for edit with checkpoint options */
|
||||
variant: CheckpointConfirmationVariant
|
||||
/** Whether an action is currently processing */
|
||||
isProcessing: boolean
|
||||
/** Callback when cancel is clicked */
|
||||
onCancel: () => void
|
||||
/** Callback when revert is clicked */
|
||||
onRevert: () => void
|
||||
/** Callback when continue is clicked (only for 'discard' variant) */
|
||||
onContinue?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Inline confirmation for checkpoint operations
|
||||
* Supports two variants:
|
||||
* - 'restore': Simple revert confirmation with warning
|
||||
* - 'discard': Edit with checkpoint options (revert or continue without revert)
|
||||
*/
|
||||
export function CheckpointConfirmation({
|
||||
variant,
|
||||
isProcessing,
|
||||
onCancel,
|
||||
onRevert,
|
||||
onContinue,
|
||||
}: CheckpointConfirmationProps) {
|
||||
const isRestoreVariant = variant === 'restore'
|
||||
|
||||
return (
|
||||
<div className='mt-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] p-[10px]'>
|
||||
<p className='mb-[8px] text-[12px] text-[var(--text-primary)]'>
|
||||
{isRestoreVariant ? (
|
||||
<>
|
||||
Revert to checkpoint? This will restore your workflow to the state saved at this
|
||||
checkpoint.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</>
|
||||
) : (
|
||||
'Continue from a previous message?'
|
||||
)}
|
||||
</p>
|
||||
<div className='flex gap-[8px]'>
|
||||
<Button
|
||||
onClick={onCancel}
|
||||
variant='active'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessing}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={onRevert}
|
||||
variant='destructive'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessing}
|
||||
>
|
||||
{isProcessing ? 'Reverting...' : 'Revert'}
|
||||
</Button>
|
||||
{!isRestoreVariant && onContinue && (
|
||||
<Button
|
||||
onClick={onContinue}
|
||||
variant='tertiary'
|
||||
size='sm'
|
||||
className='flex-1'
|
||||
disabled={isProcessing}
|
||||
>
|
||||
Continue
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './checkpoint-confirmation'
|
||||
@@ -0,0 +1 @@
|
||||
export * from './file-display'
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user