diff --git a/.claude/rules/emcn-components.md b/.claude/rules/emcn-components.md new file mode 100644 index 000000000..011a3280f --- /dev/null +++ b/.claude/rules/emcn-components.md @@ -0,0 +1,35 @@ +--- +paths: + - "apps/sim/components/emcn/**" +--- + +# EMCN Components + +Import from `@/components/emcn`, never from subpaths (except CSS files). + +## CVA vs Direct Styles + +**Use CVA when:** 2+ variants (primary/secondary, sm/md/lg) + +```tsx +const buttonVariants = cva('base-classes', { + variants: { variant: { default: '...', primary: '...' } } +}) +export { Button, buttonVariants } +``` + +**Use direct className when:** Single consistent style, no variations + +```tsx +function Label({ className, ...props }) { + return +} +``` + +## Rules + +- Use Radix UI primitives for accessibility +- Export component and variants (if using CVA) +- TSDoc with usage examples +- Consistent tokens: `font-medium`, `text-[12px]`, `rounded-[4px]` +- `transition-colors` for hover states diff --git a/.claude/rules/global.md b/.claude/rules/global.md new file mode 100644 index 000000000..e749b67b2 --- /dev/null +++ b/.claude/rules/global.md @@ -0,0 +1,13 @@ +# Global Standards + +## Logging +Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`. + +## Comments +Use TSDoc for documentation. No `====` separators. No non-TSDoc comments. + +## Styling +Never update global styles. Keep all styling local to components. + +## Package Manager +Use `bun` and `bunx`, not `npm` and `npx`. diff --git a/.claude/rules/sim-architecture.md b/.claude/rules/sim-architecture.md new file mode 100644 index 000000000..d6d719797 --- /dev/null +++ b/.claude/rules/sim-architecture.md @@ -0,0 +1,56 @@ +--- +paths: + - "apps/sim/**" +--- + +# Sim App Architecture + +## Core Principles +1. **Single Responsibility**: Each component, hook, store has one clear purpose +2. **Composition Over Complexity**: Break down complex logic into smaller pieces +3. **Type Safety First**: TypeScript interfaces for all props, state, return types +4. **Predictable State**: Zustand for global state, useState for UI-only concerns + +## Root-Level Structure + +``` +apps/sim/ +├── app/ # Next.js app router (pages, API routes) +├── blocks/ # Block definitions and registry +├── components/ # Shared UI (emcn/, ui/) +├── executor/ # Workflow execution engine +├── hooks/ # Shared hooks (queries/, selectors/) +├── lib/ # App-wide utilities +├── providers/ # LLM provider integrations +├── stores/ # Zustand stores +├── tools/ # Tool definitions +└── triggers/ # Trigger definitions +``` + +## Feature Organization + +Features live under `app/workspace/[workspaceId]/`: + +``` +feature/ +├── components/ # Feature components +├── hooks/ # Feature-scoped hooks +├── utils/ # Feature-scoped utilities (2+ consumers) +├── feature.tsx # Main component +└── page.tsx # Next.js page entry +``` + +## Naming Conventions +- **Components**: PascalCase (`WorkflowList`) +- **Hooks**: `use` prefix (`useWorkflowOperations`) +- **Files**: kebab-case (`workflow-list.tsx`) +- **Stores**: `stores/feature/store.ts` +- **Constants**: SCREAMING_SNAKE_CASE +- **Interfaces**: PascalCase with suffix (`WorkflowListProps`) + +## Utils Rules + +- **Never create `utils.ts` for single consumer** - inline it +- **Create `utils.ts` when** 2+ files need the same helper +- **Check existing sources** before duplicating (`lib/` has many utilities) +- **Location**: `lib/` (app-wide) → `feature/utils/` (feature-scoped) → inline (single-use) diff --git a/.claude/rules/sim-components.md b/.claude/rules/sim-components.md new file mode 100644 index 000000000..23799bcda --- /dev/null +++ b/.claude/rules/sim-components.md @@ -0,0 +1,48 @@ +--- +paths: + - "apps/sim/**/*.tsx" +--- + +# Component Patterns + +## Structure Order + +```typescript +'use client' // Only if using hooks + +// Imports (external → internal) +// Constants at module level +const CONFIG = { SPACING: 8 } as const + +// Props interface +interface ComponentProps { + requiredProp: string + optionalProp?: boolean +} + +export function Component({ requiredProp, optionalProp = false }: ComponentProps) { + // a. Refs + // b. External hooks (useParams, useRouter) + // c. Store hooks + // d. Custom hooks + // e. Local state + // f. useMemo + // g. useCallback + // h. useEffect + // i. Return JSX +} +``` + +## Rules + +1. `'use client'` only when using React hooks +2. Always define props interface +3. Extract constants with `as const` +4. Semantic HTML (`aside`, `nav`, `article`) +5. Optional chain callbacks: `onAction?.(id)` + +## Component Extraction + +**Extract when:** 50+ lines, used in 2+ files, or has own state/logic + +**Keep inline when:** < 10 lines, single use, purely presentational diff --git a/.claude/rules/sim-hooks.md b/.claude/rules/sim-hooks.md new file mode 100644 index 000000000..3c06a4a31 --- /dev/null +++ b/.claude/rules/sim-hooks.md @@ -0,0 +1,55 @@ +--- +paths: + - "apps/sim/**/use-*.ts" + - "apps/sim/**/hooks/**/*.ts" +--- + +# Hook Patterns + +## Structure + +```typescript +interface UseFeatureProps { + id: string + onSuccess?: (result: Result) => void +} + +export function useFeature({ id, onSuccess }: UseFeatureProps) { + // 1. Refs for stable dependencies + const idRef = useRef(id) + const onSuccessRef = useRef(onSuccess) + + // 2. State + const [data, setData] = useState(null) + const [isLoading, setIsLoading] = useState(false) + + // 3. Sync refs + useEffect(() => { + idRef.current = id + onSuccessRef.current = onSuccess + }, [id, onSuccess]) + + // 4. Operations (useCallback with empty deps when using refs) + const fetchData = useCallback(async () => { + setIsLoading(true) + try { + const result = await fetch(`/api/${idRef.current}`).then(r => r.json()) + setData(result) + onSuccessRef.current?.(result) + } finally { + setIsLoading(false) + } + }, []) + + return { data, isLoading, fetchData } +} +``` + +## Rules + +1. Single responsibility per hook +2. Props interface required +3. Refs for stable callback dependencies +4. Wrap returned functions in useCallback +5. Always try/catch async operations +6. Track loading/error states diff --git a/.claude/rules/sim-imports.md b/.claude/rules/sim-imports.md new file mode 100644 index 000000000..b1f1926cd --- /dev/null +++ b/.claude/rules/sim-imports.md @@ -0,0 +1,62 @@ +--- +paths: + - "apps/sim/**/*.ts" + - "apps/sim/**/*.tsx" +--- + +# Import Patterns + +## Absolute Imports + +**Always use absolute imports.** Never use relative imports. + +```typescript +// ✓ Good +import { useWorkflowStore } from '@/stores/workflows/store' +import { Button } from '@/components/ui/button' + +// ✗ Bad +import { useWorkflowStore } from '../../../stores/workflows/store' +``` + +## Barrel Exports + +Use barrel exports (`index.ts`) when a folder has 3+ exports. Import from barrel, not individual files. + +```typescript +// ✓ Good +import { Dashboard, Sidebar } from '@/app/workspace/[workspaceId]/logs/components' + +// ✗ Bad +import { Dashboard } from '@/app/workspace/[workspaceId]/logs/components/dashboard/dashboard' +``` + +## No Re-exports + +Do not re-export from non-barrel files. Import directly from the source. + +```typescript +// ✓ Good - import from where it's declared +import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' + +// ✗ Bad - re-exporting in utils.ts then importing from there +import { CORE_TRIGGER_TYPES } from '@/app/workspace/.../utils' +``` + +## Import Order + +1. React/core libraries +2. External libraries +3. UI components (`@/components/emcn`, `@/components/ui`) +4. Utilities (`@/lib/...`) +5. Stores (`@/stores/...`) +6. Feature imports +7. CSS imports + +## Type Imports + +Use `type` keyword for type-only imports: + +```typescript +import type { WorkflowLog } from '@/stores/logs/types' +``` diff --git a/.claude/rules/sim-integrations.md b/.claude/rules/sim-integrations.md new file mode 100644 index 000000000..cef0c895b --- /dev/null +++ b/.claude/rules/sim-integrations.md @@ -0,0 +1,209 @@ +--- +paths: + - "apps/sim/tools/**" + - "apps/sim/blocks/**" + - "apps/sim/triggers/**" +--- + +# Adding Integrations + +## Overview + +Adding a new integration typically requires: +1. **Tools** - API operations (`tools/{service}/`) +2. **Block** - UI component (`blocks/blocks/{service}.ts`) +3. **Icon** - SVG icon (`components/icons.tsx`) +4. **Trigger** (optional) - Webhooks/polling (`triggers/{service}/`) + +Always look up the service's API docs first. + +## 1. Tools (`tools/{service}/`) + +``` +tools/{service}/ +├── index.ts # Export all tools +├── types.ts # Params/response types +├── {action}.ts # Individual tool (e.g., send_message.ts) +└── ... +``` + +**Tool file structure:** + +```typescript +// tools/{service}/{action}.ts +import type { {Service}Params, {Service}Response } from '@/tools/{service}/types' +import type { ToolConfig } from '@/tools/types' + +export const {service}{Action}Tool: ToolConfig<{Service}Params, {Service}Response> = { + id: '{service}_{action}', + name: '{Service} {Action}', + description: 'What this tool does', + version: '1.0.0', + oauth: { required: true, provider: '{service}' }, // if OAuth + params: { /* param definitions */ }, + request: { + url: '/api/tools/{service}/{action}', + method: 'POST', + headers: () => ({ 'Content-Type': 'application/json' }), + body: (params) => ({ ...params }), + }, + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) throw new Error(data.error) + return { success: true, output: data.output } + }, + outputs: { /* output definitions */ }, +} +``` + +**Register in `tools/registry.ts`:** + +```typescript +import { {service}{Action}Tool } from '@/tools/{service}' +// Add to registry object +{service}_{action}: {service}{Action}Tool, +``` + +## 2. Block (`blocks/blocks/{service}.ts`) + +```typescript +import { {Service}Icon } from '@/components/icons' +import type { BlockConfig } from '@/blocks/types' +import type { {Service}Response } from '@/tools/{service}/types' + +export const {Service}Block: BlockConfig<{Service}Response> = { + type: '{service}', + name: '{Service}', + description: 'Short description', + longDescription: 'Detailed description', + category: 'tools', + bgColor: '#hexcolor', + icon: {Service}Icon, + subBlocks: [ /* see SubBlock Properties below */ ], + tools: { + access: ['{service}_{action}', ...], + config: { + tool: (params) => `{service}_${params.operation}`, + params: (params) => ({ ...params }), + }, + }, + inputs: { /* input definitions */ }, + outputs: { /* output definitions */ }, +} +``` + +### SubBlock Properties + +```typescript +{ + id: 'fieldName', // Unique identifier + title: 'Field Label', // UI label + type: 'short-input', // See SubBlock Types below + placeholder: 'Hint text', + required: true, // See Required below + condition: { ... }, // See Condition below + dependsOn: ['otherField'], // See DependsOn below + mode: 'basic', // 'basic' | 'advanced' | 'both' | 'trigger' +} +``` + +**SubBlock Types:** `short-input`, `long-input`, `dropdown`, `code`, `switch`, `slider`, `oauth-input`, `channel-selector`, `user-selector`, `file-upload`, etc. + +### `condition` - Show/hide based on another field + +```typescript +// Show when operation === 'send' +condition: { field: 'operation', value: 'send' } + +// Show when operation is 'send' OR 'read' +condition: { field: 'operation', value: ['send', 'read'] } + +// Show when operation !== 'send' +condition: { field: 'operation', value: 'send', not: true } + +// Complex: NOT in list AND another condition +condition: { + field: 'operation', + value: ['list_channels', 'list_users'], + not: true, + and: { field: 'destinationType', value: 'dm', not: true } +} +``` + +### `required` - Field validation + +```typescript +// Always required +required: true + +// Conditionally required (same syntax as condition) +required: { field: 'operation', value: 'send' } +``` + +### `dependsOn` - Clear field when dependencies change + +```typescript +// Clear when credential changes +dependsOn: ['credential'] + +// Clear when authMethod changes AND (credential OR botToken) changes +dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] } +``` + +### `mode` - When to show field + +- `'basic'` - Only in basic mode (default UI) +- `'advanced'` - Only in advanced mode (manual input) +- `'both'` - Show in both modes (default) +- `'trigger'` - Only when block is used as trigger + +**Register in `blocks/registry.ts`:** + +```typescript +import { {Service}Block } from '@/blocks/blocks/{service}' +// Add to registry object (alphabetically) +{service}: {Service}Block, +``` + +## 3. Icon (`components/icons.tsx`) + +```typescript +export function {Service}Icon(props: SVGProps) { + return ( + + {/* SVG path from service's brand assets */} + + ) +} +``` + +## 4. Trigger (`triggers/{service}/`) - Optional + +``` +triggers/{service}/ +├── index.ts # Export all triggers +├── webhook.ts # Webhook handler +├── utils.ts # Shared utilities +└── {event}.ts # Specific event handlers +``` + +**Register in `triggers/registry.ts`:** + +```typescript +import { {service}WebhookTrigger } from '@/triggers/{service}' +// Add to TRIGGER_REGISTRY +{service}_webhook: {service}WebhookTrigger, +``` + +## Checklist + +- [ ] Look up API docs for the service +- [ ] Create `tools/{service}/types.ts` with proper types +- [ ] Create tool files for each operation +- [ ] Create `tools/{service}/index.ts` barrel export +- [ ] Register tools in `tools/registry.ts` +- [ ] Add icon to `components/icons.tsx` +- [ ] Create block in `blocks/blocks/{service}.ts` +- [ ] Register block in `blocks/registry.ts` +- [ ] (Optional) Create triggers in `triggers/{service}/` +- [ ] (Optional) Register triggers in `triggers/registry.ts` diff --git a/.claude/rules/sim-queries.md b/.claude/rules/sim-queries.md new file mode 100644 index 000000000..0ca91ac26 --- /dev/null +++ b/.claude/rules/sim-queries.md @@ -0,0 +1,66 @@ +--- +paths: + - "apps/sim/hooks/queries/**/*.ts" +--- + +# React Query Patterns + +All React Query hooks live in `hooks/queries/`. + +## Query Key Factory + +Every query file defines a keys factory: + +```typescript +export const entityKeys = { + all: ['entity'] as const, + list: (workspaceId?: string) => [...entityKeys.all, 'list', workspaceId ?? ''] as const, + detail: (id?: string) => [...entityKeys.all, 'detail', id ?? ''] as const, +} +``` + +## File Structure + +```typescript +// 1. Query keys factory +// 2. Types (if needed) +// 3. Private fetch functions +// 4. Exported hooks +``` + +## Query Hook + +```typescript +export function useEntityList(workspaceId?: string, options?: { enabled?: boolean }) { + return useQuery({ + queryKey: entityKeys.list(workspaceId), + queryFn: () => fetchEntities(workspaceId as string), + enabled: Boolean(workspaceId) && (options?.enabled ?? true), + staleTime: 60 * 1000, + placeholderData: keepPreviousData, + }) +} +``` + +## Mutation Hook + +```typescript +export function useCreateEntity() { + const queryClient = useQueryClient() + return useMutation({ + mutationFn: async (variables) => { /* fetch POST */ }, + onSuccess: () => queryClient.invalidateQueries({ queryKey: entityKeys.all }), + }) +} +``` + +## Optimistic Updates + +For optimistic mutations syncing with Zustand, use `createOptimisticMutationHandlers` from `@/hooks/queries/utils/optimistic-mutation`. + +## Naming + +- **Keys**: `entityKeys` +- **Query hooks**: `useEntity`, `useEntityList` +- **Mutation hooks**: `useCreateEntity`, `useUpdateEntity` +- **Fetch functions**: `fetchEntity` (private) diff --git a/.claude/rules/sim-stores.md b/.claude/rules/sim-stores.md new file mode 100644 index 000000000..333ff9fd9 --- /dev/null +++ b/.claude/rules/sim-stores.md @@ -0,0 +1,71 @@ +--- +paths: + - "apps/sim/**/store.ts" + - "apps/sim/**/stores/**/*.ts" +--- + +# Zustand Store Patterns + +Stores live in `stores/`. Complex stores split into `store.ts` + `types.ts`. + +## Basic Store + +```typescript +import { create } from 'zustand' +import { devtools } from 'zustand/middleware' +import type { FeatureState } from '@/stores/feature/types' + +const initialState = { items: [] as Item[], activeId: null as string | null } + +export const useFeatureStore = create()( + devtools( + (set, get) => ({ + ...initialState, + setItems: (items) => set({ items }), + addItem: (item) => set((state) => ({ items: [...state.items, item] })), + reset: () => set(initialState), + }), + { name: 'feature-store' } + ) +) +``` + +## Persisted Store + +```typescript +import { create } from 'zustand' +import { persist } from 'zustand/middleware' + +export const useFeatureStore = create()( + persist( + (set) => ({ + width: 300, + setWidth: (width) => set({ width }), + _hasHydrated: false, + setHasHydrated: (v) => set({ _hasHydrated: v }), + }), + { + name: 'feature-state', + partialize: (state) => ({ width: state.width }), + onRehydrateStorage: () => (state) => state?.setHasHydrated(true), + } + ) +) +``` + +## Rules + +1. Use `devtools` middleware (named stores) +2. Use `persist` only when data should survive reload +3. `partialize` to persist only necessary state +4. `_hasHydrated` pattern for persisted stores needing hydration tracking +5. Immutable updates only +6. `set((state) => ...)` when depending on previous state +7. Provide `reset()` action + +## Outside React + +```typescript +const items = useFeatureStore.getState().items +useFeatureStore.setState({ items: newItems }) +``` diff --git a/.claude/rules/sim-styling.md b/.claude/rules/sim-styling.md new file mode 100644 index 000000000..1b8c384a7 --- /dev/null +++ b/.claude/rules/sim-styling.md @@ -0,0 +1,41 @@ +--- +paths: + - "apps/sim/**/*.tsx" + - "apps/sim/**/*.css" +--- + +# Styling Rules + +## Tailwind + +1. **No inline styles** - Use Tailwind classes +2. **No duplicate dark classes** - Skip `dark:` when value matches light mode +3. **Exact values** - `text-[14px]`, `h-[26px]` +4. **Transitions** - `transition-colors` for interactive states + +## Conditional Classes + +```typescript +import { cn } from '@/lib/utils' + +
+``` + +## CSS Variables + +For dynamic values (widths, heights) synced with stores: + +```typescript +// In store +setWidth: (width) => { + set({ width }) + document.documentElement.style.setProperty('--sidebar-width', `${width}px`) +} + +// In component +
)} + {/* Password reset success message */} + {resetSuccessMessage && ( +
+

{resetSuccessMessage}

+
+ )} + {/* Email/Password Form - show unless explicitly disabled */} {!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
diff --git a/apps/sim/app/(auth)/reset-password/reset-password-form.tsx b/apps/sim/app/(auth)/reset-password/reset-password-form.tsx index 7212b52d5..d50fbf986 100644 --- a/apps/sim/app/(auth)/reset-password/reset-password-form.tsx +++ b/apps/sim/app/(auth)/reset-password/reset-password-form.tsx @@ -1,12 +1,13 @@ 'use client' -import { useEffect, useState } from 'react' +import { useState } from 'react' import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react' import { Button } from '@/components/ui/button' import { Input } from '@/components/ui/input' import { Label } from '@/components/ui/label' import { cn } from '@/lib/core/utils/cn' import { inter } from '@/app/_styles/fonts/inter/inter' +import { useBrandedButtonClass } from '@/hooks/use-branded-button-class' interface RequestResetFormProps { email: string @@ -27,36 +28,9 @@ export function RequestResetForm({ statusMessage, className, }: RequestResetFormProps) { - const [buttonClass, setButtonClass] = useState('branded-button-gradient') + const buttonClass = useBrandedButtonClass() const [isButtonHovered, setIsButtonHovered] = useState(false) - useEffect(() => { - const checkCustomBrand = () => { - const computedStyle = getComputedStyle(document.documentElement) - const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() - - if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('branded-button-custom') - } else { - setButtonClass('branded-button-gradient') - } - } - - checkCustomBrand() - - window.addEventListener('resize', checkCustomBrand) - const observer = new MutationObserver(checkCustomBrand) - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ['style', 'class'], - }) - - return () => { - window.removeEventListener('resize', checkCustomBrand) - observer.disconnect() - } - }, []) - const handleSubmit = async (e: React.FormEvent) => { e.preventDefault() onSubmit(email) @@ -138,36 +112,9 @@ export function SetNewPasswordForm({ const [validationMessage, setValidationMessage] = useState('') const [showPassword, setShowPassword] = useState(false) const [showConfirmPassword, setShowConfirmPassword] = useState(false) - const [buttonClass, setButtonClass] = useState('branded-button-gradient') + const buttonClass = useBrandedButtonClass() const [isButtonHovered, setIsButtonHovered] = useState(false) - useEffect(() => { - const checkCustomBrand = () => { - const computedStyle = getComputedStyle(document.documentElement) - const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() - - if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('branded-button-custom') - } else { - setButtonClass('branded-button-gradient') - } - } - - checkCustomBrand() - - window.addEventListener('resize', checkCustomBrand) - const observer = new MutationObserver(checkCustomBrand) - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ['style', 'class'], - }) - - return () => { - window.removeEventListener('resize', checkCustomBrand) - observer.disconnect() - } - }, []) - const handleSubmit = async (e: React.FormEvent) => { e.preventDefault() diff --git a/apps/sim/app/(auth)/signup/signup-form.tsx b/apps/sim/app/(auth)/signup/signup-form.tsx index 5aeb59fa6..840765de8 100644 --- a/apps/sim/app/(auth)/signup/signup-form.tsx +++ b/apps/sim/app/(auth)/signup/signup-form.tsx @@ -16,6 +16,7 @@ import { inter } from '@/app/_styles/fonts/inter/inter' import { soehne } from '@/app/_styles/fonts/soehne/soehne' import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons' import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button' +import { useBrandedButtonClass } from '@/hooks/use-branded-button-class' const logger = createLogger('SignupForm') @@ -95,7 +96,7 @@ function SignupFormContent({ const [showEmailValidationError, setShowEmailValidationError] = useState(false) const [redirectUrl, setRedirectUrl] = useState('') const [isInviteFlow, setIsInviteFlow] = useState(false) - const [buttonClass, setButtonClass] = useState('branded-button-gradient') + const buttonClass = useBrandedButtonClass() const [isButtonHovered, setIsButtonHovered] = useState(false) const [name, setName] = useState('') @@ -126,31 +127,6 @@ function SignupFormContent({ if (inviteFlowParam === 'true') { setIsInviteFlow(true) } - - const checkCustomBrand = () => { - const computedStyle = getComputedStyle(document.documentElement) - const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() - - if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('branded-button-custom') - } else { - setButtonClass('branded-button-gradient') - } - } - - checkCustomBrand() - - window.addEventListener('resize', checkCustomBrand) - const observer = new MutationObserver(checkCustomBrand) - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ['style', 'class'], - }) - - return () => { - window.removeEventListener('resize', checkCustomBrand) - observer.disconnect() - } }, [searchParams]) const validatePassword = (passwordValue: string): string[] => { diff --git a/apps/sim/app/(auth)/sso/sso-form.tsx b/apps/sim/app/(auth)/sso/sso-form.tsx index 0d371bbaf..12901c51c 100644 --- a/apps/sim/app/(auth)/sso/sso-form.tsx +++ b/apps/sim/app/(auth)/sso/sso-form.tsx @@ -13,6 +13,7 @@ import { cn } from '@/lib/core/utils/cn' import { quickValidateEmail } from '@/lib/messaging/email/validation' import { inter } from '@/app/_styles/fonts/inter/inter' import { soehne } from '@/app/_styles/fonts/soehne/soehne' +import { useBrandedButtonClass } from '@/hooks/use-branded-button-class' const logger = createLogger('SSOForm') @@ -57,7 +58,7 @@ export default function SSOForm() { const [email, setEmail] = useState('') const [emailErrors, setEmailErrors] = useState([]) const [showEmailValidationError, setShowEmailValidationError] = useState(false) - const [buttonClass, setButtonClass] = useState('branded-button-gradient') + const buttonClass = useBrandedButtonClass() const [callbackUrl, setCallbackUrl] = useState('/workspace') useEffect(() => { @@ -90,31 +91,6 @@ export default function SSOForm() { setShowEmailValidationError(true) } } - - const checkCustomBrand = () => { - const computedStyle = getComputedStyle(document.documentElement) - const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() - - if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('branded-button-custom') - } else { - setButtonClass('branded-button-gradient') - } - } - - checkCustomBrand() - - window.addEventListener('resize', checkCustomBrand) - const observer = new MutationObserver(checkCustomBrand) - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ['style', 'class'], - }) - - return () => { - window.removeEventListener('resize', checkCustomBrand) - observer.disconnect() - } }, [searchParams]) const handleEmailChange = (e: React.ChangeEvent) => { diff --git a/apps/sim/app/(auth)/verify/verify-content.tsx b/apps/sim/app/(auth)/verify/verify-content.tsx index ed05354b9..0eb41b8ba 100644 --- a/apps/sim/app/(auth)/verify/verify-content.tsx +++ b/apps/sim/app/(auth)/verify/verify-content.tsx @@ -8,6 +8,7 @@ import { cn } from '@/lib/core/utils/cn' import { inter } from '@/app/_styles/fonts/inter/inter' import { soehne } from '@/app/_styles/fonts/soehne/soehne' import { useVerification } from '@/app/(auth)/verify/use-verification' +import { useBrandedButtonClass } from '@/hooks/use-branded-button-class' interface VerifyContentProps { hasEmailService: boolean @@ -58,34 +59,7 @@ function VerificationForm({ setCountdown(30) } - const [buttonClass, setButtonClass] = useState('branded-button-gradient') - - useEffect(() => { - const checkCustomBrand = () => { - const computedStyle = getComputedStyle(document.documentElement) - const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim() - - if (brandAccent && brandAccent !== '#6f3dfa') { - setButtonClass('branded-button-custom') - } else { - setButtonClass('branded-button-gradient') - } - } - - checkCustomBrand() - - window.addEventListener('resize', checkCustomBrand) - const observer = new MutationObserver(checkCustomBrand) - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ['style', 'class'], - }) - - return () => { - window.removeEventListener('resize', checkCustomBrand) - observer.disconnect() - } - }, []) + const buttonClass = useBrandedButtonClass() return ( <> diff --git a/apps/sim/app/api/auth/reset-password/route.ts b/apps/sim/app/api/auth/reset-password/route.ts index 0caa1494f..1d47be103 100644 --- a/apps/sim/app/api/auth/reset-password/route.ts +++ b/apps/sim/app/api/auth/reset-password/route.ts @@ -15,7 +15,8 @@ const resetPasswordSchema = z.object({ .max(100, 'Password must not exceed 100 characters') .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') .regex(/[a-z]/, 'Password must contain at least one lowercase letter') - .regex(/[0-9]/, 'Password must contain at least one number'), + .regex(/[0-9]/, 'Password must contain at least one number') + .regex(/[^A-Za-z0-9]/, 'Password must contain at least one special character'), }) export async function POST(request: NextRequest) { diff --git a/apps/sim/app/api/auth/sso/providers/route.ts b/apps/sim/app/api/auth/sso/providers/route.ts index f35f25ee2..d4bcfa35d 100644 --- a/apps/sim/app/api/auth/sso/providers/route.ts +++ b/apps/sim/app/api/auth/sso/providers/route.ts @@ -4,7 +4,7 @@ import { eq } from 'drizzle-orm' import { NextResponse } from 'next/server' import { getSession } from '@/lib/auth' -const logger = createLogger('SSO-Providers') +const logger = createLogger('SSOProvidersRoute') export async function GET() { try { diff --git a/apps/sim/app/api/auth/sso/register/route.ts b/apps/sim/app/api/auth/sso/register/route.ts index b53d83eae..00e499d6f 100644 --- a/apps/sim/app/api/auth/sso/register/route.ts +++ b/apps/sim/app/api/auth/sso/register/route.ts @@ -6,7 +6,7 @@ import { hasSSOAccess } from '@/lib/billing' import { env } from '@/lib/core/config/env' import { REDACTED_MARKER } from '@/lib/core/security/redaction' -const logger = createLogger('SSO-Register') +const logger = createLogger('SSORegisterRoute') const mappingSchema = z .object({ @@ -43,6 +43,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [ ]) .default(['openid', 'profile', 'email']), pkce: z.boolean().default(true), + authorizationEndpoint: z.string().url().optional(), + tokenEndpoint: z.string().url().optional(), + userInfoEndpoint: z.string().url().optional(), + jwksEndpoint: z.string().url().optional(), }), z.object({ providerType: z.literal('saml'), @@ -64,12 +68,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [ export async function POST(request: NextRequest) { try { - // SSO plugin must be enabled in Better Auth if (!env.SSO_ENABLED) { return NextResponse.json({ error: 'SSO is not enabled' }, { status: 400 }) } - // Check plan access (enterprise) or env var override const session = await getSession() if (!session?.user?.id) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) @@ -116,7 +118,16 @@ export async function POST(request: NextRequest) { } if (providerType === 'oidc') { - const { clientId, clientSecret, scopes, pkce } = body + const { + clientId, + clientSecret, + scopes, + pkce, + authorizationEndpoint, + tokenEndpoint, + userInfoEndpoint, + jwksEndpoint, + } = body const oidcConfig: any = { clientId, @@ -127,50 +138,104 @@ export async function POST(request: NextRequest) { pkce: pkce ?? true, } - // Add manual endpoints for providers that might need them - // Common patterns for OIDC providers that don't support discovery properly - if ( - issuer.includes('okta.com') || - issuer.includes('auth0.com') || - issuer.includes('identityserver') - ) { - const baseUrl = issuer.includes('/oauth2/default') - ? issuer.replace('/oauth2/default', '') - : issuer.replace('/oauth', '').replace('/v2.0', '').replace('/oauth2', '') + oidcConfig.authorizationEndpoint = authorizationEndpoint + oidcConfig.tokenEndpoint = tokenEndpoint + oidcConfig.userInfoEndpoint = userInfoEndpoint + oidcConfig.jwksEndpoint = jwksEndpoint - // Okta-style endpoints - if (issuer.includes('okta.com')) { - oidcConfig.authorizationEndpoint = `${baseUrl}/oauth2/default/v1/authorize` - oidcConfig.tokenEndpoint = `${baseUrl}/oauth2/default/v1/token` - oidcConfig.userInfoEndpoint = `${baseUrl}/oauth2/default/v1/userinfo` - oidcConfig.jwksEndpoint = `${baseUrl}/oauth2/default/v1/keys` - } - // Auth0-style endpoints - else if (issuer.includes('auth0.com')) { - oidcConfig.authorizationEndpoint = `${baseUrl}/authorize` - oidcConfig.tokenEndpoint = `${baseUrl}/oauth/token` - oidcConfig.userInfoEndpoint = `${baseUrl}/userinfo` - oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks.json` - } - // Generic OIDC endpoints (IdentityServer, etc.) - else { - oidcConfig.authorizationEndpoint = `${baseUrl}/connect/authorize` - oidcConfig.tokenEndpoint = `${baseUrl}/connect/token` - oidcConfig.userInfoEndpoint = `${baseUrl}/connect/userinfo` - oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks` - } + const needsDiscovery = + !oidcConfig.authorizationEndpoint || !oidcConfig.tokenEndpoint || !oidcConfig.jwksEndpoint - logger.info('Using manual OIDC endpoints for provider', { + if (needsDiscovery) { + const discoveryUrl = `${issuer.replace(/\/$/, '')}/.well-known/openid-configuration` + try { + logger.info('Fetching OIDC discovery document for missing endpoints', { + discoveryUrl, + hasAuthEndpoint: !!oidcConfig.authorizationEndpoint, + hasTokenEndpoint: !!oidcConfig.tokenEndpoint, + hasJwksEndpoint: !!oidcConfig.jwksEndpoint, + }) + + const discoveryResponse = await fetch(discoveryUrl, { + headers: { Accept: 'application/json' }, + }) + + if (!discoveryResponse.ok) { + logger.error('Failed to fetch OIDC discovery document', { + status: discoveryResponse.status, + statusText: discoveryResponse.statusText, + }) + return NextResponse.json( + { + error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Status: ${discoveryResponse.status}. Provide all endpoints explicitly or verify the issuer URL.`, + }, + { status: 400 } + ) + } + + const discovery = await discoveryResponse.json() + + oidcConfig.authorizationEndpoint = + oidcConfig.authorizationEndpoint || discovery.authorization_endpoint + oidcConfig.tokenEndpoint = oidcConfig.tokenEndpoint || discovery.token_endpoint + oidcConfig.userInfoEndpoint = oidcConfig.userInfoEndpoint || discovery.userinfo_endpoint + oidcConfig.jwksEndpoint = oidcConfig.jwksEndpoint || discovery.jwks_uri + + logger.info('Merged OIDC endpoints (user-provided + discovery)', { + providerId, + issuer, + authorizationEndpoint: oidcConfig.authorizationEndpoint, + tokenEndpoint: oidcConfig.tokenEndpoint, + userInfoEndpoint: oidcConfig.userInfoEndpoint, + jwksEndpoint: oidcConfig.jwksEndpoint, + }) + } catch (error) { + logger.error('Error fetching OIDC discovery document', { + error: error instanceof Error ? error.message : 'Unknown error', + discoveryUrl, + }) + return NextResponse.json( + { + error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Please verify the issuer URL is correct or provide all endpoints explicitly.`, + }, + { status: 400 } + ) + } + } else { + logger.info('Using explicitly provided OIDC endpoints (all present)', { providerId, - provider: issuer.includes('okta.com') - ? 'Okta' - : issuer.includes('auth0.com') - ? 'Auth0' - : 'Generic', - authEndpoint: oidcConfig.authorizationEndpoint, + issuer, + authorizationEndpoint: oidcConfig.authorizationEndpoint, + tokenEndpoint: oidcConfig.tokenEndpoint, + userInfoEndpoint: oidcConfig.userInfoEndpoint, + jwksEndpoint: oidcConfig.jwksEndpoint, }) } + if ( + !oidcConfig.authorizationEndpoint || + !oidcConfig.tokenEndpoint || + !oidcConfig.jwksEndpoint + ) { + const missing: string[] = [] + if (!oidcConfig.authorizationEndpoint) missing.push('authorizationEndpoint') + if (!oidcConfig.tokenEndpoint) missing.push('tokenEndpoint') + if (!oidcConfig.jwksEndpoint) missing.push('jwksEndpoint') + + logger.error('Missing required OIDC endpoints after discovery merge', { + missing, + authorizationEndpoint: oidcConfig.authorizationEndpoint, + tokenEndpoint: oidcConfig.tokenEndpoint, + jwksEndpoint: oidcConfig.jwksEndpoint, + }) + return NextResponse.json( + { + error: `Missing required OIDC endpoints: ${missing.join(', ')}. Please provide these explicitly or verify the issuer supports OIDC discovery.`, + }, + { status: 400 } + ) + } + providerConfig.oidcConfig = oidcConfig } else if (providerType === 'saml') { const { diff --git a/apps/sim/app/api/copilot/execute-tool/route.ts b/apps/sim/app/api/copilot/execute-tool/route.ts index c8205821f..e38309968 100644 --- a/apps/sim/app/api/copilot/execute-tool/route.ts +++ b/apps/sim/app/api/copilot/execute-tool/route.ts @@ -224,7 +224,7 @@ export async function POST(req: NextRequest) { hasApiKey: !!executionParams.apiKey, }) - const result = await executeTool(resolvedToolName, executionParams, true) + const result = await executeTool(resolvedToolName, executionParams) logger.info(`[${tracker.requestId}] Tool execution complete`, { toolName, diff --git a/apps/sim/app/api/creators/[id]/verify/route.ts b/apps/sim/app/api/creators/[id]/verify/route.ts index 45cd2dc0b..6ce9e8b3c 100644 --- a/apps/sim/app/api/creators/[id]/verify/route.ts +++ b/apps/sim/app/api/creators/[id]/verify/route.ts @@ -1,10 +1,11 @@ import { db } from '@sim/db' -import { templateCreators, user } from '@sim/db/schema' +import { templateCreators } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' +import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' const logger = createLogger('CreatorVerificationAPI') @@ -23,9 +24,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ } // Check if user is a super user - const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1) - - if (!currentUser[0]?.isSuperUser) { + const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id) + if (!effectiveSuperUser) { logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`) return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 }) } @@ -76,9 +76,8 @@ export async function DELETE( } // Check if user is a super user - const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1) - - if (!currentUser[0]?.isSuperUser) { + const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id) + if (!effectiveSuperUser) { logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`) return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 }) } diff --git a/apps/sim/app/api/files/parse/route.ts b/apps/sim/app/api/files/parse/route.ts index 4e4d54f18..50dc55572 100644 --- a/apps/sim/app/api/files/parse/route.ts +++ b/apps/sim/app/api/files/parse/route.ts @@ -6,9 +6,10 @@ import { createLogger } from '@sim/logger' import binaryExtensionsList from 'binary-extensions' import { type NextRequest, NextResponse } from 'next/server' import { checkHybridAuth } from '@/lib/auth/hybrid' -import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation' +import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation' import { isSupportedFileType, parseFile } from '@/lib/file-parsers' import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads' +import { uploadExecutionFile } from '@/lib/uploads/contexts/execution' import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server' import { getFileMetadataByKey } from '@/lib/uploads/server/metadata' import { @@ -21,6 +22,7 @@ import { } from '@/lib/uploads/utils/file-utils' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { verifyFileAccess } from '@/app/api/files/authorization' +import type { UserFile } from '@/executor/types' import '@/lib/uploads/core/setup.server' export const dynamic = 'force-dynamic' @@ -30,6 +32,12 @@ const logger = createLogger('FilesParseAPI') const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds +interface ExecutionContext { + workspaceId: string + workflowId: string + executionId: string +} + interface ParseResult { success: boolean content?: string @@ -37,6 +45,7 @@ interface ParseResult { filePath: string originalName?: string // Original filename from database (for workspace files) viewerUrl?: string | null // Viewer URL for the file if available + userFile?: UserFile // UserFile object for the raw file metadata?: { fileType: string size: number @@ -70,27 +79,45 @@ export async function POST(request: NextRequest) { const userId = authResult.userId const requestData = await request.json() - const { filePath, fileType, workspaceId } = requestData + const { filePath, fileType, workspaceId, workflowId, executionId } = requestData if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) { return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 }) } - logger.info('File parse request received:', { filePath, fileType, workspaceId, userId }) + // Build execution context if all required fields are present + const executionContext: ExecutionContext | undefined = + workspaceId && workflowId && executionId + ? { workspaceId, workflowId, executionId } + : undefined + + logger.info('File parse request received:', { + filePath, + fileType, + workspaceId, + userId, + hasExecutionContext: !!executionContext, + }) if (Array.isArray(filePath)) { const results = [] - for (const path of filePath) { - if (!path || (typeof path === 'string' && path.trim() === '')) { + for (const singlePath of filePath) { + if (!singlePath || (typeof singlePath === 'string' && singlePath.trim() === '')) { results.push({ success: false, error: 'Empty file path in array', - filePath: path || '', + filePath: singlePath || '', }) continue } - const result = await parseFileSingle(path, fileType, workspaceId, userId) + const result = await parseFileSingle( + singlePath, + fileType, + workspaceId, + userId, + executionContext + ) if (result.metadata) { result.metadata.processingTime = Date.now() - startTime } @@ -106,6 +133,7 @@ export async function POST(request: NextRequest) { fileType: result.metadata?.fileType || 'application/octet-stream', size: result.metadata?.size || 0, binary: false, + file: result.userFile, }, filePath: result.filePath, viewerUrl: result.viewerUrl, @@ -121,7 +149,7 @@ export async function POST(request: NextRequest) { }) } - const result = await parseFileSingle(filePath, fileType, workspaceId, userId) + const result = await parseFileSingle(filePath, fileType, workspaceId, userId, executionContext) if (result.metadata) { result.metadata.processingTime = Date.now() - startTime @@ -137,6 +165,7 @@ export async function POST(request: NextRequest) { fileType: result.metadata?.fileType || 'application/octet-stream', size: result.metadata?.size || 0, binary: false, + file: result.userFile, }, filePath: result.filePath, viewerUrl: result.viewerUrl, @@ -164,7 +193,8 @@ async function parseFileSingle( filePath: string, fileType: string, workspaceId: string, - userId: string + userId: string, + executionContext?: ExecutionContext ): Promise { logger.info('Parsing file:', filePath) @@ -186,18 +216,18 @@ async function parseFileSingle( } if (filePath.includes('/api/files/serve/')) { - return handleCloudFile(filePath, fileType, undefined, userId) + return handleCloudFile(filePath, fileType, undefined, userId, executionContext) } if (filePath.startsWith('http://') || filePath.startsWith('https://')) { - return handleExternalUrl(filePath, fileType, workspaceId, userId) + return handleExternalUrl(filePath, fileType, workspaceId, userId, executionContext) } if (isUsingCloudStorage()) { - return handleCloudFile(filePath, fileType, undefined, userId) + return handleCloudFile(filePath, fileType, undefined, userId, executionContext) } - return handleLocalFile(filePath, fileType, userId) + return handleLocalFile(filePath, fileType, userId, executionContext) } /** @@ -230,12 +260,14 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string /** * Handle external URL * If workspaceId is provided, checks if file already exists and saves to workspace if not + * If executionContext is provided, also stores the file in execution storage and returns UserFile */ async function handleExternalUrl( url: string, fileType: string, workspaceId: string, - userId: string + userId: string, + executionContext?: ExecutionContext ): Promise { try { logger.info('Fetching external URL:', url) @@ -312,17 +344,13 @@ async function handleExternalUrl( if (existingFile) { const storageFilePath = `/api/files/serve/${existingFile.key}` - return handleCloudFile(storageFilePath, fileType, 'workspace', userId) + return handleCloudFile(storageFilePath, fileType, 'workspace', userId, executionContext) } } } - const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!) - const response = await fetch(pinnedUrl, { - signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS), - headers: { - Host: urlValidation.originalHostname!, - }, + const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, { + timeout: DOWNLOAD_TIMEOUT_MS, }) if (!response.ok) { throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`) @@ -341,6 +369,19 @@ async function handleExternalUrl( logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`) + let userFile: UserFile | undefined + const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension) + + if (executionContext) { + try { + userFile = await uploadExecutionFile(executionContext, buffer, filename, mimeType, userId) + logger.info(`Stored file in execution storage: ${filename}`, { key: userFile.key }) + } catch (uploadError) { + logger.warn(`Failed to store file in execution storage:`, uploadError) + // Continue without userFile - parsing can still work + } + } + if (shouldCheckWorkspace) { try { const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) @@ -353,8 +394,6 @@ async function handleExternalUrl( }) } else { const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace') - const mimeType = - response.headers.get('content-type') || getMimeTypeFromExtension(extension) await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType) logger.info(`Saved URL file to workspace storage: ${filename}`) } @@ -363,17 +402,23 @@ async function handleExternalUrl( } } + let parseResult: ParseResult if (extension === 'pdf') { - return await handlePdfBuffer(buffer, filename, fileType, url) - } - if (extension === 'csv') { - return await handleCsvBuffer(buffer, filename, fileType, url) - } - if (isSupportedFileType(extension)) { - return await handleGenericTextBuffer(buffer, filename, extension, fileType, url) + parseResult = await handlePdfBuffer(buffer, filename, fileType, url) + } else if (extension === 'csv') { + parseResult = await handleCsvBuffer(buffer, filename, fileType, url) + } else if (isSupportedFileType(extension)) { + parseResult = await handleGenericTextBuffer(buffer, filename, extension, fileType, url) + } else { + parseResult = handleGenericBuffer(buffer, filename, extension, fileType) } - return handleGenericBuffer(buffer, filename, extension, fileType) + // Attach userFile to the result + if (userFile) { + parseResult.userFile = userFile + } + + return parseResult } catch (error) { logger.error(`Error handling external URL ${url}:`, error) return { @@ -386,12 +431,15 @@ async function handleExternalUrl( /** * Handle file stored in cloud storage + * If executionContext is provided and file is not already from execution storage, + * copies the file to execution storage and returns UserFile */ async function handleCloudFile( filePath: string, fileType: string, explicitContext: string | undefined, - userId: string + userId: string, + executionContext?: ExecutionContext ): Promise { try { const cloudKey = extractStorageKey(filePath) @@ -438,6 +486,7 @@ async function handleCloudFile( const filename = originalFilename || cloudKey.split('/').pop() || cloudKey const extension = path.extname(filename).toLowerCase().substring(1) + const mimeType = getMimeTypeFromExtension(extension) const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}` let workspaceIdFromKey: string | undefined @@ -453,6 +502,39 @@ async function handleCloudFile( const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey) + // Store file in execution storage if executionContext is provided + let userFile: UserFile | undefined + + if (executionContext) { + // If file is already from execution context, create UserFile reference without re-uploading + if (context === 'execution') { + userFile = { + id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`, + name: filename, + url: normalizedFilePath, + size: fileBuffer.length, + type: mimeType, + key: cloudKey, + context: 'execution', + } + logger.info(`Created UserFile reference for existing execution file: ${filename}`) + } else { + // Copy from workspace/other storage to execution storage + try { + userFile = await uploadExecutionFile( + executionContext, + fileBuffer, + filename, + mimeType, + userId + ) + logger.info(`Copied file to execution storage: ${filename}`, { key: userFile.key }) + } catch (uploadError) { + logger.warn(`Failed to copy file to execution storage:`, uploadError) + } + } + } + let parseResult: ParseResult if (extension === 'pdf') { parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath) @@ -477,6 +559,11 @@ async function handleCloudFile( parseResult.viewerUrl = viewerUrl + // Attach userFile to the result + if (userFile) { + parseResult.userFile = userFile + } + return parseResult } catch (error) { logger.error(`Error handling cloud file ${filePath}:`, error) @@ -500,7 +587,8 @@ async function handleCloudFile( async function handleLocalFile( filePath: string, fileType: string, - userId: string + userId: string, + executionContext?: ExecutionContext ): Promise { try { const filename = filePath.split('/').pop() || filePath @@ -540,13 +628,32 @@ async function handleLocalFile( const hash = createHash('md5').update(fileBuffer).digest('hex') const extension = path.extname(filename).toLowerCase().substring(1) + const mimeType = fileType || getMimeTypeFromExtension(extension) + + // Store file in execution storage if executionContext is provided + let userFile: UserFile | undefined + if (executionContext) { + try { + userFile = await uploadExecutionFile( + executionContext, + fileBuffer, + filename, + mimeType, + userId + ) + logger.info(`Stored local file in execution storage: ${filename}`, { key: userFile.key }) + } catch (uploadError) { + logger.warn(`Failed to store local file in execution storage:`, uploadError) + } + } return { success: true, content: result.content, filePath, + userFile, metadata: { - fileType: fileType || getMimeTypeFromExtension(extension), + fileType: mimeType, size: stats.size, hash, processingTime: 0, diff --git a/apps/sim/app/api/form/[identifier]/route.ts b/apps/sim/app/api/form/[identifier]/route.ts index e75dd236c..a4ad31eef 100644 --- a/apps/sim/app/api/form/[identifier]/route.ts +++ b/apps/sim/app/api/form/[identifier]/route.ts @@ -11,7 +11,7 @@ import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' import { normalizeInputFormatValue } from '@/lib/workflows/input-format' import { createStreamingResponse } from '@/lib/workflows/streaming/streaming' -import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types' +import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers' import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' @@ -36,7 +36,7 @@ async function getWorkflowInputSchema(workflowId: string): Promise { .from(workflowBlocks) .where(eq(workflowBlocks.workflowId, workflowId)) - const startBlock = blocks.find((block) => isValidStartBlockType(block.type)) + const startBlock = blocks.find((block) => isInputDefinitionTrigger(block.type)) if (!startBlock) { return [] diff --git a/apps/sim/app/api/function/execute/route.test.ts b/apps/sim/app/api/function/execute/route.test.ts index 45abbb321..ea020abaf 100644 --- a/apps/sim/app/api/function/execute/route.test.ts +++ b/apps/sim/app/api/function/execute/route.test.ts @@ -276,8 +276,11 @@ describe('Function Execute API Route', () => { it.concurrent('should resolve tag variables with syntax', async () => { const req = createMockRequest('POST', { code: 'return ', - params: { - email: { id: '123', subject: 'Test Email' }, + blockData: { + 'block-123': { id: '123', subject: 'Test Email' }, + }, + blockNameMapping: { + email: 'block-123', }, }) @@ -305,9 +308,13 @@ describe('Function Execute API Route', () => { it.concurrent('should only match valid variable names in angle brackets', async () => { const req = createMockRequest('POST', { code: 'return + "" + ', - params: { - validVar: 'hello', - another_valid: 'world', + blockData: { + 'block-1': 'hello', + 'block-2': 'world', + }, + blockNameMapping: { + validVar: 'block-1', + another_valid: 'block-2', }, }) @@ -321,28 +328,22 @@ describe('Function Execute API Route', () => { it.concurrent( 'should handle Gmail webhook data with email addresses containing angle brackets', async () => { - const gmailData = { - email: { - id: '123', - from: 'Waleed Latif ', - to: 'User ', - subject: 'Test Email', - bodyText: 'Hello world', - }, - rawEmail: { - id: '123', - payload: { - headers: [ - { name: 'From', value: 'Waleed Latif ' }, - { name: 'To', value: 'User ' }, - ], - }, - }, + const emailData = { + id: '123', + from: 'Waleed Latif ', + to: 'User ', + subject: 'Test Email', + bodyText: 'Hello world', } const req = createMockRequest('POST', { code: 'return ', - params: gmailData, + blockData: { + 'block-email': emailData, + }, + blockNameMapping: { + email: 'block-email', + }, }) const response = await POST(req) @@ -356,17 +357,20 @@ describe('Function Execute API Route', () => { it.concurrent( 'should properly serialize complex email objects with special characters', async () => { - const complexEmailData = { - email: { - from: 'Test User ', - bodyHtml: '
HTML content with "quotes" and \'apostrophes\'
', - bodyText: 'Text with\nnewlines\tand\ttabs', - }, + const emailData = { + from: 'Test User ', + bodyHtml: '
HTML content with "quotes" and \'apostrophes\'
', + bodyText: 'Text with\nnewlines\tand\ttabs', } const req = createMockRequest('POST', { code: 'return ', - params: complexEmailData, + blockData: { + 'block-email': emailData, + }, + blockNameMapping: { + email: 'block-email', + }, }) const response = await POST(req) @@ -519,18 +523,23 @@ describe('Function Execute API Route', () => { }) it.concurrent('should handle JSON serialization edge cases', async () => { + const complexData = { + special: 'chars"with\'quotes', + unicode: '🎉 Unicode content', + nested: { + deep: { + value: 'test', + }, + }, + } + const req = createMockRequest('POST', { code: 'return ', - params: { - complexData: { - special: 'chars"with\'quotes', - unicode: '🎉 Unicode content', - nested: { - deep: { - value: 'test', - }, - }, - }, + blockData: { + 'block-complex': complexData, + }, + blockNameMapping: { + complexData: 'block-complex', }, }) diff --git a/apps/sim/app/api/function/execute/route.ts b/apps/sim/app/api/function/execute/route.ts index 4412cf966..8868c2d40 100644 --- a/apps/sim/app/api/function/execute/route.ts +++ b/apps/sim/app/api/function/execute/route.ts @@ -9,8 +9,8 @@ import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants' import { createEnvVarPattern, createWorkflowVariablePattern, - resolveEnvVarReferences, } from '@/executor/utils/reference-validation' +import { navigatePath } from '@/executor/variables/resolvers/reference' export const dynamic = 'force-dynamic' export const runtime = 'nodejs' @@ -18,8 +18,8 @@ export const MAX_DURATION = 210 const logger = createLogger('FunctionExecuteAPI') -const E2B_JS_WRAPPER_LINES = 3 // Lines before user code: ';(async () => {', ' try {', ' const __sim_result = await (async () => {' -const E2B_PYTHON_WRAPPER_LINES = 1 // Lines before user code: 'def __sim_main__():' +const E2B_JS_WRAPPER_LINES = 3 +const E2B_PYTHON_WRAPPER_LINES = 1 type TypeScriptModule = typeof import('typescript') @@ -134,33 +134,21 @@ function extractEnhancedError( if (error.stack) { enhanced.stack = error.stack - // Parse stack trace to extract line and column information - // Handle both compilation errors and runtime errors const stackLines: string[] = error.stack.split('\n') for (const line of stackLines) { - // Pattern 1: Compilation errors - "user-function.js:6" let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/) - // Pattern 2: Runtime errors - "at user-function.js:5:12" if (!match) { match = line.match(/at\s+user-function\.js:(\d+):(\d+)/) } - // Pattern 3: Generic patterns for any line containing our filename - if (!match) { - match = line.match(/user-function\.js:(\d+)(?::(\d+))?/) - } - if (match) { const stackLine = Number.parseInt(match[1], 10) const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined - // Adjust line number to account for wrapper code - // The user code starts at a specific line in our wrapper const adjustedLine = stackLine - userCodeStartLine + 1 - // Check if this is a syntax error in wrapper code caused by incomplete user code const isWrapperSyntaxError = stackLine > userCodeStartLine && error.name === 'SyntaxError' && @@ -168,7 +156,6 @@ function extractEnhancedError( error.message.includes('Unexpected end of input')) if (isWrapperSyntaxError && userCode) { - // Map wrapper syntax errors to the last line of user code const codeLines = userCode.split('\n') const lastUserLine = codeLines.length enhanced.line = lastUserLine @@ -181,7 +168,6 @@ function extractEnhancedError( enhanced.line = adjustedLine enhanced.column = stackColumn - // Extract the actual line content from user code if (userCode) { const codeLines = userCode.split('\n') if (adjustedLine <= codeLines.length) { @@ -192,7 +178,6 @@ function extractEnhancedError( } if (stackLine <= userCodeStartLine) { - // Error is in wrapper code itself enhanced.line = stackLine enhanced.column = stackColumn break @@ -200,7 +185,6 @@ function extractEnhancedError( } } - // Clean up stack trace to show user-relevant information const cleanedStackLines: string[] = stackLines .filter( (line: string) => @@ -214,9 +198,6 @@ function extractEnhancedError( } } - // Keep original message without adding error type prefix - // The error type will be added later in createUserFriendlyErrorMessage - return enhanced } @@ -231,7 +212,6 @@ function formatE2BError( userCode: string, prologueLineCount: number ): { formattedError: string; cleanedOutput: string } { - // Calculate line offset based on language and prologue const wrapperLines = language === CodeLanguage.Python ? E2B_PYTHON_WRAPPER_LINES : E2B_JS_WRAPPER_LINES const totalOffset = prologueLineCount + wrapperLines @@ -241,27 +221,20 @@ function formatE2BError( let cleanErrorMsg = '' if (language === CodeLanguage.Python) { - // Python error format: "Cell In[X], line Y" followed by error details - // Extract line number from the Cell reference const cellMatch = errorOutput.match(/Cell In\[\d+\], line (\d+)/) if (cellMatch) { const originalLine = Number.parseInt(cellMatch[1], 10) userLine = originalLine - totalOffset } - // Extract clean error message from the error string - // Remove file references like "(detected at line X) (file.py, line Y)" cleanErrorMsg = errorMessage .replace(/\s*\(detected at line \d+\)/g, '') .replace(/\s*\([^)]+\.py, line \d+\)/g, '') .trim() } else if (language === CodeLanguage.JavaScript) { - // JavaScript error format from E2B: "SyntaxError: /path/file.ts: Message. (line:col)\n\n 9 | ..." - // First, extract the error type and message from the first line const firstLineEnd = errorMessage.indexOf('\n') const firstLine = firstLineEnd > 0 ? errorMessage.substring(0, firstLineEnd) : errorMessage - // Parse: "SyntaxError: /home/user/index.ts: Missing semicolon. (11:9)" const jsErrorMatch = firstLine.match(/^(\w+Error):\s*[^:]+:\s*([^(]+)\.\s*\((\d+):(\d+)\)/) if (jsErrorMatch) { cleanErrorType = jsErrorMatch[1] @@ -269,13 +242,11 @@ function formatE2BError( const originalLine = Number.parseInt(jsErrorMatch[3], 10) userLine = originalLine - totalOffset } else { - // Fallback: look for line number in the arrow pointer line (> 11 |) const arrowMatch = errorMessage.match(/^>\s*(\d+)\s*\|/m) if (arrowMatch) { const originalLine = Number.parseInt(arrowMatch[1], 10) userLine = originalLine - totalOffset } - // Try to extract error type and message const errorMatch = firstLine.match(/^(\w+Error):\s*(.+)/) if (errorMatch) { cleanErrorType = errorMatch[1] @@ -289,13 +260,11 @@ function formatE2BError( } } - // Build the final clean error message const finalErrorMsg = cleanErrorType && cleanErrorMsg ? `${cleanErrorType}: ${cleanErrorMsg}` : cleanErrorMsg || errorMessage - // Format with line number if available let formattedError = finalErrorMsg if (userLine && userLine > 0) { const codeLines = userCode.split('\n') @@ -311,7 +280,6 @@ function formatE2BError( } } - // For stdout, just return the clean error message without the full traceback const cleanedOutput = finalErrorMsg return { formattedError, cleanedOutput } @@ -327,7 +295,6 @@ function createUserFriendlyErrorMessage( ): string { let errorMessage = enhanced.message - // Add line information if available if (enhanced.line !== undefined) { let lineInfo = `Line ${enhanced.line}` @@ -338,18 +305,14 @@ function createUserFriendlyErrorMessage( errorMessage = `${lineInfo} - ${errorMessage}` } else { - // If no line number, try to extract it from stack trace for display if (enhanced.stack) { const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/) if (stackMatch) { const line = Number.parseInt(stackMatch[1], 10) let lineInfo = `Line ${line}` - // Try to get line content if we have userCode if (userCode) { const codeLines = userCode.split('\n') - // Note: stackMatch gives us VM line number, need to adjust - // This is a fallback case, so we might not have perfect line mapping if (line <= codeLines.length) { const lineContent = codeLines[line - 1]?.trim() if (lineContent) { @@ -363,7 +326,6 @@ function createUserFriendlyErrorMessage( } } - // Add error type prefix with consistent naming if (enhanced.name !== 'Error') { const errorTypePrefix = enhanced.name === 'SyntaxError' @@ -374,7 +336,6 @@ function createUserFriendlyErrorMessage( ? 'Reference Error' : enhanced.name - // Only add prefix if not already present if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) { errorMessage = `${errorTypePrefix}: ${errorMessage}` } @@ -383,9 +344,6 @@ function createUserFriendlyErrorMessage( return errorMessage } -/** - * Resolves workflow variables with syntax - */ function resolveWorkflowVariables( code: string, workflowVariables: Record, @@ -405,39 +363,35 @@ function resolveWorkflowVariables( while ((match = regex.exec(code)) !== null) { const variableName = match[1].trim() - // Find the variable by name (workflowVariables is indexed by ID, values are variable objects) const foundVariable = Object.entries(workflowVariables).find( ([_, variable]) => normalizeName(variable.name || '') === variableName ) - let variableValue: unknown = '' - if (foundVariable) { - const variable = foundVariable[1] - variableValue = variable.value + if (!foundVariable) { + const availableVars = Object.values(workflowVariables) + .map((v) => v.name) + .filter(Boolean) + throw new Error( + `Variable "${variableName}" doesn't exist.` + + (availableVars.length > 0 ? ` Available: ${availableVars.join(', ')}` : '') + ) + } - if (variable.value !== undefined && variable.value !== null) { + const variable = foundVariable[1] + let variableValue: unknown = variable.value + + if (variable.value !== undefined && variable.value !== null) { + const type = variable.type === 'string' ? 'plain' : variable.type + + if (type === 'number') { + variableValue = Number(variableValue) + } else if (type === 'boolean') { + variableValue = variableValue === 'true' || variableValue === true + } else if (type === 'json' && typeof variableValue === 'string') { try { - // Handle 'string' type the same as 'plain' for backward compatibility - const type = variable.type === 'string' ? 'plain' : variable.type - - // For plain text, use exactly what's entered without modifications - if (type === 'plain' && typeof variableValue === 'string') { - // Use as-is for plain text - } else if (type === 'number') { - variableValue = Number(variableValue) - } else if (type === 'boolean') { - variableValue = variableValue === 'true' || variableValue === true - } else if (type === 'json') { - try { - variableValue = - typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue - } catch { - // Keep original value if JSON parsing fails - } - } + variableValue = JSON.parse(variableValue) } catch { - // Fallback to original value on error - variableValue = variable.value + // Keep as-is } } } @@ -450,11 +404,9 @@ function resolveWorkflowVariables( }) } - // Process replacements in reverse order to maintain correct indices for (let i = replacements.length - 1; i >= 0; i--) { const { match: matchStr, index, variableName, variableValue } = replacements[i] - // Use variable reference approach const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}` contextVariables[safeVarName] = variableValue resolvedCode = @@ -464,9 +416,6 @@ function resolveWorkflowVariables( return resolvedCode } -/** - * Resolves environment variables with {{var_name}} syntax - */ function resolveEnvironmentVariables( code: string, params: Record, @@ -482,32 +431,28 @@ function resolveEnvironmentVariables( const resolverVars: Record = {} Object.entries(params).forEach(([key, value]) => { - if (value) { + if (value !== undefined && value !== null) { resolverVars[key] = String(value) } }) Object.entries(envVars).forEach(([key, value]) => { - if (value) { + if (value !== undefined && value !== null) { resolverVars[key] = value } }) while ((match = regex.exec(code)) !== null) { const varName = match[1].trim() - const resolved = resolveEnvVarReferences(match[0], resolverVars, { - allowEmbedded: true, - resolveExactMatch: true, - trimKeys: true, - onMissing: 'empty', - deep: false, - }) - const varValue = - typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved) + + if (!(varName in resolverVars)) { + continue + } + replacements.push({ match: match[0], index: match.index, varName, - varValue: String(varValue), + varValue: resolverVars[varName], }) } @@ -523,12 +468,8 @@ function resolveEnvironmentVariables( return resolvedCode } -/** - * Resolves tags with syntax (including nested paths like ) - */ function resolveTagVariables( code: string, - params: Record, blockData: Record, blockNameMapping: Record, contextVariables: Record @@ -543,27 +484,30 @@ function resolveTagVariables( for (const match of tagMatches) { const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim() + const pathParts = tagName.split(REFERENCE.PATH_DELIMITER) + const blockName = pathParts[0] - // Handle nested paths like "getrecord.response.data" or "function1.response.result" - // First try params, then blockData directly, then try with block name mapping - let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || '' - - // If not found and the path starts with a block name, try mapping the block name to ID - if (!tagValue && tagName.includes(REFERENCE.PATH_DELIMITER)) { - const pathParts = tagName.split(REFERENCE.PATH_DELIMITER) - const normalizedBlockName = pathParts[0] // This should already be normalized like "function1" - - // Direct lookup using normalized block name - const blockId = blockNameMapping[normalizedBlockName] ?? null - - if (blockId) { - const remainingPath = pathParts.slice(1).join('.') - const fullPath = `${blockId}.${remainingPath}` - tagValue = getNestedValue(blockData, fullPath) || '' - } + const blockId = blockNameMapping[blockName] + if (!blockId) { + continue + } + + const blockOutput = blockData[blockId] + if (blockOutput === undefined) { + continue + } + + let tagValue: any + if (pathParts.length === 1) { + tagValue = blockOutput + } else { + tagValue = navigatePath(blockOutput, pathParts.slice(1)) + } + + if (tagValue === undefined) { + continue } - // If the value is a stringified JSON, parse it back to object if ( typeof tagValue === 'string' && tagValue.length > 100 && @@ -571,16 +515,13 @@ function resolveTagVariables( ) { try { tagValue = JSON.parse(tagValue) - } catch (e) { - // Keep as string if parsing fails + } catch { + // Keep as-is } } - // Instead of injecting large JSON directly, create a variable reference const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}` contextVariables[safeVarName] = tagValue - - // Replace the template with a variable reference resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName) } @@ -605,35 +546,13 @@ function resolveCodeVariables( let resolvedCode = code const contextVariables: Record = {} - // Resolve workflow variables with syntax first resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables) - - // Resolve environment variables with {{var_name}} syntax resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables) - - // Resolve tags with syntax (including nested paths like ) - resolvedCode = resolveTagVariables( - resolvedCode, - params, - blockData, - blockNameMapping, - contextVariables - ) + resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables) return { resolvedCode, contextVariables } } -/** - * Get nested value from object using dot notation path - */ -function getNestedValue(obj: any, path: string): any { - if (!obj || !path) return undefined - - return path.split('.').reduce((current, key) => { - return current && typeof current === 'object' ? current[key] : undefined - }, obj) -} - /** * Remove one trailing newline from stdout * This handles the common case where print() or console.log() adds a trailing \n @@ -671,7 +590,6 @@ export async function POST(req: NextRequest) { isCustomTool = false, } = body - // Extract internal parameters that shouldn't be passed to the execution context const executionParams = { ...params } executionParams._context = undefined @@ -697,7 +615,6 @@ export async function POST(req: NextRequest) { const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE - // Extract imports once for JavaScript code (reuse later to avoid double extraction) let jsImports = '' let jsRemainingCode = resolvedCode let hasImports = false @@ -707,31 +624,22 @@ export async function POST(req: NextRequest) { jsImports = extractionResult.imports jsRemainingCode = extractionResult.remainingCode - // Check for ES6 imports or CommonJS require statements - // ES6 imports are extracted by the TypeScript parser - // Also check for require() calls which indicate external dependencies const hasRequireStatements = /require\s*\(\s*['"`]/.test(resolvedCode) hasImports = jsImports.trim().length > 0 || hasRequireStatements } - // Python always requires E2B if (lang === CodeLanguage.Python && !isE2bEnabled) { throw new Error( 'Python execution requires E2B to be enabled. Please contact your administrator to enable E2B, or use JavaScript instead.' ) } - // JavaScript with imports requires E2B if (lang === CodeLanguage.JavaScript && hasImports && !isE2bEnabled) { throw new Error( 'JavaScript code with import statements requires E2B to be enabled. Please remove the import statements, or contact your administrator to enable E2B.' ) } - // Use E2B if: - // - E2B is enabled AND - // - Not a custom tool AND - // - (Python OR JavaScript with imports) const useE2B = isE2bEnabled && !isCustomTool && @@ -744,13 +652,10 @@ export async function POST(req: NextRequest) { language: lang, }) let prologue = '' - const epilogue = '' if (lang === CodeLanguage.JavaScript) { - // Track prologue lines for error adjustment let prologueLineCount = 0 - // Reuse the imports we already extracted earlier const imports = jsImports const remainingCode = jsRemainingCode @@ -782,7 +687,7 @@ export async function POST(req: NextRequest) { ' }', '})();', ].join('\n') - const codeForE2B = importSection + prologue + wrapped + epilogue + const codeForE2B = importSection + prologue + wrapped const execStart = Date.now() const { @@ -804,7 +709,6 @@ export async function POST(req: NextRequest) { error: e2bError, }) - // If there was an execution error, format it properly if (e2bError) { const { formattedError, cleanedOutput } = formatE2BError( e2bError, @@ -828,7 +732,7 @@ export async function POST(req: NextRequest) { output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime }, }) } - // Track prologue lines for error adjustment + let prologueLineCount = 0 prologue += 'import json\n' prologueLineCount++ @@ -846,7 +750,7 @@ export async function POST(req: NextRequest) { '__sim_result__ = __sim_main__()', "print('__SIM_RESULT__=' + json.dumps(__sim_result__))", ].join('\n') - const codeForE2B = prologue + wrapped + epilogue + const codeForE2B = prologue + wrapped const execStart = Date.now() const { @@ -868,7 +772,6 @@ export async function POST(req: NextRequest) { error: e2bError, }) - // If there was an execution error, format it properly if (e2bError) { const { formattedError, cleanedOutput } = formatE2BError( e2bError, @@ -897,7 +800,6 @@ export async function POST(req: NextRequest) { const wrapperLines = ['(async () => {', ' try {'] if (isCustomTool) { - wrapperLines.push(' // For custom tools, make parameters directly accessible') Object.keys(executionParams).forEach((key) => { wrapperLines.push(` const ${key} = params.${key};`) }) @@ -931,12 +833,10 @@ export async function POST(req: NextRequest) { }) const ivmError = isolatedResult.error - // Adjust line number for prepended param destructuring in custom tools let adjustedLine = ivmError.line let adjustedLineContent = ivmError.lineContent if (prependedLineCount > 0 && ivmError.line !== undefined) { adjustedLine = Math.max(1, ivmError.line - prependedLineCount) - // Get line content from original user code, not the prepended code const codeLines = resolvedCode.split('\n') if (adjustedLine <= codeLines.length) { adjustedLineContent = codeLines[adjustedLine - 1]?.trim() diff --git a/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts b/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts index 09f1fc787..ba52994c8 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts @@ -2,7 +2,7 @@ import { randomUUID } from 'crypto' import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { getSession } from '@/lib/auth' +import { checkHybridAuth } from '@/lib/auth/hybrid' import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/constants' import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service' import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' @@ -19,19 +19,32 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: try { logger.info(`[${requestId}] Getting tag definitions for knowledge base ${knowledgeBaseId}`) - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + const auth = await checkHybridAuth(req, { requireWorkflowId: false }) + if (!auth.success) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) } - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) - if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + // Only allow session and internal JWT auth (not API key) + if (auth.authType === 'api_key') { + return NextResponse.json( + { error: 'API key auth not supported for this endpoint' }, + { status: 401 } + ) + } + + // For session auth, verify KB access. Internal JWT is trusted. + if (auth.authType === 'session' && auth.userId) { + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } } const tagDefinitions = await getTagDefinitions(knowledgeBaseId) - logger.info(`[${requestId}] Retrieved ${tagDefinitions.length} tag definitions`) + logger.info( + `[${requestId}] Retrieved ${tagDefinitions.length} tag definitions (${auth.authType})` + ) return NextResponse.json({ success: true, @@ -51,14 +64,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: try { logger.info(`[${requestId}] Creating tag definition for knowledge base ${knowledgeBaseId}`) - const session = await getSession() - if (!session?.user?.id) { - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + const auth = await checkHybridAuth(req, { requireWorkflowId: false }) + if (!auth.success) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) } - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) - if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + // Only allow session and internal JWT auth (not API key) + if (auth.authType === 'api_key') { + return NextResponse.json( + { error: 'API key auth not supported for this endpoint' }, + { status: 401 } + ) + } + + // For session auth, verify KB access. Internal JWT is trusted. + if (auth.authType === 'session' && auth.userId) { + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } } const body = await req.json() diff --git a/apps/sim/app/api/proxy/route.ts b/apps/sim/app/api/proxy/route.ts deleted file mode 100644 index 24702aa48..000000000 --- a/apps/sim/app/api/proxy/route.ts +++ /dev/null @@ -1,395 +0,0 @@ -import { createLogger } from '@sim/logger' -import type { NextRequest } from 'next/server' -import { NextResponse } from 'next/server' -import { z } from 'zod' -import { checkHybridAuth } from '@/lib/auth/hybrid' -import { generateInternalToken } from '@/lib/auth/internal' -import { isDev } from '@/lib/core/config/feature-flags' -import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation' -import { generateRequestId } from '@/lib/core/utils/request' -import { getBaseUrl } from '@/lib/core/utils/urls' -import { executeTool } from '@/tools' -import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils' - -const logger = createLogger('ProxyAPI') - -const proxyPostSchema = z.object({ - toolId: z.string().min(1, 'toolId is required'), - params: z.record(z.any()).optional().default({}), - executionContext: z - .object({ - workflowId: z.string().optional(), - workspaceId: z.string().optional(), - executionId: z.string().optional(), - userId: z.string().optional(), - }) - .optional(), -}) - -/** - * Creates a minimal set of default headers for proxy requests - * @returns Record of HTTP headers - */ -const getProxyHeaders = (): Record => { - return { - 'User-Agent': - 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', - Accept: '*/*', - 'Accept-Encoding': 'gzip, deflate, br', - 'Cache-Control': 'no-cache', - Connection: 'keep-alive', - } -} - -/** - * Formats a response with CORS headers - * @param responseData Response data object - * @param status HTTP status code - * @returns NextResponse with CORS headers - */ -const formatResponse = (responseData: any, status = 200) => { - return NextResponse.json(responseData, { - status, - headers: { - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS', - 'Access-Control-Allow-Headers': 'Content-Type, Authorization', - }, - }) -} - -/** - * Creates an error response with consistent formatting - * @param error Error object or message - * @param status HTTP status code - * @param additionalData Additional data to include in the response - * @returns Formatted error response - */ -const createErrorResponse = (error: any, status = 500, additionalData = {}) => { - const errorMessage = error instanceof Error ? error.message : String(error) - const errorStack = error instanceof Error ? error.stack : undefined - - logger.error('Creating error response', { - errorMessage, - status, - stack: isDev ? errorStack : undefined, - }) - - return formatResponse( - { - success: false, - error: errorMessage, - stack: isDev ? errorStack : undefined, - ...additionalData, - }, - status - ) -} - -/** - * GET handler for direct external URL proxying - * This allows for GET requests to external APIs - */ -export async function GET(request: Request) { - const url = new URL(request.url) - const targetUrl = url.searchParams.get('url') - const requestId = generateRequestId() - - // Vault download proxy: /api/proxy?vaultDownload=1&bucket=...&object=...&credentialId=... - const vaultDownload = url.searchParams.get('vaultDownload') - if (vaultDownload === '1') { - try { - const bucket = url.searchParams.get('bucket') - const objectParam = url.searchParams.get('object') - const credentialId = url.searchParams.get('credentialId') - - if (!bucket || !objectParam || !credentialId) { - return createErrorResponse('Missing bucket, object, or credentialId', 400) - } - - // Fetch access token using existing token API - const baseUrl = new URL(getBaseUrl()) - const tokenUrl = new URL('/api/auth/oauth/token', baseUrl) - - // Build headers: forward session cookies if present; include internal auth for server-side - const tokenHeaders: Record = { 'Content-Type': 'application/json' } - const incomingCookie = request.headers.get('cookie') - if (incomingCookie) tokenHeaders.Cookie = incomingCookie - try { - const internalToken = await generateInternalToken() - tokenHeaders.Authorization = `Bearer ${internalToken}` - } catch (_e) { - // best-effort internal auth - } - - // Optional workflow context for collaboration auth - const workflowId = url.searchParams.get('workflowId') || undefined - - const tokenRes = await fetch(tokenUrl.toString(), { - method: 'POST', - headers: tokenHeaders, - body: JSON.stringify({ credentialId, workflowId }), - }) - - if (!tokenRes.ok) { - const err = await tokenRes.text() - return createErrorResponse(`Failed to fetch access token: ${err}`, 401) - } - - const tokenJson = await tokenRes.json() - const accessToken = tokenJson.accessToken - if (!accessToken) { - return createErrorResponse('No access token available', 401) - } - - // Avoid double-encoding: incoming object may already be percent-encoded - const objectDecoded = decodeURIComponent(objectParam) - const gcsUrl = `https://storage.googleapis.com/storage/v1/b/${encodeURIComponent( - bucket - )}/o/${encodeURIComponent(objectDecoded)}?alt=media` - - const fileRes = await fetch(gcsUrl, { - headers: { Authorization: `Bearer ${accessToken}` }, - }) - - if (!fileRes.ok) { - const errText = await fileRes.text() - return createErrorResponse(errText || 'Failed to download file', fileRes.status) - } - - const headers = new Headers() - fileRes.headers.forEach((v, k) => headers.set(k, v)) - return new NextResponse(fileRes.body, { status: 200, headers }) - } catch (error: any) { - logger.error(`[${requestId}] Vault download proxy failed`, { - error: error instanceof Error ? error.message : String(error), - }) - return createErrorResponse('Vault download failed', 500) - } - } - - if (!targetUrl) { - logger.error(`[${requestId}] Missing 'url' parameter`) - return createErrorResponse("Missing 'url' parameter", 400) - } - - const urlValidation = await validateUrlWithDNS(targetUrl) - if (!urlValidation.isValid) { - logger.warn(`[${requestId}] Blocked proxy request`, { - url: targetUrl.substring(0, 100), - error: urlValidation.error, - }) - return createErrorResponse(urlValidation.error || 'Invalid URL', 403) - } - - const method = url.searchParams.get('method') || 'GET' - - const bodyParam = url.searchParams.get('body') - let body: string | undefined - - if (bodyParam && ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())) { - try { - body = decodeURIComponent(bodyParam) - } catch (error) { - logger.warn(`[${requestId}] Failed to decode body parameter`, error) - } - } - - const customHeaders: Record = {} - - for (const [key, value] of url.searchParams.entries()) { - if (key.startsWith('header.')) { - const headerName = key.substring(7) - customHeaders[headerName] = value - } - } - - if (body && !customHeaders['Content-Type']) { - customHeaders['Content-Type'] = 'application/json' - } - - logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`) - - try { - const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!) - const response = await fetch(pinnedUrl, { - method: method, - headers: { - ...getProxyHeaders(), - ...customHeaders, - Host: urlValidation.originalHostname!, - }, - body: body || undefined, - }) - - const contentType = response.headers.get('content-type') || '' - let data - - if (contentType.includes('application/json')) { - data = await response.json() - } else { - data = await response.text() - } - - const errorMessage = !response.ok - ? data && typeof data === 'object' && data.error - ? `${data.error.message || JSON.stringify(data.error)}` - : response.statusText || `HTTP error ${response.status}` - : undefined - - if (!response.ok) { - logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`) - } - - return formatResponse({ - success: response.ok, - status: response.status, - statusText: response.statusText, - headers: Object.fromEntries(response.headers.entries()), - data, - error: errorMessage, - }) - } catch (error: any) { - logger.error(`[${requestId}] Proxy GET request failed`, { - url: targetUrl, - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - }) - - return createErrorResponse(error) - } -} - -export async function POST(request: NextRequest) { - const requestId = generateRequestId() - const startTime = new Date() - const startTimeISO = startTime.toISOString() - - try { - const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) - if (!authResult.success) { - logger.error(`[${requestId}] Authentication failed for proxy:`, authResult.error) - return createErrorResponse('Unauthorized', 401) - } - - let requestBody - try { - requestBody = await request.json() - } catch (parseError) { - logger.error(`[${requestId}] Failed to parse request body`, { - error: parseError instanceof Error ? parseError.message : String(parseError), - }) - throw new Error('Invalid JSON in request body') - } - - const validationResult = proxyPostSchema.safeParse(requestBody) - if (!validationResult.success) { - logger.error(`[${requestId}] Request validation failed`, { - errors: validationResult.error.errors, - }) - const errorMessages = validationResult.error.errors - .map((err) => `${err.path.join('.')}: ${err.message}`) - .join(', ') - throw new Error(`Validation failed: ${errorMessages}`) - } - - const { toolId, params } = validationResult.data - - logger.info(`[${requestId}] Processing tool: ${toolId}`) - - const tool = getTool(toolId) - - if (!tool) { - logger.error(`[${requestId}] Tool not found: ${toolId}`) - throw new Error(`Tool not found: ${toolId}`) - } - - try { - validateRequiredParametersAfterMerge(toolId, tool, params) - } catch (validationError) { - logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, { - error: validationError instanceof Error ? validationError.message : String(validationError), - }) - - const endTime = new Date() - const endTimeISO = endTime.toISOString() - const duration = endTime.getTime() - startTime.getTime() - - return createErrorResponse(validationError, 400, { - startTime: startTimeISO, - endTime: endTimeISO, - duration, - }) - } - - const hasFileOutputs = - tool.outputs && - Object.values(tool.outputs).some( - (output) => output.type === 'file' || output.type === 'file[]' - ) - - const result = await executeTool( - toolId, - params, - true, // skipProxy (we're already in the proxy) - !hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs) - undefined // execution context is not available in proxy context - ) - - if (!result.success) { - logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, { - error: result.error || 'Unknown error', - }) - - throw new Error(result.error || 'Tool execution failed') - } - - const endTime = new Date() - const endTimeISO = endTime.toISOString() - const duration = endTime.getTime() - startTime.getTime() - - const responseWithTimingData = { - ...result, - startTime: startTimeISO, - endTime: endTimeISO, - duration, - timing: { - startTime: startTimeISO, - endTime: endTimeISO, - duration, - }, - } - - logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`) - - return formatResponse(responseWithTimingData) - } catch (error: any) { - logger.error(`[${requestId}] Proxy request failed`, { - error: error instanceof Error ? error.message : String(error), - stack: error instanceof Error ? error.stack : undefined, - name: error instanceof Error ? error.name : undefined, - }) - - const endTime = new Date() - const endTimeISO = endTime.toISOString() - const duration = endTime.getTime() - startTime.getTime() - - return createErrorResponse(error, 500, { - startTime: startTimeISO, - endTime: endTimeISO, - duration, - }) - } -} - -export async function OPTIONS() { - return new NextResponse(null, { - status: 204, - headers: { - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS', - 'Access-Control-Allow-Headers': 'Content-Type, Authorization', - 'Access-Control-Max-Age': '86400', - }, - }) -} diff --git a/apps/sim/app/api/superuser/import-workflow/route.ts b/apps/sim/app/api/superuser/import-workflow/route.ts new file mode 100644 index 000000000..399879299 --- /dev/null +++ b/apps/sim/app/api/superuser/import-workflow/route.ts @@ -0,0 +1,193 @@ +import { db } from '@sim/db' +import { copilotChats, workflow, workspace } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' +import { parseWorkflowJson } from '@/lib/workflows/operations/import-export' +import { + loadWorkflowFromNormalizedTables, + saveWorkflowToNormalizedTables, +} from '@/lib/workflows/persistence/utils' +import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer' + +const logger = createLogger('SuperUserImportWorkflow') + +interface ImportWorkflowRequest { + workflowId: string + targetWorkspaceId: string +} + +/** + * POST /api/superuser/import-workflow + * + * Superuser endpoint to import a workflow by ID along with its copilot chats. + * This creates a copy of the workflow in the target workspace with new IDs. + * Only the workflow structure and copilot chats are copied - no deployments, + * webhooks, triggers, or other sensitive data. + * + * Requires both isSuperUser flag AND superUserModeEnabled setting. + */ +export async function POST(request: NextRequest) { + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { effectiveSuperUser, isSuperUser, superUserModeEnabled } = + await verifyEffectiveSuperUser(session.user.id) + + if (!effectiveSuperUser) { + logger.warn('Non-effective-superuser attempted to access import-workflow endpoint', { + userId: session.user.id, + isSuperUser, + superUserModeEnabled, + }) + return NextResponse.json({ error: 'Forbidden: Superuser access required' }, { status: 403 }) + } + + const body: ImportWorkflowRequest = await request.json() + const { workflowId, targetWorkspaceId } = body + + if (!workflowId) { + return NextResponse.json({ error: 'workflowId is required' }, { status: 400 }) + } + + if (!targetWorkspaceId) { + return NextResponse.json({ error: 'targetWorkspaceId is required' }, { status: 400 }) + } + + // Verify target workspace exists + const [targetWorkspace] = await db + .select({ id: workspace.id, ownerId: workspace.ownerId }) + .from(workspace) + .where(eq(workspace.id, targetWorkspaceId)) + .limit(1) + + if (!targetWorkspace) { + return NextResponse.json({ error: 'Target workspace not found' }, { status: 404 }) + } + + // Get the source workflow + const [sourceWorkflow] = await db + .select() + .from(workflow) + .where(eq(workflow.id, workflowId)) + .limit(1) + + if (!sourceWorkflow) { + return NextResponse.json({ error: 'Source workflow not found' }, { status: 404 }) + } + + // Load the workflow state from normalized tables + const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) + + if (!normalizedData) { + return NextResponse.json( + { error: 'Workflow has no normalized data - cannot import' }, + { status: 400 } + ) + } + + // Use existing export logic to create export format + const workflowState = { + blocks: normalizedData.blocks, + edges: normalizedData.edges, + loops: normalizedData.loops, + parallels: normalizedData.parallels, + metadata: { + name: sourceWorkflow.name, + description: sourceWorkflow.description ?? undefined, + color: sourceWorkflow.color, + }, + } + + const exportData = sanitizeForExport(workflowState) + + // Use existing import logic (parseWorkflowJson regenerates IDs automatically) + const { data: importedData, errors } = parseWorkflowJson(JSON.stringify(exportData)) + + if (!importedData || errors.length > 0) { + return NextResponse.json( + { error: `Failed to parse workflow: ${errors.join(', ')}` }, + { status: 400 } + ) + } + + // Create new workflow record + const newWorkflowId = crypto.randomUUID() + const now = new Date() + + await db.insert(workflow).values({ + id: newWorkflowId, + userId: session.user.id, + workspaceId: targetWorkspaceId, + folderId: null, // Don't copy folder association + name: `[Debug Import] ${sourceWorkflow.name}`, + description: sourceWorkflow.description, + color: sourceWorkflow.color, + lastSynced: now, + createdAt: now, + updatedAt: now, + isDeployed: false, // Never copy deployment status + runCount: 0, + variables: sourceWorkflow.variables || {}, + }) + + // Save using existing persistence logic + const saveResult = await saveWorkflowToNormalizedTables(newWorkflowId, importedData) + + if (!saveResult.success) { + // Clean up the workflow record if save failed + await db.delete(workflow).where(eq(workflow.id, newWorkflowId)) + return NextResponse.json( + { error: `Failed to save workflow state: ${saveResult.error}` }, + { status: 500 } + ) + } + + // Copy copilot chats associated with the source workflow + const sourceCopilotChats = await db + .select() + .from(copilotChats) + .where(eq(copilotChats.workflowId, workflowId)) + + let copilotChatsImported = 0 + + for (const chat of sourceCopilotChats) { + await db.insert(copilotChats).values({ + userId: session.user.id, + workflowId: newWorkflowId, + title: chat.title ? `[Import] ${chat.title}` : null, + messages: chat.messages, + model: chat.model, + conversationId: null, // Don't copy conversation ID + previewYaml: chat.previewYaml, + planArtifact: chat.planArtifact, + config: chat.config, + createdAt: new Date(), + updatedAt: new Date(), + }) + copilotChatsImported++ + } + + logger.info('Superuser imported workflow', { + userId: session.user.id, + sourceWorkflowId: workflowId, + newWorkflowId, + targetWorkspaceId, + copilotChatsImported, + }) + + return NextResponse.json({ + success: true, + newWorkflowId, + copilotChatsImported, + }) + } catch (error) { + logger.error('Error importing workflow', error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/templates/[id]/approve/route.ts b/apps/sim/app/api/templates/[id]/approve/route.ts index c15c1916e..0492ae584 100644 --- a/apps/sim/app/api/templates/[id]/approve/route.ts +++ b/apps/sim/app/api/templates/[id]/approve/route.ts @@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { verifySuperUser } from '@/lib/templates/permissions' +import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' const logger = createLogger('TemplateApprovalAPI') @@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } - const { isSuperUser } = await verifySuperUser(session.user.id) - if (!isSuperUser) { + const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id) + if (!effectiveSuperUser) { logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`) return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 }) } @@ -71,8 +71,8 @@ export async function DELETE( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } - const { isSuperUser } = await verifySuperUser(session.user.id) - if (!isSuperUser) { + const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id) + if (!effectiveSuperUser) { logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`) return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 }) } diff --git a/apps/sim/app/api/templates/[id]/reject/route.ts b/apps/sim/app/api/templates/[id]/reject/route.ts index af5ed2e12..99e50e52a 100644 --- a/apps/sim/app/api/templates/[id]/reject/route.ts +++ b/apps/sim/app/api/templates/[id]/reject/route.ts @@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { verifySuperUser } from '@/lib/templates/permissions' +import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' const logger = createLogger('TemplateRejectionAPI') @@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } - const { isSuperUser } = await verifySuperUser(session.user.id) - if (!isSuperUser) { + const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id) + if (!effectiveSuperUser) { logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`) return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 }) } diff --git a/apps/sim/app/api/templates/route.ts b/apps/sim/app/api/templates/route.ts index 7177aa005..2985684e4 100644 --- a/apps/sim/app/api/templates/route.ts +++ b/apps/sim/app/api/templates/route.ts @@ -3,7 +3,6 @@ import { templateCreators, templateStars, templates, - user, workflow, workflowDeploymentVersion, } from '@sim/db/schema' @@ -14,6 +13,7 @@ import { v4 as uuidv4 } from 'uuid' import { z } from 'zod' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' +import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' import { extractRequiredCredentials, sanitizeCredentials, @@ -70,8 +70,8 @@ export async function GET(request: NextRequest) { logger.debug(`[${requestId}] Fetching templates with params:`, params) // Check if user is a super user - const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1) - const isSuperUser = currentUser[0]?.isSuperUser || false + const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id) + const isSuperUser = effectiveSuperUser // Build query conditions const conditions = [] diff --git a/apps/sim/app/api/proxy/image/route.ts b/apps/sim/app/api/tools/image/route.ts similarity index 100% rename from apps/sim/app/api/proxy/image/route.ts rename to apps/sim/app/api/tools/image/route.ts diff --git a/apps/sim/app/api/tools/mistral/parse/route.ts b/apps/sim/app/api/tools/mistral/parse/route.ts index b31029d1b..5474855af 100644 --- a/apps/sim/app/api/tools/mistral/parse/route.ts +++ b/apps/sim/app/api/tools/mistral/parse/route.ts @@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { getBaseUrl } from '@/lib/core/utils/urls' import { StorageService } from '@/lib/uploads' -import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils' +import { + extractStorageKey, + inferContextFromKey, + isInternalFileUrl, +} from '@/lib/uploads/utils/file-utils' import { verifyFileAccess } from '@/app/api/files/authorization' export const dynamic = 'force-dynamic' @@ -47,13 +51,13 @@ export async function POST(request: NextRequest) { logger.info(`[${requestId}] Mistral parse request`, { filePath: validatedData.filePath, - isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'), + isWorkspaceFile: isInternalFileUrl(validatedData.filePath), userId, }) let fileUrl = validatedData.filePath - if (validatedData.filePath?.includes('/api/files/serve/')) { + if (isInternalFileUrl(validatedData.filePath)) { try { const storageKey = extractStorageKey(validatedData.filePath) diff --git a/apps/sim/app/api/tools/pulse/parse/route.ts b/apps/sim/app/api/tools/pulse/parse/route.ts index 7c2f340b1..74ef2fe08 100644 --- a/apps/sim/app/api/tools/pulse/parse/route.ts +++ b/apps/sim/app/api/tools/pulse/parse/route.ts @@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { getBaseUrl } from '@/lib/core/utils/urls' import { StorageService } from '@/lib/uploads' -import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils' +import { + extractStorageKey, + inferContextFromKey, + isInternalFileUrl, +} from '@/lib/uploads/utils/file-utils' import { verifyFileAccess } from '@/app/api/files/authorization' export const dynamic = 'force-dynamic' @@ -48,13 +52,13 @@ export async function POST(request: NextRequest) { logger.info(`[${requestId}] Pulse parse request`, { filePath: validatedData.filePath, - isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'), + isWorkspaceFile: isInternalFileUrl(validatedData.filePath), userId, }) let fileUrl = validatedData.filePath - if (validatedData.filePath?.includes('/api/files/serve/')) { + if (isInternalFileUrl(validatedData.filePath)) { try { const storageKey = extractStorageKey(validatedData.filePath) const context = inferContextFromKey(storageKey) diff --git a/apps/sim/app/api/tools/reducto/parse/route.ts b/apps/sim/app/api/tools/reducto/parse/route.ts index fa96ac46b..2ce14e9d3 100644 --- a/apps/sim/app/api/tools/reducto/parse/route.ts +++ b/apps/sim/app/api/tools/reducto/parse/route.ts @@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { getBaseUrl } from '@/lib/core/utils/urls' import { StorageService } from '@/lib/uploads' -import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils' +import { + extractStorageKey, + inferContextFromKey, + isInternalFileUrl, +} from '@/lib/uploads/utils/file-utils' import { verifyFileAccess } from '@/app/api/files/authorization' export const dynamic = 'force-dynamic' @@ -44,13 +48,13 @@ export async function POST(request: NextRequest) { logger.info(`[${requestId}] Reducto parse request`, { filePath: validatedData.filePath, - isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'), + isWorkspaceFile: isInternalFileUrl(validatedData.filePath), userId, }) let fileUrl = validatedData.filePath - if (validatedData.filePath?.includes('/api/files/serve/')) { + if (isInternalFileUrl(validatedData.filePath)) { try { const storageKey = extractStorageKey(validatedData.filePath) const context = inferContextFromKey(storageKey) diff --git a/apps/sim/app/api/tools/s3/copy-object/route.ts b/apps/sim/app/api/tools/s3/copy-object/route.ts index 888aaf630..74b0d9ee5 100644 --- a/apps/sim/app/api/tools/s3/copy-object/route.ts +++ b/apps/sim/app/api/tools/s3/copy-object/route.ts @@ -79,11 +79,13 @@ export async function POST(request: NextRequest) { // Generate public URL for destination (properly encode the destination key) const encodedDestKey = validatedData.destinationKey.split('/').map(encodeURIComponent).join('/') const url = `https://${validatedData.destinationBucket}.s3.${validatedData.region}.amazonaws.com/${encodedDestKey}` + const uri = `s3://${validatedData.destinationBucket}/${validatedData.destinationKey}` return NextResponse.json({ success: true, output: { url, + uri, copySourceVersionId: result.CopySourceVersionId, versionId: result.VersionId, etag: result.CopyObjectResult?.ETag, diff --git a/apps/sim/app/api/tools/s3/put-object/route.ts b/apps/sim/app/api/tools/s3/put-object/route.ts index 2f7aced28..bd2bab3a6 100644 --- a/apps/sim/app/api/tools/s3/put-object/route.ts +++ b/apps/sim/app/api/tools/s3/put-object/route.ts @@ -117,11 +117,13 @@ export async function POST(request: NextRequest) { const encodedKey = validatedData.objectKey.split('/').map(encodeURIComponent).join('/') const url = `https://${validatedData.bucketName}.s3.${validatedData.region}.amazonaws.com/${encodedKey}` + const uri = `s3://${validatedData.bucketName}/${validatedData.objectKey}` return NextResponse.json({ success: true, output: { url, + uri, etag: result.ETag, location: url, key: validatedData.objectKey, diff --git a/apps/sim/app/api/proxy/stt/route.ts b/apps/sim/app/api/tools/stt/route.ts similarity index 100% rename from apps/sim/app/api/proxy/stt/route.ts rename to apps/sim/app/api/tools/stt/route.ts diff --git a/apps/sim/app/api/tools/textract/parse/route.ts b/apps/sim/app/api/tools/textract/parse/route.ts new file mode 100644 index 000000000..3fb73976d --- /dev/null +++ b/apps/sim/app/api/tools/textract/parse/route.ts @@ -0,0 +1,637 @@ +import crypto from 'crypto' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { + validateAwsRegion, + validateExternalUrl, + validateS3BucketName, +} from '@/lib/core/security/input-validation' +import { generateRequestId } from '@/lib/core/utils/request' +import { StorageService } from '@/lib/uploads' +import { + extractStorageKey, + inferContextFromKey, + isInternalFileUrl, +} from '@/lib/uploads/utils/file-utils' +import { verifyFileAccess } from '@/app/api/files/authorization' + +export const dynamic = 'force-dynamic' +export const maxDuration = 300 // 5 minutes for large multi-page PDF processing + +const logger = createLogger('TextractParseAPI') + +const QuerySchema = z.object({ + Text: z.string().min(1), + Alias: z.string().optional(), + Pages: z.array(z.string()).optional(), +}) + +const TextractParseSchema = z + .object({ + accessKeyId: z.string().min(1, 'AWS Access Key ID is required'), + secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'), + region: z.string().min(1, 'AWS region is required'), + processingMode: z.enum(['sync', 'async']).optional().default('sync'), + filePath: z.string().optional(), + s3Uri: z.string().optional(), + featureTypes: z + .array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT'])) + .optional(), + queries: z.array(QuerySchema).optional(), + }) + .superRefine((data, ctx) => { + const regionValidation = validateAwsRegion(data.region, 'AWS region') + if (!regionValidation.isValid) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: regionValidation.error, + path: ['region'], + }) + } + }) + +function getSignatureKey( + key: string, + dateStamp: string, + regionName: string, + serviceName: string +): Buffer { + const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest() + const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest() + const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest() + const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest() + return kSigning +} + +function signAwsRequest( + method: string, + host: string, + uri: string, + body: string, + accessKeyId: string, + secretAccessKey: string, + region: string, + service: string, + amzTarget: string +): Record { + const date = new Date() + const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '') + const dateStamp = amzDate.slice(0, 8) + + const payloadHash = crypto.createHash('sha256').update(body).digest('hex') + + const canonicalHeaders = + `content-type:application/x-amz-json-1.1\n` + + `host:${host}\n` + + `x-amz-date:${amzDate}\n` + + `x-amz-target:${amzTarget}\n` + + const signedHeaders = 'content-type;host;x-amz-date;x-amz-target' + + const canonicalRequest = `${method}\n${uri}\n\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}` + + const algorithm = 'AWS4-HMAC-SHA256' + const credentialScope = `${dateStamp}/${region}/${service}/aws4_request` + const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${crypto.createHash('sha256').update(canonicalRequest).digest('hex')}` + + const signingKey = getSignatureKey(secretAccessKey, dateStamp, region, service) + const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex') + + const authorizationHeader = `${algorithm} Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}` + + return { + 'Content-Type': 'application/x-amz-json-1.1', + Host: host, + 'X-Amz-Date': amzDate, + 'X-Amz-Target': amzTarget, + Authorization: authorizationHeader, + } +} + +async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> { + const response = await fetch(url) + if (!response.ok) { + throw new Error(`Failed to fetch document: ${response.statusText}`) + } + + const arrayBuffer = await response.arrayBuffer() + const bytes = Buffer.from(arrayBuffer).toString('base64') + const contentType = response.headers.get('content-type') || 'application/octet-stream' + + return { bytes, contentType } +} + +function parseS3Uri(s3Uri: string): { bucket: string; key: string } { + const match = s3Uri.match(/^s3:\/\/([^/]+)\/(.+)$/) + if (!match) { + throw new Error( + `Invalid S3 URI format: ${s3Uri}. Expected format: s3://bucket-name/path/to/object` + ) + } + + const bucket = match[1] + const key = match[2] + + const bucketValidation = validateS3BucketName(bucket, 'S3 bucket name') + if (!bucketValidation.isValid) { + throw new Error(bucketValidation.error) + } + + if (key.includes('..') || key.startsWith('/')) { + throw new Error('S3 key contains invalid path traversal sequences') + } + + return { bucket, key } +} + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)) +} + +async function callTextractAsync( + host: string, + amzTarget: string, + body: Record, + accessKeyId: string, + secretAccessKey: string, + region: string +): Promise> { + const bodyString = JSON.stringify(body) + const headers = signAwsRequest( + 'POST', + host, + '/', + bodyString, + accessKeyId, + secretAccessKey, + region, + 'textract', + amzTarget + ) + + const response = await fetch(`https://${host}/`, { + method: 'POST', + headers, + body: bodyString, + }) + + if (!response.ok) { + const errorText = await response.text() + let errorMessage = `Textract API error: ${response.statusText}` + try { + const errorJson = JSON.parse(errorText) + if (errorJson.Message) { + errorMessage = errorJson.Message + } else if (errorJson.__type) { + errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}` + } + } catch { + // Use default error message + } + throw new Error(errorMessage) + } + + return response.json() +} + +async function pollForJobCompletion( + host: string, + jobId: string, + accessKeyId: string, + secretAccessKey: string, + region: string, + useAnalyzeDocument: boolean, + requestId: string +): Promise> { + const pollIntervalMs = 5000 // 5 seconds between polls + const maxPollTimeMs = 180000 // 3 minutes maximum polling time + const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs) + + const getTarget = useAnalyzeDocument + ? 'Textract.GetDocumentAnalysis' + : 'Textract.GetDocumentTextDetection' + + for (let attempt = 0; attempt < maxAttempts; attempt++) { + const result = await callTextractAsync( + host, + getTarget, + { JobId: jobId }, + accessKeyId, + secretAccessKey, + region + ) + + const jobStatus = result.JobStatus as string + + if (jobStatus === 'SUCCEEDED') { + logger.info(`[${requestId}] Async job completed successfully after ${attempt + 1} polls`) + + let allBlocks = (result.Blocks as unknown[]) || [] + let nextToken = result.NextToken as string | undefined + + while (nextToken) { + const nextResult = await callTextractAsync( + host, + getTarget, + { JobId: jobId, NextToken: nextToken }, + accessKeyId, + secretAccessKey, + region + ) + allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || []) + nextToken = nextResult.NextToken as string | undefined + } + + return { + ...result, + Blocks: allBlocks, + } + } + + if (jobStatus === 'FAILED') { + throw new Error(`Textract job failed: ${result.StatusMessage || 'Unknown error'}`) + } + + if (jobStatus === 'PARTIAL_SUCCESS') { + logger.warn(`[${requestId}] Job completed with partial success: ${result.StatusMessage}`) + + let allBlocks = (result.Blocks as unknown[]) || [] + let nextToken = result.NextToken as string | undefined + + while (nextToken) { + const nextResult = await callTextractAsync( + host, + getTarget, + { JobId: jobId, NextToken: nextToken }, + accessKeyId, + secretAccessKey, + region + ) + allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || []) + nextToken = nextResult.NextToken as string | undefined + } + + return { + ...result, + Blocks: allBlocks, + } + } + + logger.info(`[${requestId}] Job status: ${jobStatus}, attempt ${attempt + 1}/${maxAttempts}`) + await sleep(pollIntervalMs) + } + + throw new Error( + `Timeout waiting for Textract job to complete (max ${maxPollTimeMs / 1000} seconds)` + ) +} + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized Textract parse attempt`, { + error: authResult.error || 'Missing userId', + }) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Unauthorized', + }, + { status: 401 } + ) + } + + const userId = authResult.userId + const body = await request.json() + const validatedData = TextractParseSchema.parse(body) + + const processingMode = validatedData.processingMode || 'sync' + const featureTypes = validatedData.featureTypes ?? [] + const useAnalyzeDocument = featureTypes.length > 0 + const host = `textract.${validatedData.region}.amazonaws.com` + + logger.info(`[${requestId}] Textract parse request`, { + processingMode, + filePath: validatedData.filePath?.substring(0, 50), + s3Uri: validatedData.s3Uri?.substring(0, 50), + featureTypes, + userId, + }) + + if (processingMode === 'async') { + if (!validatedData.s3Uri) { + return NextResponse.json( + { + success: false, + error: 'S3 URI is required for multi-page processing (s3://bucket/key)', + }, + { status: 400 } + ) + } + + const { bucket: s3Bucket, key: s3Key } = parseS3Uri(validatedData.s3Uri) + + logger.info(`[${requestId}] Starting async Textract job`, { s3Bucket, s3Key }) + + const startTarget = useAnalyzeDocument + ? 'Textract.StartDocumentAnalysis' + : 'Textract.StartDocumentTextDetection' + + const startBody: Record = { + DocumentLocation: { + S3Object: { + Bucket: s3Bucket, + Name: s3Key, + }, + }, + } + + if (useAnalyzeDocument) { + startBody.FeatureTypes = featureTypes + + if ( + validatedData.queries && + validatedData.queries.length > 0 && + featureTypes.includes('QUERIES') + ) { + startBody.QueriesConfig = { + Queries: validatedData.queries.map((q) => ({ + Text: q.Text, + Alias: q.Alias, + Pages: q.Pages, + })), + } + } + } + + const startResult = await callTextractAsync( + host, + startTarget, + startBody, + validatedData.accessKeyId, + validatedData.secretAccessKey, + validatedData.region + ) + + const jobId = startResult.JobId as string + if (!jobId) { + throw new Error('Failed to start Textract job: No JobId returned') + } + + logger.info(`[${requestId}] Async job started`, { jobId }) + + const textractData = await pollForJobCompletion( + host, + jobId, + validatedData.accessKeyId, + validatedData.secretAccessKey, + validatedData.region, + useAnalyzeDocument, + requestId + ) + + logger.info(`[${requestId}] Textract async parse successful`, { + pageCount: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0, + blockCount: (textractData.Blocks as unknown[])?.length ?? 0, + }) + + return NextResponse.json({ + success: true, + output: { + blocks: textractData.Blocks ?? [], + documentMetadata: { + pages: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0, + }, + modelVersion: (textractData.AnalyzeDocumentModelVersion ?? + textractData.DetectDocumentTextModelVersion) as string | undefined, + }, + }) + } + + if (!validatedData.filePath) { + return NextResponse.json( + { + success: false, + error: 'File path is required for single-page processing', + }, + { status: 400 } + ) + } + + let fileUrl = validatedData.filePath + + const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath) + + if (isInternalFilePath) { + try { + const storageKey = extractStorageKey(validatedData.filePath) + const context = inferContextFromKey(storageKey) + + const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false) + + if (!hasAccess) { + logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, { + userId, + key: storageKey, + context, + }) + return NextResponse.json( + { + success: false, + error: 'File not found', + }, + { status: 404 } + ) + } + + fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60) + logger.info(`[${requestId}] Generated presigned URL for ${context} file`) + } catch (error) { + logger.error(`[${requestId}] Failed to generate presigned URL:`, error) + return NextResponse.json( + { + success: false, + error: 'Failed to generate file access URL', + }, + { status: 500 } + ) + } + } else if (validatedData.filePath?.startsWith('/')) { + // Reject arbitrary absolute paths that don't contain /api/files/serve/ + logger.warn(`[${requestId}] Invalid internal path`, { + userId, + path: validatedData.filePath.substring(0, 50), + }) + return NextResponse.json( + { + success: false, + error: 'Invalid file path. Only uploaded files are supported for internal paths.', + }, + { status: 400 } + ) + } else { + const urlValidation = validateExternalUrl(fileUrl, 'Document URL') + if (!urlValidation.isValid) { + logger.warn(`[${requestId}] SSRF attempt blocked`, { + userId, + url: fileUrl.substring(0, 100), + error: urlValidation.error, + }) + return NextResponse.json( + { + success: false, + error: urlValidation.error, + }, + { status: 400 } + ) + } + } + + const { bytes, contentType } = await fetchDocumentBytes(fileUrl) + + // Track if this is a PDF for better error messaging + const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf') + + const uri = '/' + + let textractBody: Record + let amzTarget: string + + if (useAnalyzeDocument) { + amzTarget = 'Textract.AnalyzeDocument' + textractBody = { + Document: { + Bytes: bytes, + }, + FeatureTypes: featureTypes, + } + + if ( + validatedData.queries && + validatedData.queries.length > 0 && + featureTypes.includes('QUERIES') + ) { + textractBody.QueriesConfig = { + Queries: validatedData.queries.map((q) => ({ + Text: q.Text, + Alias: q.Alias, + Pages: q.Pages, + })), + } + } + } else { + amzTarget = 'Textract.DetectDocumentText' + textractBody = { + Document: { + Bytes: bytes, + }, + } + } + + const bodyString = JSON.stringify(textractBody) + + const headers = signAwsRequest( + 'POST', + host, + uri, + bodyString, + validatedData.accessKeyId, + validatedData.secretAccessKey, + validatedData.region, + 'textract', + amzTarget + ) + + const textractResponse = await fetch(`https://${host}${uri}`, { + method: 'POST', + headers, + body: bodyString, + }) + + if (!textractResponse.ok) { + const errorText = await textractResponse.text() + logger.error(`[${requestId}] Textract API error:`, errorText) + + let errorMessage = `Textract API error: ${textractResponse.statusText}` + let isUnsupportedFormat = false + try { + const errorJson = JSON.parse(errorText) + if (errorJson.Message) { + errorMessage = errorJson.Message + } else if (errorJson.__type) { + errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}` + } + // Check for unsupported document format error + isUnsupportedFormat = + errorJson.__type === 'UnsupportedDocumentException' || + errorJson.Message?.toLowerCase().includes('unsupported document') || + errorText.toLowerCase().includes('unsupported document') + } catch { + isUnsupportedFormat = errorText.toLowerCase().includes('unsupported document') + } + + // Provide helpful message for unsupported format (likely multi-page PDF) + if (isUnsupportedFormat && isPdf) { + errorMessage = + 'This document format is not supported in Single Page mode. If this is a multi-page PDF, please use "Multi-Page (PDF, TIFF via S3)" mode instead, which requires uploading your document to S3 first. Single Page mode only supports JPEG, PNG, and single-page PDF files.' + } + + return NextResponse.json( + { + success: false, + error: errorMessage, + }, + { status: textractResponse.status } + ) + } + + const textractData = await textractResponse.json() + + logger.info(`[${requestId}] Textract parse successful`, { + pageCount: textractData.DocumentMetadata?.Pages ?? 0, + blockCount: textractData.Blocks?.length ?? 0, + }) + + return NextResponse.json({ + success: true, + output: { + blocks: textractData.Blocks ?? [], + documentMetadata: { + pages: textractData.DocumentMetadata?.Pages ?? 0, + }, + modelVersion: + textractData.AnalyzeDocumentModelVersion ?? + textractData.DetectDocumentTextModelVersion ?? + undefined, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error in Textract parse:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/proxy/tts/route.ts b/apps/sim/app/api/tools/tts/route.ts similarity index 100% rename from apps/sim/app/api/proxy/tts/route.ts rename to apps/sim/app/api/tools/tts/route.ts diff --git a/apps/sim/app/api/proxy/tts/unified/route.ts b/apps/sim/app/api/tools/tts/unified/route.ts similarity index 100% rename from apps/sim/app/api/proxy/tts/unified/route.ts rename to apps/sim/app/api/tools/tts/unified/route.ts diff --git a/apps/sim/app/api/proxy/video/route.ts b/apps/sim/app/api/tools/video/route.ts similarity index 100% rename from apps/sim/app/api/proxy/video/route.ts rename to apps/sim/app/api/tools/video/route.ts diff --git a/apps/sim/app/api/v1/admin/types.ts b/apps/sim/app/api/v1/admin/types.ts index 114563a37..0f2dfd814 100644 --- a/apps/sim/app/api/v1/admin/types.ts +++ b/apps/sim/app/api/v1/admin/types.ts @@ -550,6 +550,8 @@ export interface AdminUserBilling { totalWebhookTriggers: number totalScheduledExecutions: number totalChatExecutions: number + totalMcpExecutions: number + totalA2aExecutions: number totalTokensUsed: number totalCost: string currentUsageLimit: string | null diff --git a/apps/sim/app/api/v1/admin/users/[id]/billing/route.ts b/apps/sim/app/api/v1/admin/users/[id]/billing/route.ts index e5681df62..9f0374f5f 100644 --- a/apps/sim/app/api/v1/admin/users/[id]/billing/route.ts +++ b/apps/sim/app/api/v1/admin/users/[id]/billing/route.ts @@ -97,6 +97,8 @@ export const GET = withAdminAuthParams(async (_, context) => { totalWebhookTriggers: stats?.totalWebhookTriggers ?? 0, totalScheduledExecutions: stats?.totalScheduledExecutions ?? 0, totalChatExecutions: stats?.totalChatExecutions ?? 0, + totalMcpExecutions: stats?.totalMcpExecutions ?? 0, + totalA2aExecutions: stats?.totalA2aExecutions ?? 0, totalTokensUsed: stats?.totalTokensUsed ?? 0, totalCost: stats?.totalCost ?? '0', currentUsageLimit: stats?.currentUsageLimit ?? null, diff --git a/apps/sim/app/api/v1/middleware.ts b/apps/sim/app/api/v1/middleware.ts index 4f0eac4ad..06b410943 100644 --- a/apps/sim/app/api/v1/middleware.ts +++ b/apps/sim/app/api/v1/middleware.ts @@ -19,7 +19,7 @@ export interface RateLimitResult { export async function checkRateLimit( request: NextRequest, - endpoint: 'logs' | 'logs-detail' = 'logs' + endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' = 'logs' ): Promise { try { const auth = await authenticateV1Request(request) diff --git a/apps/sim/app/api/v1/workflows/[id]/route.ts b/apps/sim/app/api/v1/workflows/[id]/route.ts new file mode 100644 index 000000000..658a0f8ea --- /dev/null +++ b/apps/sim/app/api/v1/workflows/[id]/route.ts @@ -0,0 +1,102 @@ +import { db } from '@sim/db' +import { permissions, workflow, workflowBlocks } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format' +import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta' +import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware' + +const logger = createLogger('V1WorkflowDetailsAPI') + +export const revalidate = 0 + +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = crypto.randomUUID().slice(0, 8) + + try { + const rateLimit = await checkRateLimit(request, 'workflow-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { id } = await params + + logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId }) + + const rows = await db + .select({ + id: workflow.id, + name: workflow.name, + description: workflow.description, + color: workflow.color, + folderId: workflow.folderId, + workspaceId: workflow.workspaceId, + isDeployed: workflow.isDeployed, + deployedAt: workflow.deployedAt, + runCount: workflow.runCount, + lastRunAt: workflow.lastRunAt, + variables: workflow.variables, + createdAt: workflow.createdAt, + updatedAt: workflow.updatedAt, + }) + .from(workflow) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workflow.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(eq(workflow.id, id)) + .limit(1) + + const workflowData = rows[0] + if (!workflowData) { + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) + } + + const blockRows = await db + .select({ + id: workflowBlocks.id, + type: workflowBlocks.type, + subBlocks: workflowBlocks.subBlocks, + }) + .from(workflowBlocks) + .where(eq(workflowBlocks.workflowId, id)) + + const blocksRecord = Object.fromEntries( + blockRows.map((block) => [block.id, { type: block.type, subBlocks: block.subBlocks }]) + ) + const inputs = extractInputFieldsFromBlocks(blocksRecord) + + const response = { + id: workflowData.id, + name: workflowData.name, + description: workflowData.description, + color: workflowData.color, + folderId: workflowData.folderId, + workspaceId: workflowData.workspaceId, + isDeployed: workflowData.isDeployed, + deployedAt: workflowData.deployedAt?.toISOString() || null, + runCount: workflowData.runCount, + lastRunAt: workflowData.lastRunAt?.toISOString() || null, + variables: workflowData.variables || {}, + inputs, + createdAt: workflowData.createdAt.toISOString(), + updatedAt: workflowData.updatedAt.toISOString(), + } + + const limits = await getUserLimits(userId) + + const apiResponse = createApiResponse({ data: response }, limits, rateLimit) + + return NextResponse.json(apiResponse.body, { headers: apiResponse.headers }) + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error(`[${requestId}] Workflow details fetch error`, { error: message }) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/workflows/route.ts b/apps/sim/app/api/v1/workflows/route.ts new file mode 100644 index 000000000..23bb707f1 --- /dev/null +++ b/apps/sim/app/api/v1/workflows/route.ts @@ -0,0 +1,184 @@ +import { db } from '@sim/db' +import { permissions, workflow } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, asc, eq, gt, or } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta' +import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware' + +const logger = createLogger('V1WorkflowsAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const QueryParamsSchema = z.object({ + workspaceId: z.string(), + folderId: z.string().optional(), + deployedOnly: z.coerce.boolean().optional().default(false), + limit: z.coerce.number().min(1).max(100).optional().default(50), + cursor: z.string().optional(), +}) + +interface CursorData { + sortOrder: number + createdAt: string + id: string +} + +function encodeCursor(data: CursorData): string { + return Buffer.from(JSON.stringify(data)).toString('base64') +} + +function decodeCursor(cursor: string): CursorData | null { + try { + return JSON.parse(Buffer.from(cursor, 'base64').toString()) + } catch { + return null + } +} + +export async function GET(request: NextRequest) { + const requestId = crypto.randomUUID().slice(0, 8) + + try { + const rateLimit = await checkRateLimit(request, 'workflows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { searchParams } = new URL(request.url) + const rawParams = Object.fromEntries(searchParams.entries()) + + const validationResult = QueryParamsSchema.safeParse(rawParams) + if (!validationResult.success) { + return NextResponse.json( + { error: 'Invalid parameters', details: validationResult.error.errors }, + { status: 400 } + ) + } + + const params = validationResult.data + + logger.info(`[${requestId}] Fetching workflows for workspace ${params.workspaceId}`, { + userId, + filters: { + folderId: params.folderId, + deployedOnly: params.deployedOnly, + }, + }) + + const conditions = [ + eq(workflow.workspaceId, params.workspaceId), + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, params.workspaceId), + eq(permissions.userId, userId), + ] + + if (params.folderId) { + conditions.push(eq(workflow.folderId, params.folderId)) + } + + if (params.deployedOnly) { + conditions.push(eq(workflow.isDeployed, true)) + } + + if (params.cursor) { + const cursorData = decodeCursor(params.cursor) + if (cursorData) { + const cursorCondition = or( + gt(workflow.sortOrder, cursorData.sortOrder), + and( + eq(workflow.sortOrder, cursorData.sortOrder), + gt(workflow.createdAt, new Date(cursorData.createdAt)) + ), + and( + eq(workflow.sortOrder, cursorData.sortOrder), + eq(workflow.createdAt, new Date(cursorData.createdAt)), + gt(workflow.id, cursorData.id) + ) + ) + if (cursorCondition) { + conditions.push(cursorCondition) + } + } + } + + const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)] + + const rows = await db + .select({ + id: workflow.id, + name: workflow.name, + description: workflow.description, + color: workflow.color, + folderId: workflow.folderId, + workspaceId: workflow.workspaceId, + isDeployed: workflow.isDeployed, + deployedAt: workflow.deployedAt, + runCount: workflow.runCount, + lastRunAt: workflow.lastRunAt, + sortOrder: workflow.sortOrder, + createdAt: workflow.createdAt, + updatedAt: workflow.updatedAt, + }) + .from(workflow) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, params.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(and(...conditions)) + .orderBy(...orderByClause) + .limit(params.limit + 1) + + const hasMore = rows.length > params.limit + const data = rows.slice(0, params.limit) + + let nextCursor: string | undefined + if (hasMore && data.length > 0) { + const lastWorkflow = data[data.length - 1] + nextCursor = encodeCursor({ + sortOrder: lastWorkflow.sortOrder, + createdAt: lastWorkflow.createdAt.toISOString(), + id: lastWorkflow.id, + }) + } + + const formattedWorkflows = data.map((w) => ({ + id: w.id, + name: w.name, + description: w.description, + color: w.color, + folderId: w.folderId, + workspaceId: w.workspaceId, + isDeployed: w.isDeployed, + deployedAt: w.deployedAt?.toISOString() || null, + runCount: w.runCount, + lastRunAt: w.lastRunAt?.toISOString() || null, + createdAt: w.createdAt.toISOString(), + updatedAt: w.updatedAt.toISOString(), + })) + + const limits = await getUserLimits(userId) + + const response = createApiResponse( + { + data: formattedWorkflows, + nextCursor, + }, + limits, + rateLimit + ) + + return NextResponse.json(response.body, { headers: response.headers }) + } catch (error: unknown) { + const message = error instanceof Error ? error.message : 'Unknown error' + logger.error(`[${requestId}] Workflows fetch error`, { error: message }) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index df988f26a..a850c7ac9 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -12,6 +12,10 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation' import { processInputFileFields } from '@/lib/execution/files' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' +import { + cleanupExecutionBase64Cache, + hydrateUserFilesWithBase64, +} from '@/lib/uploads/utils/user-file-base64.server' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' @@ -25,7 +29,7 @@ import type { WorkflowExecutionPayload } from '@/background/workflow-execution' import { normalizeName } from '@/executor/constants' import { ExecutionSnapshot } from '@/executor/execution/snapshot' import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types' -import type { StreamingExecution } from '@/executor/types' +import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types' import { Serializer } from '@/serializer' import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types' @@ -38,6 +42,8 @@ const ExecuteWorkflowSchema = z.object({ useDraftState: z.boolean().optional(), input: z.any().optional(), isClientSession: z.boolean().optional(), + includeFileBase64: z.boolean().optional().default(true), + base64MaxBytes: z.number().int().positive().optional(), workflowStateOverride: z .object({ blocks: z.record(z.any()), @@ -214,6 +220,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: useDraftState, input: validatedInput, isClientSession = false, + includeFileBase64, + base64MaxBytes, workflowStateOverride, } = validation.data @@ -227,6 +235,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: triggerType, stream, useDraftState, + includeFileBase64, + base64MaxBytes, workflowStateOverride, workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth ...rest @@ -427,16 +437,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: snapshot, callbacks: {}, loggingSession, + includeFileBase64, + base64MaxBytes, }) - const hasResponseBlock = workflowHasResponseBlock(result) + const outputWithBase64 = includeFileBase64 + ? ((await hydrateUserFilesWithBase64(result.output, { + requestId, + executionId, + maxBytes: base64MaxBytes, + })) as NormalizedBlockOutput) + : result.output + + const resultWithBase64 = { ...result, output: outputWithBase64 } + + // Cleanup base64 cache for this execution + await cleanupExecutionBase64Cache(executionId) + + const hasResponseBlock = workflowHasResponseBlock(resultWithBase64) if (hasResponseBlock) { - return createHttpResponseFromBlock(result) + return createHttpResponseFromBlock(resultWithBase64) } const filteredResult = { success: result.success, - output: result.output, + output: outputWithBase64, error: result.error, metadata: result.metadata ? { @@ -498,6 +523,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: selectedOutputs: resolvedSelectedOutputs, isSecureMode: false, workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api', + includeFileBase64, + base64MaxBytes, }, executionId, }) @@ -698,6 +725,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: }, loggingSession, abortSignal: abortController.signal, + includeFileBase64, + base64MaxBytes, }) if (result.status === 'paused') { @@ -750,12 +779,21 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: workflowId, data: { success: result.success, - output: result.output, + output: includeFileBase64 + ? await hydrateUserFilesWithBase64(result.output, { + requestId, + executionId, + maxBytes: base64MaxBytes, + }) + : result.output, duration: result.metadata?.duration || 0, startTime: result.metadata?.startTime || startTime.toISOString(), endTime: result.metadata?.endTime || new Date().toISOString(), }, }) + + // Cleanup base64 cache for this execution + await cleanupExecutionBase64Cache(executionId) } catch (error: any) { const errorMessage = error.message || 'Unknown error' logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`) diff --git a/apps/sim/app/api/workflows/[id]/state/route.ts b/apps/sim/app/api/workflows/[id]/state/route.ts index 7c8879430..0c977a56c 100644 --- a/apps/sim/app/api/workflows/[id]/state/route.ts +++ b/apps/sim/app/api/workflows/[id]/state/route.ts @@ -33,6 +33,7 @@ const BlockDataSchema = z.object({ doWhileCondition: z.string().optional(), parallelType: z.enum(['collection', 'count']).optional(), type: z.string().optional(), + canonicalModes: z.record(z.enum(['basic', 'advanced'])).optional(), }) const SubBlockStateSchema = z.object({ diff --git a/apps/sim/app/chat/components/message/components/markdown-renderer.tsx b/apps/sim/app/chat/components/message/components/markdown-renderer.tsx index ba69814cf..8aa66579d 100644 --- a/apps/sim/app/chat/components/message/components/markdown-renderer.tsx +++ b/apps/sim/app/chat/components/message/components/markdown-renderer.tsx @@ -1,4 +1,4 @@ -import React, { type HTMLAttributes, type ReactNode } from 'react' +import React, { type HTMLAttributes, memo, type ReactNode, useMemo } from 'react' import ReactMarkdown from 'react-markdown' import remarkGfm from 'remark-gfm' import { Tooltip } from '@/components/emcn' @@ -23,24 +23,16 @@ export function LinkWithPreview({ href, children }: { href: string; children: Re ) } -export default function MarkdownRenderer({ - content, - customLinkComponent, -}: { - content: string - customLinkComponent?: typeof LinkWithPreview -}) { - const LinkComponent = customLinkComponent || LinkWithPreview +const REMARK_PLUGINS = [remarkGfm] - const customComponents = { - // Paragraph +function createCustomComponents(LinkComponent: typeof LinkWithPreview) { + return { p: ({ children }: React.HTMLAttributes) => (

{children}

), - // Headings h1: ({ children }: React.HTMLAttributes) => (

{children} @@ -62,7 +54,6 @@ export default function MarkdownRenderer({

), - // Lists ul: ({ children }: React.HTMLAttributes) => (
    ), - // Code blocks pre: ({ children }: HTMLAttributes) => { let codeProps: HTMLAttributes = {} let codeContent: ReactNode = children @@ -120,7 +110,6 @@ export default function MarkdownRenderer({ ) }, - // Inline code code: ({ inline, className, @@ -144,24 +133,20 @@ export default function MarkdownRenderer({ ) }, - // Blockquotes blockquote: ({ children }: React.HTMLAttributes) => (
    {children}
    ), - // Horizontal rule hr: () =>
    , - // Links a: ({ href, children, ...props }: React.AnchorHTMLAttributes) => ( {children} ), - // Tables table: ({ children }: React.TableHTMLAttributes) => (
    @@ -193,7 +178,6 @@ export default function MarkdownRenderer({ ), - // Images img: ({ src, alt, ...props }: React.ImgHTMLAttributes) => ( ), } +} + +const DEFAULT_COMPONENTS = createCustomComponents(LinkWithPreview) + +const MarkdownRenderer = memo(function MarkdownRenderer({ + content, + customLinkComponent, +}: { + content: string + customLinkComponent?: typeof LinkWithPreview +}) { + const components = useMemo(() => { + if (!customLinkComponent) { + return DEFAULT_COMPONENTS + } + return createCustomComponents(customLinkComponent) + }, [customLinkComponent]) - // Pre-process content to fix common issues const processedContent = content.trim() return (
    - + {processedContent}
    ) -} +}) + +export default MarkdownRenderer diff --git a/apps/sim/app/chat/hooks/use-chat-streaming.ts b/apps/sim/app/chat/hooks/use-chat-streaming.ts index ac474fa37..e02087093 100644 --- a/apps/sim/app/chat/hooks/use-chat-streaming.ts +++ b/apps/sim/app/chat/hooks/use-chat-streaming.ts @@ -2,7 +2,7 @@ import { useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { isUserFile } from '@/lib/core/utils/display-filters' +import { isUserFileWithMetadata } from '@/lib/core/utils/user-file' import type { ChatFile, ChatMessage } from '@/app/chat/components/message/message' import { CHAT_ERROR_MESSAGES } from '@/app/chat/constants' @@ -17,7 +17,7 @@ function extractFilesFromData( return files } - if (isUserFile(data)) { + if (isUserFileWithMetadata(data)) { if (!seenIds.has(data.id)) { seenIds.add(data.id) files.push({ @@ -232,7 +232,7 @@ export function useChatStreaming() { return null } - if (isUserFile(value)) { + if (isUserFileWithMetadata(value)) { return null } @@ -285,7 +285,7 @@ export function useChatStreaming() { const value = getOutputValue(blockOutputs, config.path) - if (isUserFile(value)) { + if (isUserFileWithMetadata(value)) { extractedFiles.push({ id: value.id, name: value.name, diff --git a/apps/sim/app/layout.tsx b/apps/sim/app/layout.tsx index 166b260af..6ab3aae35 100644 --- a/apps/sim/app/layout.tsx +++ b/apps/sim/app/layout.tsx @@ -7,7 +7,7 @@ import { generateBrandedMetadata, generateStructuredData } from '@/lib/branding/ import { PostHogProvider } from '@/app/_shell/providers/posthog-provider' import '@/app/_styles/globals.css' import { OneDollarStats } from '@/components/analytics/onedollarstats' -import { isReactGrabEnabled } from '@/lib/core/config/feature-flags' +import { isReactGrabEnabled, isReactScanEnabled } from '@/lib/core/config/feature-flags' import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler' import { QueryProvider } from '@/app/_shell/providers/query-provider' import { SessionProvider } from '@/app/_shell/providers/session-provider' @@ -35,6 +35,13 @@ export default function RootLayout({ children }: { children: React.ReactNode }) return ( + {isReactScanEnabled && ( +