mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-16 02:18:06 -05:00
Compare commits
10 Commits
fix/copilo
...
feat/imper
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82fba6dc07 | ||
|
|
29ab351d0d | ||
|
|
432a40efc2 | ||
|
|
73873bb4c6 | ||
|
|
197ada5df2 | ||
|
|
6f469a7f37 | ||
|
|
a35f6eca03 | ||
|
|
1cc489e544 | ||
|
|
e499cc4f82 | ||
|
|
5e44357b9f |
@@ -20,6 +20,7 @@ export type AppSession = {
|
||||
id?: string
|
||||
userId?: string
|
||||
activeOrganizationId?: string
|
||||
impersonatedBy?: string | null
|
||||
}
|
||||
} | null
|
||||
|
||||
|
||||
11
apps/sim/app/_shell/providers/tooltip-provider.tsx
Normal file
11
apps/sim/app/_shell/providers/tooltip-provider.tsx
Normal file
@@ -0,0 +1,11 @@
|
||||
'use client'
|
||||
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
|
||||
interface TooltipProviderProps {
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
export function TooltipProvider({ children }: TooltipProviderProps) {
|
||||
return <Tooltip.Provider>{children}</Tooltip.Provider>
|
||||
}
|
||||
@@ -58,6 +58,25 @@
|
||||
pointer-events: none !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow canvas cursor styles
|
||||
* Override React Flow's default selection cursor based on canvas mode
|
||||
*/
|
||||
.workflow-container.canvas-mode-cursor .react-flow__pane,
|
||||
.workflow-container.canvas-mode-cursor .react-flow__selectionpane {
|
||||
cursor: default !important;
|
||||
}
|
||||
|
||||
.workflow-container.canvas-mode-hand .react-flow__pane,
|
||||
.workflow-container.canvas-mode-hand .react-flow__selectionpane {
|
||||
cursor: grab !important;
|
||||
}
|
||||
|
||||
.workflow-container.canvas-mode-hand .react-flow__pane:active,
|
||||
.workflow-container.canvas-mode-hand .react-flow__selectionpane:active {
|
||||
cursor: grabbing !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Selected node ring indicator
|
||||
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
|
||||
@@ -657,6 +676,20 @@ input[type="search"]::-ms-clear {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Notification toast enter animation
|
||||
*/
|
||||
@keyframes notification-enter {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateX(-16px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateX(var(--stack-offset, 0px));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @depricated
|
||||
* Legacy globals (light/dark) kept for backward-compat with old classes.
|
||||
|
||||
363
apps/sim/app/admin/impersonate/impersonate-client.tsx
Normal file
363
apps/sim/app/admin/impersonate/impersonate-client.tsx
Normal file
@@ -0,0 +1,363 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useState } from 'react'
|
||||
import { AlertCircle, ArrowLeft, ChevronLeft, ChevronRight, Loader2, Search } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import {
|
||||
Avatar,
|
||||
AvatarFallback,
|
||||
AvatarImage,
|
||||
Badge,
|
||||
Button,
|
||||
Input,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from '@/components/emcn'
|
||||
import { client } from '@/lib/auth/auth-client'
|
||||
|
||||
const USERS_PER_PAGE = 10
|
||||
|
||||
interface User {
|
||||
id: string
|
||||
name: string
|
||||
email: string
|
||||
image: string | null
|
||||
role: string | null
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
interface Pagination {
|
||||
total: number
|
||||
limit: number
|
||||
offset: number
|
||||
}
|
||||
|
||||
interface ImpersonateClientProps {
|
||||
currentUserId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts initials from a user's name.
|
||||
*/
|
||||
function getInitials(name: string | undefined | null): string {
|
||||
if (!name?.trim()) return ''
|
||||
const parts = name.trim().split(' ')
|
||||
if (parts.length >= 2) {
|
||||
return `${parts[0][0]}${parts[parts.length - 1][0]}`.toUpperCase()
|
||||
}
|
||||
return parts[0][0].toUpperCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to a readable format.
|
||||
*/
|
||||
function formatDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
export default function ImpersonateClient({ currentUserId }: ImpersonateClientProps) {
|
||||
const router = useRouter()
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [users, setUsers] = useState<User[]>([])
|
||||
const [pagination, setPagination] = useState<Pagination>({
|
||||
total: 0,
|
||||
limit: USERS_PER_PAGE,
|
||||
offset: 0,
|
||||
})
|
||||
const [currentPage, setCurrentPage] = useState(1)
|
||||
const [searching, setSearching] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [impersonatingId, setImpersonatingId] = useState<string | null>(null)
|
||||
|
||||
const totalPages = Math.ceil(pagination.total / pagination.limit)
|
||||
const hasNextPage = currentPage < totalPages
|
||||
const hasPrevPage = currentPage > 1
|
||||
|
||||
const searchUsers = useCallback(
|
||||
async (page = 1) => {
|
||||
if (!searchTerm.trim()) {
|
||||
setUsers([])
|
||||
setPagination({ total: 0, limit: USERS_PER_PAGE, offset: 0 })
|
||||
return
|
||||
}
|
||||
|
||||
setSearching(true)
|
||||
setError(null)
|
||||
|
||||
const offset = (page - 1) * USERS_PER_PAGE
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/admin/impersonate/search?q=${encodeURIComponent(searchTerm.trim())}&limit=${USERS_PER_PAGE}&offset=${offset}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to search users')
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
setUsers(data.users)
|
||||
setPagination(data.pagination)
|
||||
setCurrentPage(page)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to search users')
|
||||
setUsers([])
|
||||
} finally {
|
||||
setSearching(false)
|
||||
}
|
||||
},
|
||||
[searchTerm]
|
||||
)
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter') {
|
||||
searchUsers(1)
|
||||
}
|
||||
}
|
||||
|
||||
const goToPage = useCallback(
|
||||
(page: number) => {
|
||||
if (page >= 1 && page <= totalPages) {
|
||||
searchUsers(page)
|
||||
}
|
||||
},
|
||||
[totalPages, searchUsers]
|
||||
)
|
||||
|
||||
const nextPage = useCallback(() => {
|
||||
if (hasNextPage) {
|
||||
searchUsers(currentPage + 1)
|
||||
}
|
||||
}, [hasNextPage, currentPage, searchUsers])
|
||||
|
||||
const prevPage = useCallback(() => {
|
||||
if (hasPrevPage) {
|
||||
searchUsers(currentPage - 1)
|
||||
}
|
||||
}, [hasPrevPage, currentPage, searchUsers])
|
||||
|
||||
const handleImpersonate = async (userId: string) => {
|
||||
if (userId === currentUserId) {
|
||||
setError('You cannot impersonate yourself')
|
||||
return
|
||||
}
|
||||
|
||||
setImpersonatingId(userId)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const result = await client.admin.impersonateUser({
|
||||
userId,
|
||||
})
|
||||
|
||||
if (result.error) {
|
||||
throw new Error(result.error.message || 'Failed to impersonate user')
|
||||
}
|
||||
|
||||
// Redirect to workspace after successful impersonation
|
||||
router.push('/workspace')
|
||||
router.refresh()
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to impersonate user')
|
||||
setImpersonatingId(null)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex min-h-screen flex-col bg-[var(--bg)]'>
|
||||
{/* Header */}
|
||||
<div className='border-[var(--border)] border-b bg-[var(--bg-secondary)] px-6 py-4'>
|
||||
<div className='mx-auto flex max-w-5xl items-center gap-4'>
|
||||
<Link href='/workspace'>
|
||||
<Button variant='ghost' size='sm' className='gap-2'>
|
||||
<ArrowLeft className='h-4 w-4' />
|
||||
Back to Workspace
|
||||
</Button>
|
||||
</Link>
|
||||
<div className='h-6 w-px bg-[var(--border)]' />
|
||||
<h1 className='font-semibold text-[var(--text)] text-lg'>User Impersonation</h1>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className='mx-auto w-full max-w-5xl p-6'>
|
||||
{/* Search */}
|
||||
<div className='mb-6'>
|
||||
<label
|
||||
htmlFor='user-search'
|
||||
className='mb-2 block font-medium text-[var(--text-secondary)] text-sm'
|
||||
>
|
||||
Search for a user by name or email
|
||||
</label>
|
||||
<div className='flex gap-2'>
|
||||
<div className='relative flex-1'>
|
||||
<Search className='-translate-y-1/2 absolute top-1/2 left-3 h-4 w-4 text-[var(--text-muted)]' />
|
||||
<Input
|
||||
id='user-search'
|
||||
type='text'
|
||||
placeholder='Enter name or email...'
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
className='pl-10'
|
||||
/>
|
||||
</div>
|
||||
<Button onClick={() => searchUsers(1)} disabled={searching || !searchTerm.trim()}>
|
||||
{searching ? <Loader2 className='h-4 w-4 animate-spin' /> : 'Search'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Error */}
|
||||
{error && (
|
||||
<div className='mb-6 rounded-lg border border-red-500/30 bg-red-500/10 p-4'>
|
||||
<div className='flex gap-3'>
|
||||
<AlertCircle className='h-5 w-5 flex-shrink-0 text-red-500' />
|
||||
<p className='text-red-200 text-sm'>{error}</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Results */}
|
||||
{users.length > 0 && (
|
||||
<div className='rounded-lg border border-[var(--border)] bg-[var(--bg-secondary)]'>
|
||||
<div className='border-[var(--border)] border-b px-4 py-3'>
|
||||
<p className='text-[var(--text-secondary)] text-sm'>
|
||||
Found {pagination.total} user{pagination.total !== 1 ? 's' : ''}
|
||||
</p>
|
||||
</div>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>User</TableHead>
|
||||
<TableHead>Email</TableHead>
|
||||
<TableHead>Role</TableHead>
|
||||
<TableHead>Created</TableHead>
|
||||
<TableHead className='text-right'>Action</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{users.map((user) => (
|
||||
<TableRow key={user.id}>
|
||||
<TableCell>
|
||||
<div className='flex items-center gap-3'>
|
||||
<Avatar size='sm'>
|
||||
<AvatarImage src={user.image || undefined} alt={user.name} />
|
||||
<AvatarFallback>{getInitials(user.name)}</AvatarFallback>
|
||||
</Avatar>
|
||||
<div className='flex items-center gap-2'>
|
||||
<span className='font-medium text-[var(--text)]'>{user.name}</span>
|
||||
{user.id === currentUserId && <Badge variant='blue'>You</Badge>}
|
||||
</div>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell className='text-[var(--text-secondary)]'>{user.email}</TableCell>
|
||||
<TableCell>
|
||||
{user.role ? (
|
||||
<Badge variant='gray'>{user.role}</Badge>
|
||||
) : (
|
||||
<span className='text-[var(--text-muted)]'>-</span>
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell className='text-[var(--text-secondary)]'>
|
||||
{formatDate(user.createdAt)}
|
||||
</TableCell>
|
||||
<TableCell className='text-right'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => handleImpersonate(user.id)}
|
||||
disabled={impersonatingId === user.id || user.id === currentUserId}
|
||||
>
|
||||
{impersonatingId === user.id ? (
|
||||
<>
|
||||
<Loader2 className='mr-2 h-3 w-3 animate-spin' />
|
||||
Impersonating...
|
||||
</>
|
||||
) : (
|
||||
'Impersonate'
|
||||
)}
|
||||
</Button>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
|
||||
{/* Pagination */}
|
||||
{totalPages > 1 && (
|
||||
<div className='flex items-center justify-center border-[var(--border)] border-t px-4 py-3'>
|
||||
<div className='flex items-center gap-1'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={prevPage}
|
||||
disabled={!hasPrevPage || searching}
|
||||
>
|
||||
<ChevronLeft className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
|
||||
<div className='mx-3 flex items-center gap-4'>
|
||||
{Array.from({ length: Math.min(totalPages, 5) }, (_, i) => {
|
||||
let page: number
|
||||
if (totalPages <= 5) {
|
||||
page = i + 1
|
||||
} else if (currentPage <= 3) {
|
||||
page = i + 1
|
||||
} else if (currentPage >= totalPages - 2) {
|
||||
page = totalPages - 4 + i
|
||||
} else {
|
||||
page = currentPage - 2 + i
|
||||
}
|
||||
|
||||
if (page < 1 || page > totalPages) return null
|
||||
|
||||
return (
|
||||
<button
|
||||
key={page}
|
||||
onClick={() => goToPage(page)}
|
||||
disabled={searching}
|
||||
className={`font-medium text-sm transition-colors hover:text-[var(--text)] disabled:opacity-50 ${
|
||||
page === currentPage ? 'text-[var(--text)]' : 'text-[var(--text-muted)]'
|
||||
}`}
|
||||
>
|
||||
{page}
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={nextPage}
|
||||
disabled={!hasNextPage || searching}
|
||||
>
|
||||
<ChevronRight className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Empty state */}
|
||||
{searchTerm && !searching && users.length === 0 && !error && (
|
||||
<div className='rounded-lg border border-[var(--border)] bg-[var(--bg-secondary)] p-8 text-center'>
|
||||
<p className='text-[var(--text-secondary)]'>No users found matching your search</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
31
apps/sim/app/admin/impersonate/page.tsx
Normal file
31
apps/sim/app/admin/impersonate/page.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
import { db } from '@sim/db'
|
||||
import { user } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { notFound } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import ImpersonateClient from './impersonate-client'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* Admin impersonation page - allows superadmins to impersonate other users.
|
||||
*/
|
||||
export default async function ImpersonatePage() {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
notFound()
|
||||
}
|
||||
|
||||
const [currentUser] = await db
|
||||
.select({ role: user.role })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (currentUser?.role !== 'superadmin') {
|
||||
notFound()
|
||||
}
|
||||
|
||||
return <ImpersonateClient currentUserId={session.user.id} />
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
94
apps/sim/app/api/admin/impersonate/search/route.ts
Normal file
94
apps/sim/app/api/admin/impersonate/search/route.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { db } from '@sim/db'
|
||||
import { user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { count, eq, ilike, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('ImpersonateSearchAPI')
|
||||
|
||||
const DEFAULT_LIMIT = 10
|
||||
const MAX_LIMIT = 50
|
||||
|
||||
/**
|
||||
* GET /api/admin/impersonate/search
|
||||
*
|
||||
* Search for users to impersonate. Only accessible by superadmins.
|
||||
*
|
||||
* Query params:
|
||||
* - q: Search term (searches name and email)
|
||||
* - limit: Number of results per page (default: 10, max: 50)
|
||||
* - offset: Number of results to skip (default: 0)
|
||||
*
|
||||
* Response: { users: Array<{ id, name, email, image, role, createdAt }>, pagination: { total, limit, offset } }
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return new NextResponse(null, { status: 404 })
|
||||
}
|
||||
|
||||
const [currentUser] = await db
|
||||
.select({ role: user.role })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (currentUser?.role !== 'superadmin') {
|
||||
return new NextResponse(null, { status: 404 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const query = searchParams.get('q')?.trim()
|
||||
const limit = Math.min(
|
||||
Math.max(1, Number.parseInt(searchParams.get('limit') || String(DEFAULT_LIMIT), 10)),
|
||||
MAX_LIMIT
|
||||
)
|
||||
const offset = Math.max(0, Number.parseInt(searchParams.get('offset') || '0', 10))
|
||||
|
||||
if (!query || query.length < 2) {
|
||||
return NextResponse.json({
|
||||
users: [],
|
||||
pagination: { total: 0, limit, offset },
|
||||
})
|
||||
}
|
||||
|
||||
const searchPattern = `%${query}%`
|
||||
const whereCondition = or(ilike(user.name, searchPattern), ilike(user.email, searchPattern))
|
||||
|
||||
const [totalResult] = await db.select({ count: count() }).from(user).where(whereCondition)
|
||||
|
||||
const users = await db
|
||||
.select({
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
image: user.image,
|
||||
role: user.role,
|
||||
createdAt: user.createdAt,
|
||||
})
|
||||
.from(user)
|
||||
.where(whereCondition)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
|
||||
logger.info(`Superadmin ${session.user.id} searched for users with query: ${query}`)
|
||||
|
||||
return NextResponse.json({
|
||||
users: users.map((u) => ({
|
||||
...u,
|
||||
createdAt: u.createdAt.toISOString(),
|
||||
})),
|
||||
pagination: {
|
||||
total: totalResult?.count ?? 0,
|
||||
limit,
|
||||
offset,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to search users for impersonation', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -3,13 +3,60 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
createMockRequest,
|
||||
mockConsoleLogger,
|
||||
mockCryptoUuid,
|
||||
mockDrizzleOrm,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
vi.mock('@/lib/core/utils/urls', () => ({
|
||||
getBaseUrl: vi.fn(() => 'https://app.example.com'),
|
||||
}))
|
||||
|
||||
/** Setup auth API mocks for testing authentication routes */
|
||||
function setupAuthApiMocks(
|
||||
options: {
|
||||
operations?: {
|
||||
forgetPassword?: { success?: boolean; error?: string }
|
||||
resetPassword?: { success?: boolean; error?: string }
|
||||
}
|
||||
} = {}
|
||||
) {
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
mockConsoleLogger()
|
||||
mockDrizzleOrm()
|
||||
|
||||
const { operations = {} } = options
|
||||
const defaultOperations = {
|
||||
forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
|
||||
resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
|
||||
}
|
||||
|
||||
const createAuthMethod = (config: { success?: boolean; error?: string }) => {
|
||||
return vi.fn().mockImplementation(() => {
|
||||
if (config.success) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
return Promise.reject(new Error(config.error))
|
||||
})
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
auth: {
|
||||
api: {
|
||||
forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
|
||||
resetPassword: createAuthMethod(defaultOperations.resetPassword),
|
||||
},
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
describe('Forget Password API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('OAuth Connections API Route', () => {
|
||||
const mockGetSession = vi.fn()
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
import { createMockLogger } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockLogger } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('OAuth Credentials API Route', () => {
|
||||
const mockGetSession = vi.fn()
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('OAuth Disconnect API Route', () => {
|
||||
const mockGetSession = vi.fn()
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('OAuth Token API Routes', () => {
|
||||
const mockGetUserId = vi.fn()
|
||||
|
||||
@@ -3,8 +3,55 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
createMockRequest,
|
||||
mockConsoleLogger,
|
||||
mockCryptoUuid,
|
||||
mockDrizzleOrm,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
/** Setup auth API mocks for testing authentication routes */
|
||||
function setupAuthApiMocks(
|
||||
options: {
|
||||
operations?: {
|
||||
forgetPassword?: { success?: boolean; error?: string }
|
||||
resetPassword?: { success?: boolean; error?: string }
|
||||
}
|
||||
} = {}
|
||||
) {
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
mockConsoleLogger()
|
||||
mockDrizzleOrm()
|
||||
|
||||
const { operations = {} } = options
|
||||
const defaultOperations = {
|
||||
forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
|
||||
resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
|
||||
}
|
||||
|
||||
const createAuthMethod = (config: { success?: boolean; error?: string }) => {
|
||||
return vi.fn().mockImplementation(() => {
|
||||
if (config.success) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
return Promise.reject(new Error(config.error))
|
||||
})
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
auth: {
|
||||
api: {
|
||||
forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
|
||||
resetPassword: createAuthMethod(defaultOperations.resetPassword),
|
||||
},
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
describe('Reset Password API Route', () => {
|
||||
beforeEach(() => {
|
||||
|
||||
@@ -5,7 +5,34 @@
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
/**
|
||||
* Creates a mock NextRequest with cookies support for testing.
|
||||
*/
|
||||
function createMockNextRequest(
|
||||
method = 'GET',
|
||||
body?: unknown,
|
||||
headers: Record<string, string> = {},
|
||||
url = 'http://localhost:3000/api/test'
|
||||
): any {
|
||||
const headersObj = new Headers({
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
})
|
||||
|
||||
return {
|
||||
method,
|
||||
headers: headersObj,
|
||||
cookies: {
|
||||
get: vi.fn().mockReturnValue(undefined),
|
||||
},
|
||||
json:
|
||||
body !== undefined
|
||||
? vi.fn().mockResolvedValue(body)
|
||||
: vi.fn().mockRejectedValue(new Error('No body')),
|
||||
url,
|
||||
}
|
||||
}
|
||||
|
||||
const createMockStream = () => {
|
||||
return new ReadableStream({
|
||||
@@ -71,10 +98,15 @@ vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }),
|
||||
}))
|
||||
|
||||
describe('Chat Identifier API Route', () => {
|
||||
const mockAddCorsHeaders = vi.fn().mockImplementation((response) => response)
|
||||
const mockValidateChatAuth = vi.fn().mockResolvedValue({ authorized: true })
|
||||
const mockSetChatAuthCookie = vi.fn()
|
||||
const mockValidateAuthToken = vi.fn().mockReturnValue(false)
|
||||
|
||||
const mockChatResult = [
|
||||
{
|
||||
@@ -114,11 +146,16 @@ describe('Chat Identifier API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
vi.doMock('@/app/api/chat/utils', () => ({
|
||||
vi.doMock('@/lib/core/security/deployment', () => ({
|
||||
addCorsHeaders: mockAddCorsHeaders,
|
||||
validateAuthToken: mockValidateAuthToken,
|
||||
setDeploymentAuthCookie: vi.fn(),
|
||||
isEmailAllowed: vi.fn().mockReturnValue(false),
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/chat/utils', () => ({
|
||||
validateChatAuth: mockValidateChatAuth,
|
||||
setChatAuthCookie: mockSetChatAuthCookie,
|
||||
validateAuthToken: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
// Mock logger - use loggerMock from @sim/testing
|
||||
@@ -175,7 +212,7 @@ describe('Chat Identifier API Route', () => {
|
||||
|
||||
describe('GET endpoint', () => {
|
||||
it('should return chat info for a valid identifier', async () => {
|
||||
const req = createMockRequest('GET')
|
||||
const req = createMockNextRequest('GET')
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { GET } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -206,7 +243,7 @@ describe('Chat Identifier API Route', () => {
|
||||
}
|
||||
})
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const req = createMockNextRequest('GET')
|
||||
const params = Promise.resolve({ identifier: 'nonexistent' })
|
||||
|
||||
const { GET } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -240,7 +277,7 @@ describe('Chat Identifier API Route', () => {
|
||||
}
|
||||
})
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const req = createMockNextRequest('GET')
|
||||
const params = Promise.resolve({ identifier: 'inactive-chat' })
|
||||
|
||||
const { GET } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -261,7 +298,7 @@ describe('Chat Identifier API Route', () => {
|
||||
error: 'auth_required_password',
|
||||
}))
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const req = createMockNextRequest('GET')
|
||||
const params = Promise.resolve({ identifier: 'password-protected-chat' })
|
||||
|
||||
const { GET } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -282,7 +319,7 @@ describe('Chat Identifier API Route', () => {
|
||||
|
||||
describe('POST endpoint', () => {
|
||||
it('should handle authentication requests without input', async () => {
|
||||
const req = createMockRequest('POST', { password: 'test-password' })
|
||||
const req = createMockNextRequest('POST', { password: 'test-password' })
|
||||
const params = Promise.resolve({ identifier: 'password-protected-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -298,7 +335,7 @@ describe('Chat Identifier API Route', () => {
|
||||
})
|
||||
|
||||
it('should return 400 for requests without input', async () => {
|
||||
const req = createMockRequest('POST', {})
|
||||
const req = createMockNextRequest('POST', {})
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -319,7 +356,7 @@ describe('Chat Identifier API Route', () => {
|
||||
error: 'Authentication required',
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const req = createMockNextRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ identifier: 'protected-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -350,7 +387,7 @@ describe('Chat Identifier API Route', () => {
|
||||
},
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const req = createMockNextRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -369,7 +406,10 @@ describe('Chat Identifier API Route', () => {
|
||||
})
|
||||
|
||||
it('should return streaming response for valid chat messages', async () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
|
||||
const req = createMockNextRequest('POST', {
|
||||
input: 'Hello world',
|
||||
conversationId: 'conv-123',
|
||||
})
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -401,7 +441,7 @@ describe('Chat Identifier API Route', () => {
|
||||
}, 10000)
|
||||
|
||||
it('should handle streaming response body correctly', async () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const req = createMockNextRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -431,7 +471,7 @@ describe('Chat Identifier API Route', () => {
|
||||
throw new Error('Execution failed')
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { input: 'Trigger error' })
|
||||
const req = createMockNextRequest('POST', { input: 'Trigger error' })
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
@@ -470,7 +510,7 @@ describe('Chat Identifier API Route', () => {
|
||||
})
|
||||
|
||||
it('should pass conversationId to streaming execution when provided', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
const req = createMockNextRequest('POST', {
|
||||
input: 'Hello world',
|
||||
conversationId: 'test-conversation-123',
|
||||
})
|
||||
@@ -492,7 +532,7 @@ describe('Chat Identifier API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle missing conversationId gracefully', async () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const req = createMockNextRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ identifier: 'test-chat' })
|
||||
|
||||
const { POST } = await import('@/app/api/chat/[identifier]/route')
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot API Keys API Route', () => {
|
||||
const mockFetch = vi.fn()
|
||||
|
||||
@@ -3,14 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Chat Delete API Route', () => {
|
||||
const mockDelete = vi.fn()
|
||||
|
||||
@@ -7,7 +7,6 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateChatTitle } from '@/lib/copilot/chat-title'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
@@ -41,8 +40,34 @@ const ChatMessageSchema = z.object({
|
||||
userMessageId: z.string().optional(), // ID from frontend for the user message
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||
model: z
|
||||
.enum([
|
||||
'gpt-5-fast',
|
||||
'gpt-5',
|
||||
'gpt-5-medium',
|
||||
'gpt-5-high',
|
||||
'gpt-5.1-fast',
|
||||
'gpt-5.1',
|
||||
'gpt-5.1-medium',
|
||||
'gpt-5.1-high',
|
||||
'gpt-5-codex',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.2',
|
||||
'gpt-5.2-codex',
|
||||
'gpt-5.2-pro',
|
||||
'gpt-4o',
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
'claude-4-sonnet',
|
||||
'claude-4.5-haiku',
|
||||
'claude-4.5-sonnet',
|
||||
'claude-4.5-opus',
|
||||
'claude-4.1-opus',
|
||||
'gemini-3-pro',
|
||||
])
|
||||
.optional()
|
||||
.default('claude-4.5-opus'),
|
||||
mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
|
||||
prefetch: z.boolean().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
stream: z.boolean().optional().default(true),
|
||||
@@ -270,8 +295,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const defaults = getCopilotModel('chat')
|
||||
const selectedModel = model || defaults.model
|
||||
const envModel = env.COPILOT_MODEL || defaults.model
|
||||
const modelToUse = env.COPILOT_MODEL || defaults.model
|
||||
|
||||
let providerConfig: CopilotProviderConfig | undefined
|
||||
const providerEnv = env.COPILOT_PROVIDER as any
|
||||
@@ -280,7 +304,7 @@ export async function POST(req: NextRequest) {
|
||||
if (providerEnv === 'azure-openai') {
|
||||
providerConfig = {
|
||||
provider: 'azure-openai',
|
||||
model: envModel,
|
||||
model: modelToUse,
|
||||
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||
apiVersion: 'preview',
|
||||
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||
@@ -288,7 +312,7 @@ export async function POST(req: NextRequest) {
|
||||
} else if (providerEnv === 'vertex') {
|
||||
providerConfig = {
|
||||
provider: 'vertex',
|
||||
model: envModel,
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
vertexProject: env.VERTEX_PROJECT,
|
||||
vertexLocation: env.VERTEX_LOCATION,
|
||||
@@ -296,15 +320,12 @@ export async function POST(req: NextRequest) {
|
||||
} else {
|
||||
providerConfig = {
|
||||
provider: providerEnv,
|
||||
model: selectedModel,
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||
|
||||
// Determine conversationId to use for this request
|
||||
const effectiveConversationId =
|
||||
(currentChat?.conversationId as string | undefined) || conversationId
|
||||
@@ -324,7 +345,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
} | null = null
|
||||
|
||||
if (effectiveMode === 'build') {
|
||||
if (mode === 'agent') {
|
||||
// Build base tools (executed locally, not deferred)
|
||||
// Include function_execute for code execution capability
|
||||
baseTools = [
|
||||
@@ -431,8 +452,8 @@ export async function POST(req: NextRequest) {
|
||||
userId: authenticatedUserId,
|
||||
stream: stream,
|
||||
streamToolCalls: true,
|
||||
model: selectedModel,
|
||||
mode: transportMode,
|
||||
model: model,
|
||||
mode: mode,
|
||||
messageId: userMessageIdToUse,
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(providerConfig ? { provider: providerConfig } : {}),
|
||||
@@ -456,7 +477,7 @@ export async function POST(req: NextRequest) {
|
||||
hasConversationId: !!effectiveConversationId,
|
||||
hasFileAttachments: processedFileContents.length > 0,
|
||||
messageLength: message.length,
|
||||
mode: effectiveMode,
|
||||
mode,
|
||||
hasTools: integrationTools.length > 0,
|
||||
toolCount: integrationTools.length,
|
||||
hasBaseTools: baseTools.length > 0,
|
||||
|
||||
@@ -3,14 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Chat Update Messages API Route', () => {
|
||||
const mockSelect = vi.fn()
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { COPILOT_MODES } from '@/lib/copilot/models'
|
||||
|
||||
const logger = createLogger('CopilotChatUpdateAPI')
|
||||
|
||||
@@ -46,7 +45,7 @@ const UpdateMessagesSchema = z.object({
|
||||
planArtifact: z.string().nullable().optional(),
|
||||
config: z
|
||||
.object({
|
||||
mode: z.enum(COPILOT_MODES).optional(),
|
||||
mode: z.enum(['ask', 'build', 'plan']).optional(),
|
||||
model: z.string().optional(),
|
||||
})
|
||||
.nullable()
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Chats List API Route', () => {
|
||||
const mockSelect = vi.fn()
|
||||
|
||||
@@ -3,14 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Checkpoints Revert API Route', () => {
|
||||
const mockSelect = vi.fn()
|
||||
|
||||
@@ -3,14 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Checkpoints API Route', () => {
|
||||
const mockSelect = vi.fn()
|
||||
|
||||
@@ -3,14 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Confirm API Route', () => {
|
||||
const mockRedisExists = vi.fn()
|
||||
|
||||
@@ -3,13 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Feedback API Route', () => {
|
||||
const mockInsert = vi.fn()
|
||||
|
||||
@@ -3,13 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Copilot Stats API Route', () => {
|
||||
const mockFetch = vi.fn()
|
||||
|
||||
@@ -2,13 +2,12 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import type { CopilotModelId } from '@/lib/copilot/models'
|
||||
import { db } from '@/../../packages/db'
|
||||
import { settings } from '@/../../packages/db/schema'
|
||||
|
||||
const logger = createLogger('CopilotUserModelsAPI')
|
||||
|
||||
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
||||
const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
|
||||
'gpt-4o': false,
|
||||
'gpt-4.1': false,
|
||||
'gpt-5-fast': false,
|
||||
@@ -29,7 +28,7 @@ const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
||||
'claude-4.5-haiku': true,
|
||||
'claude-4.5-sonnet': true,
|
||||
'claude-4.5-opus': true,
|
||||
'claude-4.1-opus': false,
|
||||
// 'claude-4.1-opus': true,
|
||||
'gemini-3-pro': true,
|
||||
}
|
||||
|
||||
@@ -55,9 +54,7 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
|
||||
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
|
||||
if (modelId in mergedModels) {
|
||||
mergedModels[modelId as CopilotModelId] = enabled
|
||||
}
|
||||
mergedModels[modelId] = enabled
|
||||
}
|
||||
|
||||
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(
|
||||
|
||||
@@ -22,15 +22,15 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
const hasAdminPrivileges =
|
||||
currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
|
||||
if (!hasAdminPrivileges) {
|
||||
logger.warn(`[${requestId}] Non-admin user attempted to verify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only admin users can verify creators' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if creator exists
|
||||
const existingCreator = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
@@ -42,7 +42,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update creator verified status to true
|
||||
await db
|
||||
.update(templateCreators)
|
||||
.set({ verified: true, updatedAt: new Date() })
|
||||
@@ -75,15 +74,15 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
const hasAdminPrivileges =
|
||||
currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
|
||||
if (!hasAdminPrivileges) {
|
||||
logger.warn(`[${requestId}] Non-admin user attempted to unverify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only admin users can unverify creators' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if creator exists
|
||||
const existingCreator = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
@@ -95,7 +94,6 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update creator verified status to false
|
||||
await db
|
||||
.update(templateCreators)
|
||||
.set({ verified: false, updatedAt: new Date() })
|
||||
|
||||
@@ -1,5 +1,87 @@
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
/** Setup file API mocks for file delete tests */
|
||||
function setupFileApiMocks(
|
||||
options: {
|
||||
authenticated?: boolean
|
||||
storageProvider?: 's3' | 'blob' | 'local'
|
||||
cloudEnabled?: boolean
|
||||
} = {}
|
||||
) {
|
||||
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
|
||||
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
|
||||
const authMocks = mockAuth()
|
||||
if (authenticated) {
|
||||
authMocks.setAuthenticated()
|
||||
} else {
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
|
||||
}))
|
||||
|
||||
const uploadFileMock = vi.fn().mockResolvedValue({
|
||||
path: '/api/files/serve/test-key.txt',
|
||||
key: 'test-key.txt',
|
||||
name: 'test.txt',
|
||||
size: 100,
|
||||
type: 'text/plain',
|
||||
})
|
||||
const downloadFileMock = vi.fn().mockResolvedValue(Buffer.from('test content'))
|
||||
const deleteFileMock = vi.fn().mockResolvedValue(undefined)
|
||||
const hasCloudStorageMock = vi.fn().mockReturnValue(cloudEnabled)
|
||||
|
||||
vi.doMock('@/lib/uploads', () => ({
|
||||
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
|
||||
StorageService: {
|
||||
uploadFile: uploadFileMock,
|
||||
downloadFile: downloadFileMock,
|
||||
deleteFile: deleteFileMock,
|
||||
hasCloudStorage: hasCloudStorageMock,
|
||||
},
|
||||
uploadFile: uploadFileMock,
|
||||
downloadFile: downloadFileMock,
|
||||
deleteFile: deleteFileMock,
|
||||
hasCloudStorage: hasCloudStorageMock,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads/core/storage-service', () => ({
|
||||
uploadFile: uploadFileMock,
|
||||
downloadFile: downloadFileMock,
|
||||
deleteFile: deleteFileMock,
|
||||
hasCloudStorage: hasCloudStorageMock,
|
||||
}))
|
||||
|
||||
vi.doMock('fs/promises', () => ({
|
||||
unlink: vi.fn().mockResolvedValue(undefined),
|
||||
access: vi.fn().mockResolvedValue(undefined),
|
||||
stat: vi.fn().mockResolvedValue({ isFile: () => true }),
|
||||
}))
|
||||
|
||||
return { auth: authMocks }
|
||||
}
|
||||
|
||||
describe('File Delete API Route', () => {
|
||||
beforeEach(() => {
|
||||
|
||||
@@ -1,12 +1,59 @@
|
||||
import path from 'path'
|
||||
import { NextRequest } from 'next/server'
|
||||
/**
|
||||
* Tests for file parse API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
function setupFileApiMocks(
|
||||
options: {
|
||||
authenticated?: boolean
|
||||
storageProvider?: 's3' | 'blob' | 'local'
|
||||
cloudEnabled?: boolean
|
||||
} = {}
|
||||
) {
|
||||
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
|
||||
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
|
||||
const authMocks = mockAuth()
|
||||
if (authenticated) {
|
||||
authMocks.setAuthenticated()
|
||||
} else {
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads', () => ({
|
||||
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
|
||||
}))
|
||||
|
||||
return { auth: authMocks }
|
||||
}
|
||||
|
||||
const mockJoin = vi.fn((...args: string[]): string => {
|
||||
if (args[0] === '/test/uploads') {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
/**
|
||||
* Tests for file presigned API route
|
||||
@@ -8,6 +8,106 @@ import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
function setupFileApiMocks(
|
||||
options: {
|
||||
authenticated?: boolean
|
||||
storageProvider?: 's3' | 'blob' | 'local'
|
||||
cloudEnabled?: boolean
|
||||
} = {}
|
||||
) {
|
||||
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
|
||||
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
|
||||
const authMocks = mockAuth()
|
||||
if (authenticated) {
|
||||
authMocks.setAuthenticated()
|
||||
} else {
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
|
||||
}))
|
||||
|
||||
const useBlobStorage = storageProvider === 'blob' && cloudEnabled
|
||||
const useS3Storage = storageProvider === 's3' && cloudEnabled
|
||||
|
||||
vi.doMock('@/lib/uploads/config', () => ({
|
||||
USE_BLOB_STORAGE: useBlobStorage,
|
||||
USE_S3_STORAGE: useS3Storage,
|
||||
UPLOAD_DIR: '/uploads',
|
||||
getStorageConfig: vi.fn().mockReturnValue(
|
||||
useBlobStorage
|
||||
? {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
connectionString: 'testconnection',
|
||||
containerName: 'testcontainer',
|
||||
}
|
||||
: {
|
||||
bucket: 'test-bucket',
|
||||
region: 'us-east-1',
|
||||
}
|
||||
),
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
|
||||
getStorageProvider: vi
|
||||
.fn()
|
||||
.mockReturnValue(
|
||||
storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
|
||||
),
|
||||
}))
|
||||
|
||||
const mockGeneratePresignedUploadUrl = vi.fn().mockImplementation(async (opts) => {
|
||||
const timestamp = Date.now()
|
||||
const safeFileName = opts.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const key = `${opts.context}/${timestamp}-ik3a6w4-${safeFileName}`
|
||||
return {
|
||||
url: 'https://example.com/presigned-url',
|
||||
key,
|
||||
}
|
||||
})
|
||||
|
||||
vi.doMock('@/lib/uploads/core/storage-service', () => ({
|
||||
hasCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
|
||||
generatePresignedUploadUrl: mockGeneratePresignedUploadUrl,
|
||||
generatePresignedDownloadUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads/utils/validation', () => ({
|
||||
validateFileType: vi.fn().mockReturnValue(null),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads', () => ({
|
||||
CopilotFiles: {
|
||||
generateCopilotUploadUrl: vi.fn().mockResolvedValue({
|
||||
url: 'https://example.com/presigned-url',
|
||||
key: 'copilot/test-key.txt',
|
||||
}),
|
||||
isImageFileType: vi.fn().mockReturnValue(true),
|
||||
},
|
||||
getStorageProvider: vi
|
||||
.fn()
|
||||
.mockReturnValue(
|
||||
storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
|
||||
),
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
|
||||
}))
|
||||
|
||||
return { auth: authMocks }
|
||||
}
|
||||
|
||||
describe('/api/files/presigned', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@@ -210,7 +310,7 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.fileInfo.key).toMatch(/^knowledge-base\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
|
||||
@@ -1,11 +1,49 @@
|
||||
import { NextRequest } from 'next/server'
|
||||
/**
|
||||
* Tests for file serve API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import {
|
||||
defaultMockUser,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { setupApiTestMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
function setupApiTestMocks(
|
||||
options: {
|
||||
authenticated?: boolean
|
||||
user?: { id: string; email: string }
|
||||
withFileSystem?: boolean
|
||||
withUploadUtils?: boolean
|
||||
} = {}
|
||||
) {
|
||||
const { authenticated = true, user = defaultMockUser, withFileSystem = false } = options
|
||||
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
|
||||
const authMocks = mockAuth(user)
|
||||
if (authenticated) {
|
||||
authMocks.setAuthenticated(user)
|
||||
} else {
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
if (withFileSystem) {
|
||||
vi.doMock('fs/promises', () => ({
|
||||
readFile: vi.fn().mockResolvedValue(Buffer.from('test content')),
|
||||
access: vi.fn().mockResolvedValue(undefined),
|
||||
stat: vi.fn().mockResolvedValue({ isFile: () => true, size: 100 }),
|
||||
}))
|
||||
}
|
||||
|
||||
return { auth: authMocks }
|
||||
}
|
||||
|
||||
describe('File Serve API Route', () => {
|
||||
beforeEach(() => {
|
||||
@@ -31,6 +69,17 @@ describe('File Serve API Route', () => {
|
||||
existsSync: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads', () => ({
|
||||
CopilotFiles: {
|
||||
downloadCopilotFile: vi.fn(),
|
||||
},
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(false),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads/utils/file-utils', () => ({
|
||||
inferContextFromKey: vi.fn().mockReturnValue('workspace'),
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/files/utils', () => ({
|
||||
FileNotFoundError: class FileNotFoundError extends Error {
|
||||
constructor(message: string) {
|
||||
@@ -126,6 +175,17 @@ describe('File Serve API Route', () => {
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads', () => ({
|
||||
CopilotFiles: {
|
||||
downloadCopilotFile: vi.fn(),
|
||||
},
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(false),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads/utils/file-utils', () => ({
|
||||
inferContextFromKey: vi.fn().mockReturnValue('workspace'),
|
||||
}))
|
||||
|
||||
const req = new NextRequest(
|
||||
'http://localhost:3000/api/files/serve/workspace/test-workspace-id/nested-path-file.txt'
|
||||
)
|
||||
|
||||
@@ -1,11 +1,76 @@
|
||||
import { NextRequest } from 'next/server'
|
||||
/**
|
||||
* Tests for file upload API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
function setupFileApiMocks(
|
||||
options: {
|
||||
authenticated?: boolean
|
||||
storageProvider?: 's3' | 'blob' | 'local'
|
||||
cloudEnabled?: boolean
|
||||
} = {}
|
||||
) {
|
||||
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
|
||||
|
||||
setupCommonApiMocks()
|
||||
mockUuid()
|
||||
mockCryptoUuid()
|
||||
|
||||
const authMocks = mockAuth()
|
||||
if (authenticated) {
|
||||
authMocks.setAuthenticated()
|
||||
} else {
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
|
||||
verifyKBFileAccess: vi.fn().mockResolvedValue(true),
|
||||
verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/uploads/contexts/workspace', () => ({
|
||||
uploadWorkspaceFile: vi.fn().mockResolvedValue({
|
||||
id: 'test-file-id',
|
||||
name: 'test.txt',
|
||||
url: '/api/files/serve/workspace/test-workspace-id/test-file.txt',
|
||||
size: 100,
|
||||
type: 'text/plain',
|
||||
key: 'workspace/test-workspace-id/1234567890-test.txt',
|
||||
uploadedAt: new Date().toISOString(),
|
||||
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
|
||||
}),
|
||||
}))
|
||||
|
||||
const uploadFileMock = vi.fn().mockResolvedValue({
|
||||
path: '/api/files/serve/test-key.txt',
|
||||
key: 'test-key.txt',
|
||||
name: 'test.txt',
|
||||
size: 100,
|
||||
type: 'text/plain',
|
||||
})
|
||||
|
||||
vi.doMock('@/lib/uploads', () => ({
|
||||
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
|
||||
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
|
||||
uploadFile: uploadFileMock,
|
||||
}))
|
||||
|
||||
return { auth: authMocks }
|
||||
}
|
||||
|
||||
describe('File Upload API Route', () => {
|
||||
const createMockFormData = (files: File[], context = 'workspace'): FormData => {
|
||||
|
||||
@@ -3,15 +3,24 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
type CapturedFolderValues,
|
||||
createMockRequest,
|
||||
type MockUser,
|
||||
mockAuth,
|
||||
mockLogger,
|
||||
mockConsoleLogger,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
/** Type for captured folder values in tests */
|
||||
interface CapturedFolderValues {
|
||||
name?: string
|
||||
color?: string
|
||||
parentId?: string | null
|
||||
isExpanded?: boolean
|
||||
sortOrder?: number
|
||||
updatedAt?: Date
|
||||
}
|
||||
|
||||
interface FolderDbMockOptions {
|
||||
folderLookupResult?: any
|
||||
@@ -21,6 +30,8 @@ interface FolderDbMockOptions {
|
||||
}
|
||||
|
||||
describe('Individual Folder API Route', () => {
|
||||
let mockLogger: ReturnType<typeof mockConsoleLogger>
|
||||
|
||||
const TEST_USER: MockUser = {
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
@@ -39,7 +50,8 @@ describe('Individual Folder API Route', () => {
|
||||
updatedAt: new Date('2024-01-01T00:00:00Z'),
|
||||
}
|
||||
|
||||
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth(TEST_USER)
|
||||
let mockAuthenticatedUser: (user?: MockUser) => void
|
||||
let mockUnauthenticated: () => void
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
|
||||
function createFolderDbMock(options: FolderDbMockOptions = {}) {
|
||||
@@ -110,6 +122,10 @@ describe('Individual Folder API Route', () => {
|
||||
vi.resetModules()
|
||||
vi.clearAllMocks()
|
||||
setupCommonApiMocks()
|
||||
mockLogger = mockConsoleLogger()
|
||||
const auth = mockAuth(TEST_USER)
|
||||
mockAuthenticatedUser = auth.mockAuthenticatedUser
|
||||
mockUnauthenticated = auth.mockUnauthenticated
|
||||
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin')
|
||||
|
||||
|
||||
@@ -3,17 +3,46 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockAuth, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
type CapturedFolderValues,
|
||||
createMockRequest,
|
||||
createMockTransaction,
|
||||
mockAuth,
|
||||
mockLogger,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
interface CapturedFolderValues {
|
||||
name?: string
|
||||
color?: string
|
||||
parentId?: string | null
|
||||
isExpanded?: boolean
|
||||
sortOrder?: number
|
||||
updatedAt?: Date
|
||||
}
|
||||
|
||||
function createMockTransaction(mockData: {
|
||||
selectData?: Array<{ id: string; [key: string]: unknown }>
|
||||
insertResult?: Array<{ id: string; [key: string]: unknown }>
|
||||
}) {
|
||||
const { selectData = [], insertResult = [] } = mockData
|
||||
return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise<unknown>) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue(selectData),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue(insertResult),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
}
|
||||
|
||||
describe('Folders API Route', () => {
|
||||
let mockLogger: ReturnType<typeof mockConsoleLogger>
|
||||
const mockFolders = [
|
||||
{
|
||||
id: 'folder-1',
|
||||
@@ -41,7 +70,8 @@ describe('Folders API Route', () => {
|
||||
},
|
||||
]
|
||||
|
||||
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth()
|
||||
let mockAuthenticatedUser: () => void
|
||||
let mockUnauthenticated: () => void
|
||||
const mockUUID = 'mock-uuid-12345678-90ab-cdef-1234-567890abcdef'
|
||||
|
||||
const mockSelect = vi.fn()
|
||||
@@ -63,6 +93,10 @@ describe('Folders API Route', () => {
|
||||
})
|
||||
|
||||
setupCommonApiMocks()
|
||||
mockLogger = mockConsoleLogger()
|
||||
const auth = mockAuth()
|
||||
mockAuthenticatedUser = auth.mockAuthenticatedUser
|
||||
mockUnauthenticated = auth.mockUnauthenticated
|
||||
|
||||
mockSelect.mockReturnValue({ from: mockFrom })
|
||||
mockFrom.mockReturnValue({ where: mockWhere })
|
||||
|
||||
@@ -9,6 +9,7 @@ import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deploymen
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
@@ -34,22 +35,17 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
// Find the start block (starter or start_trigger type)
|
||||
const startBlock = blocks.find(
|
||||
(block) => block.type === 'starter' || block.type === 'start_trigger'
|
||||
(block) =>
|
||||
block.type === 'starter' || block.type === 'start_trigger' || block.type === 'input_trigger'
|
||||
)
|
||||
|
||||
if (!startBlock) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Extract inputFormat from subBlocks
|
||||
const subBlocks = startBlock.subBlocks as Record<string, any> | null
|
||||
if (!subBlocks?.inputFormat?.value) {
|
||||
return []
|
||||
}
|
||||
|
||||
return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : []
|
||||
return normalizeInputFormatValue(subBlocks?.inputFormat?.value)
|
||||
} catch (error) {
|
||||
logger.error('Error fetching workflow input schema:', error)
|
||||
return []
|
||||
|
||||
@@ -3,10 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { createMockRequest, loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
vi.mock('@/lib/execution/isolated-vm', () => ({
|
||||
executeInIsolatedVM: vi.fn().mockImplementation(async (req) => {
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
mockDrizzleOrm,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
mockDrizzleOrm,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
mockDrizzleOrm,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
mockDrizzleOrm()
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
mockDrizzleOrm,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
mockDrizzleOrm()
|
||||
|
||||
@@ -5,13 +5,13 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createEnvMock,
|
||||
createMockRequest,
|
||||
mockConsoleLogger,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
and: vi.fn().mockImplementation((...args) => ({ and: args })),
|
||||
|
||||
@@ -4,6 +4,8 @@ import {
|
||||
invitation,
|
||||
member,
|
||||
organization,
|
||||
permissionGroup,
|
||||
permissionGroupMember,
|
||||
permissions,
|
||||
subscription as subscriptionTable,
|
||||
user,
|
||||
@@ -17,6 +19,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
@@ -382,6 +385,47 @@ export async function PUT(
|
||||
// Don't fail the whole invitation acceptance due to this
|
||||
}
|
||||
|
||||
// Auto-assign to permission group if one has autoAddNewMembers enabled
|
||||
try {
|
||||
const hasAccessControl = await hasAccessControlAccess(session.user.id)
|
||||
if (hasAccessControl) {
|
||||
const [autoAddGroup] = await tx
|
||||
.select({ id: permissionGroup.id, name: permissionGroup.name })
|
||||
.from(permissionGroup)
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroup.organizationId, organizationId),
|
||||
eq(permissionGroup.autoAddNewMembers, true)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (autoAddGroup) {
|
||||
await tx.insert(permissionGroupMember).values({
|
||||
id: randomUUID(),
|
||||
permissionGroupId: autoAddGroup.id,
|
||||
userId: session.user.id,
|
||||
assignedBy: null,
|
||||
assignedAt: new Date(),
|
||||
})
|
||||
|
||||
logger.info('Auto-assigned new member to permission group', {
|
||||
userId: session.user.id,
|
||||
organizationId,
|
||||
permissionGroupId: autoAddGroup.id,
|
||||
permissionGroupName: autoAddGroup.name,
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to auto-assign user to permission group', {
|
||||
userId: session.user.id,
|
||||
organizationId,
|
||||
error,
|
||||
})
|
||||
// Don't fail the whole invitation acceptance due to this
|
||||
}
|
||||
|
||||
const linkedWorkspaceInvitations = await tx
|
||||
.select()
|
||||
.from(workspaceInvitation)
|
||||
|
||||
@@ -25,12 +25,19 @@ const configSchema = z.object({
|
||||
disableMcpTools: z.boolean().optional(),
|
||||
disableCustomTools: z.boolean().optional(),
|
||||
hideTemplates: z.boolean().optional(),
|
||||
disableInvitations: z.boolean().optional(),
|
||||
hideDeployApi: z.boolean().optional(),
|
||||
hideDeployMcp: z.boolean().optional(),
|
||||
hideDeployA2a: z.boolean().optional(),
|
||||
hideDeployChatbot: z.boolean().optional(),
|
||||
hideDeployTemplate: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const updateSchema = z.object({
|
||||
name: z.string().trim().min(1).max(100).optional(),
|
||||
description: z.string().max(500).nullable().optional(),
|
||||
config: configSchema.optional(),
|
||||
autoAddNewMembers: z.boolean().optional(),
|
||||
})
|
||||
|
||||
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
|
||||
@@ -44,6 +51,7 @@ async function getPermissionGroupWithAccess(groupId: string, userId: string) {
|
||||
createdBy: permissionGroup.createdBy,
|
||||
createdAt: permissionGroup.createdAt,
|
||||
updatedAt: permissionGroup.updatedAt,
|
||||
autoAddNewMembers: permissionGroup.autoAddNewMembers,
|
||||
})
|
||||
.from(permissionGroup)
|
||||
.where(eq(permissionGroup.id, groupId))
|
||||
@@ -140,11 +148,27 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
? { ...currentConfig, ...updates.config }
|
||||
: currentConfig
|
||||
|
||||
// If setting autoAddNewMembers to true, unset it on other groups in the org first
|
||||
if (updates.autoAddNewMembers === true) {
|
||||
await db
|
||||
.update(permissionGroup)
|
||||
.set({ autoAddNewMembers: false, updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroup.organizationId, result.group.organizationId),
|
||||
eq(permissionGroup.autoAddNewMembers, true)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
await db
|
||||
.update(permissionGroup)
|
||||
.set({
|
||||
...(updates.name !== undefined && { name: updates.name }),
|
||||
...(updates.description !== undefined && { description: updates.description }),
|
||||
...(updates.autoAddNewMembers !== undefined && {
|
||||
autoAddNewMembers: updates.autoAddNewMembers,
|
||||
}),
|
||||
config: newConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
@@ -26,6 +26,12 @@ const configSchema = z.object({
|
||||
disableMcpTools: z.boolean().optional(),
|
||||
disableCustomTools: z.boolean().optional(),
|
||||
hideTemplates: z.boolean().optional(),
|
||||
disableInvitations: z.boolean().optional(),
|
||||
hideDeployApi: z.boolean().optional(),
|
||||
hideDeployMcp: z.boolean().optional(),
|
||||
hideDeployA2a: z.boolean().optional(),
|
||||
hideDeployChatbot: z.boolean().optional(),
|
||||
hideDeployTemplate: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const createSchema = z.object({
|
||||
@@ -33,6 +39,7 @@ const createSchema = z.object({
|
||||
name: z.string().trim().min(1).max(100),
|
||||
description: z.string().max(500).optional(),
|
||||
config: configSchema.optional(),
|
||||
autoAddNewMembers: z.boolean().optional(),
|
||||
})
|
||||
|
||||
export async function GET(req: Request) {
|
||||
@@ -68,6 +75,7 @@ export async function GET(req: Request) {
|
||||
createdBy: permissionGroup.createdBy,
|
||||
createdAt: permissionGroup.createdAt,
|
||||
updatedAt: permissionGroup.updatedAt,
|
||||
autoAddNewMembers: permissionGroup.autoAddNewMembers,
|
||||
creatorName: user.name,
|
||||
creatorEmail: user.email,
|
||||
})
|
||||
@@ -111,7 +119,8 @@ export async function POST(req: Request) {
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { organizationId, name, description, config } = createSchema.parse(body)
|
||||
const { organizationId, name, description, config, autoAddNewMembers } =
|
||||
createSchema.parse(body)
|
||||
|
||||
const membership = await db
|
||||
.select({ id: member.id, role: member.role })
|
||||
@@ -154,6 +163,19 @@ export async function POST(req: Request) {
|
||||
...config,
|
||||
}
|
||||
|
||||
// If autoAddNewMembers is true, unset it on any existing groups first
|
||||
if (autoAddNewMembers) {
|
||||
await db
|
||||
.update(permissionGroup)
|
||||
.set({ autoAddNewMembers: false, updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroup.organizationId, organizationId),
|
||||
eq(permissionGroup.autoAddNewMembers, true)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const newGroup = {
|
||||
id: crypto.randomUUID(),
|
||||
@@ -164,6 +186,7 @@ export async function POST(req: Request) {
|
||||
createdBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
autoAddNewMembers: autoAddNewMembers || false,
|
||||
}
|
||||
|
||||
await db.insert(permissionGroup).values(newGroup)
|
||||
|
||||
@@ -5,14 +5,14 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
import { verifyAdminPrivileges } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateApprovalAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
/**
|
||||
* POST /api/templates/[id]/approve - Approve a template (super users only)
|
||||
* POST /api/templates/[id]/approve - Approve a template (admin users only)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
@@ -25,10 +25,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
||||
const { hasAdminPrivileges } = await verifyAdminPrivileges(session.user.id)
|
||||
if (!hasAdminPrivileges) {
|
||||
logger.warn(`[${requestId}] Non-admin user attempted to approve template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only admin users can approve templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
@@ -42,7 +42,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
.set({ status: 'approved', updatedAt: new Date() })
|
||||
.where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Template approved: ${id} by super user: ${session.user.id}`)
|
||||
logger.info(`[${requestId}] Template approved: ${id} by admin: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Template approved successfully',
|
||||
@@ -55,7 +55,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
|
||||
* DELETE /api/templates/[id]/approve - Unapprove a template (admin users only)
|
||||
*/
|
||||
export async function DELETE(
|
||||
_request: NextRequest,
|
||||
@@ -71,10 +71,10 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
const { hasAdminPrivileges } = await verifyAdminPrivileges(session.user.id)
|
||||
if (!hasAdminPrivileges) {
|
||||
logger.warn(`[${requestId}] Non-admin user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only admin users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
@@ -88,7 +88,7 @@ export async function DELETE(
|
||||
.set({ status: 'rejected', updatedAt: new Date() })
|
||||
.where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
|
||||
logger.info(`[${requestId}] Template rejected: ${id} by admin: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Template rejected successfully',
|
||||
|
||||
@@ -5,14 +5,14 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
import { verifyAdminPrivileges } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateRejectionAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
/**
|
||||
* POST /api/templates/[id]/reject - Reject a template (super users only)
|
||||
* POST /api/templates/[id]/reject - Reject a template (admin users only)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
@@ -25,10 +25,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
const { hasAdminPrivileges } = await verifyAdminPrivileges(session.user.id)
|
||||
if (!hasAdminPrivileges) {
|
||||
logger.warn(`[${requestId}] Non-admin user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only admin users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
@@ -42,7 +42,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
.set({ status: 'rejected', updatedAt: new Date() })
|
||||
.where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
|
||||
logger.info(`[${requestId}] Template rejected: ${id} by admin: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Template rejected successfully',
|
||||
|
||||
@@ -23,13 +23,10 @@ const logger = createLogger('TemplatesAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
// Function to sanitize sensitive data from workflow state
|
||||
// Now uses the more comprehensive sanitizeCredentials from credential-extractor
|
||||
function sanitizeWorkflowState(state: any): any {
|
||||
return sanitizeCredentials(state)
|
||||
}
|
||||
|
||||
// Schema for creating a template
|
||||
const CreateTemplateSchema = z.object({
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
name: z.string().min(1, 'Name is required').max(100, 'Name must be less than 100 characters'),
|
||||
@@ -43,7 +40,6 @@ const CreateTemplateSchema = z.object({
|
||||
tags: z.array(z.string()).max(10, 'Maximum 10 tags allowed').optional().default([]),
|
||||
})
|
||||
|
||||
// Schema for query parameters
|
||||
const QueryParamsSchema = z.object({
|
||||
limit: z.coerce.number().optional().default(50),
|
||||
offset: z.coerce.number().optional().default(0),
|
||||
@@ -69,31 +65,21 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
logger.debug(`[${requestId}] Fetching templates with params:`, params)
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
const isSuperUser = currentUser[0]?.isSuperUser || false
|
||||
const isSuperUser = currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
|
||||
|
||||
// Build query conditions
|
||||
const conditions = []
|
||||
|
||||
// Apply workflow filter if provided (for getting template by workflow)
|
||||
// When fetching by workflowId, we want to get the template regardless of status
|
||||
// This is used by the deploy modal to check if a template exists
|
||||
if (params.workflowId) {
|
||||
conditions.push(eq(templates.workflowId, params.workflowId))
|
||||
// Don't apply status filter when fetching by workflowId - we want to show
|
||||
// the template to its owner even if it's pending
|
||||
} else {
|
||||
// Apply status filter - only approved templates for non-super users
|
||||
if (params.status) {
|
||||
conditions.push(eq(templates.status, params.status))
|
||||
} else if (!isSuperUser || !params.includeAllStatuses) {
|
||||
// Non-super users and super users without includeAllStatuses flag see only approved templates
|
||||
conditions.push(eq(templates.status, 'approved'))
|
||||
}
|
||||
}
|
||||
|
||||
// Apply search filter if provided
|
||||
if (params.search) {
|
||||
const searchTerm = `%${params.search}%`
|
||||
conditions.push(
|
||||
@@ -104,10 +90,8 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Combine conditions
|
||||
const whereCondition = conditions.length > 0 ? and(...conditions) : undefined
|
||||
|
||||
// Apply ordering, limit, and offset with star information
|
||||
const results = await db
|
||||
.select({
|
||||
id: templates.id,
|
||||
@@ -138,7 +122,6 @@ export async function GET(request: NextRequest) {
|
||||
.limit(params.limit)
|
||||
.offset(params.offset)
|
||||
|
||||
// Get total count for pagination
|
||||
const totalCount = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(templates)
|
||||
@@ -191,7 +174,6 @@ export async function POST(request: NextRequest) {
|
||||
workflowId: data.workflowId,
|
||||
})
|
||||
|
||||
// Verify the workflow exists and belongs to the user
|
||||
const workflowExists = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
@@ -218,7 +200,6 @@ export async function POST(request: NextRequest) {
|
||||
const templateId = uuidv4()
|
||||
const now = new Date()
|
||||
|
||||
// Get the active deployment version for the workflow to copy its state
|
||||
const activeVersion = await db
|
||||
.select({
|
||||
id: workflowDeploymentVersion.id,
|
||||
@@ -243,10 +224,8 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Ensure the state includes workflow variables (if not already included)
|
||||
let stateWithVariables = activeVersion[0].state as any
|
||||
if (stateWithVariables && !stateWithVariables.variables) {
|
||||
// Fetch workflow variables if not in deployment version
|
||||
const [workflowRecord] = await db
|
||||
.select({ variables: workflow.variables })
|
||||
.from(workflow)
|
||||
@@ -259,10 +238,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
// Extract credential requirements before sanitizing
|
||||
const requiredCredentials = extractRequiredCredentials(stateWithVariables)
|
||||
|
||||
// Sanitize the workflow state to remove all credential values
|
||||
const sanitizedState = sanitizeWorkflowState(stateWithVariables)
|
||||
|
||||
const newTemplate = {
|
||||
|
||||
@@ -3,10 +3,9 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { createMockRequest, loggerMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Custom Tools API Routes', () => {
|
||||
const sampleTools = [
|
||||
@@ -364,7 +363,7 @@ describe('Custom Tools API Routes', () => {
|
||||
})
|
||||
|
||||
it('should reject requests missing tool ID', async () => {
|
||||
const req = createMockRequest('DELETE')
|
||||
const req = new NextRequest('http://localhost:3000/api/tools/custom')
|
||||
|
||||
const { DELETE } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
|
||||
@@ -6,23 +6,26 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('SuperUserAPI')
|
||||
const logger = createLogger('AdminStatusAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
// GET /api/user/super-user - Check if current user is a super user (database status)
|
||||
/**
|
||||
* GET /api/user/admin-status - Check if current user has admin privileges
|
||||
* Returns hasAdminPrivileges: true if user role is 'admin' or 'superadmin'
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized super user status check attempt`)
|
||||
logger.warn(`[${requestId}] Unauthorized admin status check attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const currentUser = await db
|
||||
.select({ isSuperUser: user.isSuperUser })
|
||||
.select({ role: user.role })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
@@ -32,11 +35,13 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'User not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const role = currentUser[0].role
|
||||
return NextResponse.json({
|
||||
isSuperUser: currentUser[0].isSuperUser,
|
||||
hasAdminPrivileges: role === 'admin' || role === 'superadmin',
|
||||
role,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error checking super user status`, error)
|
||||
logger.error(`[${requestId}] Error checking admin status`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -27,10 +27,11 @@ const SettingsSchema = z.object({
|
||||
superUserModeEnabled: z.boolean().optional(),
|
||||
errorNotificationsEnabled: z.boolean().optional(),
|
||||
snapToGridSize: z.number().min(0).max(50).optional(),
|
||||
showActionBar: z.boolean().optional(),
|
||||
})
|
||||
|
||||
const defaultSettings = {
|
||||
theme: 'system',
|
||||
theme: 'dark',
|
||||
autoConnect: true,
|
||||
telemetryEnabled: true,
|
||||
emailPreferences: {},
|
||||
@@ -39,6 +40,7 @@ const defaultSettings = {
|
||||
superUserModeEnabled: false,
|
||||
errorNotificationsEnabled: true,
|
||||
snapToGridSize: 0,
|
||||
showActionBar: true,
|
||||
}
|
||||
|
||||
export async function GET() {
|
||||
@@ -73,6 +75,7 @@ export async function GET() {
|
||||
superUserModeEnabled: userSettings.superUserModeEnabled ?? true,
|
||||
errorNotificationsEnabled: userSettings.errorNotificationsEnabled ?? true,
|
||||
snapToGridSize: userSettings.snapToGridSize ?? 0,
|
||||
showActionBar: userSettings.showActionBar ?? true,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
* GET /api/v1/admin/users/:id - Get user details
|
||||
* GET /api/v1/admin/users/:id/billing - Get user billing info
|
||||
* PATCH /api/v1/admin/users/:id/billing - Update user billing (limit, blocked)
|
||||
* GET /api/v1/admin/users/:id/role - Get user role
|
||||
* PATCH /api/v1/admin/users/:id/role - Update user role (user, admin, superadmin)
|
||||
*
|
||||
* Workspaces:
|
||||
* GET /api/v1/admin/workspaces - List all workspaces
|
||||
|
||||
@@ -105,6 +105,7 @@ export interface AdminUser {
|
||||
email: string
|
||||
emailVerified: boolean
|
||||
image: string | null
|
||||
role: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
@@ -116,6 +117,7 @@ export function toAdminUser(dbUser: DbUser): AdminUser {
|
||||
email: dbUser.email,
|
||||
emailVerified: dbUser.emailVerified,
|
||||
image: dbUser.image,
|
||||
role: dbUser.role,
|
||||
createdAt: dbUser.createdAt.toISOString(),
|
||||
updatedAt: dbUser.updatedAt.toISOString(),
|
||||
}
|
||||
|
||||
98
apps/sim/app/api/v1/admin/users/[id]/role/route.ts
Normal file
98
apps/sim/app/api/v1/admin/users/[id]/role/route.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* GET /api/v1/admin/users/[id]/role
|
||||
*
|
||||
* Get a user's current role.
|
||||
*
|
||||
* Response: AdminSingleResponse<{ role: string | null }>
|
||||
*
|
||||
* PATCH /api/v1/admin/users/[id]/role
|
||||
*
|
||||
* Update a user's role.
|
||||
*
|
||||
* Body:
|
||||
* - role: 'user' | 'admin' | 'superadmin' - The role to assign
|
||||
*
|
||||
* Response: AdminSingleResponse<AdminUser>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import { toAdminUser } from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminUserRoleAPI')
|
||||
|
||||
const VALID_ROLES = ['user', 'admin', 'superadmin'] as const
|
||||
type ValidRole = (typeof VALID_ROLES)[number]
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
export const GET = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
const { id: userId } = await context.params
|
||||
|
||||
try {
|
||||
const [userData] = await db
|
||||
.select({ role: user.role })
|
||||
.from(user)
|
||||
.where(eq(user.id, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!userData) {
|
||||
return notFoundResponse('User')
|
||||
}
|
||||
|
||||
logger.info(`Admin API: Retrieved role for user ${userId}`)
|
||||
|
||||
return singleResponse({ role: userData.role })
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to get user role', { error, userId })
|
||||
return internalErrorResponse('Failed to get user role')
|
||||
}
|
||||
})
|
||||
|
||||
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
const { id: userId } = await context.params
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
const [existing] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
|
||||
|
||||
if (!existing) {
|
||||
return notFoundResponse('User')
|
||||
}
|
||||
|
||||
if (body.role === undefined) {
|
||||
return badRequestResponse('role is required')
|
||||
}
|
||||
|
||||
if (!VALID_ROLES.includes(body.role)) {
|
||||
return badRequestResponse(`Invalid role. Must be one of: ${VALID_ROLES.join(', ')}`, {
|
||||
validRoles: VALID_ROLES,
|
||||
})
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(user)
|
||||
.set({ role: body.role as ValidRole, updatedAt: new Date() })
|
||||
.where(eq(user.id, userId))
|
||||
.returning()
|
||||
|
||||
logger.info(`Admin API: Updated user ${userId} role to ${body.role}`)
|
||||
|
||||
return singleResponse(toAdminUser(updated))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to update user role', { error, userId })
|
||||
return internalErrorResponse('Failed to update user role')
|
||||
}
|
||||
})
|
||||
@@ -1,6 +1,8 @@
|
||||
import { db, workflow } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { cleanupWebhooksForWorkflow } from '@/lib/webhooks/deploy'
|
||||
import {
|
||||
deployWorkflow,
|
||||
loadWorkflowFromNormalizedTables,
|
||||
@@ -80,10 +82,11 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
|
||||
|
||||
export const DELETE = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
const { id: workflowId } = await context.params
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const [workflowRecord] = await db
|
||||
.select({ id: workflow.id })
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
@@ -92,6 +95,13 @@ export const DELETE = withAdminAuthParams<RouteParams>(async (request, context)
|
||||
return notFoundResponse('Workflow')
|
||||
}
|
||||
|
||||
// Clean up external webhook subscriptions before undeploying
|
||||
await cleanupWebhooksForWorkflow(
|
||||
workflowId,
|
||||
workflowRecord as Record<string, unknown>,
|
||||
requestId
|
||||
)
|
||||
|
||||
const result = await undeployWorkflow({ workflowId })
|
||||
if (!result.success) {
|
||||
return internalErrorResponse(result.error || 'Failed to undeploy workflow')
|
||||
|
||||
@@ -7,6 +7,11 @@ import { getSession } from '@/lib/auth'
|
||||
import { validateInteger } from '@/lib/core/security/input-validation'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
cleanupExternalWebhook,
|
||||
createExternalWebhookSubscription,
|
||||
shouldRecreateExternalWebhookSubscription,
|
||||
} from '@/lib/webhooks/provider-subscriptions'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WebhookAPI')
|
||||
@@ -177,6 +182,46 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingProviderConfig =
|
||||
(webhookData.webhook.providerConfig as Record<string, unknown>) || {}
|
||||
let nextProviderConfig =
|
||||
providerConfig !== undefined &&
|
||||
resolvedProviderConfig &&
|
||||
typeof resolvedProviderConfig === 'object'
|
||||
? (resolvedProviderConfig as Record<string, unknown>)
|
||||
: existingProviderConfig
|
||||
const nextProvider = (provider ?? webhookData.webhook.provider) as string
|
||||
|
||||
if (
|
||||
providerConfig !== undefined &&
|
||||
shouldRecreateExternalWebhookSubscription({
|
||||
previousProvider: webhookData.webhook.provider as string,
|
||||
nextProvider,
|
||||
previousConfig: existingProviderConfig,
|
||||
nextConfig: nextProviderConfig,
|
||||
})
|
||||
) {
|
||||
await cleanupExternalWebhook(
|
||||
{ ...webhookData.webhook, providerConfig: existingProviderConfig },
|
||||
webhookData.workflow,
|
||||
requestId
|
||||
)
|
||||
|
||||
const result = await createExternalWebhookSubscription(
|
||||
request,
|
||||
{
|
||||
...webhookData.webhook,
|
||||
provider: nextProvider,
|
||||
providerConfig: nextProviderConfig,
|
||||
},
|
||||
webhookData.workflow,
|
||||
session.user.id,
|
||||
requestId
|
||||
)
|
||||
|
||||
nextProviderConfig = result.updatedProviderConfig as Record<string, unknown>
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Updating webhook properties`, {
|
||||
hasPathUpdate: path !== undefined,
|
||||
hasProviderUpdate: provider !== undefined,
|
||||
@@ -188,16 +233,16 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
// Merge providerConfig to preserve credential-related fields
|
||||
let finalProviderConfig = webhooks[0].webhook.providerConfig
|
||||
if (providerConfig !== undefined) {
|
||||
const existingConfig = (webhooks[0].webhook.providerConfig as Record<string, unknown>) || {}
|
||||
const existingConfig = existingProviderConfig
|
||||
finalProviderConfig = {
|
||||
...resolvedProviderConfig,
|
||||
...nextProviderConfig,
|
||||
credentialId: existingConfig.credentialId,
|
||||
credentialSetId: existingConfig.credentialSetId,
|
||||
userId: existingConfig.userId,
|
||||
historyId: existingConfig.historyId,
|
||||
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
|
||||
setupCompleted: existingConfig.setupCompleted,
|
||||
externalId: existingConfig.externalId,
|
||||
externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,9 +7,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createExternalWebhookSubscription } from '@/lib/webhooks/provider-subscriptions'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
const logger = createLogger('WebhooksAPI')
|
||||
|
||||
@@ -257,7 +256,7 @@ export async function POST(request: NextRequest) {
|
||||
const finalProviderConfig = providerConfig || {}
|
||||
|
||||
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
|
||||
const resolvedProviderConfig = await resolveEnvVarsInObject(
|
||||
let resolvedProviderConfig = await resolveEnvVarsInObject(
|
||||
finalProviderConfig,
|
||||
userId,
|
||||
workflowRecord.workspaceId || undefined
|
||||
@@ -414,149 +413,33 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
// --- End Credential Set Handling ---
|
||||
|
||||
// Create external subscriptions before saving to DB to prevent orphaned records
|
||||
let externalSubscriptionId: string | undefined
|
||||
let externalSubscriptionCreated = false
|
||||
|
||||
const createTempWebhookData = () => ({
|
||||
const createTempWebhookData = (providerConfigOverride = resolvedProviderConfig) => ({
|
||||
id: targetWebhookId || nanoid(),
|
||||
path: finalPath,
|
||||
providerConfig: resolvedProviderConfig,
|
||||
provider,
|
||||
providerConfig: providerConfigOverride,
|
||||
})
|
||||
|
||||
if (provider === 'airtable') {
|
||||
logger.info(`[${requestId}] Creating Airtable subscription before saving to database`)
|
||||
try {
|
||||
externalSubscriptionId = await createAirtableWebhookSubscription(
|
||||
request,
|
||||
userId,
|
||||
createTempWebhookData(),
|
||||
requestId
|
||||
)
|
||||
if (externalSubscriptionId) {
|
||||
resolvedProviderConfig.externalId = externalSubscriptionId
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Airtable webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Airtable',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'calendly') {
|
||||
logger.info(`[${requestId}] Creating Calendly subscription before saving to database`)
|
||||
try {
|
||||
externalSubscriptionId = await createCalendlyWebhookSubscription(
|
||||
request,
|
||||
userId,
|
||||
createTempWebhookData(),
|
||||
requestId
|
||||
)
|
||||
if (externalSubscriptionId) {
|
||||
resolvedProviderConfig.externalId = externalSubscriptionId
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Calendly webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Calendly',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'microsoft-teams') {
|
||||
const { createTeamsSubscription } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
logger.info(`[${requestId}] Creating Teams subscription before saving to database`)
|
||||
try {
|
||||
await createTeamsSubscription(request, createTempWebhookData(), workflowRecord, requestId)
|
||||
externalSubscriptionCreated = true
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Teams subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create Teams subscription',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'telegram') {
|
||||
const { createTelegramWebhook } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
logger.info(`[${requestId}] Creating Telegram webhook before saving to database`)
|
||||
try {
|
||||
await createTelegramWebhook(request, createTempWebhookData(), requestId)
|
||||
externalSubscriptionCreated = true
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Telegram webhook`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create Telegram webhook',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'webflow') {
|
||||
logger.info(`[${requestId}] Creating Webflow subscription before saving to database`)
|
||||
try {
|
||||
externalSubscriptionId = await createWebflowWebhookSubscription(
|
||||
request,
|
||||
userId,
|
||||
createTempWebhookData(),
|
||||
requestId
|
||||
)
|
||||
if (externalSubscriptionId) {
|
||||
resolvedProviderConfig.externalId = externalSubscriptionId
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Webflow webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Webflow',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'typeform') {
|
||||
const { createTypeformWebhook } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
logger.info(`[${requestId}] Creating Typeform webhook before saving to database`)
|
||||
try {
|
||||
const usedTag = await createTypeformWebhook(request, createTempWebhookData(), requestId)
|
||||
|
||||
if (!resolvedProviderConfig.webhookTag) {
|
||||
resolvedProviderConfig.webhookTag = usedTag
|
||||
logger.info(`[${requestId}] Stored auto-generated webhook tag: ${usedTag}`)
|
||||
}
|
||||
|
||||
externalSubscriptionCreated = true
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Typeform webhook`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Typeform',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
try {
|
||||
const result = await createExternalWebhookSubscription(
|
||||
request,
|
||||
createTempWebhookData(),
|
||||
workflowRecord,
|
||||
userId,
|
||||
requestId
|
||||
)
|
||||
resolvedProviderConfig = result.updatedProviderConfig as Record<string, unknown>
|
||||
externalSubscriptionCreated = result.externalSubscriptionCreated
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating external webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create external webhook subscription',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Now save to database (only if subscription succeeded or provider doesn't need external subscription)
|
||||
@@ -617,7 +500,11 @@ export async function POST(request: NextRequest) {
|
||||
logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError)
|
||||
try {
|
||||
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
await cleanupExternalWebhook(createTempWebhookData(), workflowRecord, requestId)
|
||||
await cleanupExternalWebhook(
|
||||
createTempWebhookData(resolvedProviderConfig),
|
||||
workflowRecord,
|
||||
requestId
|
||||
)
|
||||
} catch (cleanupError) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to cleanup external subscription after DB save failure`,
|
||||
@@ -741,110 +628,6 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
// --- End RSS specific logic ---
|
||||
|
||||
if (savedWebhook && provider === 'grain') {
|
||||
logger.info(`[${requestId}] Grain provider detected. Creating Grain webhook subscription.`)
|
||||
try {
|
||||
const grainResult = await createGrainWebhookSubscription(
|
||||
request,
|
||||
{
|
||||
id: savedWebhook.id,
|
||||
path: savedWebhook.path,
|
||||
providerConfig: savedWebhook.providerConfig,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
if (grainResult) {
|
||||
// Update the webhook record with the external Grain hook ID and event types for filtering
|
||||
const updatedConfig = {
|
||||
...(savedWebhook.providerConfig as Record<string, any>),
|
||||
externalId: grainResult.id,
|
||||
eventTypes: grainResult.eventTypes,
|
||||
}
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: updatedConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, savedWebhook.id))
|
||||
|
||||
savedWebhook.providerConfig = updatedConfig
|
||||
logger.info(`[${requestId}] Successfully created Grain webhook`, {
|
||||
grainHookId: grainResult.id,
|
||||
eventTypes: grainResult.eventTypes,
|
||||
webhookId: savedWebhook.id,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error creating Grain webhook subscription, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Grain',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End Grain specific logic ---
|
||||
|
||||
// --- Lemlist specific logic ---
|
||||
if (savedWebhook && provider === 'lemlist') {
|
||||
logger.info(
|
||||
`[${requestId}] Lemlist provider detected. Creating Lemlist webhook subscription.`
|
||||
)
|
||||
try {
|
||||
const lemlistResult = await createLemlistWebhookSubscription(
|
||||
{
|
||||
id: savedWebhook.id,
|
||||
path: savedWebhook.path,
|
||||
providerConfig: savedWebhook.providerConfig,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
if (lemlistResult) {
|
||||
// Update the webhook record with the external Lemlist hook ID
|
||||
const updatedConfig = {
|
||||
...(savedWebhook.providerConfig as Record<string, any>),
|
||||
externalId: lemlistResult.id,
|
||||
}
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: updatedConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, savedWebhook.id))
|
||||
|
||||
savedWebhook.providerConfig = updatedConfig
|
||||
logger.info(`[${requestId}] Successfully created Lemlist webhook`, {
|
||||
lemlistHookId: lemlistResult.id,
|
||||
webhookId: savedWebhook.id,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error creating Lemlist webhook subscription, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Lemlist',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End Lemlist specific logic ---
|
||||
|
||||
if (!targetWebhookId && savedWebhook) {
|
||||
try {
|
||||
PlatformEvents.webhookCreated({
|
||||
@@ -868,616 +651,3 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Airtable
|
||||
async function createAirtableWebhookSubscription(
|
||||
request: NextRequest,
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { baseId, tableId, includeCellValuesInFieldIds } = providerConfig || {}
|
||||
|
||||
if (!baseId || !tableId) {
|
||||
logger.warn(`[${requestId}] Missing baseId or tableId for Airtable webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Base ID and Table ID are required to create Airtable webhook. Please provide valid Airtable base and table IDs.'
|
||||
)
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'airtable')
|
||||
if (!accessToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
|
||||
)
|
||||
throw new Error(
|
||||
'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
|
||||
|
||||
const specification: any = {
|
||||
options: {
|
||||
filters: {
|
||||
dataTypes: ['tableData'], // Watch table data changes
|
||||
recordChangeScope: tableId, // Watch only the specified table
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Conditionally add the 'includes' field based on the config
|
||||
if (includeCellValuesInFieldIds === 'all') {
|
||||
specification.options.includes = {
|
||||
includeCellValuesInFieldIds: 'all',
|
||||
}
|
||||
}
|
||||
|
||||
const requestBody: any = {
|
||||
notificationUrl: notificationUrl,
|
||||
specification: specification,
|
||||
}
|
||||
|
||||
const airtableResponse = await fetch(airtableApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
// Airtable often returns 200 OK even for errors in the body, check payload
|
||||
const responseBody = await airtableResponse.json()
|
||||
|
||||
if (!airtableResponse.ok || responseBody.error) {
|
||||
const errorMessage =
|
||||
responseBody.error?.message || responseBody.error || 'Unknown Airtable API error'
|
||||
const errorType = responseBody.error?.type
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Airtable for webhook ${webhookData.id}. Status: ${airtableResponse.status}`,
|
||||
{ type: errorType, message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Airtable'
|
||||
if (airtableResponse.status === 404) {
|
||||
userFriendlyMessage =
|
||||
'Airtable base or table not found. Please verify that the Base ID and Table ID are correct and that you have access to them.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Airtable API error') {
|
||||
userFriendlyMessage = `Airtable error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Airtable for webhook ${webhookData.id}.`,
|
||||
{
|
||||
airtableWebhookId: responseBody.id,
|
||||
}
|
||||
)
|
||||
return responseBody.id
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Airtable webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
// Re-throw the error so it can be caught by the outer try-catch
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Calendly
|
||||
async function createCalendlyWebhookSubscription(
|
||||
request: NextRequest,
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, organization, triggerId } = providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn(`[${requestId}] Missing apiKey for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Personal Access Token is required to create Calendly webhook. Please provide your Calendly Personal Access Token.'
|
||||
)
|
||||
}
|
||||
|
||||
if (!organization) {
|
||||
logger.warn(`[${requestId}] Missing organization URI for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Organization URI is required to create Calendly webhook. Please provide your Organization URI from the "Get Current User" operation.'
|
||||
)
|
||||
}
|
||||
|
||||
if (!triggerId) {
|
||||
logger.warn(`[${requestId}] Missing triggerId for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Trigger ID is required to create Calendly webhook')
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
// Map trigger IDs to Calendly event types
|
||||
const eventTypeMap: Record<string, string[]> = {
|
||||
calendly_invitee_created: ['invitee.created'],
|
||||
calendly_invitee_canceled: ['invitee.canceled'],
|
||||
calendly_routing_form_submitted: ['routing_form_submission.created'],
|
||||
calendly_webhook: ['invitee.created', 'invitee.canceled', 'routing_form_submission.created'],
|
||||
}
|
||||
|
||||
const events = eventTypeMap[triggerId] || ['invitee.created']
|
||||
|
||||
const calendlyApiUrl = 'https://api.calendly.com/webhook_subscriptions'
|
||||
|
||||
const requestBody = {
|
||||
url: notificationUrl,
|
||||
events,
|
||||
organization,
|
||||
scope: 'organization',
|
||||
}
|
||||
|
||||
const calendlyResponse = await fetch(calendlyApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
if (!calendlyResponse.ok) {
|
||||
const errorBody = await calendlyResponse.json().catch(() => ({}))
|
||||
const errorMessage = errorBody.message || errorBody.title || 'Unknown Calendly API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Calendly for webhook ${webhookData.id}. Status: ${calendlyResponse.status}`,
|
||||
{ response: errorBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Calendly'
|
||||
if (calendlyResponse.status === 401) {
|
||||
userFriendlyMessage =
|
||||
'Calendly authentication failed. Please verify your Personal Access Token is correct.'
|
||||
} else if (calendlyResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Calendly access denied. Please ensure you have appropriate permissions and a paid Calendly subscription.'
|
||||
} else if (calendlyResponse.status === 404) {
|
||||
userFriendlyMessage =
|
||||
'Calendly organization not found. Please verify the Organization URI is correct.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Calendly API error') {
|
||||
userFriendlyMessage = `Calendly error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
const responseBody = await calendlyResponse.json()
|
||||
const webhookUri = responseBody.resource?.uri
|
||||
|
||||
if (!webhookUri) {
|
||||
logger.error(
|
||||
`[${requestId}] Calendly webhook created but no webhook URI returned for webhook ${webhookData.id}`,
|
||||
{ response: responseBody }
|
||||
)
|
||||
throw new Error('Calendly webhook creation succeeded but no webhook URI was returned')
|
||||
}
|
||||
|
||||
// Extract the webhook ID from the URI (e.g., https://api.calendly.com/webhook_subscriptions/WEBHOOK_ID)
|
||||
const webhookId = webhookUri.split('/').pop()
|
||||
|
||||
if (!webhookId) {
|
||||
logger.error(`[${requestId}] Could not extract webhook ID from Calendly URI: ${webhookUri}`, {
|
||||
response: responseBody,
|
||||
})
|
||||
throw new Error('Failed to extract webhook ID from Calendly response')
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Calendly for webhook ${webhookData.id}.`,
|
||||
{
|
||||
calendlyWebhookUri: webhookUri,
|
||||
calendlyWebhookId: webhookId,
|
||||
}
|
||||
)
|
||||
return webhookId
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Calendly webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
// Re-throw the error so it can be caught by the outer try-catch
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Webflow
|
||||
async function createWebflowWebhookSubscription(
|
||||
request: NextRequest,
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { siteId, triggerId, collectionId, formId } = providerConfig || {}
|
||||
|
||||
if (!siteId) {
|
||||
logger.warn(`[${requestId}] Missing siteId for Webflow webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Site ID is required to create Webflow webhook')
|
||||
}
|
||||
|
||||
if (!triggerId) {
|
||||
logger.warn(`[${requestId}] Missing triggerId for Webflow webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Trigger type is required to create Webflow webhook')
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'webflow')
|
||||
if (!accessToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not retrieve Webflow access token for user ${userId}. Cannot create webhook in Webflow.`
|
||||
)
|
||||
throw new Error(
|
||||
'Webflow account connection required. Please connect your Webflow account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
// Map trigger IDs to Webflow trigger types
|
||||
const triggerTypeMap: Record<string, string> = {
|
||||
webflow_collection_item_created: 'collection_item_created',
|
||||
webflow_collection_item_changed: 'collection_item_changed',
|
||||
webflow_collection_item_deleted: 'collection_item_deleted',
|
||||
webflow_form_submission: 'form_submission',
|
||||
}
|
||||
|
||||
const webflowTriggerType = triggerTypeMap[triggerId]
|
||||
if (!webflowTriggerType) {
|
||||
logger.warn(`[${requestId}] Invalid triggerId for Webflow: ${triggerId}`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(`Invalid Webflow trigger type: ${triggerId}`)
|
||||
}
|
||||
|
||||
const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks`
|
||||
|
||||
const requestBody: any = {
|
||||
triggerType: webflowTriggerType,
|
||||
url: notificationUrl,
|
||||
}
|
||||
|
||||
// Add filter for collection-based triggers
|
||||
if (collectionId && webflowTriggerType.startsWith('collection_item_')) {
|
||||
requestBody.filter = {
|
||||
resource_type: 'collection',
|
||||
resource_id: collectionId,
|
||||
}
|
||||
}
|
||||
|
||||
// Add filter for form submissions
|
||||
if (formId && webflowTriggerType === 'form_submission') {
|
||||
requestBody.filter = {
|
||||
resource_type: 'form',
|
||||
resource_id: formId,
|
||||
}
|
||||
}
|
||||
|
||||
const webflowResponse = await fetch(webflowApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await webflowResponse.json()
|
||||
|
||||
if (!webflowResponse.ok || responseBody.error) {
|
||||
const errorMessage = responseBody.message || responseBody.error || 'Unknown Webflow API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Webflow for webhook ${webhookData.id}. Status: ${webflowResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Webflow for webhook ${webhookData.id}.`,
|
||||
{
|
||||
webflowWebhookId: responseBody.id || responseBody._id,
|
||||
}
|
||||
)
|
||||
|
||||
return responseBody.id || responseBody._id
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Webflow webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Grain
|
||||
async function createGrainWebhookSubscription(
|
||||
request: NextRequest,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string; eventTypes: string[] } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, includeHighlights, includeParticipants, includeAiSummary } =
|
||||
providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn(`[${requestId}] Missing apiKey for Grain webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Grain API Key is required. Please provide your Grain Personal Access Token in the trigger configuration.'
|
||||
)
|
||||
}
|
||||
|
||||
// Map trigger IDs to Grain API hook_type (only 2 options: recording_added, upload_status)
|
||||
const hookTypeMap: Record<string, string> = {
|
||||
grain_webhook: 'recording_added',
|
||||
grain_recording_created: 'recording_added',
|
||||
grain_recording_updated: 'recording_added',
|
||||
grain_highlight_created: 'recording_added',
|
||||
grain_highlight_updated: 'recording_added',
|
||||
grain_story_created: 'recording_added',
|
||||
grain_upload_status: 'upload_status',
|
||||
}
|
||||
|
||||
const eventTypeMap: Record<string, string[]> = {
|
||||
grain_webhook: [],
|
||||
grain_recording_created: ['recording_added'],
|
||||
grain_recording_updated: ['recording_updated'],
|
||||
grain_highlight_created: ['highlight_created'],
|
||||
grain_highlight_updated: ['highlight_updated'],
|
||||
grain_story_created: ['story_created'],
|
||||
grain_upload_status: ['upload_status'],
|
||||
}
|
||||
|
||||
const hookType = hookTypeMap[triggerId] ?? 'recording_added'
|
||||
const eventTypes = eventTypeMap[triggerId] ?? []
|
||||
|
||||
if (!hookTypeMap[triggerId]) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unknown triggerId for Grain: ${triggerId}, defaulting to recording_added`,
|
||||
{
|
||||
webhookId: webhookData.id,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Creating Grain webhook`, {
|
||||
triggerId,
|
||||
hookType,
|
||||
eventTypes,
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const grainApiUrl = 'https://api.grain.com/_/public-api/v2/hooks/create'
|
||||
|
||||
const requestBody: Record<string, any> = {
|
||||
hook_url: notificationUrl,
|
||||
hook_type: hookType,
|
||||
}
|
||||
|
||||
// Build include object based on configuration
|
||||
const include: Record<string, boolean> = {}
|
||||
if (includeHighlights) {
|
||||
include.highlights = true
|
||||
}
|
||||
if (includeParticipants) {
|
||||
include.participants = true
|
||||
}
|
||||
if (includeAiSummary) {
|
||||
include.ai_summary = true
|
||||
}
|
||||
if (Object.keys(include).length > 0) {
|
||||
requestBody.include = include
|
||||
}
|
||||
|
||||
const grainResponse = await fetch(grainApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Public-Api-Version': '2025-10-31',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await grainResponse.json()
|
||||
|
||||
if (!grainResponse.ok || responseBody.error || responseBody.errors) {
|
||||
logger.warn('[App] Grain response body:', responseBody)
|
||||
const errorMessage =
|
||||
responseBody.errors?.detail ||
|
||||
responseBody.error?.message ||
|
||||
responseBody.error ||
|
||||
responseBody.message ||
|
||||
'Unknown Grain API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Grain for webhook ${webhookData.id}. Status: ${grainResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Grain'
|
||||
if (grainResponse.status === 401) {
|
||||
userFriendlyMessage =
|
||||
'Invalid Grain API Key. Please verify your Personal Access Token is correct.'
|
||||
} else if (grainResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Access denied. Please ensure your Grain API Key has appropriate permissions.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Grain API error') {
|
||||
userFriendlyMessage = `Grain error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Grain for webhook ${webhookData.id}.`,
|
||||
{
|
||||
grainWebhookId: responseBody.id,
|
||||
eventTypes,
|
||||
}
|
||||
)
|
||||
|
||||
return { id: responseBody.id, eventTypes }
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Grain webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Lemlist
|
||||
async function createLemlistWebhookSubscription(
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, campaignId } = providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn(`[${requestId}] Missing apiKey for Lemlist webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Lemlist API Key is required. Please provide your Lemlist API Key in the trigger configuration.'
|
||||
)
|
||||
}
|
||||
|
||||
// Map trigger IDs to Lemlist event types
|
||||
const eventTypeMap: Record<string, string | undefined> = {
|
||||
lemlist_email_replied: 'emailsReplied',
|
||||
lemlist_linkedin_replied: 'linkedinReplied',
|
||||
lemlist_interested: 'interested',
|
||||
lemlist_not_interested: 'notInterested',
|
||||
lemlist_email_opened: 'emailsOpened',
|
||||
lemlist_email_clicked: 'emailsClicked',
|
||||
lemlist_email_bounced: 'emailsBounced',
|
||||
lemlist_email_sent: 'emailsSent',
|
||||
lemlist_webhook: undefined, // Generic webhook - no type filter
|
||||
}
|
||||
|
||||
const eventType = eventTypeMap[triggerId]
|
||||
|
||||
logger.info(`[${requestId}] Creating Lemlist webhook`, {
|
||||
triggerId,
|
||||
eventType,
|
||||
hasCampaignId: !!campaignId,
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const lemlistApiUrl = 'https://api.lemlist.com/api/hooks'
|
||||
|
||||
// Build request body
|
||||
const requestBody: Record<string, any> = {
|
||||
targetUrl: notificationUrl,
|
||||
}
|
||||
|
||||
// Add event type if specified (omit for generic webhook to receive all events)
|
||||
if (eventType) {
|
||||
requestBody.type = eventType
|
||||
}
|
||||
|
||||
// Add campaign filter if specified
|
||||
if (campaignId) {
|
||||
requestBody.campaignId = campaignId
|
||||
}
|
||||
|
||||
// Lemlist uses Basic Auth with empty username and API key as password
|
||||
const authString = Buffer.from(`:${apiKey}`).toString('base64')
|
||||
|
||||
const lemlistResponse = await fetch(lemlistApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Basic ${authString}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await lemlistResponse.json()
|
||||
|
||||
if (!lemlistResponse.ok || responseBody.error) {
|
||||
const errorMessage = responseBody.message || responseBody.error || 'Unknown Lemlist API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Lemlist for webhook ${webhookData.id}. Status: ${lemlistResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Lemlist'
|
||||
if (lemlistResponse.status === 401) {
|
||||
userFriendlyMessage = 'Invalid Lemlist API Key. Please verify your API Key is correct.'
|
||||
} else if (lemlistResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Access denied. Please ensure your Lemlist API Key has appropriate permissions.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Lemlist API error') {
|
||||
userFriendlyMessage = `Lemlist error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Lemlist for webhook ${webhookData.id}.`,
|
||||
{
|
||||
lemlistWebhookId: responseBody._id,
|
||||
}
|
||||
)
|
||||
|
||||
return { id: responseBody._id }
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Lemlist webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,15 +3,92 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { createMockRequest, loggerMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
globalMockData,
|
||||
mockExecutionDependencies,
|
||||
mockTriggerDevSdk,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
/** Mock execution dependencies for webhook tests */
|
||||
function mockExecutionDependencies() {
|
||||
vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'decrypted-value' }),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({
|
||||
buildTraceSpans: vi.fn().mockReturnValue({ traceSpans: [], totalDuration: 100 }),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/utils', () => ({
|
||||
updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
vi.mock('@/serializer', () => ({
|
||||
Serializer: vi.fn().mockImplementation(() => ({
|
||||
serializeWorkflow: vi.fn().mockReturnValue({
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'starter-id',
|
||||
metadata: { id: 'starter', name: 'Start' },
|
||||
config: {},
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'agent-id',
|
||||
metadata: { id: 'agent', name: 'Agent 1' },
|
||||
config: {},
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
position: { x: 634, y: -167 },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}),
|
||||
})),
|
||||
}))
|
||||
}
|
||||
|
||||
/** Mock Trigger.dev SDK */
|
||||
function mockTriggerDevSdk() {
|
||||
vi.mock('@trigger.dev/sdk', () => ({
|
||||
tasks: { trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }) },
|
||||
task: vi.fn().mockReturnValue({}),
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Test data store - isolated per test via beforeEach reset
|
||||
* This replaces the global mutable state pattern with local test data
|
||||
*/
|
||||
const testData = {
|
||||
webhooks: [] as Array<{
|
||||
id: string
|
||||
provider: string
|
||||
path: string
|
||||
isActive: boolean
|
||||
providerConfig?: Record<string, unknown>
|
||||
workflowId: string
|
||||
rateLimitCount?: number
|
||||
rateLimitPeriod?: number
|
||||
}>,
|
||||
workflows: [] as Array<{
|
||||
id: string
|
||||
userId: string
|
||||
workspaceId?: string
|
||||
}>,
|
||||
}
|
||||
|
||||
const {
|
||||
generateRequestHashMock,
|
||||
@@ -159,8 +236,8 @@ vi.mock('@/lib/workflows/persistence/utils', () => ({
|
||||
|
||||
vi.mock('@/lib/webhooks/processor', () => ({
|
||||
findAllWebhooksForPath: vi.fn().mockImplementation(async (options: { path: string }) => {
|
||||
// Filter webhooks by path from globalMockData
|
||||
const matchingWebhooks = globalMockData.webhooks.filter(
|
||||
// Filter webhooks by path from testData
|
||||
const matchingWebhooks = testData.webhooks.filter(
|
||||
(wh) => wh.path === options.path && wh.isActive
|
||||
)
|
||||
|
||||
@@ -170,7 +247,7 @@ vi.mock('@/lib/webhooks/processor', () => ({
|
||||
|
||||
// Return array of {webhook, workflow} objects
|
||||
return matchingWebhooks.map((wh) => {
|
||||
const matchingWorkflow = globalMockData.workflows.find((w) => w.id === wh.workflowId) || {
|
||||
const matchingWorkflow = testData.workflows.find((w) => w.id === wh.workflowId) || {
|
||||
id: wh.workflowId || 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -283,14 +360,15 @@ describe('Webhook Trigger API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
globalMockData.webhooks.length = 0
|
||||
globalMockData.workflows.length = 0
|
||||
globalMockData.schedules.length = 0
|
||||
// Reset test data arrays
|
||||
testData.webhooks.length = 0
|
||||
testData.workflows.length = 0
|
||||
|
||||
mockExecutionDependencies()
|
||||
mockTriggerDevSdk()
|
||||
|
||||
globalMockData.workflows.push({
|
||||
// Set up default workflow for tests
|
||||
testData.workflows.push({
|
||||
id: 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -326,7 +404,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
|
||||
describe('Generic Webhook Authentication', () => {
|
||||
it('should process generic webhook without authentication', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -336,7 +414,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
rateLimitCount: 100,
|
||||
rateLimitPeriod: 60,
|
||||
})
|
||||
globalMockData.workflows.push({
|
||||
testData.workflows.push({
|
||||
id: 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -354,7 +432,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should authenticate with Bearer token when no custom header is configured', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -362,7 +440,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
providerConfig: { requireAuth: true, token: 'test-token-123' },
|
||||
workflowId: 'test-workflow-id',
|
||||
})
|
||||
globalMockData.workflows.push({
|
||||
testData.workflows.push({
|
||||
id: 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -381,7 +459,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should authenticate with custom header when configured', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -393,7 +471,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
},
|
||||
workflowId: 'test-workflow-id',
|
||||
})
|
||||
globalMockData.workflows.push({
|
||||
testData.workflows.push({
|
||||
id: 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -412,7 +490,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle case insensitive Bearer token authentication', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -420,7 +498,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
providerConfig: { requireAuth: true, token: 'case-test-token' },
|
||||
workflowId: 'test-workflow-id',
|
||||
})
|
||||
globalMockData.workflows.push({
|
||||
testData.workflows.push({
|
||||
id: 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -454,7 +532,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle case insensitive custom header authentication', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -466,7 +544,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
},
|
||||
workflowId: 'test-workflow-id',
|
||||
})
|
||||
globalMockData.workflows.push({
|
||||
testData.workflows.push({
|
||||
id: 'test-workflow-id',
|
||||
userId: 'test-user-id',
|
||||
workspaceId: 'test-workspace-id',
|
||||
@@ -495,7 +573,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should reject wrong Bearer token', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -519,7 +597,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should reject wrong custom header token', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -547,7 +625,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should reject missing authentication when required', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -567,7 +645,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should reject Bearer token when custom header is configured', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -595,7 +673,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should reject wrong custom header name', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -623,7 +701,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
})
|
||||
|
||||
it('should reject when auth is required but no token is configured', async () => {
|
||||
globalMockData.webhooks.push({
|
||||
testData.webhooks.push({
|
||||
id: 'generic-webhook-id',
|
||||
provider: 'generic',
|
||||
path: 'test-path',
|
||||
@@ -631,7 +709,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
providerConfig: { requireAuth: true },
|
||||
workflowId: 'test-workflow-id',
|
||||
})
|
||||
globalMockData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
|
||||
testData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -22,13 +22,6 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
|
||||
.select({
|
||||
id: chat.id,
|
||||
identifier: chat.identifier,
|
||||
title: chat.title,
|
||||
description: chat.description,
|
||||
customizations: chat.customizations,
|
||||
authType: chat.authType,
|
||||
allowedEmails: chat.allowedEmails,
|
||||
outputConfigs: chat.outputConfigs,
|
||||
password: chat.password,
|
||||
isActive: chat.isActive,
|
||||
})
|
||||
.from(chat)
|
||||
@@ -41,13 +34,6 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
|
||||
? {
|
||||
id: deploymentResults[0].id,
|
||||
identifier: deploymentResults[0].identifier,
|
||||
title: deploymentResults[0].title,
|
||||
description: deploymentResults[0].description,
|
||||
customizations: deploymentResults[0].customizations,
|
||||
authType: deploymentResults[0].authType,
|
||||
allowedEmails: deploymentResults[0].allowedEmails,
|
||||
outputConfigs: deploymentResults[0].outputConfigs,
|
||||
hasPassword: Boolean(deploymentResults[0].password),
|
||||
}
|
||||
: null
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { and, desc, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||
import { cleanupWebhooksForWorkflow, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
||||
import {
|
||||
deployWorkflow,
|
||||
loadWorkflowFromNormalizedTables,
|
||||
@@ -130,6 +131,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData,
|
||||
userId: actorUserId,
|
||||
blocks: normalizedData.blocks,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
const deployResult = await deployWorkflow({
|
||||
workflowId: id,
|
||||
deployedBy: actorUserId,
|
||||
@@ -202,11 +219,18 @@ export async function DELETE(
|
||||
try {
|
||||
logger.debug(`[${requestId}] Undeploying workflow: ${id}`)
|
||||
|
||||
const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
|
||||
const { error, workflow: workflowData } = await validateWorkflowPermissions(
|
||||
id,
|
||||
requestId,
|
||||
'admin'
|
||||
)
|
||||
if (error) {
|
||||
return createErrorResponse(error.message, error.status)
|
||||
}
|
||||
|
||||
// Clean up external webhook subscriptions before undeploying
|
||||
await cleanupWebhooksForWorkflow(id, workflowData as Record<string, unknown>, requestId)
|
||||
|
||||
const result = await undeployWorkflow({ workflowId: id })
|
||||
if (!result.success) {
|
||||
return createErrorResponse(result.error || 'Failed to undeploy workflow', 500)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook, workflow } from '@sim/db/schema'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
@@ -13,7 +13,6 @@ import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validat
|
||||
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
const logger = createLogger('WorkflowStateAPI')
|
||||
|
||||
@@ -203,8 +202,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
)
|
||||
}
|
||||
|
||||
await syncWorkflowWebhooks(workflowId, workflowState.blocks)
|
||||
|
||||
// Extract and persist custom tools to database
|
||||
try {
|
||||
const workspaceId = workflowData.workspaceId
|
||||
@@ -290,213 +287,3 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
function getSubBlockValue<T = unknown>(block: BlockState, subBlockId: string): T | undefined {
|
||||
const value = block.subBlocks?.[subBlockId]?.value
|
||||
if (value === undefined || value === null) {
|
||||
return undefined
|
||||
}
|
||||
return value as T
|
||||
}
|
||||
|
||||
async function syncWorkflowWebhooks(
|
||||
workflowId: string,
|
||||
blocks: Record<string, any>
|
||||
): Promise<void> {
|
||||
await syncBlockResources(workflowId, blocks, {
|
||||
resourceName: 'webhook',
|
||||
subBlockId: 'webhookId',
|
||||
buildMetadata: buildWebhookMetadata,
|
||||
applyMetadata: upsertWebhookRecord,
|
||||
})
|
||||
}
|
||||
|
||||
interface WebhookMetadata {
|
||||
triggerPath: string
|
||||
provider: string | null
|
||||
providerConfig: Record<string, any>
|
||||
}
|
||||
|
||||
const CREDENTIAL_SET_PREFIX = 'credentialSet:'
|
||||
|
||||
function buildWebhookMetadata(block: BlockState): WebhookMetadata | null {
|
||||
const triggerId =
|
||||
getSubBlockValue<string>(block, 'triggerId') ||
|
||||
getSubBlockValue<string>(block, 'selectedTriggerId')
|
||||
const triggerConfig = getSubBlockValue<Record<string, any>>(block, 'triggerConfig') || {}
|
||||
const triggerCredentials = getSubBlockValue<string>(block, 'triggerCredentials')
|
||||
const triggerPath = getSubBlockValue<string>(block, 'triggerPath') || block.id
|
||||
|
||||
const triggerDef = triggerId ? getTrigger(triggerId) : undefined
|
||||
const provider = triggerDef?.provider || null
|
||||
|
||||
// Handle credential sets vs individual credentials
|
||||
const isCredentialSet = triggerCredentials?.startsWith(CREDENTIAL_SET_PREFIX)
|
||||
const credentialSetId = isCredentialSet
|
||||
? triggerCredentials!.slice(CREDENTIAL_SET_PREFIX.length)
|
||||
: undefined
|
||||
const credentialId = isCredentialSet ? undefined : triggerCredentials
|
||||
|
||||
const providerConfig = {
|
||||
...(typeof triggerConfig === 'object' ? triggerConfig : {}),
|
||||
...(credentialId ? { credentialId } : {}),
|
||||
...(credentialSetId ? { credentialSetId } : {}),
|
||||
...(triggerId ? { triggerId } : {}),
|
||||
}
|
||||
|
||||
return {
|
||||
triggerPath,
|
||||
provider,
|
||||
providerConfig,
|
||||
}
|
||||
}
|
||||
|
||||
async function upsertWebhookRecord(
|
||||
workflowId: string,
|
||||
block: BlockState,
|
||||
webhookId: string,
|
||||
metadata: WebhookMetadata
|
||||
): Promise<void> {
|
||||
const providerConfig = metadata.providerConfig as Record<string, unknown>
|
||||
const credentialSetId = providerConfig?.credentialSetId as string | undefined
|
||||
|
||||
// For credential sets, delegate to the sync function which handles fan-out
|
||||
if (credentialSetId && metadata.provider) {
|
||||
const { syncWebhooksForCredentialSet } = await import('@/lib/webhooks/utils.server')
|
||||
const { getProviderIdFromServiceId } = await import('@/lib/oauth')
|
||||
|
||||
const oauthProviderId = getProviderIdFromServiceId(metadata.provider)
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
// Extract base config (without credential-specific fields)
|
||||
const {
|
||||
credentialId: _cId,
|
||||
credentialSetId: _csId,
|
||||
userId: _uId,
|
||||
...baseConfig
|
||||
} = providerConfig
|
||||
|
||||
try {
|
||||
await syncWebhooksForCredentialSet({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
provider: metadata.provider,
|
||||
basePath: metadata.triggerPath,
|
||||
credentialSetId,
|
||||
oauthProviderId,
|
||||
providerConfig: baseConfig as Record<string, any>,
|
||||
requestId,
|
||||
})
|
||||
|
||||
logger.info('Synced credential set webhooks during workflow save', {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
credentialSetId,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to sync credential set webhooks during workflow save', {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
credentialSetId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For individual credentials, use the existing single webhook logic
|
||||
const [existing] = await db.select().from(webhook).where(eq(webhook.id, webhookId)).limit(1)
|
||||
|
||||
if (existing) {
|
||||
const needsUpdate =
|
||||
existing.blockId !== block.id ||
|
||||
existing.workflowId !== workflowId ||
|
||||
existing.path !== metadata.triggerPath
|
||||
|
||||
if (needsUpdate) {
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
path: metadata.triggerPath,
|
||||
provider: metadata.provider || existing.provider,
|
||||
providerConfig: Object.keys(metadata.providerConfig).length
|
||||
? metadata.providerConfig
|
||||
: existing.providerConfig,
|
||||
isActive: true,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, webhookId))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
await db.insert(webhook).values({
|
||||
id: webhookId,
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
path: metadata.triggerPath,
|
||||
provider: metadata.provider,
|
||||
providerConfig: metadata.providerConfig,
|
||||
credentialSetId: null,
|
||||
isActive: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
logger.info('Recreated missing webhook after workflow save', {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
webhookId,
|
||||
})
|
||||
}
|
||||
|
||||
interface BlockResourceSyncConfig<T> {
|
||||
resourceName: string
|
||||
subBlockId: string
|
||||
buildMetadata: (block: BlockState, resourceId: string) => T | null
|
||||
applyMetadata: (
|
||||
workflowId: string,
|
||||
block: BlockState,
|
||||
resourceId: string,
|
||||
metadata: T
|
||||
) => Promise<void>
|
||||
}
|
||||
|
||||
async function syncBlockResources<T>(
|
||||
workflowId: string,
|
||||
blocks: Record<string, any>,
|
||||
config: BlockResourceSyncConfig<T>
|
||||
): Promise<void> {
|
||||
const blockEntries = Object.values(blocks || {}).filter(Boolean) as BlockState[]
|
||||
if (blockEntries.length === 0) return
|
||||
|
||||
for (const block of blockEntries) {
|
||||
const resourceId = getSubBlockValue<string>(block, config.subBlockId)
|
||||
if (!resourceId) continue
|
||||
|
||||
const metadata = config.buildMetadata(block, resourceId)
|
||||
if (!metadata) {
|
||||
logger.warn(`Skipping ${config.resourceName} sync due to invalid configuration`, {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
resourceId,
|
||||
resourceName: config.resourceName,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
await config.applyMetadata(workflowId, block, resourceId, metadata)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to sync ${config.resourceName}`, {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
resourceId,
|
||||
resourceName: config.resourceName,
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,29 +4,29 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockDatabase,
|
||||
databaseMock,
|
||||
defaultMockUser,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockUser,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
describe('Workflow Variables API Route', () => {
|
||||
let authMocks: ReturnType<typeof mockAuth>
|
||||
let databaseMocks: ReturnType<typeof createMockDatabase>
|
||||
const mockGetWorkflowAccessContext = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
setupCommonApiMocks()
|
||||
mockCryptoUuid('mock-request-id-12345678')
|
||||
authMocks = mockAuth(mockUser)
|
||||
authMocks = mockAuth(defaultMockUser)
|
||||
mockGetWorkflowAccessContext.mockReset()
|
||||
|
||||
vi.doMock('@sim/db', () => databaseMock)
|
||||
|
||||
vi.doMock('@/lib/workflows/utils', () => ({
|
||||
getWorkflowAccessContext: mockGetWorkflowAccessContext,
|
||||
}))
|
||||
@@ -203,10 +203,6 @@ describe('Workflow Variables API Route', () => {
|
||||
isWorkspaceOwner: false,
|
||||
})
|
||||
|
||||
databaseMocks = createMockDatabase({
|
||||
update: { results: [{}] },
|
||||
})
|
||||
|
||||
const variables = {
|
||||
'var-1': {
|
||||
id: 'var-1',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createMockRequest, mockAuth, mockConsoleLogger } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, mockAuth, mockConsoleLogger } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Workspace Invitations API Route', () => {
|
||||
const mockWorkspace = { id: 'workspace-1', name: 'Test Workspace' }
|
||||
|
||||
@@ -12,6 +12,7 @@ import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler'
|
||||
import { QueryProvider } from '@/app/_shell/providers/query-provider'
|
||||
import { SessionProvider } from '@/app/_shell/providers/session-provider'
|
||||
import { ThemeProvider } from '@/app/_shell/providers/theme-provider'
|
||||
import { TooltipProvider } from '@/app/_shell/providers/tooltip-provider'
|
||||
import { season } from '@/app/_styles/fonts/season/season'
|
||||
|
||||
export const viewport: Viewport = {
|
||||
@@ -208,7 +209,9 @@ export default function RootLayout({ children }: { children: React.ReactNode })
|
||||
<ThemeProvider>
|
||||
<QueryProvider>
|
||||
<SessionProvider>
|
||||
<BrandedLayout>{children}</BrandedLayout>
|
||||
<TooltipProvider>
|
||||
<BrandedLayout>{children}</BrandedLayout>
|
||||
</TooltipProvider>
|
||||
</SessionProvider>
|
||||
</QueryProvider>
|
||||
</ThemeProvider>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -38,6 +38,7 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import type { CredentialRequirement } from '@/lib/workflows/credentials/credential-extractor'
|
||||
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { useAdminStatus } from '@/hooks/queries/admin-status'
|
||||
import { useStarTemplate, useTemplate } from '@/hooks/queries/templates'
|
||||
|
||||
const logger = createLogger('TemplateDetails')
|
||||
@@ -150,7 +151,8 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
const [currentUserOrgRoles, setCurrentUserOrgRoles] = useState<
|
||||
Array<{ organizationId: string; role: string }>
|
||||
>([])
|
||||
const [isSuperUser, setIsSuperUser] = useState(false)
|
||||
const { data: adminStatus } = useAdminStatus(!!session?.user?.id)
|
||||
const hasAdminPrivileges = adminStatus?.hasAdminPrivileges ?? false
|
||||
const [isUsing, setIsUsing] = useState(false)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
const [isApproving, setIsApproving] = useState(false)
|
||||
@@ -188,21 +190,6 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
}
|
||||
}
|
||||
|
||||
const fetchSuperUserStatus = async () => {
|
||||
if (!currentUserId) return
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/user/super-user')
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setIsSuperUser(data.isSuperUser || false)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching super user status:', error)
|
||||
}
|
||||
}
|
||||
|
||||
fetchSuperUserStatus()
|
||||
fetchUserOrganizations()
|
||||
}, [currentUserId])
|
||||
|
||||
@@ -650,7 +637,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
{/* Action buttons */}
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
{/* Approve/Reject buttons for super users */}
|
||||
{isSuperUser && template.status === 'pending' && (
|
||||
{hasAdminPrivileges && template.status === 'pending' && (
|
||||
<>
|
||||
<Button
|
||||
variant='active'
|
||||
@@ -974,7 +961,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
<h3 className='font-sans font-semibold text-base text-foreground'>
|
||||
About the Creator
|
||||
</h3>
|
||||
{isSuperUser && template.creator && (
|
||||
{hasAdminPrivileges && template.creator && (
|
||||
<Button
|
||||
variant={template.creator.verified ? 'active' : 'default'}
|
||||
onClick={handleToggleVerification}
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { season } from '@/app/_styles/fonts/season/season'
|
||||
|
||||
export default function TemplatesLayoutClient({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
|
||||
<div className={`${season.variable} relative flex min-h-screen flex-col font-season`}>
|
||||
<div className='-z-50 pointer-events-none fixed inset-0 bg-white' />
|
||||
{children}
|
||||
</div>
|
||||
</Tooltip.Provider>
|
||||
<div className={`${season.variable} relative flex min-h-screen flex-col font-season`}>
|
||||
<div className='-z-50 pointer-events-none fixed inset-0 bg-white' />
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
'use client'
|
||||
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import { ProviderModelsLoader } from '@/app/workspace/[workspaceId]/providers/provider-models-loader'
|
||||
import { SettingsLoader } from '@/app/workspace/[workspaceId]/providers/settings-loader'
|
||||
@@ -13,16 +12,14 @@ export default function WorkspaceLayout({ children }: { children: React.ReactNod
|
||||
<SettingsLoader />
|
||||
<ProviderModelsLoader />
|
||||
<GlobalCommandsProvider>
|
||||
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
|
||||
<div className='flex h-screen w-full bg-[var(--bg)]'>
|
||||
<WorkspacePermissionsProvider>
|
||||
<div className='shrink-0' suppressHydrationWarning>
|
||||
<Sidebar />
|
||||
</div>
|
||||
{children}
|
||||
</WorkspacePermissionsProvider>
|
||||
</div>
|
||||
</Tooltip.Provider>
|
||||
<div className='flex h-screen w-full bg-[var(--bg)]'>
|
||||
<WorkspacePermissionsProvider>
|
||||
<div className='shrink-0' suppressHydrationWarning>
|
||||
<Sidebar />
|
||||
</div>
|
||||
{children}
|
||||
</WorkspacePermissionsProvider>
|
||||
</div>
|
||||
</GlobalCommandsProvider>
|
||||
</>
|
||||
)
|
||||
|
||||
@@ -39,9 +39,9 @@ export default async function TemplatesPage({ params }: TemplatesPageProps) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
// Determine effective super user (DB flag AND UI mode enabled)
|
||||
// Determine effective super user (admin/superadmin role AND UI mode enabled)
|
||||
const currentUser = await db
|
||||
.select({ isSuperUser: user.isSuperUser })
|
||||
.select({ role: user.role })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
@@ -51,7 +51,7 @@ export default async function TemplatesPage({ params }: TemplatesPageProps) {
|
||||
.where(eq(settings.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
const isSuperUser = currentUser[0]?.isSuperUser || false
|
||||
const isSuperUser = currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
|
||||
const superUserModeEnabled = userSettings[0]?.superUserModeEnabled ?? true
|
||||
const effectiveSuperUser = isSuperUser && superUserModeEnabled
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ export type CommandId =
|
||||
| 'clear-terminal-console'
|
||||
| 'focus-toolbar-search'
|
||||
| 'clear-notifications'
|
||||
| 'fit-to-view'
|
||||
|
||||
/**
|
||||
* Static metadata for a global command.
|
||||
@@ -104,6 +105,11 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
|
||||
shortcut: 'Mod+E',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'fit-to-view': {
|
||||
id: 'fit-to-view',
|
||||
shortcut: 'Mod+Shift+F',
|
||||
allowInEditable: false,
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import type { RefObject } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
@@ -7,14 +8,48 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type { BlockContextMenuProps } from './types'
|
||||
|
||||
/**
|
||||
* Block information for context menu actions
|
||||
*/
|
||||
export interface BlockInfo {
|
||||
id: string
|
||||
type: string
|
||||
enabled: boolean
|
||||
horizontalHandles: boolean
|
||||
parentId?: string
|
||||
parentType?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for BlockMenu component
|
||||
*/
|
||||
export interface BlockMenuProps {
|
||||
isOpen: boolean
|
||||
position: { x: number; y: number }
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
onClose: () => void
|
||||
selectedBlocks: BlockInfo[]
|
||||
onCopy: () => void
|
||||
onPaste: () => void
|
||||
onDuplicate: () => void
|
||||
onDelete: () => void
|
||||
onToggleEnabled: () => void
|
||||
onToggleHandles: () => void
|
||||
onRemoveFromSubflow: () => void
|
||||
onOpenEditor: () => void
|
||||
onRename: () => void
|
||||
hasClipboard?: boolean
|
||||
showRemoveFromSubflow?: boolean
|
||||
disableEdit?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for workflow block(s).
|
||||
* Displays block-specific actions in a popover at right-click position.
|
||||
* Supports multi-selection - actions apply to all selected blocks.
|
||||
*/
|
||||
export function BlockContextMenu({
|
||||
export function BlockMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
@@ -32,7 +67,7 @@ export function BlockContextMenu({
|
||||
hasClipboard = false,
|
||||
showRemoveFromSubflow = false,
|
||||
disableEdit = false,
|
||||
}: BlockContextMenuProps) {
|
||||
}: BlockMenuProps) {
|
||||
const isSingleBlock = selectedBlocks.length === 1
|
||||
|
||||
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
||||
@@ -0,0 +1,2 @@
|
||||
export type { BlockInfo, BlockMenuProps } from './block-menu'
|
||||
export { BlockMenu } from './block-menu'
|
||||
@@ -1,5 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import type { RefObject } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
@@ -7,13 +8,40 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type { PaneContextMenuProps } from './types'
|
||||
|
||||
/**
|
||||
* Context menu for workflow canvas pane.
|
||||
* Props for CanvasMenu component
|
||||
*/
|
||||
export interface CanvasMenuProps {
|
||||
isOpen: boolean
|
||||
position: { x: number; y: number }
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
onClose: () => void
|
||||
onUndo: () => void
|
||||
onRedo: () => void
|
||||
onPaste: () => void
|
||||
onAddBlock: () => void
|
||||
onAutoLayout: () => void
|
||||
onFitToView: () => void
|
||||
onOpenLogs: () => void
|
||||
onToggleVariables: () => void
|
||||
onToggleChat: () => void
|
||||
onInvite: () => void
|
||||
isVariablesOpen?: boolean
|
||||
isChatOpen?: boolean
|
||||
hasClipboard?: boolean
|
||||
disableEdit?: boolean
|
||||
disableAdmin?: boolean
|
||||
canUndo?: boolean
|
||||
canRedo?: boolean
|
||||
isInvitationsDisabled?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for workflow canvas.
|
||||
* Displays canvas-level actions when right-clicking empty space.
|
||||
*/
|
||||
export function PaneContextMenu({
|
||||
export function CanvasMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
@@ -23,6 +51,7 @@ export function PaneContextMenu({
|
||||
onPaste,
|
||||
onAddBlock,
|
||||
onAutoLayout,
|
||||
onFitToView,
|
||||
onOpenLogs,
|
||||
onToggleVariables,
|
||||
onToggleChat,
|
||||
@@ -35,7 +64,7 @@ export function PaneContextMenu({
|
||||
canUndo = false,
|
||||
canRedo = false,
|
||||
isInvitationsDisabled = false,
|
||||
}: PaneContextMenuProps) {
|
||||
}: CanvasMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
@@ -113,6 +142,14 @@ export function PaneContextMenu({
|
||||
<span>Auto-layout</span>
|
||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⇧L</span>
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onFitToView()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Fit to View
|
||||
</PopoverItem>
|
||||
|
||||
{/* Navigation actions */}
|
||||
<PopoverDivider />
|
||||
@@ -0,0 +1,2 @@
|
||||
export type { CanvasMenuProps } from './canvas-menu'
|
||||
export { CanvasMenu } from './canvas-menu'
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
PopoverItem,
|
||||
PopoverScrollArea,
|
||||
PopoverTrigger,
|
||||
Tooltip,
|
||||
Trash,
|
||||
} from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
@@ -29,7 +30,7 @@ import {
|
||||
extractPathFromOutputId,
|
||||
parseOutputContentSafely,
|
||||
} from '@/lib/core/utils/response-format'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { START_BLOCK_RESERVED_FIELDS } from '@/lib/workflows/types'
|
||||
import {
|
||||
@@ -869,7 +870,7 @@ export function Chat() {
|
||||
|
||||
<div className='flex flex-shrink-0 items-center gap-[8px]'>
|
||||
{/* More menu with actions */}
|
||||
<Popover variant='default' open={moreMenuOpen} onOpenChange={setMoreMenuOpen}>
|
||||
<Popover variant='default' size='sm' open={moreMenuOpen} onOpenChange={setMoreMenuOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
@@ -1042,17 +1043,21 @@ export function Chat() {
|
||||
|
||||
{/* Buttons positioned absolutely on the right */}
|
||||
<div className='-translate-y-1/2 absolute top-1/2 right-[2px] flex items-center gap-[10px]'>
|
||||
<Badge
|
||||
onClick={() => document.getElementById('floating-chat-file-input')?.click()}
|
||||
title='Attach file'
|
||||
className={cn(
|
||||
'!bg-transparent !border-0 cursor-pointer rounded-[6px] p-[0px]',
|
||||
(!activeWorkflowId || isExecuting || chatFiles.length >= 15) &&
|
||||
'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
>
|
||||
<Paperclip className='!h-3.5 !w-3.5' />
|
||||
</Badge>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
onClick={() => document.getElementById('floating-chat-file-input')?.click()}
|
||||
className={cn(
|
||||
'!bg-transparent !border-0 cursor-pointer rounded-[6px] p-[0px]',
|
||||
(!activeWorkflowId || isExecuting || chatFiles.length >= 15) &&
|
||||
'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
>
|
||||
<Paperclip className='!h-3.5 !w-3.5' />
|
||||
</Badge>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Attach file</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
{isStreaming ? (
|
||||
<Button
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
export { BlockContextMenu } from './block-context-menu'
|
||||
export { PaneContextMenu } from './pane-context-menu'
|
||||
export type {
|
||||
BlockContextMenuProps,
|
||||
ContextMenuBlockInfo,
|
||||
ContextMenuPosition,
|
||||
PaneContextMenuProps,
|
||||
} from './types'
|
||||
@@ -1,99 +0,0 @@
|
||||
import type { RefObject } from 'react'
|
||||
|
||||
/**
|
||||
* Position for context menu placement
|
||||
*/
|
||||
export interface ContextMenuPosition {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Block information passed to context menu for action handling
|
||||
*/
|
||||
export interface ContextMenuBlockInfo {
|
||||
/** Block ID */
|
||||
id: string
|
||||
/** Block type (e.g., 'agent', 'function', 'loop') */
|
||||
type: string
|
||||
/** Whether block is enabled */
|
||||
enabled: boolean
|
||||
/** Whether block uses horizontal handles */
|
||||
horizontalHandles: boolean
|
||||
/** Parent subflow ID if nested in loop/parallel */
|
||||
parentId?: string
|
||||
/** Parent type ('loop' | 'parallel') if nested */
|
||||
parentType?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for BlockContextMenu component
|
||||
*/
|
||||
export interface BlockContextMenuProps {
|
||||
/** Whether the context menu is open */
|
||||
isOpen: boolean
|
||||
/** Position of the context menu */
|
||||
position: ContextMenuPosition
|
||||
/** Ref for the menu element (for click-outside detection) */
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
/** Callback when menu should close */
|
||||
onClose: () => void
|
||||
/** Selected block(s) info */
|
||||
selectedBlocks: ContextMenuBlockInfo[]
|
||||
/** Callbacks for menu actions */
|
||||
onCopy: () => void
|
||||
onPaste: () => void
|
||||
onDuplicate: () => void
|
||||
onDelete: () => void
|
||||
onToggleEnabled: () => void
|
||||
onToggleHandles: () => void
|
||||
onRemoveFromSubflow: () => void
|
||||
onOpenEditor: () => void
|
||||
onRename: () => void
|
||||
/** Whether clipboard has content for pasting */
|
||||
hasClipboard?: boolean
|
||||
/** Whether remove from subflow option should be shown */
|
||||
showRemoveFromSubflow?: boolean
|
||||
/** Whether edit actions are disabled (no permission) */
|
||||
disableEdit?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for PaneContextMenu component
|
||||
*/
|
||||
export interface PaneContextMenuProps {
|
||||
/** Whether the context menu is open */
|
||||
isOpen: boolean
|
||||
/** Position of the context menu */
|
||||
position: ContextMenuPosition
|
||||
/** Ref for the menu element */
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
/** Callback when menu should close */
|
||||
onClose: () => void
|
||||
/** Callbacks for menu actions */
|
||||
onUndo: () => void
|
||||
onRedo: () => void
|
||||
onPaste: () => void
|
||||
onAddBlock: () => void
|
||||
onAutoLayout: () => void
|
||||
onOpenLogs: () => void
|
||||
onToggleVariables: () => void
|
||||
onToggleChat: () => void
|
||||
onInvite: () => void
|
||||
/** Whether the variables panel is currently open */
|
||||
isVariablesOpen?: boolean
|
||||
/** Whether the chat panel is currently open */
|
||||
isChatOpen?: boolean
|
||||
/** Whether clipboard has content for pasting */
|
||||
hasClipboard?: boolean
|
||||
/** Whether edit actions are disabled (no permission) */
|
||||
disableEdit?: boolean
|
||||
/** Whether admin actions are disabled (no admin permission) */
|
||||
disableAdmin?: boolean
|
||||
/** Whether undo is available */
|
||||
canUndo?: boolean
|
||||
/** Whether redo is available */
|
||||
canRedo?: boolean
|
||||
/** Whether invitations are disabled (feature flag or permission group) */
|
||||
isInvitationsDisabled?: boolean
|
||||
}
|
||||
@@ -4,36 +4,204 @@ import clsx from 'clsx'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import { createCommand } from '@/app/workspace/[workspaceId]/utils/commands-utils'
|
||||
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useCopilotStore, usePanelStore } from '@/stores/panel'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('DiffControls')
|
||||
const NOTIFICATION_WIDTH = 240
|
||||
const NOTIFICATION_GAP = 16
|
||||
|
||||
export const DiffControls = memo(function DiffControls() {
|
||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges } = useWorkflowDiffStore(
|
||||
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges, baselineWorkflow } =
|
||||
useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
baselineWorkflow: state.baselineWorkflow,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
)
|
||||
|
||||
const { updatePreviewToolCallState, currentChat, messages } = useCopilotStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
updatePreviewToolCallState: state.updatePreviewToolCallState,
|
||||
currentChat: state.currentChat,
|
||||
messages: state.messages,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
)
|
||||
|
||||
const { updatePreviewToolCallState } = useCopilotStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
updatePreviewToolCallState: state.updatePreviewToolCallState,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
const { activeWorkflowId } = useWorkflowRegistry(
|
||||
useCallback((state) => ({ activeWorkflowId: state.activeWorkflowId }), [])
|
||||
)
|
||||
|
||||
const allNotifications = useNotificationStore((state) => state.notifications)
|
||||
const hasVisibleNotifications = useMemo(() => {
|
||||
if (!activeWorkflowId) return false
|
||||
return allNotifications.some((n) => !n.workflowId || n.workflowId === activeWorkflowId)
|
||||
}, [allNotifications, activeWorkflowId])
|
||||
|
||||
const createCheckpoint = useCallback(async () => {
|
||||
if (!activeWorkflowId || !currentChat?.id) {
|
||||
logger.warn('Cannot create checkpoint: missing workflowId or chatId', {
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat?.id,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info('Creating checkpoint before accepting changes')
|
||||
|
||||
// Use the baseline workflow (state before diff) instead of current state
|
||||
// This ensures reverting to the checkpoint restores the pre-diff state
|
||||
const rawState = baselineWorkflow || useWorkflowStore.getState().getWorkflowState()
|
||||
|
||||
// The baseline already has merged subblock values, but we'll merge again to be safe
|
||||
// This ensures all user inputs and subblock data are captured
|
||||
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, activeWorkflowId)
|
||||
|
||||
// Filter and complete blocks to ensure all required fields are present
|
||||
// This matches the validation logic from /api/workflows/[id]/state
|
||||
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
|
||||
(acc, [blockId, block]) => {
|
||||
if (block.type && block.name) {
|
||||
// Ensure all required fields are present
|
||||
acc[blockId] = {
|
||||
...block,
|
||||
id: block.id || blockId, // Ensure id field is set
|
||||
enabled: block.enabled !== undefined ? block.enabled : true,
|
||||
horizontalHandles:
|
||||
block.horizontalHandles !== undefined ? block.horizontalHandles : true,
|
||||
height: block.height !== undefined ? block.height : 90,
|
||||
subBlocks: block.subBlocks || {},
|
||||
outputs: block.outputs || {},
|
||||
data: block.data || {},
|
||||
position: block.position || { x: 0, y: 0 }, // Ensure position exists
|
||||
}
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as typeof rawState.blocks
|
||||
)
|
||||
|
||||
// Clean the workflow state - only include valid fields, exclude null/undefined values
|
||||
const workflowState = {
|
||||
blocks: filteredBlocks,
|
||||
edges: rawState.edges || [],
|
||||
loops: rawState.loops || {},
|
||||
parallels: rawState.parallels || {},
|
||||
lastSaved: rawState.lastSaved || Date.now(),
|
||||
deploymentStatuses: rawState.deploymentStatuses || {},
|
||||
}
|
||||
|
||||
logger.info('Prepared complete workflow state for checkpoint', {
|
||||
blocksCount: Object.keys(workflowState.blocks).length,
|
||||
edgesCount: workflowState.edges.length,
|
||||
loopsCount: Object.keys(workflowState.loops).length,
|
||||
parallelsCount: Object.keys(workflowState.parallels).length,
|
||||
hasRequiredFields: Object.values(workflowState.blocks).every(
|
||||
(block) => block.id && block.type && block.name && block.position
|
||||
),
|
||||
hasSubblockValues: Object.values(workflowState.blocks).some((block) =>
|
||||
Object.values(block.subBlocks || {}).some(
|
||||
(subblock) => subblock.value !== null && subblock.value !== undefined
|
||||
)
|
||||
),
|
||||
sampleBlock: Object.values(workflowState.blocks)[0],
|
||||
})
|
||||
|
||||
// Find the most recent user message ID from the current chat
|
||||
const userMessages = messages.filter((msg) => msg.role === 'user')
|
||||
const lastUserMessage = userMessages[userMessages.length - 1]
|
||||
const messageId = lastUserMessage?.id
|
||||
|
||||
logger.info('Creating checkpoint with message association', {
|
||||
totalMessages: messages.length,
|
||||
userMessageCount: userMessages.length,
|
||||
lastUserMessageId: messageId,
|
||||
chatId: currentChat.id,
|
||||
entireMessageArray: messages,
|
||||
allMessageIds: messages.map((m) => ({
|
||||
id: m.id,
|
||||
role: m.role,
|
||||
content: m.content.substring(0, 50),
|
||||
})),
|
||||
selectedUserMessages: userMessages.map((m) => ({
|
||||
id: m.id,
|
||||
content: m.content.substring(0, 100),
|
||||
})),
|
||||
allRawMessageIds: messages.map((m) => m.id),
|
||||
userMessageIds: userMessages.map((m) => m.id),
|
||||
checkpointData: {
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat.id,
|
||||
messageId: messageId,
|
||||
messageFound: !!lastUserMessage,
|
||||
},
|
||||
})
|
||||
|
||||
const response = await fetch('/api/copilot/checkpoints', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat.id,
|
||||
messageId,
|
||||
workflowState: JSON.stringify(workflowState),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
const newCheckpoint = result.checkpoint
|
||||
|
||||
logger.info('Checkpoint created successfully', {
|
||||
messageId,
|
||||
chatId: currentChat.id,
|
||||
checkpointId: newCheckpoint?.id,
|
||||
})
|
||||
|
||||
// Update the copilot store immediately to show the checkpoint icon
|
||||
if (newCheckpoint && messageId) {
|
||||
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
|
||||
const existingCheckpoints = currentCheckpoints[messageId] || []
|
||||
|
||||
const updatedCheckpoints = {
|
||||
...currentCheckpoints,
|
||||
[messageId]: [newCheckpoint, ...existingCheckpoints],
|
||||
}
|
||||
|
||||
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
|
||||
logger.info('Updated copilot store with new checkpoint', {
|
||||
messageId,
|
||||
checkpointId: newCheckpoint.id,
|
||||
})
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Failed to create checkpoint:', error)
|
||||
return false
|
||||
}
|
||||
}, [activeWorkflowId, currentChat, messages, baselineWorkflow])
|
||||
|
||||
const handleAccept = useCallback(() => {
|
||||
logger.info('Accepting proposed changes with backup protection')
|
||||
|
||||
@@ -70,8 +238,12 @@ export const DiffControls = memo(function DiffControls() {
|
||||
})
|
||||
|
||||
// Create checkpoint in the background (fire-and-forget) so it doesn't block UI
|
||||
createCheckpoint().catch((error) => {
|
||||
logger.warn('Failed to create checkpoint after accept:', error)
|
||||
})
|
||||
|
||||
logger.info('Accept triggered; UI will update optimistically')
|
||||
}, [updatePreviewToolCallState, acceptChanges])
|
||||
}, [createCheckpoint, updatePreviewToolCallState, acceptChanges])
|
||||
|
||||
const handleReject = useCallback(() => {
|
||||
logger.info('Rejecting proposed changes (optimistic)')
|
||||
@@ -132,16 +304,15 @@ export const DiffControls = memo(function DiffControls() {
|
||||
|
||||
const isResizing = isTerminalResizing || isPanelResizing
|
||||
|
||||
const notificationOffset = hasVisibleNotifications ? NOTIFICATION_WIDTH + NOTIFICATION_GAP : 0
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={preventZoomRef}
|
||||
className={clsx(
|
||||
'fixed z-30',
|
||||
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
|
||||
)}
|
||||
className={clsx('fixed z-30', !isResizing && 'transition-[bottom] duration-100 ease-out')}
|
||||
style={{
|
||||
bottom: 'calc(var(--terminal-height) + 16px)',
|
||||
right: 'calc(var(--panel-width) + 16px)',
|
||||
right: `calc(var(--panel-width) + 16px + ${notificationOffset}px)`,
|
||||
}}
|
||||
>
|
||||
<div
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
export { BlockMenu } from './block-menu'
|
||||
export { CanvasMenu } from './canvas-menu'
|
||||
export { CommandList } from './command-list/command-list'
|
||||
export { Cursors } from './cursors/cursors'
|
||||
export { DiffControls } from './diff-controls/diff-controls'
|
||||
@@ -8,4 +10,5 @@ export { SubflowNodeComponent } from './subflows/subflow-node'
|
||||
export { Terminal } from './terminal/terminal'
|
||||
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
|
||||
export { WorkflowBlock } from './workflow-block/workflow-block'
|
||||
export { WorkflowControls } from './workflow-controls'
|
||||
export { WorkflowEdge } from './workflow-edge/workflow-edge'
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
openCopilotWithMessage,
|
||||
useNotificationStore,
|
||||
} from '@/stores/notifications'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { usePanelStore } from '@/stores/panel'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -19,9 +19,9 @@ const logger = createLogger('Notifications')
|
||||
const MAX_VISIBLE_NOTIFICATIONS = 4
|
||||
|
||||
/**
|
||||
* Notifications display component
|
||||
* Positioned in the bottom-left workspace area, reactive to sidebar width and terminal height
|
||||
* Shows both global notifications and workflow-specific notifications
|
||||
* Notifications display component.
|
||||
* Positioned in the bottom-right workspace area, reactive to panel width and terminal height.
|
||||
* Shows both global notifications and workflow-specific notifications.
|
||||
*/
|
||||
export const Notifications = memo(function Notifications() {
|
||||
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||
@@ -37,7 +37,7 @@ export const Notifications = memo(function Notifications() {
|
||||
.slice(0, MAX_VISIBLE_NOTIFICATIONS)
|
||||
}, [allNotifications, activeWorkflowId])
|
||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||
const isSidebarResizing = useSidebarStore((state) => state.isResizing)
|
||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||
|
||||
/**
|
||||
* Executes a notification action and handles side effects.
|
||||
@@ -105,15 +105,19 @@ export const Notifications = memo(function Notifications() {
|
||||
return null
|
||||
}
|
||||
|
||||
const isResizing = isTerminalResizing || isSidebarResizing
|
||||
const isResizing = isTerminalResizing || isPanelResizing
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={preventZoomRef}
|
||||
className={clsx(
|
||||
'fixed bottom-[calc(var(--terminal-height)+16px)] left-[calc(var(--sidebar-width)+16px)] z-30 flex flex-col items-start',
|
||||
!isResizing && 'transition-[bottom,left] duration-100 ease-out'
|
||||
'fixed z-30 flex flex-col items-start',
|
||||
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
|
||||
)}
|
||||
style={{
|
||||
bottom: 'calc(var(--terminal-height) + 16px)',
|
||||
right: 'calc(var(--panel-width) + 16px)',
|
||||
}}
|
||||
>
|
||||
{[...visibleNotifications].reverse().map((notification, index, stacked) => {
|
||||
const depth = stacked.length - index - 1
|
||||
@@ -123,8 +127,13 @@ export const Notifications = memo(function Notifications() {
|
||||
return (
|
||||
<div
|
||||
key={notification.id}
|
||||
style={{ transform: `translateX(${xOffset}px)` }}
|
||||
className={`relative h-[80px] w-[240px] overflow-hidden rounded-[4px] border bg-[var(--surface-2)] transition-transform duration-200 ${
|
||||
style={
|
||||
{
|
||||
'--stack-offset': `${xOffset}px`,
|
||||
animation: 'notification-enter 200ms ease-out forwards',
|
||||
} as React.CSSProperties
|
||||
}
|
||||
className={`relative h-[80px] w-[240px] overflow-hidden rounded-[4px] border bg-[var(--surface-2)] ${
|
||||
index > 0 ? '-mt-[80px]' : ''
|
||||
}`}
|
||||
>
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { memo, useEffect, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp } from 'lucide-react'
|
||||
import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
|
||||
/**
|
||||
* Removes thinking tags (raw or escaped) from streamed content.
|
||||
*/
|
||||
function stripThinkingTags(text: string): string {
|
||||
return text
|
||||
.replace(/<\/?thinking[^>]*>/gi, '')
|
||||
.replace(/<\/?thinking[^&]*>/gi, '')
|
||||
.trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Max height for thinking content before internal scrolling kicks in
|
||||
*/
|
||||
@@ -197,9 +187,6 @@ export function ThinkingBlock({
|
||||
label = 'Thought',
|
||||
hasSpecialTags = false,
|
||||
}: ThinkingBlockProps) {
|
||||
// Strip thinking tags from content on render to handle persisted messages
|
||||
const cleanContent = useMemo(() => stripThinkingTags(content || ''), [content])
|
||||
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
|
||||
@@ -222,10 +209,10 @@ export function ThinkingBlock({
|
||||
return
|
||||
}
|
||||
|
||||
if (!userCollapsedRef.current && cleanContent && cleanContent.length > 0) {
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, cleanContent, hasFollowingContent, hasSpecialTags])
|
||||
}, [isStreaming, content, hasFollowingContent, hasSpecialTags])
|
||||
|
||||
// Reset start time when streaming begins
|
||||
useEffect(() => {
|
||||
@@ -311,7 +298,7 @@ export function ThinkingBlock({
|
||||
return `${seconds}s`
|
||||
}
|
||||
|
||||
const hasContent = cleanContent.length > 0
|
||||
const hasContent = content && content.trim().length > 0
|
||||
// Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear
|
||||
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
|
||||
const durationText = `${label} for ${formatDuration(duration)}`
|
||||
@@ -387,7 +374,7 @@ export function ThinkingBlock({
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
<SmoothThinkingText content={cleanContent} isStreaming={isStreaming && !hasFollowingContent} />
|
||||
<SmoothThinkingText content={content} isStreaming={isStreaming && !hasFollowingContent} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -425,7 +412,7 @@ export function ThinkingBlock({
|
||||
>
|
||||
{/* Completed thinking text - dimmed with markdown */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={cleanContent} />
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { type FC, memo, useCallback, useMemo, useRef, useState } from 'react'
|
||||
import { type FC, memo, useCallback, useMemo, useState } from 'react'
|
||||
import { RotateCcw } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import {
|
||||
@@ -93,8 +93,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
// UI state
|
||||
const [isHoveringMessage, setIsHoveringMessage] = useState(false)
|
||||
|
||||
const cancelEditRef = useRef<(() => void) | null>(null)
|
||||
|
||||
// Checkpoint management hook
|
||||
const {
|
||||
showRestoreConfirmation,
|
||||
@@ -114,8 +112,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
messages,
|
||||
messageCheckpoints,
|
||||
onRevertModeChange,
|
||||
onEditModeChange,
|
||||
() => cancelEditRef.current?.()
|
||||
onEditModeChange
|
||||
)
|
||||
|
||||
// Message editing hook
|
||||
@@ -145,8 +142,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
pendingEditRef,
|
||||
})
|
||||
|
||||
cancelEditRef.current = handleCancelEdit
|
||||
|
||||
// Get clean text content with double newline parsing
|
||||
const cleanTextContent = useMemo(() => {
|
||||
if (!message.content) return ''
|
||||
|
||||
@@ -22,8 +22,7 @@ export function useCheckpointManagement(
|
||||
messages: CopilotMessage[],
|
||||
messageCheckpoints: any[],
|
||||
onRevertModeChange?: (isReverting: boolean) => void,
|
||||
onEditModeChange?: (isEditing: boolean) => void,
|
||||
onCancelEdit?: () => void
|
||||
onEditModeChange?: (isEditing: boolean) => void
|
||||
) {
|
||||
const [showRestoreConfirmation, setShowRestoreConfirmation] = useState(false)
|
||||
const [showCheckpointDiscardModal, setShowCheckpointDiscardModal] = useState(false)
|
||||
@@ -58,7 +57,7 @@ export function useCheckpointManagement(
|
||||
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
|
||||
const updatedCheckpoints = {
|
||||
...currentCheckpoints,
|
||||
[message.id]: [],
|
||||
[message.id]: messageCheckpoints.slice(1),
|
||||
}
|
||||
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
|
||||
|
||||
@@ -141,7 +140,7 @@ export function useCheckpointManagement(
|
||||
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
|
||||
const updatedCheckpoints = {
|
||||
...currentCheckpoints,
|
||||
[message.id]: [],
|
||||
[message.id]: messageCheckpoints.slice(1),
|
||||
}
|
||||
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
|
||||
|
||||
@@ -155,8 +154,6 @@ export function useCheckpointManagement(
|
||||
}
|
||||
|
||||
setShowCheckpointDiscardModal(false)
|
||||
onEditModeChange?.(false)
|
||||
onCancelEdit?.()
|
||||
|
||||
const { sendMessage } = useCopilotStore.getState()
|
||||
if (pendingEditRef.current) {
|
||||
@@ -183,22 +180,13 @@ export function useCheckpointManagement(
|
||||
} finally {
|
||||
setIsProcessingDiscard(false)
|
||||
}
|
||||
}, [
|
||||
messageCheckpoints,
|
||||
revertToCheckpoint,
|
||||
message,
|
||||
messages,
|
||||
onEditModeChange,
|
||||
onCancelEdit,
|
||||
])
|
||||
}, [messageCheckpoints, revertToCheckpoint, message, messages])
|
||||
|
||||
/**
|
||||
* Cancels checkpoint discard and clears pending edit
|
||||
*/
|
||||
const handleCancelCheckpointDiscard = useCallback(() => {
|
||||
setShowCheckpointDiscardModal(false)
|
||||
onEditModeChange?.(false)
|
||||
onCancelEdit?.()
|
||||
pendingEditRef.current = null
|
||||
}, [])
|
||||
|
||||
@@ -230,7 +218,7 @@ export function useCheckpointManagement(
|
||||
}
|
||||
pendingEditRef.current = null
|
||||
}
|
||||
}, [message, messages, onEditModeChange, onCancelEdit])
|
||||
}, [message, messages])
|
||||
|
||||
/**
|
||||
* Handles keyboard events for restore confirmation (Escape/Enter)
|
||||
|
||||
@@ -1446,10 +1446,8 @@ function WorkflowEditSummary({ toolCall }: { toolCall: CopilotToolCall }) {
|
||||
blockType = blockType || op.block_type || ''
|
||||
}
|
||||
|
||||
if (!blockName) blockName = blockType || ''
|
||||
if (!blockName && !blockType) {
|
||||
continue
|
||||
}
|
||||
// Fallback name to type or ID
|
||||
if (!blockName) blockName = blockType || blockId
|
||||
|
||||
const change: BlockChange = { blockId, blockName, blockType }
|
||||
|
||||
|
||||
@@ -22,9 +22,6 @@ interface UseContextManagementProps {
|
||||
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
|
||||
const initializedRef = useRef(false)
|
||||
const escapeRegex = useCallback((value: string) => {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
}, [])
|
||||
|
||||
// Initialize with initial contexts when they're first provided (for edit mode)
|
||||
useEffect(() => {
|
||||
@@ -81,8 +78,10 @@ export function useContextManagement({ message, initialContexts }: UseContextMan
|
||||
// Check for slash command tokens or mention tokens based on kind
|
||||
const isSlashCommand = c.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
const tokenPattern = new RegExp(`(^|\\s)${escapeRegex(prefix)}${escapeRegex(c.label)}(\\s|$)`)
|
||||
return tokenPattern.test(message)
|
||||
const tokenWithSpaces = ` ${prefix}${c.label} `
|
||||
const tokenAtStart = `${prefix}${c.label} `
|
||||
// Token can appear with leading space OR at the start of the message
|
||||
return message.includes(tokenWithSpaces) || message.startsWith(tokenAtStart)
|
||||
})
|
||||
return filtered.length === prev.length ? prev : filtered
|
||||
})
|
||||
|
||||
@@ -76,15 +76,6 @@ export function useMentionTokens({
|
||||
ranges.push({ start: idx, end: idx + token.length, label })
|
||||
fromIndex = idx + token.length
|
||||
}
|
||||
|
||||
// Token at end of message without trailing space: "@label" or " /label"
|
||||
const tokenAtEnd = `${prefix}${label}`
|
||||
if (message.endsWith(tokenAtEnd)) {
|
||||
const idx = message.lastIndexOf(tokenAtEnd)
|
||||
const hasLeadingSpace = idx > 0 && message[idx - 1] === ' '
|
||||
const start = hasLeadingSpace ? idx - 1 : idx
|
||||
ranges.push({ start, end: message.length, label })
|
||||
}
|
||||
}
|
||||
|
||||
ranges.sort((a, b) => a.start - b.start)
|
||||
|
||||
@@ -613,7 +613,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
const insertTriggerAndOpenMenu = useCallback(
|
||||
(trigger: '@' | '/') => {
|
||||
if (disabled) return
|
||||
if (disabled || isLoading) return
|
||||
const textarea = mentionMenu.textareaRef.current
|
||||
if (!textarea) return
|
||||
|
||||
@@ -642,7 +642,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
},
|
||||
[disabled, mentionMenu, message, setMessage]
|
||||
[disabled, isLoading, mentionMenu, message, setMessage]
|
||||
)
|
||||
|
||||
const handleOpenMentionMenuWithAt = useCallback(
|
||||
@@ -735,7 +735,10 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
variant='outline'
|
||||
onClick={handleOpenMentionMenuWithAt}
|
||||
title='Insert @'
|
||||
className={cn('cursor-pointer rounded-[6px] p-[4.5px]', disabled && 'cursor-not-allowed')}
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
</Badge>
|
||||
@@ -744,7 +747,10 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
variant='outline'
|
||||
onClick={handleOpenSlashMenu}
|
||||
title='Insert /'
|
||||
className={cn('cursor-pointer rounded-[6px] p-[4.5px]', disabled && 'cursor-not-allowed')}
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<span className='flex h-3 w-3 items-center justify-center font-medium text-[11px] leading-none'>
|
||||
/
|
||||
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import type { AgentAuthentication, AgentCapabilities } from '@/lib/a2a/types'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import {
|
||||
useA2AAgentByWorkflow,
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Maximize2 } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Expand,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
@@ -222,7 +222,7 @@ export function GeneralDeploy({
|
||||
onClick={() => setShowExpandedPreview(true)}
|
||||
className='absolute right-[8px] bottom-[8px] z-10 h-[28px] w-[28px] cursor-pointer border border-[var(--border)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
<Maximize2 className='h-[14px] w-[14px]' />
|
||||
<Expand className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>See preview</Tooltip.Content>
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/trigger-utils'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
import {
|
||||
|
||||
@@ -23,6 +23,7 @@ import { CreateApiKeyModal } from '@/app/workspace/[workspaceId]/w/components/si
|
||||
import { startsWithUuid } from '@/executor/constants'
|
||||
import { useApiKeys } from '@/hooks/queries/api-keys'
|
||||
import { useWorkspaceSettings } from '@/hooks/queries/workspace'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { useSettingsModalStore } from '@/stores/modals/settings/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -113,16 +114,12 @@ export function DeployModal({
|
||||
const [existingChat, setExistingChat] = useState<ExistingChat | null>(null)
|
||||
const [isLoadingChat, setIsLoadingChat] = useState(false)
|
||||
|
||||
const [formSubmitting, setFormSubmitting] = useState(false)
|
||||
const [formExists, setFormExists] = useState(false)
|
||||
const [isFormValid, setIsFormValid] = useState(false)
|
||||
|
||||
const [chatSuccess, setChatSuccess] = useState(false)
|
||||
const [formSuccess, setFormSuccess] = useState(false)
|
||||
|
||||
const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const canManageWorkspaceKeys = userPermissions.canAdmin
|
||||
const { config: permissionConfig } = usePermissionConfig()
|
||||
const { data: apiKeysData, isLoading: isLoadingKeys } = useApiKeys(workflowWorkspaceId || '')
|
||||
const { data: workspaceSettingsData, isLoading: isLoadingSettings } = useWorkspaceSettings(
|
||||
workflowWorkspaceId || ''
|
||||
@@ -189,6 +186,7 @@ export function DeployModal({
|
||||
useEffect(() => {
|
||||
if (open && workflowId) {
|
||||
setActiveTab('general')
|
||||
setApiDeployError(null)
|
||||
fetchChatDeploymentInfo()
|
||||
}
|
||||
}, [open, workflowId, fetchChatDeploymentInfo])
|
||||
@@ -507,6 +505,7 @@ export function DeployModal({
|
||||
const handleCloseModal = () => {
|
||||
setIsSubmitting(false)
|
||||
setChatSubmitting(false)
|
||||
setApiDeployError(null)
|
||||
onOpenChange(false)
|
||||
}
|
||||
|
||||
@@ -516,12 +515,6 @@ export function DeployModal({
|
||||
setTimeout(() => setChatSuccess(false), 2000)
|
||||
}
|
||||
|
||||
const handleFormDeployed = async () => {
|
||||
await handlePostDeploymentUpdate()
|
||||
setFormSuccess(true)
|
||||
setTimeout(() => setFormSuccess(false), 2000)
|
||||
}
|
||||
|
||||
const handlePostDeploymentUpdate = async () => {
|
||||
if (!workflowId) return
|
||||
|
||||
@@ -630,17 +623,6 @@ export function DeployModal({
|
||||
deleteTrigger?.click()
|
||||
}, [])
|
||||
|
||||
const handleFormFormSubmit = useCallback(() => {
|
||||
const form = document.getElementById('form-deploy-form') as HTMLFormElement
|
||||
form?.requestSubmit()
|
||||
}, [])
|
||||
|
||||
const handleFormDelete = useCallback(() => {
|
||||
const form = document.getElementById('form-deploy-form')
|
||||
const deleteTrigger = form?.querySelector('[data-delete-trigger]') as HTMLButtonElement
|
||||
deleteTrigger?.click()
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal open={open} onOpenChange={handleCloseModal}>
|
||||
@@ -654,15 +636,31 @@ export function DeployModal({
|
||||
>
|
||||
<ModalTabsList activeValue={activeTab}>
|
||||
<ModalTabsTrigger value='general'>General</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
|
||||
{!permissionConfig.hideDeployApi && (
|
||||
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
|
||||
)}
|
||||
{!permissionConfig.hideDeployMcp && (
|
||||
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
|
||||
)}
|
||||
{!permissionConfig.hideDeployA2a && (
|
||||
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
|
||||
)}
|
||||
{!permissionConfig.hideDeployChatbot && (
|
||||
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
|
||||
)}
|
||||
{/* <ModalTabsTrigger value='form'>Form</ModalTabsTrigger> */}
|
||||
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
|
||||
{!permissionConfig.hideDeployTemplate && (
|
||||
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
|
||||
)}
|
||||
</ModalTabsList>
|
||||
|
||||
<ModalBody className='min-h-0 flex-1'>
|
||||
{apiDeployError && (
|
||||
<div className='mb-3 rounded-[4px] border border-destructive/30 bg-destructive/10 p-3 text-destructive text-sm'>
|
||||
<div className='font-semibold'>Deployment Error</div>
|
||||
<div>{apiDeployError}</div>
|
||||
</div>
|
||||
)}
|
||||
<ModalTabsContent value='general'>
|
||||
<GeneralDeploy
|
||||
workflowId={workflowId}
|
||||
|
||||
@@ -33,5 +33,4 @@ export { Table } from './table/table'
|
||||
export { Text } from './text/text'
|
||||
export { TimeInput } from './time-input/time-input'
|
||||
export { ToolInput } from './tool-input/tool-input'
|
||||
export { TriggerSave } from './trigger-save/trigger-save'
|
||||
export { VariablesInput } from './variables-input/variables-input'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useMemo, useRef, useState } from 'react'
|
||||
import { Badge, Input } from '@/components/emcn'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
@@ -7,39 +7,7 @@ import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/compon
|
||||
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
|
||||
/**
|
||||
* Represents a field in the input format configuration
|
||||
*/
|
||||
interface InputFormatField {
|
||||
name: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an input trigger block structure
|
||||
*/
|
||||
interface InputTriggerBlock {
|
||||
type: 'input_trigger' | 'start_trigger'
|
||||
subBlocks?: {
|
||||
inputFormat?: { value?: InputFormatField[] }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a legacy starter block structure
|
||||
*/
|
||||
interface StarterBlockLegacy {
|
||||
type: 'starter'
|
||||
subBlocks?: {
|
||||
inputFormat?: { value?: InputFormatField[] }
|
||||
}
|
||||
config?: {
|
||||
params?: {
|
||||
inputFormat?: InputFormatField[]
|
||||
}
|
||||
}
|
||||
}
|
||||
import { useWorkflowInputFields } from '@/hooks/queries/workflows'
|
||||
|
||||
/**
|
||||
* Props for the InputMappingField component
|
||||
@@ -70,73 +38,6 @@ interface InputMappingProps {
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a value is an InputTriggerBlock
|
||||
* @param value - The value to check
|
||||
* @returns True if the value is an InputTriggerBlock
|
||||
*/
|
||||
function isInputTriggerBlock(value: unknown): value is InputTriggerBlock {
|
||||
const type = (value as { type?: unknown }).type
|
||||
return (
|
||||
!!value && typeof value === 'object' && (type === 'input_trigger' || type === 'start_trigger')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a value is a StarterBlockLegacy
|
||||
* @param value - The value to check
|
||||
* @returns True if the value is a StarterBlockLegacy
|
||||
*/
|
||||
function isStarterBlock(value: unknown): value is StarterBlockLegacy {
|
||||
return !!value && typeof value === 'object' && (value as { type?: unknown }).type === 'starter'
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a value is an InputFormatField
|
||||
* @param value - The value to check
|
||||
* @returns True if the value is an InputFormatField
|
||||
*/
|
||||
function isInputFormatField(value: unknown): value is InputFormatField {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
if (!('name' in value)) return false
|
||||
const { name, type } = value as { name: unknown; type?: unknown }
|
||||
if (typeof name !== 'string' || name.trim() === '') return false
|
||||
if (type !== undefined && typeof type !== 'string') return false
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts input format fields from workflow blocks
|
||||
* @param blocks - The workflow blocks to extract from
|
||||
* @returns Array of input format fields or null if not found
|
||||
*/
|
||||
function extractInputFormatFields(blocks: Record<string, unknown>): InputFormatField[] | null {
|
||||
const triggerEntry = Object.entries(blocks).find(([, b]) => isInputTriggerBlock(b))
|
||||
if (triggerEntry && isInputTriggerBlock(triggerEntry[1])) {
|
||||
const inputFormat = triggerEntry[1].subBlocks?.inputFormat?.value
|
||||
if (Array.isArray(inputFormat)) {
|
||||
return (inputFormat as unknown[])
|
||||
.filter(isInputFormatField)
|
||||
.map((f) => ({ name: f.name, type: f.type }))
|
||||
}
|
||||
}
|
||||
|
||||
const starterEntry = Object.entries(blocks).find(([, b]) => isStarterBlock(b))
|
||||
if (starterEntry && isStarterBlock(starterEntry[1])) {
|
||||
const starter = starterEntry[1]
|
||||
const subBlockFormat = starter.subBlocks?.inputFormat?.value
|
||||
const legacyParamsFormat = starter.config?.params?.inputFormat
|
||||
const chosen = Array.isArray(subBlockFormat) ? subBlockFormat : legacyParamsFormat
|
||||
if (Array.isArray(chosen)) {
|
||||
return (chosen as unknown[])
|
||||
.filter(isInputFormatField)
|
||||
.map((f) => ({ name: f.name, type: f.type }))
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* InputMapping component displays and manages input field mappings for workflow execution
|
||||
* @param props - The component props
|
||||
@@ -168,62 +69,10 @@ export function InputMapping({
|
||||
const inputRefs = useRef<Map<string, HTMLInputElement>>(new Map())
|
||||
const overlayRefs = useRef<Map<string, HTMLDivElement>>(new Map())
|
||||
|
||||
const [childInputFields, setChildInputFields] = useState<InputFormatField[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const workflowId = typeof selectedWorkflowId === 'string' ? selectedWorkflowId : undefined
|
||||
const { data: childInputFields = [], isLoading } = useWorkflowInputFields(workflowId)
|
||||
const [collapsedFields, setCollapsedFields] = useState<Record<string, boolean>>({})
|
||||
|
||||
useEffect(() => {
|
||||
let isMounted = true
|
||||
const controller = new AbortController()
|
||||
|
||||
async function fetchChildSchema() {
|
||||
if (!selectedWorkflowId) {
|
||||
if (isMounted) {
|
||||
setChildInputFields([])
|
||||
setIsLoading(false)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
if (isMounted) setIsLoading(true)
|
||||
|
||||
const res = await fetch(`/api/workflows/${selectedWorkflowId}`, {
|
||||
signal: controller.signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
if (isMounted) {
|
||||
setChildInputFields([])
|
||||
setIsLoading(false)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const { data } = await res.json()
|
||||
const blocks = (data?.state?.blocks as Record<string, unknown>) || {}
|
||||
const fields = extractInputFormatFields(blocks)
|
||||
|
||||
if (isMounted) {
|
||||
setChildInputFields(fields || [])
|
||||
setIsLoading(false)
|
||||
}
|
||||
} catch (error) {
|
||||
if (isMounted) {
|
||||
setChildInputFields([])
|
||||
setIsLoading(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fetchChildSchema()
|
||||
|
||||
return () => {
|
||||
isMounted = false
|
||||
controller.abort()
|
||||
}
|
||||
}, [selectedWorkflowId])
|
||||
|
||||
const valueObj: Record<string, string> = useMemo(() => {
|
||||
if (isPreview && previewValue && typeof previewValue === 'object') {
|
||||
return previewValue as Record<string, string>
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import type React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { Loader2, WrenchIcon, XIcon } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
@@ -61,7 +60,7 @@ import {
|
||||
useCustomTools,
|
||||
} from '@/hooks/queries/custom-tools'
|
||||
import { useForceRefreshMcpTools, useMcpServers, useStoredMcpTools } from '@/hooks/queries/mcp'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowInputFields, useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { getProviderFromModel, supportsToolUsageControl } from '@/providers/utils'
|
||||
import { useSettingsModalStore } from '@/stores/modals/settings/store'
|
||||
@@ -645,56 +644,7 @@ function WorkflowInputMapperSyncWrapper({
|
||||
disabled: boolean
|
||||
workflowId: string
|
||||
}) {
|
||||
const { data: workflowData, isLoading } = useQuery({
|
||||
queryKey: ['workflow-input-fields', workflowId],
|
||||
queryFn: async () => {
|
||||
const response = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!response.ok) throw new Error('Failed to fetch workflow')
|
||||
const { data } = await response.json()
|
||||
return data
|
||||
},
|
||||
enabled: Boolean(workflowId),
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
|
||||
const inputFields = useMemo(() => {
|
||||
if (!workflowData?.state?.blocks) return []
|
||||
|
||||
const blocks = workflowData.state.blocks as Record<string, any>
|
||||
|
||||
const triggerEntry = Object.entries(blocks).find(
|
||||
([, block]) =>
|
||||
block.type === 'start_trigger' || block.type === 'input_trigger' || block.type === 'starter'
|
||||
)
|
||||
|
||||
if (!triggerEntry) return []
|
||||
|
||||
const triggerBlock = triggerEntry[1]
|
||||
|
||||
const inputFormat = triggerBlock.subBlocks?.inputFormat?.value
|
||||
|
||||
if (Array.isArray(inputFormat)) {
|
||||
return inputFormat
|
||||
.filter((field: any) => field.name && typeof field.name === 'string')
|
||||
.map((field: any) => ({
|
||||
name: field.name,
|
||||
type: field.type || 'string',
|
||||
}))
|
||||
}
|
||||
|
||||
const legacyFormat = triggerBlock.config?.params?.inputFormat
|
||||
|
||||
if (Array.isArray(legacyFormat)) {
|
||||
return legacyFormat
|
||||
.filter((field: any) => field.name && typeof field.name === 'string')
|
||||
.map((field: any) => ({
|
||||
name: field.name,
|
||||
type: field.type || 'string',
|
||||
}))
|
||||
}
|
||||
|
||||
return []
|
||||
}, [workflowData])
|
||||
const { data: inputFields = [], isLoading } = useWorkflowInputFields(workflowId)
|
||||
|
||||
const parsedValue = useMemo(() => {
|
||||
try {
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
} from '@/components/emcn/components'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useTriggerConfigAggregation } from '@/hooks/use-trigger-config-aggregation'
|
||||
import { useWebhookManagement } from '@/hooks/use-webhook-management'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
import { SYSTEM_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('TriggerSave')
|
||||
|
||||
interface TriggerSaveProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
triggerId?: string
|
||||
isPreview?: boolean
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
type SaveStatus = 'idle' | 'saving' | 'saved' | 'error'
|
||||
|
||||
export function TriggerSave({
|
||||
blockId,
|
||||
subBlockId,
|
||||
triggerId,
|
||||
isPreview = false,
|
||||
disabled = false,
|
||||
}: TriggerSaveProps) {
|
||||
const [saveStatus, setSaveStatus] = useState<SaveStatus>('idle')
|
||||
const [errorMessage, setErrorMessage] = useState<string | null>(null)
|
||||
const [deleteStatus, setDeleteStatus] = useState<'idle' | 'deleting'>('idle')
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
|
||||
const effectiveTriggerId = useMemo(() => {
|
||||
if (triggerId && isTriggerValid(triggerId)) {
|
||||
return triggerId
|
||||
}
|
||||
const selectedTriggerId = useSubBlockStore.getState().getValue(blockId, 'selectedTriggerId')
|
||||
if (typeof selectedTriggerId === 'string' && isTriggerValid(selectedTriggerId)) {
|
||||
return selectedTriggerId
|
||||
}
|
||||
return triggerId
|
||||
}, [blockId, triggerId])
|
||||
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const { webhookId, saveConfig, deleteConfig, isLoading } = useWebhookManagement({
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
isPreview,
|
||||
useWebhookUrl: true, // to store the webhook url in the store
|
||||
})
|
||||
|
||||
const triggerConfig = useSubBlockStore((state) => state.getValue(blockId, 'triggerConfig'))
|
||||
const triggerCredentials = useSubBlockStore((state) =>
|
||||
state.getValue(blockId, 'triggerCredentials')
|
||||
)
|
||||
|
||||
const triggerDef =
|
||||
effectiveTriggerId && isTriggerValid(effectiveTriggerId) ? getTrigger(effectiveTriggerId) : null
|
||||
|
||||
const validateRequiredFields = useCallback(
|
||||
(
|
||||
configToCheck: Record<string, any> | null | undefined
|
||||
): { valid: boolean; missingFields: string[] } => {
|
||||
if (!triggerDef) {
|
||||
return { valid: true, missingFields: [] }
|
||||
}
|
||||
|
||||
const missingFields: string[] = []
|
||||
|
||||
triggerDef.subBlocks
|
||||
.filter(
|
||||
(sb) => sb.required && sb.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(sb.id)
|
||||
)
|
||||
.forEach((subBlock) => {
|
||||
if (subBlock.id === 'triggerCredentials') {
|
||||
if (!triggerCredentials) {
|
||||
missingFields.push(subBlock.title || 'Credentials')
|
||||
}
|
||||
} else {
|
||||
const value = configToCheck?.[subBlock.id]
|
||||
if (value === undefined || value === null || value === '') {
|
||||
missingFields.push(subBlock.title || subBlock.id)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
valid: missingFields.length === 0,
|
||||
missingFields,
|
||||
}
|
||||
},
|
||||
[triggerDef, triggerCredentials]
|
||||
)
|
||||
|
||||
const requiredSubBlockIds = useMemo(() => {
|
||||
if (!triggerDef) return []
|
||||
return triggerDef.subBlocks
|
||||
.filter((sb) => sb.required && sb.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(sb.id))
|
||||
.map((sb) => sb.id)
|
||||
}, [triggerDef])
|
||||
|
||||
const subscribedSubBlockValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!triggerDef) return {}
|
||||
const values: Record<string, any> = {}
|
||||
requiredSubBlockIds.forEach((subBlockId) => {
|
||||
const value = state.getValue(blockId, subBlockId)
|
||||
if (value !== null && value !== undefined && value !== '') {
|
||||
values[subBlockId] = value
|
||||
}
|
||||
})
|
||||
return values
|
||||
},
|
||||
[blockId, triggerDef, requiredSubBlockIds]
|
||||
)
|
||||
)
|
||||
|
||||
const previousValuesRef = useRef<Record<string, any>>({})
|
||||
const validationTimeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (saveStatus !== 'error' || !triggerDef) {
|
||||
previousValuesRef.current = subscribedSubBlockValues
|
||||
return
|
||||
}
|
||||
|
||||
const hasChanges = Object.keys(subscribedSubBlockValues).some(
|
||||
(key) =>
|
||||
previousValuesRef.current[key] !== (subscribedSubBlockValues as Record<string, any>)[key]
|
||||
)
|
||||
|
||||
if (!hasChanges) {
|
||||
return
|
||||
}
|
||||
|
||||
if (validationTimeoutRef.current) {
|
||||
clearTimeout(validationTimeoutRef.current)
|
||||
}
|
||||
|
||||
validationTimeoutRef.current = setTimeout(() => {
|
||||
const aggregatedConfig = useTriggerConfigAggregation(blockId, effectiveTriggerId)
|
||||
|
||||
if (aggregatedConfig) {
|
||||
useSubBlockStore.getState().setValue(blockId, 'triggerConfig', aggregatedConfig)
|
||||
}
|
||||
|
||||
const validation = validateRequiredFields(aggregatedConfig)
|
||||
|
||||
if (validation.valid) {
|
||||
setErrorMessage(null)
|
||||
setSaveStatus('idle')
|
||||
logger.debug('Error cleared after validation passed', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
})
|
||||
} else {
|
||||
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
|
||||
logger.debug('Error message updated', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
missingFields: validation.missingFields,
|
||||
})
|
||||
}
|
||||
|
||||
previousValuesRef.current = subscribedSubBlockValues
|
||||
}, 300)
|
||||
|
||||
return () => {
|
||||
if (validationTimeoutRef.current) {
|
||||
clearTimeout(validationTimeoutRef.current)
|
||||
}
|
||||
}
|
||||
}, [
|
||||
blockId,
|
||||
effectiveTriggerId,
|
||||
triggerDef,
|
||||
subscribedSubBlockValues,
|
||||
saveStatus,
|
||||
validateRequiredFields,
|
||||
])
|
||||
|
||||
const handleSave = async () => {
|
||||
if (isPreview || disabled) return
|
||||
|
||||
setSaveStatus('saving')
|
||||
setErrorMessage(null)
|
||||
|
||||
try {
|
||||
const aggregatedConfig = useTriggerConfigAggregation(blockId, effectiveTriggerId)
|
||||
|
||||
if (aggregatedConfig) {
|
||||
useSubBlockStore.getState().setValue(blockId, 'triggerConfig', aggregatedConfig)
|
||||
logger.debug('Stored aggregated trigger config', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
aggregatedConfig,
|
||||
})
|
||||
}
|
||||
|
||||
const validation = validateRequiredFields(aggregatedConfig)
|
||||
if (!validation.valid) {
|
||||
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
|
||||
setSaveStatus('error')
|
||||
return
|
||||
}
|
||||
|
||||
const success = await saveConfig()
|
||||
if (!success) {
|
||||
throw new Error('Save config returned false')
|
||||
}
|
||||
|
||||
setSaveStatus('saved')
|
||||
setErrorMessage(null)
|
||||
|
||||
const savedWebhookId = useSubBlockStore.getState().getValue(blockId, 'webhookId')
|
||||
const savedTriggerPath = useSubBlockStore.getState().getValue(blockId, 'triggerPath')
|
||||
const savedTriggerId = useSubBlockStore.getState().getValue(blockId, 'triggerId')
|
||||
const savedTriggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
|
||||
|
||||
collaborativeSetSubblockValue(blockId, 'webhookId', savedWebhookId)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerPath', savedTriggerPath)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerId', savedTriggerId)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerConfig', savedTriggerConfig)
|
||||
|
||||
setTimeout(() => {
|
||||
setSaveStatus('idle')
|
||||
}, 2000)
|
||||
|
||||
logger.info('Trigger configuration saved successfully', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
hasWebhookId: !!webhookId,
|
||||
})
|
||||
} catch (error: any) {
|
||||
setSaveStatus('error')
|
||||
setErrorMessage(error.message || 'An error occurred while saving.')
|
||||
logger.error('Error saving trigger configuration', { error })
|
||||
}
|
||||
}
|
||||
|
||||
const handleDeleteClick = () => {
|
||||
if (isPreview || disabled || !webhookId) return
|
||||
setShowDeleteDialog(true)
|
||||
}
|
||||
|
||||
const handleDeleteConfirm = async () => {
|
||||
setShowDeleteDialog(false)
|
||||
setDeleteStatus('deleting')
|
||||
setErrorMessage(null)
|
||||
|
||||
try {
|
||||
const success = await deleteConfig()
|
||||
|
||||
if (success) {
|
||||
setDeleteStatus('idle')
|
||||
setSaveStatus('idle')
|
||||
setErrorMessage(null)
|
||||
|
||||
collaborativeSetSubblockValue(blockId, 'triggerPath', '')
|
||||
collaborativeSetSubblockValue(blockId, 'webhookId', null)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerConfig', null)
|
||||
|
||||
logger.info('Trigger configuration deleted successfully', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
})
|
||||
} else {
|
||||
setDeleteStatus('idle')
|
||||
setErrorMessage('Failed to delete trigger configuration.')
|
||||
logger.error('Failed to delete trigger configuration')
|
||||
}
|
||||
} catch (error: any) {
|
||||
setDeleteStatus('idle')
|
||||
setErrorMessage(error.message || 'An error occurred while deleting.')
|
||||
logger.error('Error deleting trigger configuration', { error })
|
||||
}
|
||||
}
|
||||
|
||||
if (isPreview) {
|
||||
return null
|
||||
}
|
||||
|
||||
const isProcessing = saveStatus === 'saving' || deleteStatus === 'deleting' || isLoading
|
||||
|
||||
return (
|
||||
<div id={`${blockId}-${subBlockId}`}>
|
||||
<div className='flex gap-2'>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handleSave}
|
||||
disabled={disabled || isProcessing}
|
||||
className={cn(
|
||||
'flex-1',
|
||||
saveStatus === 'saved' && '!bg-green-600 !text-white hover:!bg-green-700',
|
||||
saveStatus === 'error' && '!bg-red-600 !text-white hover:!bg-red-700'
|
||||
)}
|
||||
>
|
||||
{saveStatus === 'saving' && 'Saving...'}
|
||||
{saveStatus === 'saved' && 'Saved'}
|
||||
{saveStatus === 'error' && 'Error'}
|
||||
{saveStatus === 'idle' && (webhookId ? 'Update Configuration' : 'Save Configuration')}
|
||||
</Button>
|
||||
|
||||
{webhookId && (
|
||||
<Button variant='default' onClick={handleDeleteClick} disabled={disabled || isProcessing}>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{errorMessage && <p className='mt-2 text-[12px] text-[var(--text-error)]'>{errorMessage}</p>}
|
||||
|
||||
<Modal open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Trigger</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete this trigger configuration? This will remove the
|
||||
webhook and stop all incoming triggers.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='active' onClick={() => setShowDeleteDialog(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleDeleteConfirm}>
|
||||
Delete
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -39,7 +39,6 @@ import {
|
||||
Text,
|
||||
TimeInput,
|
||||
ToolInput,
|
||||
TriggerSave,
|
||||
VariablesInput,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
@@ -867,17 +866,6 @@ function SubBlockComponent({
|
||||
}
|
||||
/>
|
||||
)
|
||||
case 'trigger-save':
|
||||
return (
|
||||
<TriggerSave
|
||||
blockId={blockId}
|
||||
subBlockId={config.id}
|
||||
triggerId={config.triggerId}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'messages-input':
|
||||
return (
|
||||
<MessagesInput
|
||||
|
||||
@@ -2,4 +2,3 @@ export { Copilot } from './copilot/copilot'
|
||||
export { Deploy } from './deploy/deploy'
|
||||
export { Editor } from './editor/editor'
|
||||
export { Toolbar } from './toolbar/toolbar'
|
||||
export { WorkflowControls } from './workflow-controls/workflow-controls'
|
||||
|
||||
@@ -327,12 +327,14 @@ export const Toolbar = forwardRef<ToolbarRef, ToolbarProps>(function Toolbar(
|
||||
/**
|
||||
* Handle search input blur.
|
||||
*
|
||||
* We intentionally keep search mode active after blur so that ArrowUp/Down
|
||||
* navigation continues to work after the first move from the search input
|
||||
* into the triggers/blocks list (e.g. when initiated via Mod+F).
|
||||
* If the search query is empty, deactivate search mode to show the search icon again.
|
||||
* If there's a query, keep search mode active so ArrowUp/Down navigation continues
|
||||
* to work after focus moves into the triggers/blocks list (e.g. when initiated via Mod+F).
|
||||
*/
|
||||
const handleSearchBlur = () => {
|
||||
// No-op by design
|
||||
if (!searchQuery.trim()) {
|
||||
setIsSearchActive(false)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { Button, Redo, Undo } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useUndoRedoStore } from '@/stores/undo-redo'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
/**
|
||||
* Workflow controls component that provides undo/redo functionality.
|
||||
* Styled to align with the panel tab buttons.
|
||||
*/
|
||||
export function WorkflowControls() {
|
||||
const { undo, redo } = useCollaborativeWorkflow()
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const { data: session } = useSession()
|
||||
const userId = session?.user?.id || 'unknown'
|
||||
const stacks = useUndoRedoStore((s) => s.stacks)
|
||||
|
||||
const undoRedoSizes = (() => {
|
||||
const key = activeWorkflowId && userId ? `${activeWorkflowId}:${userId}` : ''
|
||||
const stack = (key && stacks[key]) || { undo: [], redo: [] }
|
||||
return { undoSize: stack.undo.length, redoSize: stack.redo.length }
|
||||
})()
|
||||
|
||||
const canUndo = undoRedoSizes.undoSize > 0
|
||||
const canRedo = undoRedoSizes.redoSize > 0
|
||||
|
||||
return (
|
||||
<div className='flex gap-[2px]'>
|
||||
<Button
|
||||
className='h-[28px] rounded-[6px] rounded-r-none border border-transparent px-[6px] py-[5px] hover:border-[var(--border-1)] hover:bg-[var(--surface-5)]'
|
||||
onClick={undo}
|
||||
variant={canUndo ? 'active' : 'ghost'}
|
||||
disabled={!canUndo}
|
||||
title='Undo (Cmd+Z)'
|
||||
>
|
||||
<Undo className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
<Button
|
||||
className='h-[28px] rounded-[6px] rounded-l-none border border-transparent px-[6px] py-[5px] hover:border-[var(--border-1)] hover:bg-[var(--surface-5)]'
|
||||
onClick={redo}
|
||||
variant={canRedo ? 'active' : 'ghost'}
|
||||
disabled={!canRedo}
|
||||
title='Redo (Cmd+Shift+Z)'
|
||||
>
|
||||
<Redo className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -495,9 +495,6 @@ export function Panel() {
|
||||
Editor
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Workflow Controls (Undo/Redo) */}
|
||||
{/* <WorkflowControls /> */}
|
||||
</div>
|
||||
|
||||
{/* Tab Content - Keep all tabs mounted but hidden to preserve state */}
|
||||
|
||||
@@ -148,7 +148,7 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
ref={blockRef}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'relative cursor-pointer select-none rounded-[8px] border border-[var(--border-1)]',
|
||||
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
|
||||
'transition-block-bg transition-ring',
|
||||
'z-[20]'
|
||||
)}
|
||||
@@ -166,11 +166,8 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
{/* Header Section */}
|
||||
<div
|
||||
className={cn(
|
||||
'workflow-drag-handle flex cursor-grab items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px] [&:active]:cursor-grabbing'
|
||||
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
|
||||
)}
|
||||
onMouseDown={(e) => {
|
||||
e.stopPropagation()
|
||||
}}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user