mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 23:17:59 -05:00
fix(permissions): remove permissions granted by org membership (#1206)
* fix(permissions): remove cross-functional permissions granted by org membership * code hygiene
This commit is contained in:
@@ -2,7 +2,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getSimplifiedBillingSummary } from '@/lib/billing/core/billing'
|
||||
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
|
||||
import { getOrganizationBillingData } from '@/lib/billing/core/organization'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { member, userStats } from '@/db/schema'
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageLimitInfo, updateUserUsageLimit } from '@/lib/billing'
|
||||
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
|
||||
import {
|
||||
getOrganizationBillingData,
|
||||
isOrganizationOwnerOrAdmin,
|
||||
} from '@/lib/billing/core/organization'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { isOrganizationOwnerOrAdmin } from '@/lib/permissions/utils'
|
||||
|
||||
const logger = createLogger('UnifiedUsageLimitsAPI')
|
||||
|
||||
@@ -25,7 +27,6 @@ export async function GET(request: NextRequest) {
|
||||
const userId = searchParams.get('userId') || session.user.id
|
||||
const organizationId = searchParams.get('organizationId')
|
||||
|
||||
// Validate context
|
||||
if (!['user', 'organization'].includes(context)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid context. Must be "user" or "organization"' },
|
||||
@@ -33,7 +34,6 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// For user context, ensure they can only view their own info
|
||||
if (context === 'user' && userId !== session.user.id) {
|
||||
return NextResponse.json(
|
||||
{ error: "Cannot view other users' usage information" },
|
||||
@@ -41,7 +41,6 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Get usage limit info
|
||||
if (context === 'organization') {
|
||||
if (!organizationId) {
|
||||
return NextResponse.json(
|
||||
@@ -107,10 +106,8 @@ export async function PUT(request: NextRequest) {
|
||||
}
|
||||
|
||||
if (context === 'user') {
|
||||
// Update user's own usage limit
|
||||
await updateUserUsageLimit(userId, limit)
|
||||
} else if (context === 'organization') {
|
||||
// context === 'organization'
|
||||
if (!organizationId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization ID is required when context=organization' },
|
||||
@@ -123,10 +120,7 @@ export async function PUT(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Permission denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Use the dedicated function to update org usage limit
|
||||
const { updateOrganizationUsageLimit } = await import(
|
||||
'@/lib/billing/core/organization-billing'
|
||||
)
|
||||
const { updateOrganizationUsageLimit } = await import('@/lib/billing/core/organization')
|
||||
const result = await updateOrganizationUsageLimit(organizationId, limit)
|
||||
|
||||
if (!result.success) {
|
||||
@@ -137,7 +131,6 @@ export async function PUT(request: NextRequest) {
|
||||
return NextResponse.json({ success: true, context, userId, organizationId, data: updated })
|
||||
}
|
||||
|
||||
// Return updated limit info
|
||||
const updatedInfo = await getUserUsageLimitInfo(userId)
|
||||
|
||||
return NextResponse.json({
|
||||
|
||||
@@ -2,16 +2,19 @@ import crypto from 'crypto'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUsersWithPermissions, hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { permissions, type permissionTypeEnum } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('WorkspacesPermissionsAPI')
|
||||
|
||||
type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
|
||||
|
||||
interface UpdatePermissionsRequest {
|
||||
updates: Array<{
|
||||
userId: string
|
||||
permissions: PermissionType // Single permission type instead of object with booleans
|
||||
permissions: PermissionType
|
||||
}>
|
||||
}
|
||||
|
||||
@@ -33,7 +36,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Verify the current user has access to this workspace
|
||||
const userPermission = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
@@ -57,7 +59,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
total: result.length,
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error fetching workspace permissions:', error)
|
||||
logger.error('Error fetching workspace permissions:', error)
|
||||
return NextResponse.json({ error: 'Failed to fetch workspace permissions' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -81,7 +83,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Verify the current user has admin access to this workspace (either direct or through organization)
|
||||
const hasAdminAccess = await hasWorkspaceAdminAccess(session.user.id, workspaceId)
|
||||
|
||||
if (!hasAdminAccess) {
|
||||
@@ -91,10 +92,8 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
)
|
||||
}
|
||||
|
||||
// Parse and validate request body
|
||||
const body: UpdatePermissionsRequest = await request.json()
|
||||
|
||||
// Prevent users from modifying their own admin permissions
|
||||
const selfUpdate = body.updates.find((update) => update.userId === session.user.id)
|
||||
if (selfUpdate && selfUpdate.permissions !== 'admin') {
|
||||
return NextResponse.json(
|
||||
@@ -103,10 +102,8 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
)
|
||||
}
|
||||
|
||||
// Process updates in a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
for (const update of body.updates) {
|
||||
// Delete existing permissions for this user and workspace
|
||||
await tx
|
||||
.delete(permissions)
|
||||
.where(
|
||||
@@ -117,7 +114,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
)
|
||||
)
|
||||
|
||||
// Insert the single new permission
|
||||
await tx.insert(permissions).values({
|
||||
id: crypto.randomUUID(),
|
||||
userId: update.userId,
|
||||
@@ -138,7 +134,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
total: updatedUsers.length,
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Error updating workspace permissions:', error)
|
||||
logger.error('Error updating workspace permissions:', error)
|
||||
return NextResponse.json({ error: 'Failed to update workspace permissions' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,15 +12,17 @@ import {
|
||||
extractPathFromOutputId,
|
||||
parseOutputContentSafely,
|
||||
} from '@/lib/response-format'
|
||||
import { ChatMessage } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components/chat-message/chat-message'
|
||||
import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components/output-select/output-select'
|
||||
import {
|
||||
ChatFileUpload,
|
||||
ChatMessage,
|
||||
OutputSelect,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components'
|
||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
|
||||
import type { BlockLog, ExecutionResult } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useChatStore } from '@/stores/panel/chat/store'
|
||||
import { useConsoleStore } from '@/stores/panel/console/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { ChatFileUpload } from './components/chat-file-upload'
|
||||
|
||||
const logger = createLogger('ChatPanel')
|
||||
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
export { ChatFileUpload } from './chat-file-upload/chat-file-upload'
|
||||
export { ChatMessage } from './chat-message/chat-message'
|
||||
export { OutputSelect } from './output-select/output-select'
|
||||
@@ -155,7 +155,7 @@ const ImagePreview = ({
|
||||
className='h-auto w-full rounded-lg border'
|
||||
unoptimized
|
||||
onError={(e) => {
|
||||
console.error('Image failed to load:', imageSrc)
|
||||
logger.error('Image failed to load:', imageSrc)
|
||||
setLoadError(true)
|
||||
onLoadError?.(true)
|
||||
}}
|
||||
@@ -333,7 +333,7 @@ export function ConsoleEntry({ entry, consoleWidth }: ConsoleEntryProps) {
|
||||
// Clean up the URL
|
||||
setTimeout(() => URL.revokeObjectURL(url), 100)
|
||||
} catch (error) {
|
||||
console.error('Error downloading image:', error)
|
||||
logger.error('Error downloading image:', error)
|
||||
alert('Failed to download image. Please try again later.')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
} from '@/components/ui/dropdown-menu'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
import { useChatStore } from '@/stores/panel/chat/store'
|
||||
import { useConsoleStore } from '@/stores/panel/console/store'
|
||||
@@ -19,6 +20,8 @@ import { Console } from './components/console/console'
|
||||
import { Copilot } from './components/copilot/copilot'
|
||||
import { Variables } from './components/variables/variables'
|
||||
|
||||
const logger = createLogger('Panel')
|
||||
|
||||
export function Panel() {
|
||||
const [chatMessage, setChatMessage] = useState<string>('')
|
||||
const [isHistoryDropdownOpen, setIsHistoryDropdownOpen] = useState(false)
|
||||
@@ -67,7 +70,7 @@ export function Panel() {
|
||||
try {
|
||||
await deleteChat(chatId)
|
||||
} catch (error) {
|
||||
console.error('Error deleting chat:', error)
|
||||
logger.error('Error deleting chat:', error)
|
||||
}
|
||||
},
|
||||
[deleteChat]
|
||||
@@ -101,7 +104,7 @@ export function Panel() {
|
||||
lastLoadedWorkflowRef.current = activeWorkflowId
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load copilot data:', error)
|
||||
logger.error('Failed to load copilot data:', error)
|
||||
}
|
||||
},
|
||||
[
|
||||
@@ -134,14 +137,14 @@ export function Panel() {
|
||||
if (!areChatsFresh(activeWorkflowId)) {
|
||||
// Don't await - let it load in background while dropdown is already open
|
||||
ensureCopilotDataLoaded(false).catch((error) => {
|
||||
console.error('Failed to load chat history:', error)
|
||||
logger.error('Failed to load chat history:', error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// If streaming, just log that we're showing cached data
|
||||
if (open && isSendingMessage) {
|
||||
console.log('Chat history opened during stream - showing cached data only')
|
||||
logger.info('Chat history opened during stream - showing cached data only')
|
||||
}
|
||||
},
|
||||
[ensureCopilotDataLoaded, activeWorkflowId, areChatsFresh, isSendingMessage]
|
||||
@@ -278,7 +281,7 @@ export function Panel() {
|
||||
// This is a real workflow change, not just a tab switch
|
||||
if (copilotWorkflowId !== activeWorkflowId || !copilotWorkflowId) {
|
||||
ensureCopilotDataLoaded().catch((error) => {
|
||||
console.error('Failed to auto-load copilot data on workflow change:', error)
|
||||
logger.error('Failed to auto-load copilot data on workflow change:', error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,7 +235,7 @@ export function FileUpload({
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error uploading ${file.name}:`, error)
|
||||
logger.error(`Error uploading ${file.name}:`, error)
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
uploadErrors.push(`${file.name}: ${errorMessage}`)
|
||||
}
|
||||
@@ -428,7 +428,7 @@ export function FileUpload({
|
||||
deletionResults.failures.push(`${file.name}: ${errorMessage}`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to delete file ${file.name}:`, error)
|
||||
logger.error(`Failed to delete file ${file.name}:`, error)
|
||||
deletionResults.failures.push(
|
||||
`${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
|
||||
@@ -483,7 +483,7 @@ export function ToolInput({
|
||||
try {
|
||||
return block.tools.config.tool({ operation })
|
||||
} catch (error) {
|
||||
console.error('Error selecting tool for operation:', error)
|
||||
logger.error('Error selecting tool for operation:', error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Card } from '@/components/ui/card'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { parseCronToHumanReadable } from '@/lib/schedules/utils'
|
||||
import { cn, validateName } from '@/lib/utils'
|
||||
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
|
||||
@@ -23,6 +24,8 @@ import { ActionBar } from './components/action-bar/action-bar'
|
||||
import { ConnectionBlocks } from './components/connection-blocks/connection-blocks'
|
||||
import { SubBlock } from './components/sub-block/sub-block'
|
||||
|
||||
const logger = createLogger('WorkflowBlock')
|
||||
|
||||
interface WorkflowBlockProps {
|
||||
type: string
|
||||
config: BlockConfig
|
||||
@@ -232,10 +235,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to reactivate schedule')
|
||||
logger.error('Failed to reactivate schedule')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reactivating schedule:', error)
|
||||
logger.error('Error reactivating schedule:', error)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -255,10 +258,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to disable schedule')
|
||||
logger.error('Failed to disable schedule')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error disabling schedule:', error)
|
||||
logger.error('Error disabling schedule:', error)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,12 +331,12 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Error fetching schedule status:', err)
|
||||
logger.error('Error fetching schedule status:', err)
|
||||
}
|
||||
|
||||
setScheduleInfo(baseInfo)
|
||||
} catch (error) {
|
||||
console.error('Error fetching schedule info:', error)
|
||||
logger.error('Error fetching schedule info:', error)
|
||||
setScheduleInfo(null)
|
||||
} finally {
|
||||
setIsLoadingScheduleInfo(false)
|
||||
|
||||
@@ -15,9 +15,12 @@ import {
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||
import type { EnvironmentVariable as StoreEnvironmentVariable } from '@/stores/settings/environment/types'
|
||||
|
||||
const logger = createLogger('EnvironmentVariables')
|
||||
|
||||
// Constants
|
||||
const GRID_COLS = 'grid grid-cols-[minmax(0,1fr),minmax(0,1fr),40px] gap-4'
|
||||
const INITIAL_ENV_VAR: UIEnvironmentVariable = { key: '', value: '' }
|
||||
@@ -263,7 +266,7 @@ export function EnvironmentVariables({
|
||||
// Single store update that triggers sync
|
||||
useEnvironmentStore.getState().setVariables(validVariables)
|
||||
} catch (error) {
|
||||
console.error('Failed to save environment variables:', error)
|
||||
logger.error('Failed to save environment variables:', error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
import { format } from 'date-fns'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
@@ -28,6 +29,8 @@ interface InvitationEmailProps {
|
||||
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
|
||||
|
||||
const logger = createLogger('InvitationEmail')
|
||||
|
||||
export const InvitationEmail = ({
|
||||
inviterName = 'A team member',
|
||||
organizationName = 'an organization',
|
||||
@@ -49,7 +52,7 @@ export const InvitationEmail = ({
|
||||
enhancedLink = `${baseUrl}/invite/${invitationId}?token=${invitationId}`
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing invite link:', e)
|
||||
logger.error('Error parsing invite link:', e)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,10 +13,13 @@ import {
|
||||
} from '@react-email/components'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
|
||||
const logger = createLogger('WorkspaceInvitationEmail')
|
||||
|
||||
interface WorkspaceInvitationEmailProps {
|
||||
workspaceName?: string
|
||||
inviterName?: string
|
||||
@@ -45,7 +48,7 @@ export const WorkspaceInvitationEmail = ({
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error enhancing invitation link:', e)
|
||||
logger.error('Error enhancing invitation link:', e)
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
@@ -1254,7 +1254,7 @@ export class InputResolver {
|
||||
|
||||
return JSON.parse(normalizedExpression)
|
||||
} catch (jsonError) {
|
||||
console.error('Error parsing JSON for loop:', jsonError)
|
||||
logger.error('Error parsing JSON for loop:', jsonError)
|
||||
// If JSON parsing fails, continue with expression evaluation
|
||||
}
|
||||
}
|
||||
@@ -1267,7 +1267,7 @@ export class InputResolver {
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error evaluating forEach items:', e)
|
||||
logger.error('Error evaluating forEach items:', e)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1712,7 +1712,7 @@ export class InputResolver {
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error evaluating parallel distribution items:', e)
|
||||
logger.error('Error evaluating parallel distribution items:', e)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -175,10 +175,7 @@ describe('Full Executor Test', () => {
|
||||
} else {
|
||||
expect(result).toBeDefined()
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Execution error:', error)
|
||||
// Log the error but don't fail the test - we want to see what happens
|
||||
}
|
||||
} catch (error) {}
|
||||
})
|
||||
|
||||
it('should test the executor getNextExecutionLayer method directly', async () => {
|
||||
|
||||
@@ -621,7 +621,7 @@ export function useCollaborativeWorkflow() {
|
||||
}
|
||||
|
||||
if (!blockConfig) {
|
||||
console.error(`Block type ${type} not found`)
|
||||
logger.error(`Block type ${type} not found`)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import Fuse from 'fuse.js'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { type ChunkData, type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('UseKnowledgeBase')
|
||||
|
||||
export function useKnowledgeBase(id: string) {
|
||||
const { getKnowledgeBase, getCachedKnowledgeBase, loadingKnowledgeBases } = useKnowledgeStore()
|
||||
|
||||
@@ -22,6 +25,7 @@ export function useKnowledgeBase(id: string) {
|
||||
} catch (err) {
|
||||
if (isMounted) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load knowledge base')
|
||||
logger.error(`Failed to load knowledge base ${id}:`, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -86,6 +90,7 @@ export function useKnowledgeBaseDocuments(
|
||||
} catch (err) {
|
||||
if (isMounted) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load documents')
|
||||
logger.error(`Failed to load documents for knowledge base ${knowledgeBaseId}:`, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -127,6 +132,7 @@ export function useKnowledgeBaseDocuments(
|
||||
})
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to refresh documents')
|
||||
logger.error(`Failed to refresh documents for knowledge base ${knowledgeBaseId}:`, err)
|
||||
}
|
||||
}, [
|
||||
knowledgeBaseId,
|
||||
@@ -141,6 +147,7 @@ export function useKnowledgeBaseDocuments(
|
||||
const updateDocumentLocal = useCallback(
|
||||
(documentId: string, updates: Partial<DocumentData>) => {
|
||||
updateDocument(knowledgeBaseId, documentId, updates)
|
||||
logger.info(`Updated document ${documentId} for knowledge base ${knowledgeBaseId}`)
|
||||
},
|
||||
[knowledgeBaseId, updateDocument]
|
||||
)
|
||||
@@ -204,10 +211,11 @@ export function useKnowledgeBasesList(workspaceId?: string) {
|
||||
retryTimeoutId = setTimeout(() => {
|
||||
if (isMounted) {
|
||||
loadData(attempt + 1)
|
||||
logger.warn(`Failed to load knowledge bases list, retrying... ${attempt + 1}`)
|
||||
}
|
||||
}, delay)
|
||||
} else {
|
||||
console.error('All retry attempts failed for knowledge bases list:', err)
|
||||
logger.error('All retry attempts failed for knowledge bases list:', err)
|
||||
setError(errorMessage)
|
||||
setRetryCount(maxRetries)
|
||||
}
|
||||
@@ -235,7 +243,7 @@ export function useKnowledgeBasesList(workspaceId?: string) {
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Failed to refresh knowledge bases'
|
||||
setError(errorMessage)
|
||||
console.error('Error refreshing knowledge bases list:', err)
|
||||
logger.error('Error refreshing knowledge bases list:', err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -257,7 +265,7 @@ export function useKnowledgeBasesList(workspaceId?: string) {
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Failed to refresh knowledge bases'
|
||||
setError(errorMessage)
|
||||
console.error('Error force refreshing knowledge bases list:', err)
|
||||
logger.error('Error force refreshing knowledge bases list:', err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -361,6 +369,7 @@ export function useDocumentChunks(
|
||||
} catch (err) {
|
||||
if (isMounted) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load chunks')
|
||||
logger.error(`Failed to load chunks for document ${documentId}:`, err)
|
||||
}
|
||||
} finally {
|
||||
if (isMounted) {
|
||||
@@ -559,6 +568,7 @@ export function useDocumentChunks(
|
||||
} catch (err) {
|
||||
if (isMounted) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load chunks')
|
||||
logger.error(`Failed to load chunks for document ${documentId}:`, err)
|
||||
}
|
||||
} finally {
|
||||
if (isMounted) {
|
||||
@@ -599,6 +609,7 @@ export function useDocumentChunks(
|
||||
|
||||
// Update loading state based on store
|
||||
if (!isStoreLoading && isLoading) {
|
||||
logger.info(`Chunks loaded for document ${documentId}`)
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [documentId, isStoreLoading, isLoading, initialLoadDone, serverSearchQuery, serverCurrentPage])
|
||||
@@ -629,6 +640,7 @@ export function useDocumentChunks(
|
||||
return fetchedChunks
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to load page')
|
||||
logger.error(`Failed to load page for document ${documentId}:`, err)
|
||||
throw err
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
@@ -676,6 +688,7 @@ export function useDocumentChunks(
|
||||
return fetchedChunks
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to refresh chunks')
|
||||
logger.error(`Failed to refresh chunks for document ${documentId}:`, err)
|
||||
throw err
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
@@ -704,6 +717,7 @@ export function useDocumentChunks(
|
||||
return searchResults
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to search chunks')
|
||||
logger.error(`Failed to search chunks for document ${documentId}:`, err)
|
||||
throw err
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
|
||||
@@ -1046,7 +1046,7 @@ export const auth = betterAuth({
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
console.error('Linear API error:', {
|
||||
logger.error('Linear API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
body: errorText,
|
||||
@@ -1057,12 +1057,12 @@ export const auth = betterAuth({
|
||||
const { data, errors } = await response.json()
|
||||
|
||||
if (errors) {
|
||||
console.error('GraphQL errors:', errors)
|
||||
logger.error('GraphQL errors:', errors)
|
||||
throw new Error(`GraphQL errors: ${JSON.stringify(errors)}`)
|
||||
}
|
||||
|
||||
if (!data?.viewer) {
|
||||
console.error('No viewer data in response:', data)
|
||||
logger.error('No viewer data in response:', data)
|
||||
throw new Error('No viewer data in response')
|
||||
}
|
||||
|
||||
@@ -1078,7 +1078,7 @@ export const auth = betterAuth({
|
||||
image: viewer.avatarUrl || null,
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error in getUserInfo:', error)
|
||||
logger.error('Error in getUserInfo:', error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
@@ -334,3 +334,33 @@ export async function getOrganizationBillingSummary(organizationId: string) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user is an owner or admin of a specific organization
|
||||
*
|
||||
* @param userId - The ID of the user to check
|
||||
* @param organizationId - The ID of the organization
|
||||
* @returns Promise<boolean> - True if the user is an owner or admin of the organization
|
||||
*/
|
||||
export async function isOrganizationOwnerOrAdmin(
|
||||
userId: string,
|
||||
organizationId: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const memberRecord = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.organizationId, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (memberRecord.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
const userRole = memberRecord[0].role
|
||||
return ['owner', 'admin'].includes(userRole)
|
||||
} catch (error) {
|
||||
logger.error('Error checking organization ownership/admin status:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
export * from '@/lib/billing/calculations/usage-monitor'
|
||||
export * from '@/lib/billing/core/billing'
|
||||
export * from '@/lib/billing/core/organization-billing'
|
||||
export * from '@/lib/billing/core/organization'
|
||||
export * from '@/lib/billing/core/subscription'
|
||||
export {
|
||||
getHighestPrioritySubscription as getActiveSubscription,
|
||||
|
||||
@@ -7,6 +7,7 @@ vi.mock('@/db', () => ({
|
||||
where: vi.fn(),
|
||||
limit: vi.fn(),
|
||||
innerJoin: vi.fn(),
|
||||
leftJoin: vi.fn(),
|
||||
orderBy: vi.fn(),
|
||||
},
|
||||
}))
|
||||
@@ -17,6 +18,7 @@ vi.mock('@/db/schema', () => ({
|
||||
userId: 'user_id',
|
||||
entityType: 'entity_type',
|
||||
entityId: 'entity_id',
|
||||
id: 'permission_id',
|
||||
},
|
||||
permissionTypeEnum: {
|
||||
enumValues: ['admin', 'write', 'read'] as const,
|
||||
@@ -25,23 +27,18 @@ vi.mock('@/db/schema', () => ({
|
||||
id: 'user_id',
|
||||
email: 'user_email',
|
||||
name: 'user_name',
|
||||
image: 'user_image',
|
||||
},
|
||||
workspace: {
|
||||
id: 'workspace_id',
|
||||
name: 'workspace_name',
|
||||
ownerId: 'workspace_owner_id',
|
||||
},
|
||||
member: {
|
||||
userId: 'member_user_id',
|
||||
organizationId: 'member_organization_id',
|
||||
role: 'member_role',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
and: vi.fn().mockReturnValue('and-condition'),
|
||||
eq: vi.fn().mockReturnValue('eq-condition'),
|
||||
or: vi.fn().mockReturnValue('or-condition'),
|
||||
}))
|
||||
|
||||
import {
|
||||
@@ -50,8 +47,6 @@ import {
|
||||
getUsersWithPermissions,
|
||||
hasAdminPermission,
|
||||
hasWorkspaceAdminAccess,
|
||||
isOrganizationAdminForWorkspace,
|
||||
isOrganizationOwnerOrAdmin,
|
||||
} from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
|
||||
@@ -124,11 +119,64 @@ describe('Permission Utils', () => {
|
||||
|
||||
expect(result).toBe('admin')
|
||||
})
|
||||
|
||||
it('should return write permission when user only has write access', async () => {
|
||||
const mockResults = [{ permissionType: 'write' as PermissionType }]
|
||||
const chain = createMockChain(mockResults)
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', 'workspace', 'workspace456')
|
||||
|
||||
expect(result).toBe('write')
|
||||
})
|
||||
|
||||
it('should prioritize write over read permissions', async () => {
|
||||
const mockResults = [
|
||||
{ permissionType: 'read' as PermissionType },
|
||||
{ permissionType: 'write' as PermissionType },
|
||||
]
|
||||
const chain = createMockChain(mockResults)
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', 'workspace', 'workspace456')
|
||||
|
||||
expect(result).toBe('write')
|
||||
})
|
||||
|
||||
it('should work with workflow entity type', async () => {
|
||||
const mockResults = [{ permissionType: 'admin' as PermissionType }]
|
||||
const chain = createMockChain(mockResults)
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', 'workflow', 'workflow789')
|
||||
|
||||
expect(result).toBe('admin')
|
||||
})
|
||||
|
||||
it('should work with organization entity type', async () => {
|
||||
const mockResults = [{ permissionType: 'read' as PermissionType }]
|
||||
const chain = createMockChain(mockResults)
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', 'organization', 'org456')
|
||||
|
||||
expect(result).toBe('read')
|
||||
})
|
||||
|
||||
it('should handle generic entity types', async () => {
|
||||
const mockResults = [{ permissionType: 'write' as PermissionType }]
|
||||
const chain = createMockChain(mockResults)
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', 'custom_entity', 'entity123')
|
||||
|
||||
expect(result).toBe('write')
|
||||
})
|
||||
})
|
||||
|
||||
describe('hasAdminPermission', () => {
|
||||
it('should return true when user has admin permission for workspace', async () => {
|
||||
const chain = createMockChain([{ permissionType: 'admin' }])
|
||||
const chain = createMockChain([{ id: 'perm1' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasAdminPermission('admin-user', 'workspace123')
|
||||
@@ -144,6 +192,42 @@ describe('Permission Utils', () => {
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when user has write permission but not admin', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasAdminPermission('write-user', 'workspace123')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when user has read permission but not admin', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasAdminPermission('read-user', 'workspace123')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle non-existent workspace', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasAdminPermission('user123', 'non-existent-workspace')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle empty user ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasAdminPermission('', 'workspace123')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getUsersWithPermissions', () => {
|
||||
@@ -162,7 +246,6 @@ describe('Permission Utils', () => {
|
||||
userId: 'user1',
|
||||
email: 'alice@example.com',
|
||||
name: 'Alice Smith',
|
||||
image: 'https://example.com/alice.jpg',
|
||||
permissionType: 'admin' as PermissionType,
|
||||
},
|
||||
]
|
||||
@@ -177,43 +260,66 @@ describe('Permission Utils', () => {
|
||||
userId: 'user1',
|
||||
email: 'alice@example.com',
|
||||
name: 'Alice Smith',
|
||||
image: 'https://example.com/alice.jpg',
|
||||
permissionType: 'admin',
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('isOrganizationAdminForWorkspace', () => {
|
||||
it('should return false when workspace does not exist', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
it('should return multiple users with different permission levels', async () => {
|
||||
const mockUsersResults = [
|
||||
{
|
||||
userId: 'user1',
|
||||
email: 'admin@example.com',
|
||||
name: 'Admin User',
|
||||
permissionType: 'admin' as PermissionType,
|
||||
},
|
||||
{
|
||||
userId: 'user2',
|
||||
email: 'writer@example.com',
|
||||
name: 'Writer User',
|
||||
permissionType: 'write' as PermissionType,
|
||||
},
|
||||
{
|
||||
userId: 'user3',
|
||||
email: 'reader@example.com',
|
||||
name: 'Reader User',
|
||||
permissionType: 'read' as PermissionType,
|
||||
},
|
||||
]
|
||||
|
||||
const result = await isOrganizationAdminForWorkspace('user123', 'workspace456')
|
||||
const usersChain = createMockChain(mockUsersResults)
|
||||
mockDb.select.mockReturnValue(usersChain)
|
||||
|
||||
expect(result).toBe(false)
|
||||
const result = await getUsersWithPermissions('workspace456')
|
||||
|
||||
expect(result).toHaveLength(3)
|
||||
expect(result[0].permissionType).toBe('admin')
|
||||
expect(result[1].permissionType).toBe('write')
|
||||
expect(result[2].permissionType).toBe('read')
|
||||
})
|
||||
|
||||
it('should return false when user has no organization memberships', async () => {
|
||||
// Mock workspace exists, but user has no org memberships
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: 'workspace-owner-123' }])
|
||||
}
|
||||
return createMockChain([]) // No memberships
|
||||
})
|
||||
it('should handle users with empty names', async () => {
|
||||
const mockUsersResults = [
|
||||
{
|
||||
userId: 'user1',
|
||||
email: 'test@example.com',
|
||||
name: '',
|
||||
permissionType: 'read' as PermissionType,
|
||||
},
|
||||
]
|
||||
|
||||
const result = await isOrganizationAdminForWorkspace('user123', 'workspace456')
|
||||
const usersChain = createMockChain(mockUsersResults)
|
||||
mockDb.select.mockReturnValue(usersChain)
|
||||
|
||||
expect(result).toBe(false)
|
||||
const result = await getUsersWithPermissions('workspace123')
|
||||
|
||||
expect(result[0].name).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('hasWorkspaceAdminAccess', () => {
|
||||
it('should return true when user has direct admin permission', async () => {
|
||||
const chain = createMockChain([{ permissionType: 'admin' }])
|
||||
it('should return true when user owns the workspace', async () => {
|
||||
const chain = createMockChain([{ ownerId: 'user123' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
|
||||
@@ -221,7 +327,22 @@ describe('Permission Utils', () => {
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when user has neither direct nor organization admin access', async () => {
|
||||
it('should return true when user has direct admin permission', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([{ id: 'perm1' }])
|
||||
})
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when workspace does not exist', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
@@ -229,51 +350,137 @@ describe('Permission Utils', () => {
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when user has no admin access', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([])
|
||||
})
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when user has write permission but not admin', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([])
|
||||
})
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when user has read permission but not admin', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([])
|
||||
})
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle empty workspace ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', '')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle empty user ID', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('', 'workspace456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isOrganizationOwnerOrAdmin', () => {
|
||||
it('should return true when user is owner of organization', async () => {
|
||||
const chain = createMockChain([{ role: 'owner' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true when user is admin of organization', async () => {
|
||||
const chain = createMockChain([{ role: 'admin' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when user is regular member of organization', async () => {
|
||||
const chain = createMockChain([{ role: 'member' }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when user is not member of organization', async () => {
|
||||
describe('Edge Cases and Security Tests', () => {
|
||||
it('should handle SQL injection attempts in user IDs', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
|
||||
const result = await getUserEntityPermissions(
|
||||
"'; DROP TABLE users; --",
|
||||
'workspace',
|
||||
'workspace123'
|
||||
)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should handle very long entity IDs', async () => {
|
||||
const longEntityId = 'a'.repeat(1000)
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', 'workspace', longEntityId)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should handle unicode characters in entity names', async () => {
|
||||
const chain = createMockChain([{ permissionType: 'read' as PermissionType }])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getUserEntityPermissions('user123', '📝workspace', '🏢org-id')
|
||||
|
||||
expect(result).toBe('read')
|
||||
})
|
||||
|
||||
it('should verify permission hierarchy ordering is consistent', () => {
|
||||
const permissionOrder: Record<PermissionType, number> = { admin: 3, write: 2, read: 1 }
|
||||
|
||||
expect(permissionOrder.admin).toBeGreaterThan(permissionOrder.write)
|
||||
expect(permissionOrder.write).toBeGreaterThan(permissionOrder.read)
|
||||
})
|
||||
|
||||
it('should handle workspace ownership checks with null owner IDs', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: null }])
|
||||
}
|
||||
return createMockChain([])
|
||||
})
|
||||
|
||||
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
it('should handle null user ID correctly when owner ID is different', async () => {
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
throw new Error('Database error')
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([{ ownerId: 'other-user' }])
|
||||
}
|
||||
return createMockChain([])
|
||||
})
|
||||
|
||||
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
|
||||
const result = await hasWorkspaceAdminAccess(null as any, 'workspace456')
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
@@ -289,27 +496,121 @@ describe('Permission Utils', () => {
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it('should return direct admin workspaces', async () => {
|
||||
const mockDirectWorkspaces = [
|
||||
{ id: 'ws1', name: 'Workspace 1', ownerId: 'owner1' },
|
||||
{ id: 'ws2', name: 'Workspace 2', ownerId: 'owner2' },
|
||||
it('should return owned workspaces', async () => {
|
||||
const mockWorkspaces = [
|
||||
{ id: 'ws1', name: 'My Workspace 1', ownerId: 'user123' },
|
||||
{ id: 'ws2', name: 'My Workspace 2', ownerId: 'user123' },
|
||||
]
|
||||
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain(mockDirectWorkspaces) // direct admin workspaces
|
||||
return createMockChain(mockWorkspaces) // Owned workspaces
|
||||
}
|
||||
return createMockChain([]) // no organization memberships
|
||||
return createMockChain([]) // No admin workspaces
|
||||
})
|
||||
|
||||
const result = await getManageableWorkspaces('user123')
|
||||
|
||||
expect(result).toEqual([
|
||||
{ id: 'ws1', name: 'Workspace 1', ownerId: 'owner1', accessType: 'direct' },
|
||||
{ id: 'ws2', name: 'Workspace 2', ownerId: 'owner2', accessType: 'direct' },
|
||||
{ id: 'ws1', name: 'My Workspace 1', ownerId: 'user123', accessType: 'owner' },
|
||||
{ id: 'ws2', name: 'My Workspace 2', ownerId: 'user123', accessType: 'owner' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should return workspaces with direct admin permissions', async () => {
|
||||
const mockAdminWorkspaces = [{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user' }]
|
||||
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([]) // No owned workspaces
|
||||
}
|
||||
return createMockChain(mockAdminWorkspaces) // Admin workspaces
|
||||
})
|
||||
|
||||
const result = await getManageableWorkspaces('user123')
|
||||
|
||||
expect(result).toEqual([
|
||||
{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user', accessType: 'direct' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should combine owned and admin workspaces without duplicates', async () => {
|
||||
const mockOwnedWorkspaces = [
|
||||
{ id: 'ws1', name: 'My Workspace', ownerId: 'user123' },
|
||||
{ id: 'ws2', name: 'Another Workspace', ownerId: 'user123' },
|
||||
]
|
||||
const mockAdminWorkspaces = [
|
||||
{ id: 'ws1', name: 'My Workspace', ownerId: 'user123' }, // Duplicate (should be filtered)
|
||||
{ id: 'ws3', name: 'Shared Workspace', ownerId: 'other-user' },
|
||||
]
|
||||
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain(mockOwnedWorkspaces) // Owned workspaces
|
||||
}
|
||||
return createMockChain(mockAdminWorkspaces) // Admin workspaces
|
||||
})
|
||||
|
||||
const result = await getManageableWorkspaces('user123')
|
||||
|
||||
expect(result).toHaveLength(3)
|
||||
expect(result).toEqual([
|
||||
{ id: 'ws1', name: 'My Workspace', ownerId: 'user123', accessType: 'owner' },
|
||||
{ id: 'ws2', name: 'Another Workspace', ownerId: 'user123', accessType: 'owner' },
|
||||
{ id: 'ws3', name: 'Shared Workspace', ownerId: 'other-user', accessType: 'direct' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should handle empty workspace names', async () => {
|
||||
const mockWorkspaces = [{ id: 'ws1', name: '', ownerId: 'user123' }]
|
||||
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain(mockWorkspaces)
|
||||
}
|
||||
return createMockChain([])
|
||||
})
|
||||
|
||||
const result = await getManageableWorkspaces('user123')
|
||||
|
||||
expect(result[0].name).toBe('')
|
||||
})
|
||||
|
||||
it('should handle multiple admin permissions for same workspace', async () => {
|
||||
const mockAdminWorkspaces = [
|
||||
{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user' },
|
||||
{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user' }, // Duplicate
|
||||
]
|
||||
|
||||
let callCount = 0
|
||||
mockDb.select.mockImplementation(() => {
|
||||
callCount++
|
||||
if (callCount === 1) {
|
||||
return createMockChain([]) // No owned workspaces
|
||||
}
|
||||
return createMockChain(mockAdminWorkspaces) // Admin workspaces with duplicates
|
||||
})
|
||||
|
||||
const result = await getManageableWorkspaces('user123')
|
||||
|
||||
expect(result).toHaveLength(2) // Should include duplicates from admin permissions
|
||||
})
|
||||
|
||||
it('should handle empty user ID gracefully', async () => {
|
||||
const chain = createMockChain([])
|
||||
mockDb.select.mockReturnValue(chain)
|
||||
|
||||
const result = await getManageableWorkspaces('')
|
||||
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { db } from '@/db'
|
||||
import { member, permissions, type permissionTypeEnum, user, workspace } from '@/db/schema'
|
||||
import { permissions, type permissionTypeEnum, user, workspace } from '@/db/schema'
|
||||
|
||||
export type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
|
||||
|
||||
@@ -32,7 +32,6 @@ export async function getUserEntityPermissions(
|
||||
return null
|
||||
}
|
||||
|
||||
// If multiple permissions exist (legacy data), return the highest one
|
||||
const permissionOrder: Record<PermissionType, number> = { admin: 3, write: 2, read: 1 }
|
||||
const highestPermission = result.reduce((highest, current) => {
|
||||
return permissionOrder[current.permissionType] > permissionOrder[highest.permissionType]
|
||||
@@ -46,13 +45,13 @@ export async function getUserEntityPermissions(
|
||||
/**
|
||||
* Check if a user has admin permission for a specific workspace
|
||||
*
|
||||
* @param userId - The ID of the user to check permissions for
|
||||
* @param workspaceId - The ID of the workspace to check admin permission for
|
||||
* @param userId - The ID of the user to check
|
||||
* @param workspaceId - The ID of the workspace to check
|
||||
* @returns Promise<boolean> - True if the user has admin permission for the workspace, false otherwise
|
||||
*/
|
||||
export async function hasAdminPermission(userId: string, workspaceId: string): Promise<boolean> {
|
||||
const result = await db
|
||||
.select()
|
||||
.select({ id: permissions.id })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
@@ -73,13 +72,19 @@ export async function hasAdminPermission(userId: string, workspaceId: string): P
|
||||
* @param workspaceId - The ID of the workspace to retrieve user permissions for.
|
||||
* @returns A promise that resolves to an array of user objects, each containing user details and their permission type.
|
||||
*/
|
||||
export async function getUsersWithPermissions(workspaceId: string) {
|
||||
export async function getUsersWithPermissions(workspaceId: string): Promise<
|
||||
Array<{
|
||||
userId: string
|
||||
email: string
|
||||
name: string
|
||||
permissionType: PermissionType
|
||||
}>
|
||||
> {
|
||||
const usersWithPermissions = await db
|
||||
.select({
|
||||
userId: user.id,
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
image: user.image,
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(permissions)
|
||||
@@ -87,141 +92,71 @@ export async function getUsersWithPermissions(workspaceId: string) {
|
||||
.where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId)))
|
||||
.orderBy(user.email)
|
||||
|
||||
// Since each user has only one permission, we can use the results directly
|
||||
return usersWithPermissions.map((row) => ({
|
||||
userId: row.userId,
|
||||
email: row.email,
|
||||
name: row.name,
|
||||
image: row.image,
|
||||
permissionType: row.permissionType,
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user is an admin or owner of any organization that has access to a workspace
|
||||
* Check if a user has admin access to a specific workspace
|
||||
*
|
||||
* @param userId - The ID of the user to check
|
||||
* @param workspaceId - The ID of the workspace
|
||||
* @returns Promise<boolean> - True if the user is an organization admin with access to the workspace
|
||||
*/
|
||||
export async function isOrganizationAdminForWorkspace(
|
||||
userId: string,
|
||||
workspaceId: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
// Get the workspace owner
|
||||
const workspaceRecord = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (workspaceRecord.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
const workspaceOwnerId = workspaceRecord[0].ownerId
|
||||
|
||||
// Check if the user is an admin/owner of any organization that the workspace owner belongs to
|
||||
const orgMemberships = await db
|
||||
.select({
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
})
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.userId, userId),
|
||||
// Only admin and owner roles can manage workspace permissions
|
||||
eq(member.role, 'admin') // We'll also check for 'owner' separately
|
||||
)
|
||||
)
|
||||
|
||||
// Also check for owner role
|
||||
const ownerMemberships = await db
|
||||
.select({
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
})
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.role, 'owner')))
|
||||
|
||||
const allOrgMemberships = [...orgMemberships, ...ownerMemberships]
|
||||
|
||||
if (allOrgMemberships.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if the workspace owner is a member of any of these organizations
|
||||
for (const membership of allOrgMemberships) {
|
||||
const workspaceOwnerInOrg = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.userId, workspaceOwnerId),
|
||||
eq(member.organizationId, membership.organizationId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (workspaceOwnerInOrg.length > 0) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
} catch (error) {
|
||||
console.error('Error checking organization admin status for workspace:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user has admin permissions (either direct workspace admin or organization admin)
|
||||
*
|
||||
* @param userId - The ID of the user to check permissions for
|
||||
* @param workspaceId - The ID of the workspace to check admin permission for
|
||||
* @returns Promise<boolean> - True if the user has admin permission for the workspace, false otherwise
|
||||
* @param workspaceId - The ID of the workspace to check
|
||||
* @returns Promise<boolean> - True if the user has admin access to the workspace, false otherwise
|
||||
*/
|
||||
export async function hasWorkspaceAdminAccess(
|
||||
userId: string,
|
||||
workspaceId: string
|
||||
): Promise<boolean> {
|
||||
// Check direct workspace admin permission
|
||||
const directAdmin = await hasAdminPermission(userId, workspaceId)
|
||||
if (directAdmin) {
|
||||
const workspaceResult = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (workspaceResult.length === 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (workspaceResult[0].ownerId === userId) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check organization admin permission
|
||||
const orgAdmin = await isOrganizationAdminForWorkspace(userId, workspaceId)
|
||||
return orgAdmin
|
||||
return await hasAdminPermission(userId, workspaceId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all workspaces that a user can manage (either as direct admin or organization admin)
|
||||
* Get a list of workspaces that the user has access to
|
||||
*
|
||||
* @param userId - The ID of the user
|
||||
* @returns Promise<Array<{id: string, name: string, ownerId: string}>> - Array of workspaces the user can manage
|
||||
* @param userId - The ID of the user to check
|
||||
* @returns Promise<Array<{
|
||||
* id: string
|
||||
* name: string
|
||||
* ownerId: string
|
||||
* accessType: 'direct' | 'owner'
|
||||
* }>> - A list of workspaces that the user has access to
|
||||
*/
|
||||
export async function getManageableWorkspaces(userId: string): Promise<
|
||||
Array<{
|
||||
id: string
|
||||
name: string
|
||||
ownerId: string
|
||||
accessType: 'direct' | 'organization'
|
||||
accessType: 'direct' | 'owner'
|
||||
}>
|
||||
> {
|
||||
const manageableWorkspaces: Array<{
|
||||
id: string
|
||||
name: string
|
||||
ownerId: string
|
||||
accessType: 'direct' | 'organization'
|
||||
}> = []
|
||||
const ownedWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(eq(workspace.ownerId, userId))
|
||||
|
||||
// Get workspaces where user has direct admin permissions
|
||||
const directWorkspaces = await db
|
||||
const adminWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
@@ -237,86 +172,13 @@ export async function getManageableWorkspaces(userId: string): Promise<
|
||||
)
|
||||
)
|
||||
|
||||
directWorkspaces.forEach((ws) => {
|
||||
manageableWorkspaces.push({
|
||||
...ws,
|
||||
accessType: 'direct',
|
||||
})
|
||||
})
|
||||
const ownedSet = new Set(ownedWorkspaces.map((w) => w.id))
|
||||
const combined = [
|
||||
...ownedWorkspaces.map((ws) => ({ ...ws, accessType: 'owner' as const })),
|
||||
...adminWorkspaces
|
||||
.filter((ws) => !ownedSet.has(ws.id))
|
||||
.map((ws) => ({ ...ws, accessType: 'direct' as const })),
|
||||
]
|
||||
|
||||
// Get workspaces where user has organization admin access
|
||||
// First, get organizations where the user is admin/owner
|
||||
const adminOrgs = await db
|
||||
.select({ organizationId: member.organizationId })
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.userId, userId)
|
||||
// Check for both admin and owner roles
|
||||
)
|
||||
)
|
||||
|
||||
// Get all organization workspaces for these orgs
|
||||
for (const org of adminOrgs) {
|
||||
// Get all members of this organization
|
||||
const orgMembers = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, org.organizationId))
|
||||
|
||||
// Get workspaces owned by org members
|
||||
const orgWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(
|
||||
// Find workspaces owned by any org member
|
||||
eq(workspace.ownerId, orgMembers.length > 0 ? orgMembers[0].userId : 'none')
|
||||
)
|
||||
|
||||
// Add these workspaces if not already included
|
||||
orgWorkspaces.forEach((ws) => {
|
||||
if (!manageableWorkspaces.find((existing) => existing.id === ws.id)) {
|
||||
manageableWorkspaces.push({
|
||||
...ws,
|
||||
accessType: 'organization',
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return manageableWorkspaces
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user is an owner or admin of a specific organization
|
||||
*
|
||||
* @param userId - The ID of the user to check
|
||||
* @param organizationId - The ID of the organization
|
||||
* @returns Promise<boolean> - True if the user is an owner or admin of the organization
|
||||
*/
|
||||
export async function isOrganizationOwnerOrAdmin(
|
||||
userId: string,
|
||||
organizationId: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const memberRecord = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, userId), eq(member.organizationId, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (memberRecord.length === 0) {
|
||||
return false // User is not a member of the organization
|
||||
}
|
||||
|
||||
const userRole = memberRecord[0].role
|
||||
return ['owner', 'admin'].includes(userRole)
|
||||
} catch (error) {
|
||||
console.error('Error checking organization ownership/admin status:', error)
|
||||
return false
|
||||
}
|
||||
return combined
|
||||
}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import OpenAI, { AzureOpenAI } from 'openai'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('SimAgentUtils')
|
||||
|
||||
const azureApiKey = env.AZURE_OPENAI_API_KEY
|
||||
const azureEndpoint = env.AZURE_OPENAI_ENDPOINT
|
||||
@@ -52,7 +55,7 @@ export async function generateChatTitle(message: string): Promise<string | null>
|
||||
const title = response.choices[0]?.message?.content?.trim() || null
|
||||
return title
|
||||
} catch (error) {
|
||||
console.error('Error generating chat title:', error)
|
||||
logger.error('Error generating chat title:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
@@ -154,9 +154,8 @@ export function useSubscriptionUpgrade() {
|
||||
} catch (error) {
|
||||
logger.error('Failed to initiate subscription upgrade:', error)
|
||||
|
||||
// Log detailed error information for debugging
|
||||
if (error instanceof Error) {
|
||||
console.error('Detailed error:', {
|
||||
logger.error('Detailed error:', {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
cause: error.cause,
|
||||
|
||||
@@ -134,7 +134,7 @@ export async function validateSlackSignature(
|
||||
|
||||
return result === 0
|
||||
} catch (error) {
|
||||
console.error('Error validating Slack signature:', error)
|
||||
logger.error('Error validating Slack signature:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -149,7 +149,6 @@ export function formatWebhookInput(
|
||||
request: NextRequest
|
||||
): any {
|
||||
if (foundWebhook.provider === 'whatsapp') {
|
||||
// WhatsApp input formatting logic
|
||||
const data = body?.entry?.[0]?.changes?.[0]?.value
|
||||
const messages = data?.messages || []
|
||||
|
||||
@@ -189,12 +188,10 @@ export function formatWebhookInput(
|
||||
}
|
||||
|
||||
if (foundWebhook.provider === 'telegram') {
|
||||
// Telegram input formatting logic
|
||||
const message =
|
||||
body?.message || body?.edited_message || body?.channel_post || body?.edited_channel_post
|
||||
|
||||
if (message) {
|
||||
// Extract message text with fallbacks for different content types
|
||||
let input = ''
|
||||
|
||||
if (message.text) {
|
||||
@@ -223,7 +220,6 @@ export function formatWebhookInput(
|
||||
input = 'Message received'
|
||||
}
|
||||
|
||||
// Create the message object for easier access
|
||||
const messageObj = {
|
||||
id: message.message_id,
|
||||
text: message.text,
|
||||
@@ -251,7 +247,6 @@ export function formatWebhookInput(
|
||||
raw: message,
|
||||
}
|
||||
|
||||
// Create sender object
|
||||
const senderObj = message.from
|
||||
? {
|
||||
id: message.from.id,
|
||||
@@ -263,7 +258,6 @@ export function formatWebhookInput(
|
||||
}
|
||||
: null
|
||||
|
||||
// Create chat object
|
||||
const chatObj = message.chat
|
||||
? {
|
||||
id: message.chat.id,
|
||||
@@ -276,9 +270,9 @@ export function formatWebhookInput(
|
||||
: null
|
||||
|
||||
return {
|
||||
input, // Primary workflow input - the message content
|
||||
input,
|
||||
|
||||
// NEW: Top-level properties for backward compatibility with <blockName.message> syntax
|
||||
// Top-level properties for backward compatibility with <blockName.message> syntax
|
||||
message: messageObj,
|
||||
sender: senderObj,
|
||||
chat: chatObj,
|
||||
@@ -683,7 +677,7 @@ export function validateMicrosoftTeamsSignature(
|
||||
|
||||
return result === 0
|
||||
} catch (error) {
|
||||
console.error('Error validating Microsoft Teams signature:', error)
|
||||
logger.error('Error validating Microsoft Teams signature:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -698,12 +692,11 @@ export function verifyProviderWebhook(
|
||||
): NextResponse | null {
|
||||
const authHeader = request.headers.get('authorization')
|
||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
||||
// Keep existing switch statement for github, stripe, generic, default
|
||||
switch (foundWebhook.provider) {
|
||||
case 'github':
|
||||
break // No specific auth here
|
||||
break
|
||||
case 'stripe':
|
||||
break // Stripe verification would go here
|
||||
break
|
||||
case 'gmail':
|
||||
if (providerConfig.secret) {
|
||||
const secretHeader = request.headers.get('X-Webhook-Secret')
|
||||
@@ -723,22 +716,16 @@ export function verifyProviderWebhook(
|
||||
break
|
||||
case 'telegram': {
|
||||
// Check User-Agent to ensure it's not blocked by middleware
|
||||
// Log the user agent for debugging purposes
|
||||
const userAgent = request.headers.get('user-agent') || ''
|
||||
logger.debug(`[${requestId}] Telegram webhook request received with User-Agent: ${userAgent}`)
|
||||
|
||||
// Check if the user agent is empty and warn about it
|
||||
if (!userAgent) {
|
||||
logger.warn(
|
||||
`[${requestId}] Telegram webhook request has empty User-Agent header. This may be blocked by middleware.`
|
||||
)
|
||||
}
|
||||
|
||||
// We'll accept the request anyway since we're in the provider-specific logic,
|
||||
// but we'll log the information for debugging
|
||||
|
||||
// Telegram uses IP addresses in specific ranges
|
||||
// This is optional verification that could be added if IP verification is needed
|
||||
const clientIp =
|
||||
request.headers.get('x-forwarded-for')?.split(',')[0].trim() ||
|
||||
request.headers.get('x-real-ip') ||
|
||||
@@ -749,34 +736,27 @@ export function verifyProviderWebhook(
|
||||
break
|
||||
}
|
||||
case 'microsoftteams':
|
||||
// Microsoft Teams webhook authentication is handled separately in the main flow
|
||||
// due to the need for raw body access for HMAC verification
|
||||
break
|
||||
case 'generic':
|
||||
// Generic auth logic: requireAuth, token, secretHeaderName, allowedIps
|
||||
if (providerConfig.requireAuth) {
|
||||
let isAuthenticated = false
|
||||
// Check for token in Authorization header (Bearer token)
|
||||
if (providerConfig.token) {
|
||||
const providedToken = authHeader?.startsWith('Bearer ') ? authHeader.substring(7) : null
|
||||
if (providedToken === providerConfig.token) {
|
||||
isAuthenticated = true
|
||||
}
|
||||
// Check for token in custom header if specified
|
||||
if (!isAuthenticated && providerConfig.secretHeaderName) {
|
||||
const customHeaderValue = request.headers.get(providerConfig.secretHeaderName)
|
||||
if (customHeaderValue === providerConfig.token) {
|
||||
isAuthenticated = true
|
||||
}
|
||||
}
|
||||
// Return 401 if authentication failed
|
||||
if (!isAuthenticated) {
|
||||
logger.warn(`[${requestId}] Unauthorized webhook access attempt - invalid token`)
|
||||
return new NextResponse('Unauthorized', { status: 401 })
|
||||
}
|
||||
}
|
||||
}
|
||||
// IP restriction check
|
||||
if (
|
||||
providerConfig.allowedIps &&
|
||||
Array.isArray(providerConfig.allowedIps) &&
|
||||
@@ -821,7 +801,7 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
// Logging handles all error logging
|
||||
let currentCursor: number | null = null
|
||||
let mightHaveMore = true
|
||||
let payloadsFetched = 0 // Track total payloads fetched
|
||||
let payloadsFetched = 0
|
||||
let apiCallCount = 0
|
||||
// Use a Map to consolidate changes per record ID
|
||||
const consolidatedChangesMap = new Map<string, AirtableChange>()
|
||||
@@ -829,15 +809,7 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
const allPayloads = []
|
||||
const localProviderConfig = {
|
||||
...((webhookData.providerConfig as Record<string, any>) || {}),
|
||||
} // Local copy
|
||||
|
||||
// DEBUG: Log start of function execution with critical info
|
||||
logger.debug(`[${requestId}] TRACE: fetchAndProcessAirtablePayloads started`, {
|
||||
webhookId: webhookData.id,
|
||||
workflowId: workflowData.id,
|
||||
hasBaseId: !!localProviderConfig.baseId,
|
||||
hasExternalId: !!localProviderConfig.externalId,
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
// --- Essential IDs & Config from localProviderConfig ---
|
||||
@@ -848,11 +820,9 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
logger.error(
|
||||
`[${requestId}] Missing baseId or externalId in providerConfig for webhook ${webhookData.id}. Cannot fetch payloads.`
|
||||
)
|
||||
// Error logging handled by logging session
|
||||
return // Exit early
|
||||
return
|
||||
}
|
||||
|
||||
// Require credentialId
|
||||
const credentialId: string | undefined = localProviderConfig.credentialId
|
||||
if (!credentialId) {
|
||||
logger.error(
|
||||
@@ -861,7 +831,6 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
return
|
||||
}
|
||||
|
||||
// Resolve owner and access token strictly via credentialId (no fallback)
|
||||
let ownerUserId: string | null = null
|
||||
try {
|
||||
const rows = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||
@@ -877,18 +846,14 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
return
|
||||
}
|
||||
|
||||
// --- Retrieve Stored Cursor from localProviderConfig ---
|
||||
const storedCursor = localProviderConfig.externalWebhookCursor
|
||||
|
||||
// Initialize cursor in provider config if missing
|
||||
if (storedCursor === undefined || storedCursor === null) {
|
||||
logger.info(
|
||||
`[${requestId}] No cursor found in providerConfig for webhook ${webhookData.id}, initializing...`
|
||||
)
|
||||
// Update the local copy
|
||||
localProviderConfig.externalWebhookCursor = null
|
||||
|
||||
// Add cursor to the database immediately to fix the configuration
|
||||
try {
|
||||
await db
|
||||
.update(webhook)
|
||||
@@ -901,7 +866,7 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
})
|
||||
.where(eq(webhook.id, webhookData.id))
|
||||
|
||||
localProviderConfig.externalWebhookCursor = null // Update local copy too
|
||||
localProviderConfig.externalWebhookCursor = null
|
||||
logger.info(`[${requestId}] Successfully initialized cursor for webhook ${webhookData.id}`)
|
||||
} catch (initError: any) {
|
||||
logger.error(`[${requestId}] Failed to initialize cursor in DB`, {
|
||||
@@ -909,7 +874,6 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
error: initError.message,
|
||||
stack: initError.stack,
|
||||
})
|
||||
// Error logging handled by logging session
|
||||
}
|
||||
}
|
||||
|
||||
@@ -919,13 +883,12 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
`[${requestId}] Using stored cursor: ${currentCursor} for webhook ${webhookData.id}`
|
||||
)
|
||||
} else {
|
||||
currentCursor = null // Airtable API defaults to 1 if omitted
|
||||
currentCursor = null
|
||||
logger.debug(
|
||||
`[${requestId}] No valid stored cursor for webhook ${webhookData.id}, starting from beginning`
|
||||
)
|
||||
}
|
||||
|
||||
// --- Get OAuth Token (strict via credentialId) ---
|
||||
let accessToken: string | null = null
|
||||
try {
|
||||
accessToken = await refreshAccessTokenIfNeeded(credentialId, ownerUserId, requestId)
|
||||
@@ -946,8 +909,7 @@ export async function fetchAndProcessAirtablePayloads(
|
||||
credentialId,
|
||||
}
|
||||
)
|
||||
// Error logging handled by logging session
|
||||
return // Exit early
|
||||
return
|
||||
}
|
||||
|
||||
const airtableApiBase = 'https://api.airtable.com/v0'
|
||||
|
||||
@@ -38,7 +38,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
const config = {
|
||||
clearExisting: options.clearExisting ?? false,
|
||||
docsPath: options.docsPath ?? path.join(process.cwd(), '../../apps/docs/content/docs'),
|
||||
// Use localhost docs in development, production docs otherwise
|
||||
baseUrl: options.baseUrl ?? (isDev ? 'http://localhost:3001' : 'https://docs.sim.ai'),
|
||||
chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk
|
||||
minChunkSize: options.minChunkSize ?? 100,
|
||||
@@ -53,7 +52,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
clearExisting: config.clearExisting,
|
||||
})
|
||||
|
||||
// Initialize the docs chunker
|
||||
const chunker = new DocsChunker({
|
||||
chunkSize: config.chunkSize,
|
||||
minChunkSize: config.minChunkSize,
|
||||
@@ -61,7 +59,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
baseUrl: config.baseUrl,
|
||||
})
|
||||
|
||||
// Process all .mdx files first (compute embeddings before clearing)
|
||||
logger.info(`📚 Processing docs from: ${config.docsPath}`)
|
||||
const chunks = await chunker.chunkAllDocs(config.docsPath)
|
||||
|
||||
@@ -72,7 +69,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
|
||||
logger.info(`📊 Generated ${chunks.length} chunks with embeddings`)
|
||||
|
||||
// Clear existing embeddings if requested (after computing new ones to minimize downtime)
|
||||
if (config.clearExisting) {
|
||||
logger.info('🗑️ Clearing existing docs embeddings...')
|
||||
try {
|
||||
@@ -84,7 +80,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
// Save chunks to database in batches for better performance
|
||||
const batchSize = 10
|
||||
logger.info(`💾 Saving chunks to database (batch size: ${batchSize})...`)
|
||||
|
||||
@@ -92,7 +87,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
const batch = chunks.slice(i, i + batchSize)
|
||||
|
||||
try {
|
||||
// Prepare batch data
|
||||
const batchData = batch.map((chunk) => ({
|
||||
chunkText: chunk.text,
|
||||
sourceDocument: chunk.sourceDocument,
|
||||
@@ -105,7 +99,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
metadata: chunk.metadata,
|
||||
}))
|
||||
|
||||
// Insert batch
|
||||
await db.insert(docsEmbeddings).values(batchData)
|
||||
|
||||
processedChunks += batch.length
|
||||
@@ -121,7 +114,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
// Verify results
|
||||
const savedCount = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(docsEmbeddings)
|
||||
@@ -137,7 +129,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
logger.info(` • Database total: ${savedCount}`)
|
||||
logger.info(` • Duration: ${Math.round(duration / 1000)}s`)
|
||||
|
||||
// Summary by document
|
||||
const documentStats = chunks.reduce(
|
||||
(acc, chunk) => {
|
||||
if (!acc[chunk.sourceDocument]) {
|
||||
@@ -153,7 +144,7 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
|
||||
logger.info(`📋 Document breakdown:`)
|
||||
Object.entries(documentStats)
|
||||
.sort(([, a], [, b]) => b.chunks - a.chunks)
|
||||
.slice(0, 10) // Top 10 documents
|
||||
.slice(0, 10)
|
||||
.forEach(([doc, stats]) => {
|
||||
logger.info(` • ${doc}: ${stats.chunks} chunks, ${stats.tokens} tokens`)
|
||||
})
|
||||
@@ -188,7 +179,6 @@ async function main() {
|
||||
const args = process.argv.slice(2)
|
||||
const options: ProcessingOptions = {}
|
||||
|
||||
// Parse command line arguments
|
||||
if (args.includes('--clear')) {
|
||||
options.clearExisting = true
|
||||
}
|
||||
@@ -215,10 +205,9 @@ Examples:
|
||||
}
|
||||
}
|
||||
|
||||
// Run the script if executed directly
|
||||
if (import.meta.url.includes('process-docs-embeddings.ts')) {
|
||||
main().catch((error) => {
|
||||
console.error('Script failed:', error)
|
||||
logger.error('Script failed:', error)
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1070,12 +1070,12 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
})
|
||||
|
||||
if (!updateResponse.ok) {
|
||||
console.error('Failed to update webhook status')
|
||||
logger.error('Failed to update webhook status')
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error toggling webhook status:', error)
|
||||
logger.error('Error toggling webhook status:', error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { LatestCommitParams, LatestCommitResponse } from '@/tools/github/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('GitHubLatestCommitTool')
|
||||
|
||||
export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitResponse> = {
|
||||
id: 'github_latest_commit',
|
||||
name: 'GitHub Latest Commit',
|
||||
@@ -50,14 +53,11 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
|
||||
transformResponse: async (response, params) => {
|
||||
const data = await response.json()
|
||||
|
||||
// Create a human-readable content string
|
||||
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||
|
||||
// Initialize files array and add file information
|
||||
const files = data.files || []
|
||||
const fileDetailsWithContent = []
|
||||
|
||||
// Fetch raw content for each file if includeFileContent is true
|
||||
if (files.length > 0) {
|
||||
for (const file of files) {
|
||||
const fileDetail = {
|
||||
@@ -72,10 +72,8 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
|
||||
content: undefined as string | undefined,
|
||||
}
|
||||
|
||||
// Only try to fetch content for files that are not too large and not deleted
|
||||
if (file.status !== 'removed' && file.raw_url) {
|
||||
try {
|
||||
// Fetch the raw file content
|
||||
const contentResponse = await fetch(file.raw_url, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${params?.apiKey}`,
|
||||
@@ -87,7 +85,7 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
|
||||
fileDetail.content = await contentResponse.text()
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to fetch content for ${file.filename}:`, error)
|
||||
logger.error(`Failed to fetch content for ${file.filename}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { GmailSearchParams, GmailToolResponse } from '@/tools/gmail/types'
|
||||
import {
|
||||
createMessagesSummary,
|
||||
@@ -6,6 +7,8 @@ import {
|
||||
} from '@/tools/gmail/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('GmailSearchTool')
|
||||
|
||||
export const gmailSearchTool: ToolConfig<GmailSearchParams, GmailToolResponse> = {
|
||||
id: 'gmail_search',
|
||||
name: 'Gmail Search',
|
||||
@@ -109,7 +112,7 @@ export const gmailSearchTool: ToolConfig<GmailSearchParams, GmailToolResponse> =
|
||||
},
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error('Error fetching message details:', error)
|
||||
logger.error('Error fetching message details:', error)
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { XSearchParams, XSearchResponse, XTweet, XUser } from '@/tools/x/types'
|
||||
|
||||
const logger = createLogger('XSearchTool')
|
||||
|
||||
export const xSearchTool: ToolConfig<XSearchParams, XSearchResponse> = {
|
||||
id: 'x_search',
|
||||
name: 'X Search',
|
||||
@@ -92,7 +95,7 @@ export const xSearchTool: ToolConfig<XSearchParams, XSearchResponse> = {
|
||||
|
||||
// Check if data.data is undefined/null or not an array
|
||||
if (!data.data || !Array.isArray(data.data)) {
|
||||
console.error('X Search API Error:', JSON.stringify(data, null, 2))
|
||||
logger.error('X Search API Error:', JSON.stringify(data, null, 2))
|
||||
return {
|
||||
success: false,
|
||||
error:
|
||||
|
||||
Reference in New Issue
Block a user