feat(kb): harden sync engine and add connector audit logging (#3697)

* feat(kb): harden sync engine and add connector audit logging

- Fix stuck syncing status: added finally block in executeSync + stale lock recovery in cron scheduler (2hr TTL)
- Fix token expiry mid-sync: refresh OAuth token between pagination pages and before deferred content hydration
- GitHub deferred content loading: use Git blob SHA for change detection, only fetch content for new/changed docs
- Add network error keywords to isRetryableError (fetch failed, econnreset, etc.)
- Extract sanitizeStorageTitle helper to fix S3 key length limit issues
- Add audit logging for connector CRUD, sync triggers, document exclude/restore, and resource restoration paths

* lint

* fix(tests): update audit mock and route tests for new audit actions

* fix(kb): address PR review - finally block race, contentHash propagation, resourceName

- Replace DB-read finally block with local syncExitedCleanly flag to avoid race condition
- Propagate fullDoc.contentHash during deferred content hydration
- Add resourceName to file restore audit record

* fix(audit): include fileId in file restore audit description
This commit is contained in:
Waleed
2026-03-21 09:36:43 -07:00
committed by GitHub
parent e270756886
commit a64afac075
27 changed files with 417 additions and 114 deletions

View File

@@ -53,6 +53,11 @@ vi.mock('@/lib/auth/hybrid', () => ({
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
}))
vi.mock('@/lib/audit/log', () => ({
recordAudit: vi.fn(),
AuditAction: {},
AuditResourceType: {},
}))
import { GET, PATCH } from '@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
@@ -168,8 +173,16 @@ describe('Connector Documents API Route', () => {
})
it('returns success for restore operation', async () => {
mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' })
mockCheckWriteAccess.mockResolvedValue({ hasAccess: true })
mockCheckSession.mockResolvedValue({
success: true,
userId: 'user-1',
userName: 'Test',
userEmail: 'test@test.com',
})
mockCheckWriteAccess.mockResolvedValue({
hasAccess: true,
knowledgeBase: { workspaceId: 'ws-1', name: 'Test KB' },
})
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-1' }])
@@ -182,8 +195,16 @@ describe('Connector Documents API Route', () => {
})
it('returns success for exclude operation', async () => {
mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' })
mockCheckWriteAccess.mockResolvedValue({ hasAccess: true })
mockCheckSession.mockResolvedValue({
success: true,
userId: 'user-1',
userName: 'Test',
userEmail: 'test@test.com',
})
mockCheckWriteAccess.mockResolvedValue({
hasAccess: true,
knowledgeBase: { workspaceId: 'ws-1', name: 'Test KB' },
})
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-2' }, { id: 'doc-3' }])

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq, inArray, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
@@ -184,6 +185,19 @@ export async function PATCH(request: NextRequest, { params }: RouteParams) {
logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId })
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.CONNECTOR_DOCUMENT_RESTORED,
resourceType: AuditResourceType.CONNECTOR,
resourceId: connectorId,
description: `Restored ${updated.length} excluded document(s) for knowledge base "${writeCheck.knowledgeBase.name}"`,
metadata: { knowledgeBaseId, documentCount: updated.length },
request,
})
return NextResponse.json({
success: true,
data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) },
@@ -206,6 +220,19 @@ export async function PATCH(request: NextRequest, { params }: RouteParams) {
logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId })
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.CONNECTOR_DOCUMENT_EXCLUDED,
resourceType: AuditResourceType.CONNECTOR,
resourceId: connectorId,
description: `Excluded ${updated.length} document(s) from knowledge base "${writeCheck.knowledgeBase.name}"`,
metadata: { knowledgeBaseId, documentCount: updated.length },
request,
})
return NextResponse.json({
success: true,
data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) },

View File

@@ -75,6 +75,11 @@ vi.mock('@/lib/knowledge/tags/service', () => ({
vi.mock('@/lib/knowledge/documents/service', () => ({
deleteDocumentStorageFiles: vi.fn().mockResolvedValue(undefined),
}))
vi.mock('@/lib/audit/log', () => ({
recordAudit: vi.fn(),
AuditAction: {},
AuditResourceType: {},
}))
import { DELETE, GET, PATCH } from '@/app/api/knowledge/[id]/connectors/[connectorId]/route'
@@ -183,8 +188,16 @@ describe('Knowledge Connector By ID API Route', () => {
})
it('returns 200 and updates status', async () => {
mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' })
mockCheckWriteAccess.mockResolvedValue({ hasAccess: true })
mockCheckSession.mockResolvedValue({
success: true,
userId: 'user-1',
userName: 'Test',
userEmail: 'test@test.com',
})
mockCheckWriteAccess.mockResolvedValue({
hasAccess: true,
knowledgeBase: { workspaceId: 'ws-1', name: 'Test KB' },
})
const updatedConnector = { id: 'conn-456', status: 'paused', syncIntervalMinutes: 120 }
mockDbChain.limit.mockResolvedValueOnce([updatedConnector])
@@ -210,8 +223,16 @@ describe('Knowledge Connector By ID API Route', () => {
})
it('returns 200 on successful hard-delete', async () => {
mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' })
mockCheckWriteAccess.mockResolvedValue({ hasAccess: true })
mockCheckSession.mockResolvedValue({
success: true,
userId: 'user-1',
userName: 'Test',
userEmail: 'test@test.com',
})
mockCheckWriteAccess.mockResolvedValue({
hasAccess: true,
knowledgeBase: { workspaceId: 'ws-1', name: 'Test KB' },
})
mockDbChain.where
.mockReturnValueOnce(mockDbChain)
.mockResolvedValueOnce([{ id: 'doc-1', fileUrl: '/api/uploads/test.txt' }])

View File

@@ -11,6 +11,7 @@ import { and, desc, eq, inArray, isNull, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { decryptApiKey } from '@/lib/api-key/crypto'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteDocumentStorageFiles } from '@/lib/knowledge/documents/service'
@@ -233,6 +234,21 @@ export async function PATCH(request: NextRequest, { params }: RouteParams) {
.limit(1)
const { encryptedApiKey: __, ...updatedData } = updated[0]
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.CONNECTOR_UPDATED,
resourceType: AuditResourceType.CONNECTOR,
resourceId: connectorId,
resourceName: updatedData.connectorType,
description: `Updated connector for knowledge base "${writeCheck.knowledgeBase.name}"`,
metadata: { knowledgeBaseId, updatedFields: Object.keys(parsed.data) },
request,
})
return NextResponse.json({ success: true, data: updatedData })
} catch (error) {
logger.error(`[${requestId}] Error updating connector`, error)
@@ -260,7 +276,7 @@ export async function DELETE(request: NextRequest, { params }: RouteParams) {
}
const existingConnector = await db
.select({ id: knowledgeConnector.id })
.select({ id: knowledgeConnector.id, connectorType: knowledgeConnector.connectorType })
.from(knowledgeConnector)
.where(
and(
@@ -323,6 +339,20 @@ export async function DELETE(request: NextRequest, { params }: RouteParams) {
logger.info(`[${requestId}] Hard-deleted connector ${connectorId} and its documents`)
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.CONNECTOR_DELETED,
resourceType: AuditResourceType.CONNECTOR,
resourceId: connectorId,
resourceName: existingConnector[0].connectorType,
description: `Deleted connector from knowledge base "${writeCheck.knowledgeBase.name}"`,
metadata: { knowledgeBaseId, documentsDeleted: connectorDocuments.length },
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error deleting connector`, error)

View File

@@ -43,6 +43,11 @@ vi.mock('@/lib/core/utils/request', () => ({
vi.mock('@/lib/knowledge/connectors/sync-engine', () => ({
dispatchSync: mockDispatchSync,
}))
vi.mock('@/lib/audit/log', () => ({
recordAudit: vi.fn(),
AuditAction: {},
AuditResourceType: {},
}))
import { POST } from '@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route'
@@ -92,8 +97,16 @@ describe('Connector Manual Sync API Route', () => {
})
it('dispatches sync on valid request', async () => {
mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' })
mockCheckWriteAccess.mockResolvedValue({ hasAccess: true })
mockCheckSession.mockResolvedValue({
success: true,
userId: 'user-1',
userName: 'Test',
userEmail: 'test@test.com',
})
mockCheckWriteAccess.mockResolvedValue({
hasAccess: true,
knowledgeBase: { workspaceId: 'ws-1', name: 'Test KB' },
})
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'active' }])
const req = createMockRequest('POST')

View File

@@ -3,6 +3,7 @@ import { knowledgeConnector } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
@@ -54,6 +55,20 @@ export async function POST(request: NextRequest, { params }: RouteParams) {
logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`)
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.CONNECTOR_SYNCED,
resourceType: AuditResourceType.CONNECTOR,
resourceId: connectorId,
resourceName: connectorRows[0].connectorType,
description: `Triggered manual sync for connector on knowledge base "${writeCheck.knowledgeBase.name}"`,
metadata: { knowledgeBaseId },
request,
})
dispatchSync(connectorId, { requestId }).catch((error) => {
logger.error(
`[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`,

View File

@@ -5,6 +5,7 @@ import { and, desc, eq, isNull, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { encryptApiKey } from '@/lib/api-key/crypto'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
@@ -226,6 +227,20 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`)
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.CONNECTOR_CREATED,
resourceType: AuditResourceType.CONNECTOR,
resourceId: connectorId,
resourceName: connectorType,
description: `Created ${connectorType} connector for knowledge base "${writeCheck.knowledgeBase.name}"`,
metadata: { knowledgeBaseId, connectorType, syncIntervalMinutes },
request,
})
dispatchSync(connectorId, { requestId }).catch((error) => {
logger.error(
`[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`,

View File

@@ -3,6 +3,7 @@ import { knowledgeBase } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { restoreKnowledgeBase } from '@/lib/knowledge/service'
@@ -23,6 +24,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const [kb] = await db
.select({
id: knowledgeBase.id,
name: knowledgeBase.name,
workspaceId: knowledgeBase.workspaceId,
userId: knowledgeBase.userId,
})
@@ -47,6 +49,19 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Restored knowledge base ${id}`)
recordAudit({
workspaceId: kb.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.KNOWLEDGE_BASE_RESTORED,
resourceType: AuditResourceType.KNOWLEDGE_BASE,
resourceId: id,
resourceName: kb.name,
description: `Restored knowledge base "${kb.name}"`,
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error restoring knowledge base ${id}`, error)

View File

@@ -27,6 +27,34 @@ export async function GET(request: NextRequest) {
try {
const now = new Date()
const STALE_SYNC_TTL_MS = 120 * 60 * 1000
const staleCutoff = new Date(now.getTime() - STALE_SYNC_TTL_MS)
const recoveredConnectors = await db
.update(knowledgeConnector)
.set({
status: 'error',
lastSyncError: 'Sync timed out (stale lock recovered)',
nextSyncAt: new Date(now.getTime() + 10 * 60 * 1000),
updatedAt: now,
})
.where(
and(
eq(knowledgeConnector.status, 'syncing'),
lte(knowledgeConnector.updatedAt, staleCutoff),
isNull(knowledgeConnector.archivedAt),
isNull(knowledgeConnector.deletedAt)
)
)
.returning({ id: knowledgeConnector.id })
if (recoveredConnectors.length > 0) {
logger.warn(
`[${requestId}] Recovered ${recoveredConnectors.length} stale syncing connectors`,
{ ids: recoveredConnectors.map((c) => c.id) }
)
}
const dueConnectors = await db
.select({
id: knowledgeConnector.id,

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getTableById, restoreTable } from '@/lib/table'
@@ -34,6 +35,19 @@ export async function POST(
logger.info(`[${requestId}] Restored table ${tableId}`)
recordAudit({
workspaceId: table.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.TABLE_RESTORED,
resourceType: AuditResourceType.TABLE,
resourceId: tableId,
resourceName: table.name,
description: `Restored table "${table.name}"`,
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error restoring table ${tableId}`, error)

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { restoreWorkflow } from '@/lib/workflows/lifecycle'
@@ -44,6 +45,19 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Restored workflow ${workflowId}`)
recordAudit({
workspaceId: workflowData.workspaceId,
actorId: auth.userId,
actorName: auth.userName,
actorEmail: auth.userEmail,
action: AuditAction.WORKFLOW_RESTORED,
resourceType: AuditResourceType.WORKFLOW,
resourceId: workflowId,
resourceName: workflowData.name,
description: `Restored workflow "${workflowData.name}"`,
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error restoring workflow ${workflowId}`, error)

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { restoreWorkspaceFile } from '@/lib/uploads/contexts/workspace'
@@ -29,6 +30,19 @@ export async function POST(
logger.info(`[${requestId}] Restored workspace file ${fileId}`)
recordAudit({
workspaceId,
actorId: session.user.id,
actorName: session.user.name,
actorEmail: session.user.email,
action: AuditAction.FILE_RESTORED,
resourceType: AuditResourceType.FILE,
resourceId: fileId,
resourceName: fileId,
description: `Restored workspace file ${fileId}`,
request,
})
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error restoring workspace file ${fileId}`, error)

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 6
export default function FilesLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -18,7 +18,7 @@ export default function FilesLoading() {
</div>
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
<Skeleton className='ml-[10px] h-[14px] w-[120px] rounded-[4px]' />
@@ -27,7 +27,7 @@ export default function FilesLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -40,7 +40,7 @@ export default function FilesLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 4
export default function DocumentLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[16px] py-[8.5px]'>
<div className='border-[var(--border)] border-b px-[16px] py-[8.5px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -18,7 +18,7 @@ export default function DocumentLoading() {
</div>
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -32,7 +32,7 @@ export default function DocumentLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -45,7 +45,7 @@ export default function DocumentLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 7
export default function KnowledgeBaseLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[16px] py-[8.5px]'>
<div className='border-[var(--border)] border-b px-[16px] py-[8.5px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -19,7 +19,7 @@ export default function KnowledgeBaseLoading() {
</div>
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -34,7 +34,7 @@ export default function KnowledgeBaseLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -47,7 +47,7 @@ export default function KnowledgeBaseLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 6
export default function KnowledgeLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -18,7 +18,7 @@ export default function KnowledgeLoading() {
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
<Skeleton className='ml-[10px] h-[14px] w-[160px] rounded-[4px]' />
@@ -28,7 +28,7 @@ export default function KnowledgeLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -41,7 +41,7 @@ export default function KnowledgeLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 6
export default function LogsLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -21,7 +21,7 @@ export default function LogsLoading() {
</div>
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -36,7 +36,7 @@ export default function LogsLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -49,7 +49,7 @@ export default function LogsLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 4
export default function ScheduledTasksLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -17,7 +17,7 @@ export default function ScheduledTasksLoading() {
</div>
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
<Skeleton className='ml-[10px] h-[14px] w-[160px] rounded-[4px]' />
@@ -26,7 +26,7 @@ export default function ScheduledTasksLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -39,7 +39,7 @@ export default function ScheduledTasksLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 5
export default function TableDetailLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[16px] py-[8.5px]'>
<div className='border-[var(--border)] border-b px-[16px] py-[8.5px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -22,7 +22,7 @@ export default function TableDetailLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
{Array.from({ length: COLUMN_COUNT }).map((_, i) => (
<th key={i} className='px-[12px] py-[8px] text-left'>
<Skeleton className='h-[12px] w-[72px] rounded-[4px]' />
@@ -32,7 +32,7 @@ export default function TableDetailLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
{Array.from({ length: COLUMN_COUNT }).map((_, colIndex) => (
<td key={colIndex} className='px-[12px] py-[10px]'>
<Skeleton

View File

@@ -6,7 +6,7 @@ const COLUMN_COUNT = 6
export default function TablesLoading() {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden bg-[var(--bg)]'>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
@@ -17,7 +17,7 @@ export default function TablesLoading() {
</div>
</div>
</div>
<div className='border-b border-[var(--border)] px-[24px] py-[10px]'>
<div className='border-[var(--border)] border-b px-[24px] py-[10px]'>
<div className='flex items-center'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
<Skeleton className='ml-[10px] h-[14px] w-[120px] rounded-[4px]' />
@@ -26,7 +26,7 @@ export default function TablesLoading() {
<div className='min-h-0 flex-1 overflow-auto'>
<table className='w-full'>
<thead>
<tr className='border-b border-[var(--border)]'>
<tr className='border-[var(--border)] border-b'>
<th className='w-[40px] px-[12px] py-[8px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</th>
@@ -39,7 +39,7 @@ export default function TablesLoading() {
</thead>
<tbody>
{Array.from({ length: SKELETON_ROW_COUNT }).map((_, rowIndex) => (
<tr key={rowIndex} className='border-b border-[var(--border)]'>
<tr key={rowIndex} className='border-[var(--border)] border-b'>
<td className='w-[40px] px-[12px] py-[10px]'>
<Skeleton className='h-[14px] w-[14px] rounded-[2px]' />
</td>

View File

@@ -2,12 +2,13 @@ import { createLogger } from '@sim/logger'
import { GithubIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, parseTagDate } from '@/connectors/utils'
import { parseTagDate } from '@/connectors/utils'
const logger = createLogger('GitHubConnector')
const GITHUB_API_URL = 'https://api.github.com'
const BATCH_SIZE = 30
const GIT_SHA_PREFIX = 'git-sha:'
/**
* Parses the repository string into owner and repo.
@@ -89,58 +90,25 @@ async function fetchTree(
}
/**
* Fetches file content via the Blobs API and decodes base64.
* Creates a lightweight stub ExternalDocument from a tree item.
* Uses the Git blob SHA as contentHash for change detection, avoiding
* the need to fetch blob content for every file during listing.
* Content is deferred and only fetched for new/changed documents.
*/
async function fetchBlobContent(
accessToken: string,
owner: string,
repo: string,
sha: string
): Promise<string> {
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/git/blobs/${sha}`
const response = await fetchWithRetry(url, {
method: 'GET',
headers: {
Accept: 'application/vnd.github+json',
Authorization: `Bearer ${accessToken}`,
'X-GitHub-Api-Version': '2022-11-28',
},
})
if (!response.ok) {
throw new Error(`Failed to fetch blob ${sha}: ${response.status}`)
}
const data = await response.json()
if (data.encoding === 'base64') {
return Buffer.from(data.content, 'base64').toString('utf-8')
}
return data.content || ''
}
/**
* Converts a tree item to an ExternalDocument by fetching its content.
*/
async function treeItemToDocument(
accessToken: string,
function treeItemToStub(
owner: string,
repo: string,
branch: string,
item: TreeItem
): Promise<ExternalDocument> {
const content = await fetchBlobContent(accessToken, owner, repo, item.sha)
const contentHash = await computeContentHash(content)
): ExternalDocument {
return {
externalId: item.path,
title: item.path.split('/').pop() || item.path,
content,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: `https://github.com/${owner}/${repo}/blob/${encodeURIComponent(branch)}/${item.path.split('/').map(encodeURIComponent).join('/')}`,
contentHash,
contentHash: `${GIT_SHA_PREFIX}${item.sha}`,
metadata: {
path: item.path,
sha: item.sha,
@@ -245,24 +213,7 @@ export const githubConnector: ConnectorConfig = {
batchSize: batch.length,
})
const BLOB_CONCURRENCY = 5
const documents: ExternalDocument[] = []
for (let i = 0; i < batch.length; i += BLOB_CONCURRENCY) {
const chunk = batch.slice(i, i + BLOB_CONCURRENCY)
const results = await Promise.all(
chunk.map(async (item) => {
try {
return await treeItemToDocument(accessToken, owner, repo, branch, item)
} catch (error) {
logger.warn(`Failed to fetch content for ${item.path}`, {
error: error instanceof Error ? error.message : String(error),
})
return null
}
})
)
documents.push(...(results.filter(Boolean) as ExternalDocument[]))
}
const documents = batch.map((item) => treeItemToStub(owner, repo, branch, item))
const nextOffset = offset + BATCH_SIZE
const hasMore = nextOffset < capped.length
@@ -308,7 +259,6 @@ export const githubConnector: ConnectorConfig = {
data.encoding === 'base64'
? Buffer.from(data.content as string, 'base64').toString('utf-8')
: (data.content as string) || ''
const contentHash = await computeContentHash(content)
return {
externalId,
@@ -316,7 +266,7 @@ export const githubConnector: ConnectorConfig = {
content,
mimeType: 'text/plain',
sourceUrl: `https://github.com/${owner}/${repo}/blob/${encodeURIComponent(branch)}/${path.split('/').map(encodeURIComponent).join('/')}`,
contentHash,
contentHash: `${GIT_SHA_PREFIX}${data.sha as string}`,
metadata: {
path,
sha: data.sha as string,

View File

@@ -24,8 +24,10 @@ export interface ExternalDocument {
mimeType: string
/** Link back to the original document */
sourceUrl?: string
/** SHA-256 of content for change detection */
/** Hash of content for change detection (format varies by connector) */
contentHash: string
/** When true, content is empty and will be fetched via getDocument for new/changed docs only */
contentDeferred?: boolean
/** Additional source-specific metadata */
metadata?: Record<string, unknown>
}

View File

@@ -40,6 +40,10 @@ export const AuditAction = {
CREDENTIAL_SET_INVITATION_RESENT: 'credential_set_invitation.resent',
CREDENTIAL_SET_INVITATION_REVOKED: 'credential_set_invitation.revoked',
// Connector Documents
CONNECTOR_DOCUMENT_RESTORED: 'connector_document.restored',
CONNECTOR_DOCUMENT_EXCLUDED: 'connector_document.excluded',
// Documents
DOCUMENT_UPLOADED: 'document.uploaded',
DOCUMENT_UPDATED: 'document.updated',
@@ -52,6 +56,7 @@ export const AuditAction = {
FILE_UPLOADED: 'file.uploaded',
FILE_UPDATED: 'file.updated',
FILE_DELETED: 'file.deleted',
FILE_RESTORED: 'file.restored',
// Folders
FOLDER_CREATED: 'folder.created',
@@ -67,10 +72,17 @@ export const AuditAction = {
INVITATION_ACCEPTED: 'invitation.accepted',
INVITATION_REVOKED: 'invitation.revoked',
// Knowledge Base Connectors
CONNECTOR_CREATED: 'connector.created',
CONNECTOR_UPDATED: 'connector.updated',
CONNECTOR_DELETED: 'connector.deleted',
CONNECTOR_SYNCED: 'connector.synced',
// Knowledge Bases
KNOWLEDGE_BASE_CREATED: 'knowledge_base.created',
KNOWLEDGE_BASE_UPDATED: 'knowledge_base.updated',
KNOWLEDGE_BASE_DELETED: 'knowledge_base.deleted',
KNOWLEDGE_BASE_RESTORED: 'knowledge_base.restored',
// MCP Servers
MCP_SERVER_ADDED: 'mcp_server.added',
@@ -119,6 +131,7 @@ export const AuditAction = {
TABLE_CREATED: 'table.created',
TABLE_UPDATED: 'table.updated',
TABLE_DELETED: 'table.deleted',
TABLE_RESTORED: 'table.restored',
// Templates
TEMPLATE_CREATED: 'template.created',
@@ -132,6 +145,7 @@ export const AuditAction = {
// Workflows
WORKFLOW_CREATED: 'workflow.created',
WORKFLOW_DELETED: 'workflow.deleted',
WORKFLOW_RESTORED: 'workflow.restored',
WORKFLOW_DEPLOYED: 'workflow.deployed',
WORKFLOW_UNDEPLOYED: 'workflow.undeployed',
WORKFLOW_DUPLICATED: 'workflow.duplicated',
@@ -157,6 +171,7 @@ export const AuditResourceType = {
BILLING: 'billing',
BYOK_KEY: 'byok_key',
CHAT: 'chat',
CONNECTOR: 'connector',
CREDENTIAL_SET: 'credential_set',
DOCUMENT: 'document',
ENVIRONMENT: 'environment',

View File

@@ -38,6 +38,12 @@ class ConnectorDeletedException extends Error {
const SYNC_BATCH_SIZE = 5
const MAX_PAGES = 500
const MAX_SAFE_TITLE_LENGTH = 200
/** Sanitizes a document title for use in S3 storage keys. */
function sanitizeStorageTitle(title: string): string {
return title.replace(/[^a-zA-Z0-9.-]/g, '_').slice(0, MAX_SAFE_TITLE_LENGTH)
}
type KnowledgeBaseLockingTx = Pick<typeof db, 'execute' | 'select'>
type DocOp =
@@ -241,7 +247,7 @@ export async function executeSync(
const userId = kbRows[0].userId
const sourceConfig = connector.sourceConfig as Record<string, unknown>
const accessToken = await resolveAccessToken(connector, connectorConfig, userId)
let accessToken = await resolveAccessToken(connector, connectorConfig, userId)
if (!accessToken) {
throw new Error('Failed to obtain access token')
@@ -273,6 +279,8 @@ export async function executeSync(
startedAt: new Date(),
})
let syncExitedCleanly = false
try {
const externalDocs: ExternalDocument[] = []
let cursor: string | undefined
@@ -289,6 +297,11 @@ export async function executeSync(
isIncremental && connector.lastSyncAt ? new Date(connector.lastSyncAt) : undefined
for (let pageNum = 0; hasMore && pageNum < MAX_PAGES; pageNum++) {
if (pageNum > 0 && connectorConfig.auth.mode === 'oauth') {
const refreshed = await resolveAccessToken(connector, connectorConfig, userId)
if (refreshed) accessToken = refreshed
}
const page = await connectorConfig.listDocuments(
accessToken,
sourceConfig,
@@ -390,7 +403,7 @@ export async function executeSync(
continue
}
if (!extDoc.content.trim()) {
if (!extDoc.content.trim() && !extDoc.contentDeferred) {
logger.info(`Skipping empty document: ${extDoc.title}`, {
externalId: extDoc.externalId,
})
@@ -416,7 +429,54 @@ export async function executeSync(
throw new Error(`Knowledge base ${connector.knowledgeBaseId} was deleted during sync`)
}
const batch = pendingOps.slice(i, i + SYNC_BATCH_SIZE)
const rawBatch = pendingOps.slice(i, i + SYNC_BATCH_SIZE)
const deferredOps = rawBatch.filter((op) => op.extDoc.contentDeferred)
const readyOps = rawBatch.filter((op) => !op.extDoc.contentDeferred)
if (deferredOps.length > 0) {
if (connectorConfig.auth.mode === 'oauth') {
const refreshed = await resolveAccessToken(connector, connectorConfig, userId)
if (refreshed) accessToken = refreshed
}
const hydrated = await Promise.allSettled(
deferredOps.map(async (op) => {
const fullDoc = await connectorConfig.getDocument(
accessToken!,
sourceConfig,
op.extDoc.externalId,
syncContext
)
if (!fullDoc?.content.trim()) return null
return {
...op,
extDoc: {
...op.extDoc,
content: fullDoc.content,
contentHash: fullDoc.contentHash ?? op.extDoc.contentHash,
contentDeferred: false,
},
}
})
)
for (const outcome of hydrated) {
if (outcome.status === 'fulfilled' && outcome.value) {
readyOps.push(outcome.value)
} else if (outcome.status === 'rejected') {
result.docsFailed++
logger.error('Failed to hydrate deferred document', {
connectorId,
error:
outcome.reason instanceof Error ? outcome.reason.message : String(outcome.reason),
})
}
}
}
const batch = readyOps
const settled = await Promise.allSettled(
batch.map((op) => {
if (op.type === 'add') {
@@ -540,6 +600,7 @@ export async function executeSync(
)
logger.info('Sync completed', { connectorId, ...result })
syncExitedCleanly = true
return result
} catch (error) {
if (error instanceof ConnectorDeletedException) {
@@ -571,6 +632,7 @@ export async function executeSync(
}
result.error = 'Connector deleted during sync'
syncExitedCleanly = true
return result
}
@@ -610,7 +672,27 @@ export async function executeSync(
}
result.error = errorMessage
syncExitedCleanly = true
return result
} finally {
if (!syncExitedCleanly) {
try {
await db
.update(knowledgeConnector)
.set({
status: 'error',
lastSyncError: 'Sync terminated unexpectedly',
updatedAt: new Date(),
})
.where(eq(knowledgeConnector.id, connectorId))
logger.warn('Reset stale syncing status in finally block', { connectorId })
} catch (finallyError) {
logger.warn('Failed to reset syncing status in finally block', {
connectorId,
error: finallyError instanceof Error ? finallyError.message : String(finallyError),
})
}
}
}
}
@@ -630,7 +712,7 @@ async function addDocument(
}
const documentId = crypto.randomUUID()
const contentBuffer = Buffer.from(extDoc.content, 'utf-8')
const safeTitle = extDoc.title.replace(/[^a-zA-Z0-9.-]/g, '_')
const safeTitle = sanitizeStorageTitle(extDoc.title)
const customKey = `kb/${Date.now()}-${documentId}-${safeTitle}.txt`
const fileInfo = await StorageService.uploadFile({
@@ -729,7 +811,7 @@ async function updateDocument(
const oldFileUrl = existingRows[0]?.fileUrl
const contentBuffer = Buffer.from(extDoc.content, 'utf-8')
const safeTitle = extDoc.title.replace(/[^a-zA-Z0-9.-]/g, '_')
const safeTitle = sanitizeStorageTitle(extDoc.title)
const customKey = `kb/${Date.now()}-${existingDocId}-${safeTitle}.txt`
const fileInfo = await StorageService.uploadFile({

View File

@@ -47,7 +47,6 @@ const logger = createLogger('DocumentService')
const TIMEOUTS = {
OVERALL_PROCESSING: (env.KB_CONFIG_MAX_DURATION || 600) * 1000, // Default 10 minutes for KB document processing
EMBEDDINGS_API: (env.KB_CONFIG_MAX_TIMEOUT || 10000) * 18,
} as const
// Configuration for handling large documents

View File

@@ -52,8 +52,25 @@ export function isRetryableError(error: unknown): boolean {
return true
}
// Check for rate limiting in error messages
// Check for network-level errors (DNS, connection, timeout)
const errorMessage = error instanceof Error ? error.message : String(error)
const lowerMessage = errorMessage.toLowerCase()
const networkKeywords = [
'fetch failed',
'econnreset',
'econnrefused',
'etimedout',
'enetunreach',
'socket hang up',
'network error',
]
if (networkKeywords.some((keyword) => lowerMessage.includes(keyword))) {
return true
}
// Check for rate limiting in error messages
const rateLimitKeywords = [
'rate limit',
'rate_limit',
@@ -65,7 +82,7 @@ export function isRetryableError(error: unknown): boolean {
'service unavailable',
]
return rateLimitKeywords.some((keyword) => errorMessage.toLowerCase().includes(keyword))
return rateLimitKeywords.some((keyword) => lowerMessage.includes(keyword))
}
/**

View File

@@ -32,6 +32,8 @@ export const auditMock = {
CREDENTIAL_SET_INVITATION_ACCEPTED: 'credential_set_invitation.accepted',
CREDENTIAL_SET_INVITATION_RESENT: 'credential_set_invitation.resent',
CREDENTIAL_SET_INVITATION_REVOKED: 'credential_set_invitation.revoked',
CONNECTOR_DOCUMENT_RESTORED: 'connector_document.restored',
CONNECTOR_DOCUMENT_EXCLUDED: 'connector_document.excluded',
DOCUMENT_UPLOADED: 'document.uploaded',
DOCUMENT_UPDATED: 'document.updated',
DOCUMENT_DELETED: 'document.deleted',
@@ -39,6 +41,7 @@ export const auditMock = {
FILE_UPLOADED: 'file.uploaded',
FILE_UPDATED: 'file.updated',
FILE_DELETED: 'file.deleted',
FILE_RESTORED: 'file.restored',
FOLDER_CREATED: 'folder.created',
FOLDER_DELETED: 'folder.deleted',
FOLDER_DUPLICATED: 'folder.duplicated',
@@ -47,9 +50,14 @@ export const auditMock = {
FORM_DELETED: 'form.deleted',
INVITATION_ACCEPTED: 'invitation.accepted',
INVITATION_REVOKED: 'invitation.revoked',
CONNECTOR_CREATED: 'connector.created',
CONNECTOR_UPDATED: 'connector.updated',
CONNECTOR_DELETED: 'connector.deleted',
CONNECTOR_SYNCED: 'connector.synced',
KNOWLEDGE_BASE_CREATED: 'knowledge_base.created',
KNOWLEDGE_BASE_UPDATED: 'knowledge_base.updated',
KNOWLEDGE_BASE_DELETED: 'knowledge_base.deleted',
KNOWLEDGE_BASE_RESTORED: 'knowledge_base.restored',
MCP_SERVER_ADDED: 'mcp_server.added',
MCP_SERVER_UPDATED: 'mcp_server.updated',
MCP_SERVER_REMOVED: 'mcp_server.removed',
@@ -80,6 +88,7 @@ export const auditMock = {
TABLE_CREATED: 'table.created',
TABLE_UPDATED: 'table.updated',
TABLE_DELETED: 'table.deleted',
TABLE_RESTORED: 'table.restored',
TEMPLATE_CREATED: 'template.created',
TEMPLATE_UPDATED: 'template.updated',
TEMPLATE_DELETED: 'template.deleted',
@@ -87,6 +96,7 @@ export const auditMock = {
WEBHOOK_DELETED: 'webhook.deleted',
WORKFLOW_CREATED: 'workflow.created',
WORKFLOW_DELETED: 'workflow.deleted',
WORKFLOW_RESTORED: 'workflow.restored',
WORKFLOW_DEPLOYED: 'workflow.deployed',
WORKFLOW_UNDEPLOYED: 'workflow.undeployed',
WORKFLOW_DUPLICATED: 'workflow.duplicated',
@@ -104,6 +114,7 @@ export const auditMock = {
BILLING: 'billing',
BYOK_KEY: 'byok_key',
CHAT: 'chat',
CONNECTOR: 'connector',
CREDENTIAL_SET: 'credential_set',
DOCUMENT: 'document',
ENVIRONMENT: 'environment',