mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-17 01:42:43 -05:00
feat(knowledge): connectors, user exclusions, expanded tools & airtable integration
This commit is contained in:
240
.claude/commands/add-connector.md
Normal file
240
.claude/commands/add-connector.md
Normal file
@@ -0,0 +1,240 @@
|
||||
---
|
||||
description: Add a knowledge base connector for syncing documents from an external source
|
||||
argument-hint: <service-name> [api-docs-url]
|
||||
---
|
||||
|
||||
# Add Connector Skill
|
||||
|
||||
You are an expert at adding knowledge base connectors to Sim. A connector syncs documents from an external source (Confluence, Google Drive, Notion, etc.) into a knowledge base.
|
||||
|
||||
## Your Task
|
||||
|
||||
When the user asks you to create a connector:
|
||||
1. Use Context7 or WebFetch to read the service's API documentation
|
||||
2. Create the connector directory and config
|
||||
3. Register it in the connector registry
|
||||
|
||||
## Directory Structure
|
||||
|
||||
Create files in `apps/sim/connectors/{service}/`:
|
||||
```
|
||||
connectors/{service}/
|
||||
├── index.ts # Barrel export
|
||||
└── {service}.ts # ConnectorConfig definition
|
||||
```
|
||||
|
||||
## ConnectorConfig Structure
|
||||
|
||||
```typescript
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('{Service}Connector')
|
||||
|
||||
export const {service}Connector: ConnectorConfig = {
|
||||
id: '{service}',
|
||||
name: '{Service}',
|
||||
description: 'Sync documents from {Service} into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: '{service}', // Must match OAuthService in lib/oauth/types.ts
|
||||
requiredScopes: ['read:...'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
// Rendered dynamically by the add-connector modal UI
|
||||
// Supports 'short-input' and 'dropdown' types
|
||||
],
|
||||
|
||||
listDocuments: async (accessToken, sourceConfig, cursor) => {
|
||||
// Paginate via cursor, extract text, compute SHA-256 hash
|
||||
// Return { documents: ExternalDocument[], nextCursor?, hasMore }
|
||||
},
|
||||
|
||||
getDocument: async (accessToken, sourceConfig, externalId) => {
|
||||
// Return ExternalDocument or null
|
||||
},
|
||||
|
||||
validateConfig: async (accessToken, sourceConfig) => {
|
||||
// Return { valid: true } or { valid: false, error: 'message' }
|
||||
},
|
||||
|
||||
// Optional: map source metadata to semantic tag keys (translated to slots by sync engine)
|
||||
mapTags: (metadata) => {
|
||||
// Return Record<string, unknown> with keys matching tagDefinitions[].id
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## ConfigField Types
|
||||
|
||||
The add-connector modal renders these automatically — no custom UI needed.
|
||||
|
||||
```typescript
|
||||
// Text input
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.example.com',
|
||||
required: true,
|
||||
}
|
||||
|
||||
// Dropdown (static options)
|
||||
{
|
||||
id: 'contentType',
|
||||
title: 'Content Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Pages only', id: 'page' },
|
||||
{ label: 'Blog posts only', id: 'blogpost' },
|
||||
{ label: 'All content', id: 'all' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## ExternalDocument Shape
|
||||
|
||||
Every document returned from `listDocuments`/`getDocument` must include:
|
||||
|
||||
```typescript
|
||||
{
|
||||
externalId: string // Source-specific unique ID
|
||||
title: string // Document title
|
||||
content: string // Extracted plain text
|
||||
mimeType: 'text/plain' // Always text/plain (content is extracted)
|
||||
contentHash: string // SHA-256 of content (change detection)
|
||||
sourceUrl?: string // Link back to original (stored on document record)
|
||||
metadata?: Record<string, unknown> // Source-specific data (fed to mapTags)
|
||||
}
|
||||
```
|
||||
|
||||
## Content Hashing (Required)
|
||||
|
||||
The sync engine uses content hashes for change detection:
|
||||
|
||||
```typescript
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer)).map(b => b.toString(16).padStart(2, '0')).join('')
|
||||
}
|
||||
```
|
||||
|
||||
## tagDefinitions — Declared Tag Definitions
|
||||
|
||||
Declare which tags the connector populates using semantic IDs. Shown in the add-connector modal as opt-out checkboxes.
|
||||
On connector creation, slots are **dynamically assigned** via `getNextAvailableSlot` — connectors never hardcode slot names.
|
||||
|
||||
```typescript
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'version', displayName: 'Version', fieldType: 'number' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
```
|
||||
|
||||
Each entry has:
|
||||
- `id`: Semantic key matching a key returned by `mapTags` (e.g. `'labels'`, `'version'`)
|
||||
- `displayName`: Human-readable name shown in the UI (e.g. "Labels", "Last Modified")
|
||||
- `fieldType`: `'text'` | `'number'` | `'date'` | `'boolean'` — determines which slot pool to draw from
|
||||
|
||||
Users can opt out of specific tags in the modal. Disabled IDs are stored in `sourceConfig.disabledTagIds`.
|
||||
The assigned mapping (`semantic id → slot`) is stored in `sourceConfig.tagSlotMapping`.
|
||||
|
||||
## mapTags — Metadata to Semantic Keys
|
||||
|
||||
Maps source metadata to semantic tag keys. Required if `tagDefinitions` is set.
|
||||
The sync engine calls this automatically and translates semantic keys to actual DB slots
|
||||
using the `tagSlotMapping` stored on the connector.
|
||||
|
||||
Return keys must match the `id` values declared in `tagDefinitions`.
|
||||
|
||||
```typescript
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
// Validate arrays before casting — metadata may be malformed
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
// Validate numbers — guard against NaN
|
||||
if (metadata.version != null) {
|
||||
const num = Number(metadata.version)
|
||||
if (!Number.isNaN(num)) result.version = num
|
||||
}
|
||||
|
||||
// Validate dates — guard against Invalid Date
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
```
|
||||
|
||||
## sourceUrl
|
||||
|
||||
If `ExternalDocument.sourceUrl` is set, the sync engine stores it on the document record. Always construct the full URL (not a relative path).
|
||||
|
||||
## Sync Engine Behavior (Do Not Modify)
|
||||
|
||||
The sync engine (`lib/knowledge/connectors/sync-engine.ts`) is connector-agnostic. It:
|
||||
1. Calls `listDocuments` with pagination until `hasMore` is false
|
||||
2. Compares `contentHash` to detect new/changed/unchanged documents
|
||||
3. Stores `sourceUrl` and calls `mapTags` on insert/update automatically
|
||||
4. Handles soft-delete of removed documents
|
||||
|
||||
You never need to modify the sync engine when adding a connector.
|
||||
|
||||
## OAuth Credential Reuse
|
||||
|
||||
Connectors reuse the existing OAuth infrastructure. The `oauth.provider` must match an `OAuthService` from `apps/sim/lib/oauth/types.ts`. Check existing providers before adding a new one.
|
||||
|
||||
## Icon
|
||||
|
||||
The `icon` field on `ConnectorConfig` is used throughout the UI — in the connector list, the add-connector modal, and as the document icon in the knowledge base table (replacing the generic file type icon for connector-sourced documents). The icon is read from `CONNECTOR_REGISTRY[connectorType].icon` at runtime — no separate icon map to maintain.
|
||||
|
||||
If the service already has an icon in `apps/sim/components/icons.tsx` (from a tool integration), reuse it. Otherwise, ask the user to provide the SVG.
|
||||
|
||||
## Registering
|
||||
|
||||
Add one line to `apps/sim/connectors/registry.ts`:
|
||||
|
||||
```typescript
|
||||
import { {service}Connector } from '@/connectors/{service}'
|
||||
|
||||
export const CONNECTOR_REGISTRY: ConnectorRegistry = {
|
||||
// ... existing connectors ...
|
||||
{service}: {service}Connector,
|
||||
}
|
||||
```
|
||||
|
||||
## Reference Implementation
|
||||
|
||||
See `apps/sim/connectors/confluence/confluence.ts` for a complete example with:
|
||||
- Multiple config field types (text + dropdown)
|
||||
- Label fetching and CQL search filtering
|
||||
- Blogpost + page content types
|
||||
- `mapTags` mapping labels, version, and dates to semantic keys
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Created `connectors/{service}/{service}.ts` with full ConnectorConfig
|
||||
- [ ] Created `connectors/{service}/index.ts` barrel export
|
||||
- [ ] `oauth.provider` matches an existing OAuthService in `lib/oauth/types.ts`
|
||||
- [ ] `listDocuments` handles pagination and computes content hashes
|
||||
- [ ] `sourceUrl` set on each ExternalDocument (full URL, not relative)
|
||||
- [ ] `metadata` includes source-specific data for tag mapping
|
||||
- [ ] `tagDefinitions` declared for each semantic key returned by `mapTags`
|
||||
- [ ] `mapTags` implemented if source has useful metadata (labels, dates, versions)
|
||||
- [ ] `validateConfig` verifies the source is accessible
|
||||
- [ ] All optional config fields validated in `validateConfig`
|
||||
- [ ] Icon exists in `components/icons.tsx` (or asked user to provide SVG)
|
||||
- [ ] Registered in `connectors/registry.ts`
|
||||
@@ -130,4 +130,37 @@ Update multiple existing records in an Airtable table
|
||||
| `records` | json | Array of updated Airtable records |
|
||||
| `metadata` | json | Operation metadata including record count and updated record IDs |
|
||||
|
||||
### `airtable_list_bases`
|
||||
|
||||
List all bases the authenticated user has access to
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `bases` | json | Array of Airtable bases with id, name, and permissionLevel |
|
||||
| `metadata` | json | Operation metadata including total bases count |
|
||||
|
||||
### `airtable_get_base_schema`
|
||||
|
||||
Get the schema of all tables, fields, and views in an Airtable base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `baseId` | string | Yes | Airtable base ID \(starts with "app", e.g., "appXXXXXXXXXXXXXX"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tables` | json | Array of table schemas with fields and views |
|
||||
| `metadata` | json | Operation metadata including total tables count |
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ In Sim, the Knowledge Base block enables your agents to perform intelligent sema
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Knowledge into the workflow. Can search, upload chunks, and create documents.
|
||||
Integrate Knowledge into the workflow. Perform full CRUD operations on documents, chunks, and tags.
|
||||
|
||||
|
||||
|
||||
@@ -126,4 +126,161 @@ Create a new document in a knowledge base
|
||||
| `message` | string | Success or error message describing the operation result |
|
||||
| `documentId` | string | ID of the created document |
|
||||
|
||||
### `knowledge_list_tags`
|
||||
|
||||
List all tag definitions for a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list tags for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `tags` | array | Array of tag definitions for the knowledge base |
|
||||
| ↳ `id` | string | Tag definition ID |
|
||||
| ↳ `tagSlot` | string | Internal tag slot \(e.g. tag1, number1\) |
|
||||
| ↳ `displayName` | string | Human-readable tag name |
|
||||
| ↳ `fieldType` | string | Tag field type \(text, number, date, boolean\) |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalTags` | number | Total number of tag definitions |
|
||||
|
||||
### `knowledge_list_documents`
|
||||
|
||||
List documents in a knowledge base with optional filtering, search, and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list documents from |
|
||||
| `search` | string | No | Search query to filter documents by filename |
|
||||
| `enabledFilter` | string | No | Filter by enabled status: "all", "enabled", or "disabled" |
|
||||
| `limit` | number | No | Maximum number of documents to return \(default: 50\) |
|
||||
| `offset` | number | No | Number of documents to skip for pagination \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `documents` | array | Array of documents in the knowledge base |
|
||||
| ↳ `id` | string | Document ID |
|
||||
| ↳ `filename` | string | Document filename |
|
||||
| ↳ `fileSize` | number | File size in bytes |
|
||||
| ↳ `mimeType` | string | MIME type of the document |
|
||||
| ↳ `enabled` | boolean | Whether the document is enabled |
|
||||
| ↳ `processingStatus` | string | Processing status \(pending, processing, completed, failed\) |
|
||||
| ↳ `chunkCount` | number | Number of chunks in the document |
|
||||
| ↳ `tokenCount` | number | Total token count across chunks |
|
||||
| ↳ `uploadedAt` | string | Upload timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalDocuments` | number | Total number of documents matching the filter |
|
||||
| `limit` | number | Page size used |
|
||||
| `offset` | number | Offset used for pagination |
|
||||
|
||||
### `knowledge_delete_document`
|
||||
|
||||
Delete a document from a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `documentId` | string | Yes | ID of the document to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `documentId` | string | ID of the deleted document |
|
||||
| `message` | string | Confirmation message |
|
||||
|
||||
### `knowledge_list_chunks`
|
||||
|
||||
List chunks for a document in a knowledge base with optional filtering and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document to list chunks from |
|
||||
| `search` | string | No | Search query to filter chunks by content |
|
||||
| `enabled` | string | No | Filter by enabled status: "true", "false", or "all" \(default: "all"\) |
|
||||
| `limit` | number | No | Maximum number of chunks to return \(1-100, default: 50\) |
|
||||
| `offset` | number | No | Number of chunks to skip for pagination \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `documentId` | string | ID of the document |
|
||||
| `chunks` | array | Array of chunks in the document |
|
||||
| ↳ `id` | string | Chunk ID |
|
||||
| ↳ `chunkIndex` | number | Index of the chunk within the document |
|
||||
| ↳ `content` | string | Chunk text content |
|
||||
| ↳ `contentLength` | number | Content length in characters |
|
||||
| ↳ `tokenCount` | number | Token count for the chunk |
|
||||
| ↳ `enabled` | boolean | Whether the chunk is enabled |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalChunks` | number | Total number of chunks matching the filter |
|
||||
| `limit` | number | Page size used |
|
||||
| `offset` | number | Offset used for pagination |
|
||||
|
||||
### `knowledge_update_chunk`
|
||||
|
||||
Update the content or enabled status of a chunk in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document containing the chunk |
|
||||
| `chunkId` | string | Yes | ID of the chunk to update |
|
||||
| `content` | string | No | New content for the chunk |
|
||||
| `enabled` | boolean | No | Whether the chunk should be enabled or disabled |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `documentId` | string | ID of the parent document |
|
||||
| `id` | string | Chunk ID |
|
||||
| `chunkIndex` | number | Index of the chunk within the document |
|
||||
| `content` | string | Updated chunk content |
|
||||
| `contentLength` | number | Content length in characters |
|
||||
| `tokenCount` | number | Token count for the chunk |
|
||||
| `enabled` | boolean | Whether the chunk is enabled |
|
||||
| `updatedAt` | string | Last update timestamp |
|
||||
|
||||
### `knowledge_delete_chunk`
|
||||
|
||||
Delete a chunk from a document in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document containing the chunk |
|
||||
| `chunkId` | string | Yes | ID of the chunk to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `chunkId` | string | ID of the deleted chunk |
|
||||
| `documentId` | string | ID of the parent document |
|
||||
| `message` | string | Confirmation message |
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,188 @@
|
||||
import { db } from '@sim/db'
|
||||
import { document, knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('ConnectorDocumentsAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors/[connectorId]/documents
|
||||
* Returns documents for a connector, optionally including user-excluded ones.
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select({ id: knowledgeConnector.id })
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const includeExcluded = request.nextUrl.searchParams.get('includeExcluded') === 'true'
|
||||
|
||||
const activeDocs = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
externalId: document.externalId,
|
||||
sourceUrl: document.sourceUrl,
|
||||
enabled: document.enabled,
|
||||
userExcluded: document.userExcluded,
|
||||
uploadedAt: document.uploadedAt,
|
||||
processingStatus: document.processingStatus,
|
||||
})
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
isNull(document.deletedAt),
|
||||
eq(document.userExcluded, false)
|
||||
)
|
||||
)
|
||||
.orderBy(document.filename)
|
||||
|
||||
const excludedDocs = includeExcluded
|
||||
? await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
externalId: document.externalId,
|
||||
sourceUrl: document.sourceUrl,
|
||||
enabled: document.enabled,
|
||||
userExcluded: document.userExcluded,
|
||||
uploadedAt: document.uploadedAt,
|
||||
processingStatus: document.processingStatus,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(eq(document.connectorId, connectorId), eq(document.userExcluded, true)))
|
||||
.orderBy(document.filename)
|
||||
: []
|
||||
|
||||
const docs = [...activeDocs, ...excludedDocs]
|
||||
const activeCount = activeDocs.length
|
||||
const excludedCount = excludedDocs.length
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
documents: docs,
|
||||
counts: { active: activeCount, excluded: excludedCount },
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching connector documents`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const PatchSchema = z.object({
|
||||
operation: z.enum(['restore', 'exclude']),
|
||||
documentIds: z.array(z.string()).min(1),
|
||||
})
|
||||
|
||||
/**
|
||||
* PATCH /api/knowledge/[id]/connectors/[connectorId]/documents
|
||||
* Restore or exclude connector documents.
|
||||
*/
|
||||
export async function PATCH(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = PatchSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { operation, documentIds } = parsed.data
|
||||
|
||||
if (operation === 'restore') {
|
||||
const updated = await db
|
||||
.update(document)
|
||||
.set({ userExcluded: false, deletedAt: null, enabled: true })
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
inArray(document.id, documentIds),
|
||||
eq(document.userExcluded, true)
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id })
|
||||
|
||||
logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) },
|
||||
})
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.update(document)
|
||||
.set({ userExcluded: true, deletedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
inArray(document.id, documentIds),
|
||||
eq(document.userExcluded, false),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id })
|
||||
|
||||
logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) },
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating connector documents`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,248 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBase, knowledgeConnector, knowledgeConnectorSyncLog } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorByIdAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
const UpdateConnectorSchema = z.object({
|
||||
sourceConfig: z.record(z.unknown()).optional(),
|
||||
syncIntervalMinutes: z.number().int().min(0).optional(),
|
||||
status: z.enum(['active', 'paused']).optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors/[connectorId] - Get connector details with recent sync logs
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const syncLogs = await db
|
||||
.select()
|
||||
.from(knowledgeConnectorSyncLog)
|
||||
.where(eq(knowledgeConnectorSyncLog.connectorId, connectorId))
|
||||
.orderBy(desc(knowledgeConnectorSyncLog.startedAt))
|
||||
.limit(10)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
...connectorRows[0],
|
||||
syncLogs,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH /api/knowledge/[id]/connectors/[connectorId] - Update a connector
|
||||
*/
|
||||
export async function PATCH(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = UpdateConnectorSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (parsed.data.sourceConfig !== undefined) {
|
||||
const existingRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const existing = existingRows[0]
|
||||
const connectorConfig = CONNECTOR_REGISTRY[existing.connectorType]
|
||||
|
||||
if (!connectorConfig) {
|
||||
return NextResponse.json(
|
||||
{ error: `Unknown connector type: ${existing.connectorType}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const kbRows = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kbRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
existing.credentialId,
|
||||
kbRows[0].userId,
|
||||
`patch-${connectorId}`
|
||||
)
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to refresh access token. Please reconnect your account.' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await connectorConfig.validateConfig(accessToken, parsed.data.sourceConfig)
|
||||
if (!validation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: validation.error || 'Invalid source configuration' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = { updatedAt: new Date() }
|
||||
if (parsed.data.sourceConfig !== undefined) {
|
||||
updates.sourceConfig = parsed.data.sourceConfig
|
||||
}
|
||||
if (parsed.data.syncIntervalMinutes !== undefined) {
|
||||
updates.syncIntervalMinutes = parsed.data.syncIntervalMinutes
|
||||
if (parsed.data.syncIntervalMinutes > 0) {
|
||||
updates.nextSyncAt = new Date(Date.now() + parsed.data.syncIntervalMinutes * 60 * 1000)
|
||||
} else {
|
||||
updates.nextSyncAt = null
|
||||
}
|
||||
}
|
||||
if (parsed.data.status !== undefined) {
|
||||
updates.status = parsed.data.status
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set(updates)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
const updated = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ success: true, data: updated[0] })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/knowledge/[id]/connectors/[connectorId] - Soft-delete a connector
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set({ deletedAt: new Date(), status: 'paused', updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Soft-deleted connector ${connectorId}`)
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('ConnectorManualSyncAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
/**
|
||||
* POST /api/knowledge/[id]/connectors/[connectorId]/sync - Trigger a manual sync
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (connectorRows[0].status === 'syncing') {
|
||||
return NextResponse.json({ error: 'Sync already in progress' }, { status: 409 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`)
|
||||
|
||||
dispatchSync(connectorId, { requestId }).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Sync triggered',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error triggering manual sync`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
193
apps/sim/app/api/knowledge/[id]/connectors/route.ts
Normal file
193
apps/sim/app/api/knowledge/[id]/connectors/route.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
import { createTagDefinition, getNextAvailableSlot } from '@/lib/knowledge/tags/service'
|
||||
import { getCredential } from '@/app/api/auth/oauth/utils'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorsAPI')
|
||||
|
||||
const CreateConnectorSchema = z.object({
|
||||
connectorType: z.string().min(1),
|
||||
credentialId: z.string().min(1),
|
||||
sourceConfig: z.record(z.unknown()),
|
||||
syncIntervalMinutes: z.number().int().min(0).default(1440),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors - List connectors for a knowledge base
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectors = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(knowledgeConnector.createdAt))
|
||||
|
||||
return NextResponse.json({ success: true, data: connectors })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error listing connectors`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/knowledge/[id]/connectors - Create a new connector
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = CreateConnectorSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { connectorType, credentialId, sourceConfig, syncIntervalMinutes } = parsed.data
|
||||
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connectorType]
|
||||
if (!connectorConfig) {
|
||||
return NextResponse.json(
|
||||
{ error: `Unknown connector type: ${connectorType}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, auth.userId)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!credential.accessToken) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Credential has no access token. Please reconnect your account.' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await connectorConfig.validateConfig(credential.accessToken, sourceConfig)
|
||||
if (!validation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: validation.error || 'Invalid source configuration' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let finalSourceConfig: Record<string, unknown> = sourceConfig
|
||||
if (connectorConfig.tagDefinitions?.length) {
|
||||
const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? [])
|
||||
const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id))
|
||||
|
||||
const tagSlotMapping: Record<string, string> = {}
|
||||
const skippedTags: string[] = []
|
||||
for (const td of enabledDefs) {
|
||||
const slot = await getNextAvailableSlot(knowledgeBaseId, td.fieldType)
|
||||
if (!slot) {
|
||||
skippedTags.push(td.displayName)
|
||||
logger.warn(`[${requestId}] No available ${td.fieldType} slots for "${td.displayName}"`)
|
||||
continue
|
||||
}
|
||||
await createTagDefinition(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
tagSlot: slot,
|
||||
displayName: td.displayName,
|
||||
fieldType: td.fieldType,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
tagSlotMapping[td.id] = slot
|
||||
}
|
||||
|
||||
if (skippedTags.length > 0 && Object.keys(tagSlotMapping).length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: `No available tag slots. Could not assign: ${skippedTags.join(', ')}` },
|
||||
{ status: 422 }
|
||||
)
|
||||
}
|
||||
|
||||
finalSourceConfig = { ...sourceConfig, tagSlotMapping }
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const connectorId = crypto.randomUUID()
|
||||
const nextSyncAt =
|
||||
syncIntervalMinutes > 0 ? new Date(now.getTime() + syncIntervalMinutes * 60 * 1000) : null
|
||||
|
||||
await db.insert(knowledgeConnector).values({
|
||||
id: connectorId,
|
||||
knowledgeBaseId,
|
||||
connectorType,
|
||||
credentialId,
|
||||
sourceConfig: finalSourceConfig,
|
||||
syncIntervalMinutes,
|
||||
status: 'active',
|
||||
nextSyncAt,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`)
|
||||
|
||||
dispatchSync(connectorId, { requestId }).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
const created = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(eq(knowledgeConnector.id, connectorId))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ success: true, data: created[0] }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
getDocuments,
|
||||
getProcessingConfig,
|
||||
processDocumentsWithQueue,
|
||||
type TagFilterCondition,
|
||||
} from '@/lib/knowledge/documents/service'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
@@ -130,6 +131,21 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
? (sortOrderParam as SortOrder)
|
||||
: undefined
|
||||
|
||||
let tagFilters: TagFilterCondition[] | undefined
|
||||
const tagFiltersParam = url.searchParams.get('tagFilters')
|
||||
if (tagFiltersParam) {
|
||||
try {
|
||||
const parsed = JSON.parse(tagFiltersParam)
|
||||
if (Array.isArray(parsed)) {
|
||||
tagFilters = parsed.filter(
|
||||
(f: TagFilterCondition) => f.tagSlot && f.operator && f.value !== undefined
|
||||
)
|
||||
}
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid tagFilters param`)
|
||||
}
|
||||
}
|
||||
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
@@ -139,6 +155,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
offset,
|
||||
...(sortBy && { sortBy }),
|
||||
...(sortOrder && { sortOrder }),
|
||||
tagFilters,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
68
apps/sim/app/api/knowledge/connectors/sync/route.ts
Normal file
68
apps/sim/app/api/knowledge/connectors/sync/route.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, lte } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('ConnectorSyncSchedulerAPI')
|
||||
|
||||
/**
|
||||
* Cron endpoint that checks for connectors due for sync and dispatches sync jobs.
|
||||
* Should be called every 5 minutes by an external cron service.
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] Connector sync scheduler triggered`)
|
||||
|
||||
const authError = verifyCronAuth(request, 'Connector sync scheduler')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
try {
|
||||
const now = new Date()
|
||||
|
||||
const dueConnectors = await db
|
||||
.select({
|
||||
id: knowledgeConnector.id,
|
||||
})
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.status, 'active'),
|
||||
lte(knowledgeConnector.nextSyncAt, now),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Found ${dueConnectors.length} connectors due for sync`)
|
||||
|
||||
if (dueConnectors.length === 0) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'No connectors due for sync',
|
||||
count: 0,
|
||||
})
|
||||
}
|
||||
|
||||
for (const connector of dueConnectors) {
|
||||
dispatchSync(connector.id, { requestId }).catch((error) => {
|
||||
logger.error(`[${requestId}] Failed to dispatch sync for connector ${connector.id}`, error)
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Dispatched ${dueConnectors.length} connector sync(s)`,
|
||||
count: dueConnectors.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Connector sync scheduler error`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { useCreateChunk } from '@/hooks/queries/knowledge'
|
||||
import { useCreateChunk } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('CreateChunkModal')
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import { useDeleteChunk } from '@/hooks/queries/knowledge'
|
||||
import { useDeleteChunk } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
interface DeleteChunkModalProps {
|
||||
chunk: ChunkData | null
|
||||
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/kb/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/kb/use-tag-definitions'
|
||||
import { useUpdateDocumentTags } from '@/hooks/queries/knowledge'
|
||||
import { useUpdateDocumentTags } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('DocumentTagsModal')
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ import {
|
||||
import type { ChunkData, DocumentData } from '@/lib/knowledge/types'
|
||||
import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useUpdateChunk } from '@/hooks/queries/knowledge'
|
||||
import { useUpdateChunk } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('EditChunkModal')
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { startTransition, useCallback, useEffect, useRef, useState } from 'react
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
Circle,
|
||||
@@ -24,6 +25,10 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
@@ -55,7 +60,7 @@ import {
|
||||
useDeleteDocument,
|
||||
useDocumentChunkSearchQuery,
|
||||
useUpdateChunk,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
@@ -256,6 +261,8 @@ export function Document({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
@@ -268,7 +275,7 @@ export function Document({
|
||||
refreshChunks: initialRefreshChunks,
|
||||
updateChunk: initialUpdateChunk,
|
||||
isFetching: isFetchingChunks,
|
||||
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL)
|
||||
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL, '', enabledFilter)
|
||||
|
||||
const {
|
||||
data: searchResults = [],
|
||||
@@ -690,47 +697,100 @@ export function Document({
|
||||
</div>
|
||||
|
||||
<div className='mt-[14px] flex items-center justify-between'>
|
||||
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
|
||||
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
|
||||
<Input
|
||||
placeholder={
|
||||
documentData?.processingStatus === 'completed'
|
||||
? 'Search chunks...'
|
||||
: 'Document processing...'
|
||||
}
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
disabled={documentData?.processingStatus !== 'completed'}
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
{searchQuery &&
|
||||
(isLoadingSearch ? (
|
||||
<Loader2 className='h-[14px] w-[14px] animate-spin text-[var(--text-subtle)]' />
|
||||
) : (
|
||||
<button
|
||||
onClick={() => setSearchQuery('')}
|
||||
className='text-[var(--text-subtle)] transition-colors hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
<X className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
))}
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
|
||||
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
|
||||
<Input
|
||||
placeholder={
|
||||
documentData?.processingStatus === 'completed'
|
||||
? 'Search chunks...'
|
||||
: 'Document processing...'
|
||||
}
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
disabled={documentData?.processingStatus !== 'completed'}
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
{searchQuery &&
|
||||
(isLoadingSearch ? (
|
||||
<Loader2 className='h-[14px] w-[14px] animate-spin text-[var(--text-subtle)]' />
|
||||
) : (
|
||||
<button
|
||||
onClick={() => setSearchQuery('')}
|
||||
className='text-[var(--text-subtle)] transition-colors hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
<X className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={() => setIsCreateChunkModalOpen(true)}
|
||||
disabled={documentData?.processingStatus === 'failed' || !userPermissions.canEdit}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Create Chunk
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{!userPermissions.canEdit && (
|
||||
<Tooltip.Content>Write permission required to create chunks</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'Status'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'all'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('all')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setSelectedChunks(new Set())
|
||||
}}
|
||||
>
|
||||
All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'enabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('enabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setSelectedChunks(new Set())
|
||||
}}
|
||||
>
|
||||
Enabled
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'disabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('disabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setSelectedChunks(new Set())
|
||||
}}
|
||||
>
|
||||
Disabled
|
||||
</PopoverItem>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={() => setIsCreateChunkModalOpen(true)}
|
||||
disabled={
|
||||
documentData?.processingStatus === 'failed' || !userPermissions.canEdit
|
||||
}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Create Chunk
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{!userPermissions.canEdit && (
|
||||
<Tooltip.Content>Write permission required to create chunks</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div
|
||||
@@ -1059,7 +1119,6 @@ export function Document({
|
||||
isLoading={isBulkOperating}
|
||||
/>
|
||||
|
||||
{/* Delete Document Modal */}
|
||||
<Modal open={showDeleteDocumentDialog} onOpenChange={setShowDeleteDocumentDialog}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Document</ModalHeader>
|
||||
@@ -1072,7 +1131,14 @@ export function Document({
|
||||
? This will permanently delete the document and all {documentData?.chunkCount ?? 0}{' '}
|
||||
chunk
|
||||
{documentData?.chunkCount === 1 ? '' : 's'} within it.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
{documentData?.connectorId ? (
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This document is synced from a connector. Deleting it will permanently exclude it
|
||||
from future syncs. To temporarily hide it from search, disable it instead.
|
||||
</span>
|
||||
) : (
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
)}
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { format } from 'date-fns'
|
||||
import {
|
||||
@@ -11,7 +11,9 @@ import {
|
||||
ChevronUp,
|
||||
Circle,
|
||||
CircleOff,
|
||||
Filter,
|
||||
Loader2,
|
||||
Plus,
|
||||
RotateCcw,
|
||||
Search,
|
||||
X,
|
||||
@@ -22,6 +24,11 @@ import {
|
||||
Breadcrumb,
|
||||
Button,
|
||||
Checkbox,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
DatePicker,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
@@ -40,25 +47,28 @@ import {
|
||||
Tooltip,
|
||||
Trash,
|
||||
} from '@/components/emcn'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import { type FilterFieldType, getOperatorsForFieldType } from '@/lib/knowledge/filters/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
ActionBar,
|
||||
AddConnectorModal,
|
||||
AddDocumentsModal,
|
||||
BaseTagsModal,
|
||||
ConnectorsSection,
|
||||
DocumentContextMenu,
|
||||
RenameDocumentModal,
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import {
|
||||
useKnowledgeBase,
|
||||
useKnowledgeBaseDocuments,
|
||||
@@ -68,12 +78,14 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useConnectorList } from '@/hooks/queries/kb/connectors'
|
||||
import type { DocumentTagFilter } from '@/hooks/queries/kb/knowledge'
|
||||
import {
|
||||
useBulkDocumentOperation,
|
||||
useDeleteDocument,
|
||||
useDeleteKnowledgeBase,
|
||||
useUpdateDocument,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
@@ -345,6 +357,32 @@ export function KnowledgeBase({
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
const [isTagFilterPopoverOpen, setIsTagFilterPopoverOpen] = useState(false)
|
||||
const [tagFilterEntries, setTagFilterEntries] = useState<
|
||||
{
|
||||
id: string
|
||||
tagName: string
|
||||
tagSlot: string
|
||||
fieldType: FilterFieldType
|
||||
operator: string
|
||||
value: string
|
||||
valueTo: string
|
||||
}[]
|
||||
>([])
|
||||
|
||||
const activeTagFilters: DocumentTagFilter[] = useMemo(
|
||||
() =>
|
||||
tagFilterEntries
|
||||
.filter((f) => f.tagSlot && f.value.trim())
|
||||
.map((f) => ({
|
||||
tagSlot: f.tagSlot,
|
||||
fieldType: f.fieldType,
|
||||
operator: f.operator,
|
||||
value: f.value,
|
||||
...(f.operator === 'between' && f.valueTo ? { valueTo: f.valueTo } : {}),
|
||||
})),
|
||||
[tagFilterEntries]
|
||||
)
|
||||
|
||||
/**
|
||||
* Memoize the search query setter to prevent unnecessary re-renders
|
||||
@@ -367,6 +405,7 @@ export function KnowledgeBase({
|
||||
const [contextMenuDocument, setContextMenuDocument] = useState<DocumentData | null>(null)
|
||||
const [showRenameModal, setShowRenameModal] = useState(false)
|
||||
const [documentToRename, setDocumentToRename] = useState<DocumentData | null>(null)
|
||||
const [showAddConnectorModal, setShowAddConnectorModal] = useState(false)
|
||||
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
@@ -407,10 +446,23 @@ export function KnowledgeBase({
|
||||
return hasPending ? 3000 : false
|
||||
},
|
||||
enabledFilter,
|
||||
tagFilters: activeTagFilters.length > 0 ? activeTagFilters : undefined,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
|
||||
const { data: connectors = [], isLoading: isLoadingConnectors } = useConnectorList(id)
|
||||
const hasSyncingConnectors = connectors.some((c) => c.status === 'syncing')
|
||||
|
||||
/** Refresh KB detail when connectors transition from syncing to done */
|
||||
const prevHadSyncingRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (prevHadSyncingRef.current && !hasSyncingConnectors) {
|
||||
refreshKnowledgeBase()
|
||||
}
|
||||
prevHadSyncingRef.current = hasSyncingConnectors
|
||||
}, [hasSyncingConnectors, refreshKnowledgeBase])
|
||||
|
||||
const router = useRouter()
|
||||
|
||||
const knowledgeBaseName = knowledgeBase?.name || passedKnowledgeBaseName || 'Knowledge Base'
|
||||
@@ -1003,6 +1055,14 @@ export function KnowledgeBase({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<ConnectorsSection
|
||||
knowledgeBaseId={id}
|
||||
connectors={connectors}
|
||||
isLoading={isLoadingConnectors}
|
||||
canEdit={userPermissions.canEdit}
|
||||
onAddConnector={() => setShowAddConnectorModal(true)}
|
||||
/>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
<span className='text-[14px] text-[var(--text-muted)]'>
|
||||
{pagination.total} {pagination.total === 1 ? 'document' : 'documents'}
|
||||
@@ -1049,7 +1109,7 @@ export function KnowledgeBase({
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'All'
|
||||
? 'Status'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
@@ -1098,6 +1158,19 @@ export function KnowledgeBase({
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<TagFilterPopover
|
||||
tagDefinitions={tagDefinitions}
|
||||
entries={tagFilterEntries}
|
||||
isOpen={isTagFilterPopoverOpen}
|
||||
onOpenChange={setIsTagFilterPopoverOpen}
|
||||
onChange={(entries) => {
|
||||
setTagFilterEntries(entries)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
/>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -1138,14 +1211,14 @@ export function KnowledgeBase({
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{searchQuery
|
||||
? 'No documents found'
|
||||
: enabledFilter !== 'all'
|
||||
: enabledFilter !== 'all' || activeTagFilters.length > 0
|
||||
? 'Nothing matches your filter'
|
||||
: 'No documents yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{searchQuery
|
||||
? 'Try a different search term'
|
||||
: enabledFilter !== 'all'
|
||||
: enabledFilter !== 'all' || activeTagFilters.length > 0
|
||||
? 'Try changing the filter'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
@@ -1218,6 +1291,12 @@ export function KnowledgeBase({
|
||||
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{(() => {
|
||||
const ConnectorIcon = doc.connectorType
|
||||
? CONNECTOR_REGISTRY[doc.connectorType]?.icon
|
||||
: null
|
||||
if (ConnectorIcon) {
|
||||
return <ConnectorIcon className='h-5 w-5 flex-shrink-0' />
|
||||
}
|
||||
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
})()}
|
||||
@@ -1489,13 +1568,26 @@ export function KnowledgeBase({
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Document</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{documents.find((doc) => doc.id === documentToDelete)?.filename ?? 'this document'}
|
||||
</span>
|
||||
? <span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
{(() => {
|
||||
const docToDelete = documents.find((doc) => doc.id === documentToDelete)
|
||||
return (
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{docToDelete?.filename ?? 'this document'}
|
||||
</span>
|
||||
?{' '}
|
||||
{docToDelete?.connectorId ? (
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This document is synced from a connector. Deleting it will permanently exclude
|
||||
it from future syncs. To temporarily hide it from search, disable it instead.
|
||||
</span>
|
||||
) : (
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
)}
|
||||
</p>
|
||||
)
|
||||
})()}
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button
|
||||
@@ -1545,6 +1637,11 @@ export function KnowledgeBase({
|
||||
chunkingConfig={knowledgeBase?.chunkingConfig}
|
||||
/>
|
||||
|
||||
{/* Add Connector Modal — conditionally rendered so it remounts fresh each open */}
|
||||
{showAddConnectorModal && (
|
||||
<AddConnectorModal open onOpenChange={setShowAddConnectorModal} knowledgeBaseId={id} />
|
||||
)}
|
||||
|
||||
{/* Rename Document Modal */}
|
||||
{documentToRename && (
|
||||
<RenameDocumentModal
|
||||
@@ -1603,6 +1700,11 @@ export function KnowledgeBase({
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onOpenSource={
|
||||
contextMenuDocument?.sourceUrl && selectedDocuments.size === 1
|
||||
? () => window.open(contextMenuDocument.sourceUrl!, '_blank', 'noopener,noreferrer')
|
||||
: undefined
|
||||
}
|
||||
onRename={
|
||||
contextMenuDocument && selectedDocuments.size === 1 && userPermissions.canEdit
|
||||
? () => handleRenameDocument(contextMenuDocument)
|
||||
@@ -1655,3 +1757,224 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface TagFilterEntry {
|
||||
id: string
|
||||
tagName: string
|
||||
tagSlot: string
|
||||
fieldType: FilterFieldType
|
||||
operator: string
|
||||
value: string
|
||||
valueTo: string
|
||||
}
|
||||
|
||||
const createEmptyEntry = (): TagFilterEntry => ({
|
||||
id: crypto.randomUUID(),
|
||||
tagName: '',
|
||||
tagSlot: '',
|
||||
fieldType: 'text',
|
||||
operator: 'eq',
|
||||
value: '',
|
||||
valueTo: '',
|
||||
})
|
||||
|
||||
interface TagFilterPopoverProps {
|
||||
tagDefinitions: TagDefinition[]
|
||||
entries: TagFilterEntry[]
|
||||
isOpen: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
onChange: (entries: TagFilterEntry[]) => void
|
||||
}
|
||||
|
||||
function TagFilterPopover({
|
||||
tagDefinitions,
|
||||
entries,
|
||||
isOpen,
|
||||
onOpenChange,
|
||||
onChange,
|
||||
}: TagFilterPopoverProps) {
|
||||
const activeCount = entries.filter((f) => f.tagSlot && f.value.trim()).length
|
||||
|
||||
const tagOptions: ComboboxOption[] = tagDefinitions.map((t) => ({
|
||||
value: t.displayName,
|
||||
label: t.displayName,
|
||||
}))
|
||||
|
||||
const filtersToShow = useMemo(
|
||||
() => (entries.length > 0 ? entries : [createEmptyEntry()]),
|
||||
[entries]
|
||||
)
|
||||
|
||||
const updateEntry = (id: string, patch: Partial<TagFilterEntry>) => {
|
||||
const existing = filtersToShow.find((e) => e.id === id)
|
||||
if (!existing) return
|
||||
const updated = filtersToShow.map((e) => (e.id === id ? { ...e, ...patch } : e))
|
||||
onChange(updated)
|
||||
}
|
||||
|
||||
const handleTagChange = (id: string, tagName: string) => {
|
||||
const def = tagDefinitions.find((t) => t.displayName === tagName)
|
||||
const fieldType = (def?.fieldType || 'text') as FilterFieldType
|
||||
const operators = getOperatorsForFieldType(fieldType)
|
||||
updateEntry(id, {
|
||||
tagName,
|
||||
tagSlot: def?.tagSlot || '',
|
||||
fieldType,
|
||||
operator: operators[0]?.value || 'eq',
|
||||
value: '',
|
||||
valueTo: '',
|
||||
})
|
||||
}
|
||||
|
||||
const addFilter = () => {
|
||||
onChange([...filtersToShow, createEmptyEntry()])
|
||||
}
|
||||
|
||||
const removeFilter = (id: string) => {
|
||||
const remaining = filtersToShow.filter((e) => e.id !== id)
|
||||
onChange(remaining.length > 0 ? remaining : [])
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover open={isOpen} onOpenChange={onOpenChange}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
<Filter className='mr-1.5 h-3.5 w-3.5' />
|
||||
Tags
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4} className='w-[320px] p-0'>
|
||||
<div className='flex flex-col'>
|
||||
<div className='flex items-center justify-between border-[var(--border-1)] border-b px-[12px] py-[8px]'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-secondary)]'>
|
||||
Filter by tags
|
||||
</span>
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{activeCount > 0 && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-auto px-[6px] py-[2px] text-[11px] text-[var(--text-muted)]'
|
||||
onClick={() => onChange([])}
|
||||
>
|
||||
Clear all
|
||||
</Button>
|
||||
)}
|
||||
<Button variant='ghost' className='h-auto p-0' onClick={addFilter}>
|
||||
<Plus className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex max-h-[320px] flex-col gap-[8px] overflow-y-auto p-[12px]'>
|
||||
{filtersToShow.map((entry) => {
|
||||
const operators = getOperatorsForFieldType(entry.fieldType)
|
||||
const operatorOptions: ComboboxOption[] = operators.map((op) => ({
|
||||
value: op.value,
|
||||
label: op.label,
|
||||
}))
|
||||
const isBetween = entry.operator === 'between'
|
||||
|
||||
return (
|
||||
<div
|
||||
key={entry.id}
|
||||
className='flex flex-col gap-[6px] rounded-[6px] border border-[var(--border-1)] p-[8px]'
|
||||
>
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label className='text-[11px] text-[var(--text-muted)]'>Tag</Label>
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => removeFilter(entry.id)}
|
||||
className='text-[var(--text-muted)] transition-colors hover:text-[var(--text-error)]'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
<Combobox
|
||||
options={tagOptions}
|
||||
value={entry.tagName}
|
||||
onChange={(v) => handleTagChange(entry.id, v)}
|
||||
placeholder='Select tag'
|
||||
/>
|
||||
|
||||
{entry.tagSlot && (
|
||||
<>
|
||||
<Label className='text-[11px] text-[var(--text-muted)]'>Operator</Label>
|
||||
<Combobox
|
||||
options={operatorOptions}
|
||||
value={entry.operator}
|
||||
onChange={(v) => updateEntry(entry.id, { operator: v, valueTo: '' })}
|
||||
placeholder='Select operator'
|
||||
/>
|
||||
|
||||
<Label className='text-[11px] text-[var(--text-muted)]'>Value</Label>
|
||||
{entry.fieldType === 'date' ? (
|
||||
isBetween ? (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<DatePicker
|
||||
size='sm'
|
||||
value={entry.value || undefined}
|
||||
onChange={(v) => updateEntry(entry.id, { value: v })}
|
||||
placeholder='From'
|
||||
/>
|
||||
<span className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
to
|
||||
</span>
|
||||
<DatePicker
|
||||
size='sm'
|
||||
value={entry.valueTo || undefined}
|
||||
onChange={(v) => updateEntry(entry.id, { valueTo: v })}
|
||||
placeholder='To'
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<DatePicker
|
||||
size='sm'
|
||||
value={entry.value || undefined}
|
||||
onChange={(v) => updateEntry(entry.id, { value: v })}
|
||||
placeholder='Select date'
|
||||
/>
|
||||
)
|
||||
) : isBetween ? (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Input
|
||||
value={entry.value}
|
||||
onChange={(e) => updateEntry(entry.id, { value: e.target.value })}
|
||||
placeholder='From'
|
||||
className='h-[28px] text-[12px]'
|
||||
/>
|
||||
<span className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
to
|
||||
</span>
|
||||
<Input
|
||||
value={entry.valueTo}
|
||||
onChange={(e) => updateEntry(entry.id, { valueTo: e.target.value })}
|
||||
placeholder='To'
|
||||
className='h-[28px] text-[12px]'
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Input
|
||||
value={entry.value}
|
||||
onChange={(e) => updateEntry(entry.id, { value: e.target.value })}
|
||||
placeholder={
|
||||
entry.fieldType === 'boolean'
|
||||
? 'true or false'
|
||||
: entry.fieldType === 'number'
|
||||
? 'Enter number'
|
||||
: 'Enter value'
|
||||
}
|
||||
className='h-[28px] text-[12px]'
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,348 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { ArrowLeft, Loader2, Plus } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Checkbox,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { ConnectorConfig } from '@/connectors/types'
|
||||
import { useCreateConnector } from '@/hooks/queries/kb/connectors'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
|
||||
const SYNC_INTERVALS = [
|
||||
{ label: 'Every hour', value: 60 },
|
||||
{ label: 'Every 6 hours', value: 360 },
|
||||
{ label: 'Daily', value: 1440 },
|
||||
{ label: 'Weekly', value: 10080 },
|
||||
{ label: 'Manual only', value: 0 },
|
||||
] as const
|
||||
|
||||
interface AddConnectorModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
knowledgeBaseId: string
|
||||
}
|
||||
|
||||
type Step = 'select-type' | 'configure'
|
||||
|
||||
export function AddConnectorModal({ open, onOpenChange, knowledgeBaseId }: AddConnectorModalProps) {
|
||||
const [step, setStep] = useState<Step>('select-type')
|
||||
const [selectedType, setSelectedType] = useState<string | null>(null)
|
||||
const [sourceConfig, setSourceConfig] = useState<Record<string, string>>({})
|
||||
const [syncInterval, setSyncInterval] = useState(1440)
|
||||
const [selectedCredentialId, setSelectedCredentialId] = useState<string | null>(null)
|
||||
const [disabledTagIds, setDisabledTagIds] = useState<Set<string>>(new Set())
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
|
||||
const { mutate: createConnector, isPending: isCreating } = useCreateConnector()
|
||||
|
||||
const connectorConfig = selectedType ? CONNECTOR_REGISTRY[selectedType] : null
|
||||
const connectorProviderId = useMemo(
|
||||
() =>
|
||||
connectorConfig
|
||||
? (getProviderIdFromServiceId(connectorConfig.oauth.provider) as OAuthProvider)
|
||||
: null,
|
||||
[connectorConfig]
|
||||
)
|
||||
|
||||
const { data: credentials = [], isLoading: credentialsLoading } = useOAuthCredentials(
|
||||
connectorConfig?.oauth.provider,
|
||||
Boolean(connectorConfig)
|
||||
)
|
||||
|
||||
const effectiveCredentialId =
|
||||
selectedCredentialId ?? (credentials.length === 1 ? credentials[0].id : null)
|
||||
|
||||
const handleSelectType = (type: string) => {
|
||||
setSelectedType(type)
|
||||
setStep('configure')
|
||||
}
|
||||
|
||||
const canSubmit = useMemo(() => {
|
||||
if (!connectorConfig || !effectiveCredentialId) return false
|
||||
return connectorConfig.configFields
|
||||
.filter((f) => f.required)
|
||||
.every((f) => sourceConfig[f.id]?.trim())
|
||||
}, [connectorConfig, effectiveCredentialId, sourceConfig])
|
||||
|
||||
const handleSubmit = () => {
|
||||
if (!selectedType || !effectiveCredentialId || !canSubmit) return
|
||||
|
||||
setError(null)
|
||||
|
||||
const finalSourceConfig =
|
||||
disabledTagIds.size > 0
|
||||
? { ...sourceConfig, disabledTagIds: Array.from(disabledTagIds) }
|
||||
: sourceConfig
|
||||
|
||||
createConnector(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
connectorType: selectedType,
|
||||
credentialId: effectiveCredentialId,
|
||||
sourceConfig: finalSourceConfig,
|
||||
syncIntervalMinutes: syncInterval,
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
onOpenChange(false)
|
||||
},
|
||||
onError: (err) => {
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const connectorEntries = Object.entries(CONNECTOR_REGISTRY)
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal open={open} onOpenChange={(val) => !isCreating && onOpenChange(val)}>
|
||||
<ModalContent size='md'>
|
||||
<ModalHeader>
|
||||
{step === 'configure' && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='mr-2 h-6 w-6 p-0'
|
||||
onClick={() => setStep('select-type')}
|
||||
>
|
||||
<ArrowLeft className='h-4 w-4' />
|
||||
</Button>
|
||||
)}
|
||||
{step === 'select-type' ? 'Connect Source' : `Configure ${connectorConfig?.name}`}
|
||||
</ModalHeader>
|
||||
|
||||
<ModalBody>
|
||||
{step === 'select-type' ? (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{connectorEntries.map(([type, config]) => (
|
||||
<ConnectorTypeCard
|
||||
key={type}
|
||||
config={config}
|
||||
onClick={() => handleSelectType(type)}
|
||||
/>
|
||||
))}
|
||||
{connectorEntries.length === 0 && (
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>No connectors available.</p>
|
||||
)}
|
||||
</div>
|
||||
) : connectorConfig ? (
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
{/* Credential selection */}
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Account</Label>
|
||||
{credentialsLoading ? (
|
||||
<div className='flex items-center gap-2 text-[13px] text-[var(--text-muted)]'>
|
||||
<Loader2 className='h-4 w-4 animate-spin' />
|
||||
Loading credentials...
|
||||
</div>
|
||||
) : (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={[
|
||||
...credentials.map(
|
||||
(cred): ComboboxOption => ({
|
||||
label: cred.name || cred.provider,
|
||||
value: cred.id,
|
||||
icon: connectorConfig.icon,
|
||||
})
|
||||
),
|
||||
{
|
||||
label: 'Connect new account',
|
||||
value: '__connect_new__',
|
||||
icon: Plus,
|
||||
onSelect: () => {
|
||||
setShowOAuthModal(true)
|
||||
},
|
||||
},
|
||||
]}
|
||||
value={effectiveCredentialId ?? undefined}
|
||||
onChange={(value) => setSelectedCredentialId(value)}
|
||||
placeholder={
|
||||
credentials.length === 0
|
||||
? `No ${connectorConfig.name} accounts`
|
||||
: 'Select account'
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Config fields */}
|
||||
{connectorConfig.configFields.map((field) => (
|
||||
<div key={field.id} className='flex flex-col gap-[4px]'>
|
||||
<Label>
|
||||
{field.title}
|
||||
{field.required && (
|
||||
<span className='ml-[2px] text-[var(--text-error)]'>*</span>
|
||||
)}
|
||||
</Label>
|
||||
{field.description && (
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>{field.description}</p>
|
||||
)}
|
||||
{field.type === 'dropdown' && field.options ? (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={field.options.map((opt) => ({
|
||||
label: opt.label,
|
||||
value: opt.id,
|
||||
}))}
|
||||
value={sourceConfig[field.id] || undefined}
|
||||
onChange={(value) =>
|
||||
setSourceConfig((prev) => ({ ...prev, [field.id]: value }))
|
||||
}
|
||||
placeholder={field.placeholder || `Select ${field.title.toLowerCase()}`}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
value={sourceConfig[field.id] || ''}
|
||||
onChange={(e) =>
|
||||
setSourceConfig((prev) => ({ ...prev, [field.id]: e.target.value }))
|
||||
}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Tag definitions (opt-out) */}
|
||||
{connectorConfig.tagDefinitions && connectorConfig.tagDefinitions.length > 0 && (
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Metadata Tags</Label>
|
||||
{connectorConfig.tagDefinitions.map((tagDef) => (
|
||||
<div
|
||||
key={tagDef.id}
|
||||
className='flex cursor-pointer items-center gap-[8px] rounded-[4px] px-[2px] py-[2px] text-[13px]'
|
||||
onClick={() => {
|
||||
setDisabledTagIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
if (prev.has(tagDef.id)) {
|
||||
next.delete(tagDef.id)
|
||||
} else {
|
||||
next.add(tagDef.id)
|
||||
}
|
||||
return next
|
||||
})
|
||||
}}
|
||||
>
|
||||
<Checkbox
|
||||
checked={!disabledTagIds.has(tagDef.id)}
|
||||
onCheckedChange={(checked) => {
|
||||
setDisabledTagIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
if (checked) {
|
||||
next.delete(tagDef.id)
|
||||
} else {
|
||||
next.add(tagDef.id)
|
||||
}
|
||||
return next
|
||||
})
|
||||
}}
|
||||
/>
|
||||
<span className='text-[var(--text-primary)]'>{tagDef.displayName}</span>
|
||||
<span className='text-[11px] text-[var(--text-muted)]'>
|
||||
({tagDef.fieldType})
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Sync interval */}
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Sync Frequency</Label>
|
||||
<ButtonGroup
|
||||
value={String(syncInterval)}
|
||||
onValueChange={(val) => setSyncInterval(Number(val))}
|
||||
>
|
||||
{SYNC_INTERVALS.map((interval) => (
|
||||
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
|
||||
{interval.label}
|
||||
</ButtonGroupItem>
|
||||
))}
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<p className='text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>
|
||||
)}
|
||||
</div>
|
||||
) : null}
|
||||
</ModalBody>
|
||||
|
||||
{step === 'configure' && (
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => onOpenChange(false)} disabled={isCreating}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' onClick={handleSubmit} disabled={!canSubmit || isCreating}>
|
||||
{isCreating ? (
|
||||
<>
|
||||
<Loader2 className='mr-1.5 h-3.5 w-3.5 animate-spin' />
|
||||
Connecting...
|
||||
</>
|
||||
) : (
|
||||
'Connect & Sync'
|
||||
)}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
)}
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
{connectorConfig && connectorProviderId && (
|
||||
<OAuthRequiredModal
|
||||
isOpen={showOAuthModal}
|
||||
onClose={() => setShowOAuthModal(false)}
|
||||
provider={connectorProviderId}
|
||||
toolName={connectorConfig.name}
|
||||
requiredScopes={getCanonicalScopesForProvider(connectorProviderId)}
|
||||
newScopes={connectorConfig.oauth.requiredScopes || []}
|
||||
serviceId={connectorConfig.oauth.provider}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
interface ConnectorTypeCardProps {
|
||||
config: ConnectorConfig
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
function ConnectorTypeCard({ config, onClick }: ConnectorTypeCardProps) {
|
||||
const Icon = config.icon
|
||||
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
className='flex items-center gap-[12px] rounded-[8px] border border-[var(--border-1)] px-[14px] py-[12px] text-left transition-colors hover:bg-[var(--surface-2)]'
|
||||
onClick={onClick}
|
||||
>
|
||||
<Icon className='h-6 w-6 flex-shrink-0' />
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>{config.name}</span>
|
||||
<span className='text-[12px] text-[var(--text-muted)]'>{config.description}</span>
|
||||
</div>
|
||||
</button>
|
||||
)
|
||||
}
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/knowledge'
|
||||
import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('BaseTagsModal')
|
||||
|
||||
|
||||
@@ -0,0 +1,539 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { format, formatDistanceToNow } from 'date-fns'
|
||||
import {
|
||||
AlertCircle,
|
||||
CheckCircle2,
|
||||
ChevronDown,
|
||||
Loader2,
|
||||
Pause,
|
||||
Play,
|
||||
RefreshCw,
|
||||
Settings,
|
||||
Trash,
|
||||
Unplug,
|
||||
XCircle,
|
||||
} from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { EditConnectorModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/edit-connector-modal/edit-connector-modal'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CONNECTOR_META } from '@/connectors/icons'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { ConnectorData, SyncLogData } from '@/hooks/queries/kb/connectors'
|
||||
import {
|
||||
useConnectorDetail,
|
||||
useDeleteConnector,
|
||||
useTriggerSync,
|
||||
useUpdateConnector,
|
||||
} from '@/hooks/queries/kb/connectors'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
|
||||
const logger = createLogger('ConnectorsSection')
|
||||
|
||||
interface ConnectorsSectionProps {
|
||||
knowledgeBaseId: string
|
||||
connectors: ConnectorData[]
|
||||
isLoading: boolean
|
||||
canEdit: boolean
|
||||
onAddConnector: () => void
|
||||
}
|
||||
|
||||
/** 5-minute cooldown after a manual sync trigger */
|
||||
const SYNC_COOLDOWN_MS = 5 * 60 * 1000
|
||||
|
||||
const STATUS_CONFIG = {
|
||||
active: { label: 'Active', variant: 'green' as const },
|
||||
syncing: { label: 'Syncing', variant: 'amber' as const },
|
||||
error: { label: 'Error', variant: 'red' as const },
|
||||
paused: { label: 'Paused', variant: 'gray' as const },
|
||||
} as const
|
||||
|
||||
export function ConnectorsSection({
|
||||
knowledgeBaseId,
|
||||
connectors,
|
||||
isLoading,
|
||||
canEdit,
|
||||
onAddConnector,
|
||||
}: ConnectorsSectionProps) {
|
||||
const { mutate: triggerSync, isPending: isSyncing } = useTriggerSync()
|
||||
const { mutate: updateConnector } = useUpdateConnector()
|
||||
const { mutate: deleteConnector } = useDeleteConnector()
|
||||
const [deleteTarget, setDeleteTarget] = useState<string | null>(null)
|
||||
const [editingConnector, setEditingConnector] = useState<ConnectorData | null>(null)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const syncTriggeredAt = useRef<Record<string, number>>({})
|
||||
const cooldownTimers = useRef<Set<ReturnType<typeof setTimeout>>>(new Set())
|
||||
const [, forceUpdate] = useState(0)
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
for (const timer of cooldownTimers.current) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const isSyncOnCooldown = useCallback((connectorId: string) => {
|
||||
const triggeredAt = syncTriggeredAt.current[connectorId]
|
||||
if (!triggeredAt) return false
|
||||
return Date.now() - triggeredAt < SYNC_COOLDOWN_MS
|
||||
}, [])
|
||||
|
||||
const handleSync = useCallback(
|
||||
(connectorId: string) => {
|
||||
if (isSyncOnCooldown(connectorId)) return
|
||||
|
||||
syncTriggeredAt.current[connectorId] = Date.now()
|
||||
|
||||
triggerSync(
|
||||
{ knowledgeBaseId, connectorId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setError(null)
|
||||
const timer = setTimeout(() => {
|
||||
cooldownTimers.current.delete(timer)
|
||||
forceUpdate((n) => n + 1)
|
||||
}, SYNC_COOLDOWN_MS)
|
||||
cooldownTimers.current.add(timer)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Sync trigger failed', { error: err.message })
|
||||
setError(err.message)
|
||||
delete syncTriggeredAt.current[connectorId]
|
||||
forceUpdate((n) => n + 1)
|
||||
},
|
||||
}
|
||||
)
|
||||
},
|
||||
[knowledgeBaseId, triggerSync, isSyncOnCooldown]
|
||||
)
|
||||
|
||||
if (isLoading) return null
|
||||
if (connectors.length === 0 && !canEdit) return null
|
||||
|
||||
return (
|
||||
<div className='mt-[16px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<h2 className='font-medium text-[14px] text-[var(--text-secondary)]'>Connected Sources</h2>
|
||||
{canEdit && (
|
||||
<Button
|
||||
variant='default'
|
||||
className='h-[28px] rounded-[6px] text-[12px]'
|
||||
onClick={onAddConnector}
|
||||
>
|
||||
<Unplug className='mr-1 h-3.5 w-3.5' />
|
||||
Connect Source
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<p className='mt-[8px] text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>
|
||||
)}
|
||||
|
||||
{connectors.length === 0 ? (
|
||||
<p className='mt-[8px] text-[13px] text-[var(--text-muted)]'>
|
||||
No connected sources yet. Connect an external source to automatically sync documents.
|
||||
</p>
|
||||
) : (
|
||||
<div className='mt-[8px] flex flex-col gap-[8px]'>
|
||||
{connectors.map((connector) => (
|
||||
<ConnectorCard
|
||||
key={connector.id}
|
||||
connector={connector}
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
canEdit={canEdit}
|
||||
isSyncing={isSyncing}
|
||||
syncCooldown={isSyncOnCooldown(connector.id)}
|
||||
onSync={() => handleSync(connector.id)}
|
||||
onTogglePause={() =>
|
||||
updateConnector(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
connectorId: connector.id,
|
||||
updates: {
|
||||
status: connector.status === 'paused' ? 'active' : 'paused',
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => setError(null),
|
||||
onError: (err) => {
|
||||
logger.error('Toggle pause failed', { error: err.message })
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
onEdit={() => setEditingConnector(connector)}
|
||||
onDelete={() => setDeleteTarget(connector.id)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{editingConnector && (
|
||||
<EditConnectorModal
|
||||
open={editingConnector !== null}
|
||||
onOpenChange={(val) => !val && setEditingConnector(null)}
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
connector={editingConnector}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Modal open={deleteTarget !== null} onOpenChange={() => setDeleteTarget(null)}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Connector</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[14px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to remove this connected source? Documents already synced will
|
||||
remain in the knowledge base.
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => setDeleteTarget(null)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='destructive'
|
||||
onClick={() => {
|
||||
if (deleteTarget) {
|
||||
deleteConnector(
|
||||
{ knowledgeBaseId, connectorId: deleteTarget },
|
||||
{
|
||||
onSuccess: () => setError(null),
|
||||
onError: (err) => {
|
||||
logger.error('Delete connector failed', { error: err.message })
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
setDeleteTarget(null)
|
||||
}
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface ConnectorCardProps {
|
||||
connector: ConnectorData
|
||||
knowledgeBaseId: string
|
||||
canEdit: boolean
|
||||
isSyncing: boolean
|
||||
syncCooldown: boolean
|
||||
onSync: () => void
|
||||
onEdit: () => void
|
||||
onTogglePause: () => void
|
||||
onDelete: () => void
|
||||
}
|
||||
|
||||
function ConnectorCard({
|
||||
connector,
|
||||
knowledgeBaseId,
|
||||
canEdit,
|
||||
isSyncing,
|
||||
syncCooldown,
|
||||
onSync,
|
||||
onEdit,
|
||||
onTogglePause,
|
||||
onDelete,
|
||||
}: ConnectorCardProps) {
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
|
||||
const meta = CONNECTOR_META[connector.connectorType]
|
||||
const Icon = meta?.icon
|
||||
const statusConfig =
|
||||
STATUS_CONFIG[connector.status as keyof typeof STATUS_CONFIG] || STATUS_CONFIG.active
|
||||
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connector.connectorType]
|
||||
const serviceId = connectorConfig?.oauth.provider
|
||||
const providerId = serviceId ? getProviderIdFromServiceId(serviceId) : undefined
|
||||
const requiredScopes = connectorConfig?.oauth.requiredScopes ?? []
|
||||
|
||||
const { data: credentials } = useOAuthCredentials(providerId)
|
||||
|
||||
const missingScopes = useMemo(() => {
|
||||
if (!credentials || !connector.credentialId) return []
|
||||
const credential = credentials.find((c) => c.id === connector.credentialId)
|
||||
return getMissingRequiredScopes(credential, requiredScopes)
|
||||
}, [credentials, connector.credentialId, requiredScopes])
|
||||
|
||||
const { data: detail, isLoading: detailLoading } = useConnectorDetail(
|
||||
expanded ? knowledgeBaseId : undefined,
|
||||
expanded ? connector.id : undefined
|
||||
)
|
||||
const syncLogs = detail?.syncLogs ?? []
|
||||
|
||||
return (
|
||||
<div className='rounded-[8px] border border-[var(--border-1)]'>
|
||||
<div className='flex items-center justify-between px-[12px] py-[10px]'>
|
||||
<div className='flex items-center gap-[10px]'>
|
||||
{Icon && <Icon className='h-5 w-5 flex-shrink-0' />}
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
{meta?.name || connector.connectorType}
|
||||
</span>
|
||||
<Badge variant={statusConfig.variant} className='text-[10px]'>
|
||||
{connector.status === 'syncing' && (
|
||||
<Loader2 className='mr-1 h-3 w-3 animate-spin' />
|
||||
)}
|
||||
{statusConfig.label}
|
||||
</Badge>
|
||||
</div>
|
||||
<div className='flex items-center gap-[6px] text-[11px] text-[var(--text-muted)]'>
|
||||
{connector.lastSyncAt && (
|
||||
<span>Last sync: {format(new Date(connector.lastSyncAt), 'MMM d, h:mm a')}</span>
|
||||
)}
|
||||
{connector.lastSyncDocCount !== null && (
|
||||
<>
|
||||
<span>·</span>
|
||||
<span>{connector.lastSyncDocCount} docs</span>
|
||||
</>
|
||||
)}
|
||||
{connector.nextSyncAt && connector.status === 'active' && (
|
||||
<>
|
||||
<span>·</span>
|
||||
<span>
|
||||
Next sync:{' '}
|
||||
{formatDistanceToNow(new Date(connector.nextSyncAt), { addSuffix: true })}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
{connector.lastSyncError && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<AlertCircle className='h-3 w-3 text-[var(--text-error)]' />
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{connector.lastSyncError}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{canEdit && (
|
||||
<>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-7 w-7 p-0'
|
||||
onClick={onSync}
|
||||
disabled={connector.status === 'syncing' || isSyncing || syncCooldown}
|
||||
>
|
||||
<RefreshCw
|
||||
className={cn(
|
||||
'h-3.5 w-3.5',
|
||||
connector.status === 'syncing' && 'animate-spin'
|
||||
)}
|
||||
/>
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
{syncCooldown ? 'Sync recently triggered' : 'Sync now'}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' className='h-7 w-7 p-0' onClick={onEdit}>
|
||||
<Settings className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Settings</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' className='h-7 w-7 p-0' onClick={onTogglePause}>
|
||||
{connector.status === 'paused' ? (
|
||||
<Play className='h-3.5 w-3.5' />
|
||||
) : (
|
||||
<Pause className='h-3.5 w-3.5' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
{connector.status === 'paused' ? 'Resume' : 'Pause'}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' className='h-7 w-7 p-0' onClick={onDelete}>
|
||||
<Trash className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Delete</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-7 w-7 p-0'
|
||||
onClick={() => setExpanded((prev) => !prev)}
|
||||
>
|
||||
<ChevronDown
|
||||
className={cn('h-3.5 w-3.5 transition-transform', expanded && 'rotate-180')}
|
||||
/>
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{expanded ? 'Hide history' : 'Sync history'}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{missingScopes.length > 0 && (
|
||||
<div className='border-[var(--border-1)] border-t px-[12px] py-[8px]'>
|
||||
<div className='flex flex-col gap-[4px] rounded-[4px] border bg-[var(--surface-2)] px-[8px] py-[6px]'>
|
||||
<div className='flex items-center font-medium text-[12px]'>
|
||||
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
|
||||
Additional permissions required
|
||||
</div>
|
||||
{canEdit && (
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{expanded && (
|
||||
<div className='border-[var(--border-1)] border-t px-[12px] py-[8px]'>
|
||||
<SyncHistory logs={syncLogs} isLoading={detailLoading} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showOAuthModal && serviceId && providerId && (
|
||||
<OAuthRequiredModal
|
||||
isOpen={showOAuthModal}
|
||||
onClose={() => setShowOAuthModal(false)}
|
||||
provider={providerId as OAuthProvider}
|
||||
toolName={connectorConfig?.name ?? connector.connectorType}
|
||||
requiredScopes={getCanonicalScopesForProvider(providerId)}
|
||||
newScopes={missingScopes}
|
||||
serviceId={serviceId}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface SyncHistoryProps {
|
||||
logs: SyncLogData[]
|
||||
isLoading: boolean
|
||||
}
|
||||
|
||||
function SyncHistory({ logs, isLoading }: SyncHistoryProps) {
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex items-center gap-2 py-[4px] text-[11px] text-[var(--text-muted)]'>
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
Loading sync history...
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (logs.length === 0) {
|
||||
return <p className='py-[4px] text-[11px] text-[var(--text-muted)]'>No sync history yet.</p>
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
{logs.map((log) => {
|
||||
const isError = log.status === 'error' || log.status === 'failed'
|
||||
const isRunning = log.status === 'running' || log.status === 'syncing'
|
||||
const totalChanges = log.docsAdded + log.docsUpdated + log.docsDeleted
|
||||
|
||||
return (
|
||||
<div key={log.id} className='flex items-start gap-[8px] text-[11px]'>
|
||||
<div className='mt-[1px] flex-shrink-0'>
|
||||
{isRunning ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin text-[var(--text-muted)]' />
|
||||
) : isError ? (
|
||||
<XCircle className='h-3 w-3 text-[var(--text-error)]' />
|
||||
) : (
|
||||
<CheckCircle2 className='h-3 w-3 text-[var(--color-green-600)]' />
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='flex min-w-0 flex-1 flex-col gap-[1px]'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<span className='text-[var(--text-muted)]'>
|
||||
{format(new Date(log.startedAt), 'MMM d, h:mm a')}
|
||||
</span>
|
||||
{!isRunning && !isError && (
|
||||
<span className='text-[var(--text-muted)]'>
|
||||
{totalChanges > 0 ? (
|
||||
<>
|
||||
{log.docsAdded > 0 && (
|
||||
<span className='text-[var(--color-green-600)]'>+{log.docsAdded}</span>
|
||||
)}
|
||||
{log.docsUpdated > 0 && (
|
||||
<>
|
||||
{log.docsAdded > 0 && ' '}
|
||||
<span className='text-[var(--color-amber-600)]'>
|
||||
~{log.docsUpdated}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
{log.docsDeleted > 0 && (
|
||||
<>
|
||||
{(log.docsAdded > 0 || log.docsUpdated > 0) && ' '}
|
||||
<span className='text-[var(--text-error)]'>-{log.docsDeleted}</span>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
'No changes'
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
{isRunning && <span className='text-[var(--text-muted)]'>In progress...</span>}
|
||||
</div>
|
||||
|
||||
{isError && log.errorMessage && (
|
||||
<span className='truncate text-[var(--text-error)]'>{log.errorMessage}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -17,6 +17,7 @@ interface DocumentContextMenuProps {
|
||||
* Document-specific actions (shown when right-clicking on a document)
|
||||
*/
|
||||
onOpenInNewTab?: () => void
|
||||
onOpenSource?: () => void
|
||||
onRename?: () => void
|
||||
onToggleEnabled?: () => void
|
||||
onViewTags?: () => void
|
||||
@@ -74,6 +75,7 @@ export function DocumentContextMenu({
|
||||
menuRef,
|
||||
onClose,
|
||||
onOpenInNewTab,
|
||||
onOpenSource,
|
||||
onRename,
|
||||
onToggleEnabled,
|
||||
onViewTags,
|
||||
@@ -129,7 +131,17 @@ export function DocumentContextMenu({
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && onOpenInNewTab && <PopoverDivider />}
|
||||
{!isMultiSelect && onOpenSource && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onOpenSource()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Open source
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && (onOpenInNewTab || onOpenSource) && <PopoverDivider />}
|
||||
|
||||
{/* Edit and view actions */}
|
||||
{!isMultiSelect && onRename && (
|
||||
@@ -170,6 +182,7 @@ export function DocumentContextMenu({
|
||||
{/* Destructive action */}
|
||||
{onDelete &&
|
||||
((!isMultiSelect && onOpenInNewTab) ||
|
||||
(!isMultiSelect && onOpenSource) ||
|
||||
(!isMultiSelect && onRename) ||
|
||||
(!isMultiSelect && hasTags && onViewTags) ||
|
||||
onToggleEnabled) && <PopoverDivider />}
|
||||
|
||||
@@ -0,0 +1,339 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ExternalLink, Loader2, RotateCcw } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Combobox,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
ModalTabs,
|
||||
ModalTabsContent,
|
||||
ModalTabsList,
|
||||
ModalTabsTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { ConnectorConfig } from '@/connectors/types'
|
||||
import type { ConnectorData } from '@/hooks/queries/kb/connectors'
|
||||
import {
|
||||
useConnectorDocuments,
|
||||
useExcludeConnectorDocument,
|
||||
useRestoreConnectorDocument,
|
||||
useUpdateConnector,
|
||||
} from '@/hooks/queries/kb/connectors'
|
||||
|
||||
const logger = createLogger('EditConnectorModal')
|
||||
|
||||
const SYNC_INTERVALS = [
|
||||
{ label: 'Every hour', value: 60 },
|
||||
{ label: 'Every 6 hours', value: 360 },
|
||||
{ label: 'Daily', value: 1440 },
|
||||
{ label: 'Weekly', value: 10080 },
|
||||
{ label: 'Manual only', value: 0 },
|
||||
] as const
|
||||
|
||||
/** Keys injected by the sync engine — not user-editable */
|
||||
const INTERNAL_CONFIG_KEYS = new Set(['tagSlotMapping', 'disabledTagIds'])
|
||||
|
||||
interface EditConnectorModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
knowledgeBaseId: string
|
||||
connector: ConnectorData
|
||||
}
|
||||
|
||||
export function EditConnectorModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
knowledgeBaseId,
|
||||
connector,
|
||||
}: EditConnectorModalProps) {
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connector.connectorType] ?? null
|
||||
|
||||
const initialSourceConfig = useMemo(() => {
|
||||
const config: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(connector.sourceConfig)) {
|
||||
if (!INTERNAL_CONFIG_KEYS.has(key)) {
|
||||
config[key] = String(value ?? '')
|
||||
}
|
||||
}
|
||||
return config
|
||||
}, [connector.sourceConfig])
|
||||
|
||||
const [activeTab, setActiveTab] = useState('settings')
|
||||
const [sourceConfig, setSourceConfig] = useState<Record<string, string>>(initialSourceConfig)
|
||||
const [syncInterval, setSyncInterval] = useState(connector.syncIntervalMinutes)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const { mutate: updateConnector, isPending: isSaving } = useUpdateConnector()
|
||||
|
||||
const hasChanges = useMemo(() => {
|
||||
if (syncInterval !== connector.syncIntervalMinutes) return true
|
||||
for (const [key, value] of Object.entries(sourceConfig)) {
|
||||
if (String(connector.sourceConfig[key] ?? '') !== value) return true
|
||||
}
|
||||
return false
|
||||
}, [sourceConfig, syncInterval, connector.syncIntervalMinutes, connector.sourceConfig])
|
||||
|
||||
const handleSave = () => {
|
||||
setError(null)
|
||||
|
||||
const updates: { sourceConfig?: Record<string, unknown>; syncIntervalMinutes?: number } = {}
|
||||
|
||||
if (syncInterval !== connector.syncIntervalMinutes) {
|
||||
updates.syncIntervalMinutes = syncInterval
|
||||
}
|
||||
|
||||
const configChanged = Object.entries(sourceConfig).some(
|
||||
([key, value]) => String(connector.sourceConfig[key] ?? '') !== value
|
||||
)
|
||||
if (configChanged) {
|
||||
updates.sourceConfig = { ...connector.sourceConfig, ...sourceConfig }
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length === 0) {
|
||||
onOpenChange(false)
|
||||
return
|
||||
}
|
||||
|
||||
updateConnector(
|
||||
{ knowledgeBaseId, connectorId: connector.id, updates },
|
||||
{
|
||||
onSuccess: () => {
|
||||
onOpenChange(false)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Failed to update connector', { error: err.message })
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const displayName = connectorConfig?.name ?? connector.connectorType
|
||||
const Icon = connectorConfig?.icon
|
||||
|
||||
return (
|
||||
<Modal open={open} onOpenChange={(val) => !isSaving && onOpenChange(val)}>
|
||||
<ModalContent size='md'>
|
||||
<ModalHeader>
|
||||
<div className='flex items-center gap-2'>
|
||||
{Icon && <Icon className='h-5 w-5' />}
|
||||
Edit {displayName}
|
||||
</div>
|
||||
</ModalHeader>
|
||||
|
||||
<ModalTabs value={activeTab} onValueChange={setActiveTab}>
|
||||
<ModalTabsList>
|
||||
<ModalTabsTrigger value='settings'>Settings</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='documents'>Documents</ModalTabsTrigger>
|
||||
</ModalTabsList>
|
||||
|
||||
<ModalBody>
|
||||
<ModalTabsContent value='settings'>
|
||||
<SettingsTab
|
||||
connectorConfig={connectorConfig}
|
||||
sourceConfig={sourceConfig}
|
||||
setSourceConfig={setSourceConfig}
|
||||
syncInterval={syncInterval}
|
||||
setSyncInterval={setSyncInterval}
|
||||
error={error}
|
||||
/>
|
||||
</ModalTabsContent>
|
||||
|
||||
<ModalTabsContent value='documents'>
|
||||
<DocumentsTab knowledgeBaseId={knowledgeBaseId} connectorId={connector.id} />
|
||||
</ModalTabsContent>
|
||||
</ModalBody>
|
||||
</ModalTabs>
|
||||
|
||||
{activeTab === 'settings' && (
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => onOpenChange(false)} disabled={isSaving}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' onClick={handleSave} disabled={!hasChanges || isSaving}>
|
||||
{isSaving ? (
|
||||
<>
|
||||
<Loader2 className='mr-1.5 h-3.5 w-3.5 animate-spin' />
|
||||
Saving...
|
||||
</>
|
||||
) : (
|
||||
'Save'
|
||||
)}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
)}
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
interface SettingsTabProps {
|
||||
connectorConfig: ConnectorConfig | null
|
||||
sourceConfig: Record<string, string>
|
||||
setSourceConfig: React.Dispatch<React.SetStateAction<Record<string, string>>>
|
||||
syncInterval: number
|
||||
setSyncInterval: (v: number) => void
|
||||
error: string | null
|
||||
}
|
||||
|
||||
function SettingsTab({
|
||||
connectorConfig,
|
||||
sourceConfig,
|
||||
setSourceConfig,
|
||||
syncInterval,
|
||||
setSyncInterval,
|
||||
error,
|
||||
}: SettingsTabProps) {
|
||||
return (
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
{connectorConfig?.configFields.map((field) => (
|
||||
<div key={field.id} className='flex flex-col gap-[4px]'>
|
||||
<Label>
|
||||
{field.title}
|
||||
{field.required && <span className='ml-[2px] text-[var(--text-error)]'>*</span>}
|
||||
</Label>
|
||||
{field.description && (
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>{field.description}</p>
|
||||
)}
|
||||
{field.type === 'dropdown' && field.options ? (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={field.options.map((opt) => ({
|
||||
label: opt.label,
|
||||
value: opt.id,
|
||||
}))}
|
||||
value={sourceConfig[field.id] || undefined}
|
||||
onChange={(value) => setSourceConfig((prev) => ({ ...prev, [field.id]: value }))}
|
||||
placeholder={field.placeholder || `Select ${field.title.toLowerCase()}`}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
value={sourceConfig[field.id] || ''}
|
||||
onChange={(e) => setSourceConfig((prev) => ({ ...prev, [field.id]: e.target.value }))}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Sync Frequency</Label>
|
||||
<ButtonGroup
|
||||
value={String(syncInterval)}
|
||||
onValueChange={(val) => setSyncInterval(Number(val))}
|
||||
>
|
||||
{SYNC_INTERVALS.map((interval) => (
|
||||
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
|
||||
{interval.label}
|
||||
</ButtonGroupItem>
|
||||
))}
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
{error && <p className='text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface DocumentsTabProps {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
}
|
||||
|
||||
function DocumentsTab({ knowledgeBaseId, connectorId }: DocumentsTabProps) {
|
||||
const [filter, setFilter] = useState<'active' | 'excluded'>('active')
|
||||
|
||||
const { data, isLoading } = useConnectorDocuments(knowledgeBaseId, connectorId, {
|
||||
includeExcluded: true,
|
||||
})
|
||||
|
||||
const { mutate: excludeDoc, isPending: isExcluding } = useExcludeConnectorDocument()
|
||||
const { mutate: restoreDoc, isPending: isRestoring } = useRestoreConnectorDocument()
|
||||
|
||||
const documents = useMemo(() => {
|
||||
if (!data?.documents) return []
|
||||
return data.documents.filter((d) => (filter === 'excluded' ? d.userExcluded : !d.userExcluded))
|
||||
}, [data?.documents, filter])
|
||||
|
||||
const counts = data?.counts ?? { active: 0, excluded: 0 }
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Skeleton className='h-6 w-full' />
|
||||
<Skeleton className='h-6 w-full' />
|
||||
<Skeleton className='h-6 w-full' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
<ButtonGroup value={filter} onValueChange={(val) => setFilter(val as 'active' | 'excluded')}>
|
||||
<ButtonGroupItem value='active'>Active ({counts.active})</ButtonGroupItem>
|
||||
<ButtonGroupItem value='excluded'>Excluded ({counts.excluded})</ButtonGroupItem>
|
||||
</ButtonGroup>
|
||||
|
||||
<div className='max-h-[320px] min-h-0 overflow-y-auto'>
|
||||
{documents.length === 0 ? (
|
||||
<p className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
{filter === 'excluded' ? 'No excluded documents' : 'No documents yet'}
|
||||
</p>
|
||||
) : (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{documents.map((doc) => (
|
||||
<div key={doc.id} className='flex items-center justify-between'>
|
||||
<div className='flex min-w-0 items-center gap-[6px]'>
|
||||
<span className='truncate text-[13px] text-[var(--text-primary)]'>
|
||||
{doc.filename}
|
||||
</span>
|
||||
{doc.sourceUrl && (
|
||||
<a
|
||||
href={doc.sourceUrl}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='flex-shrink-0 text-[var(--text-muted)] hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
<ExternalLink className='h-3 w-3' />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='flex-shrink-0'
|
||||
disabled={doc.userExcluded ? isRestoring : isExcluding}
|
||||
onClick={() =>
|
||||
doc.userExcluded
|
||||
? restoreDoc({ knowledgeBaseId, connectorId, documentIds: [doc.id] })
|
||||
: excludeDoc({ knowledgeBaseId, connectorId, documentIds: [doc.id] })
|
||||
}
|
||||
>
|
||||
{doc.userExcluded ? (
|
||||
<>
|
||||
<RotateCcw className='mr-1 h-3 w-3' />
|
||||
Restore
|
||||
</>
|
||||
) : (
|
||||
'Exclude'
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export { EditConnectorModal } from './edit-connector-modal'
|
||||
@@ -1,5 +1,8 @@
|
||||
export { ActionBar } from './action-bar/action-bar'
|
||||
export { AddConnectorModal } from './add-connector-modal/add-connector-modal'
|
||||
export { AddDocumentsModal } from './add-documents-modal/add-documents-modal'
|
||||
export { BaseTagsModal } from './base-tags-modal/base-tags-modal'
|
||||
export { ConnectorsSection } from './connectors-section/connectors-section'
|
||||
export { DocumentContextMenu } from './document-context-menu'
|
||||
export { EditConnectorModal } from './edit-connector-modal/edit-connector-modal'
|
||||
export { RenameDocumentModal } from './rename-document-modal'
|
||||
|
||||
@@ -7,6 +7,7 @@ import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatt
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import { DeleteKnowledgeBaseModal } from '../delete-knowledge-base-modal/delete-knowledge-base-modal'
|
||||
import { EditKnowledgeBaseModal } from '../edit-knowledge-base-modal/edit-knowledge-base-modal'
|
||||
import { KnowledgeBaseContextMenu } from '../knowledge-base-context-menu/knowledge-base-context-menu'
|
||||
@@ -18,6 +19,7 @@ interface BaseCardProps {
|
||||
description: string
|
||||
createdAt?: string
|
||||
updatedAt?: string
|
||||
connectorTypes?: string[]
|
||||
onUpdate?: (id: string, name: string, description: string) => Promise<void>
|
||||
onDelete?: (id: string) => Promise<void>
|
||||
}
|
||||
@@ -75,6 +77,7 @@ export function BaseCard({
|
||||
docCount,
|
||||
description,
|
||||
updatedAt,
|
||||
connectorTypes = [],
|
||||
onUpdate,
|
||||
onDelete,
|
||||
}: BaseCardProps) {
|
||||
@@ -200,9 +203,33 @@ export function BaseCard({
|
||||
|
||||
<div className='h-0 w-full border-[var(--divider)] border-t' />
|
||||
|
||||
<p className='line-clamp-2 h-[36px] text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
|
||||
{description}
|
||||
</p>
|
||||
<div className='flex items-start justify-between gap-[8px]'>
|
||||
<p className='line-clamp-2 h-[36px] flex-1 text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
|
||||
{description}
|
||||
</p>
|
||||
{connectorTypes.length > 0 && (
|
||||
<div className='flex flex-shrink-0 items-center'>
|
||||
{connectorTypes.map((type, index) => {
|
||||
const config = CONNECTOR_REGISTRY[type]
|
||||
if (!config?.icon) return null
|
||||
const Icon = config.icon
|
||||
return (
|
||||
<Tooltip.Root key={type}>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-[4px] bg-[var(--surface-5)]'
|
||||
style={{ marginLeft: index > 0 ? '-4px' : '0' }}
|
||||
>
|
||||
<Icon className='h-[12px] w-[12px] text-[var(--text-secondary)]' />
|
||||
</div>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{config.name}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -22,7 +22,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||
import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('CreateBaseModal')
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { filterButtonClass } from '@/app/workspace/[workspaceId]/knowledge/components/constants'
|
||||
import { useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { useUpdateKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeHeader')
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge'
|
||||
import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
|
||||
const logger = createLogger('Knowledge')
|
||||
@@ -153,6 +153,7 @@ export function Knowledge() {
|
||||
description: kb.description || 'No description provided',
|
||||
createdAt: kb.createdAt,
|
||||
updatedAt: kb.updatedAt,
|
||||
connectorTypes: kb.connectorTypes ?? [],
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -283,6 +284,7 @@ export function Knowledge() {
|
||||
title={displayData.title}
|
||||
docCount={displayData.docCount}
|
||||
description={displayData.description}
|
||||
connectorTypes={displayData.connectorTypes}
|
||||
createdAt={displayData.createdAt}
|
||||
updatedAt={displayData.updatedAt}
|
||||
onUpdate={handleUpdateKnowledgeBase}
|
||||
|
||||
@@ -32,7 +32,10 @@ import {
|
||||
useTestNotification,
|
||||
useUpdateNotification,
|
||||
} from '@/hooks/queries/notifications'
|
||||
import { useConnectedAccounts, useConnectOAuthService } from '@/hooks/queries/oauth-connections'
|
||||
import {
|
||||
useConnectedAccounts,
|
||||
useConnectOAuthService,
|
||||
} from '@/hooks/queries/oauth/oauth-connections'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
import { SlackChannelSelector } from './components/slack-channel-selector'
|
||||
import { WorkflowSelector } from './components/workflow-selector'
|
||||
|
||||
@@ -95,6 +95,7 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'offline.access': 'Access account when not using the application',
|
||||
'data.records:read': 'Read records',
|
||||
'data.records:write': 'Write to records',
|
||||
'schema.bases:read': 'Read base schemas and table metadata',
|
||||
'webhook:manage': 'Manage webhooks',
|
||||
'page.read': 'Read Notion pages',
|
||||
'page.write': 'Write to Notion pages',
|
||||
|
||||
@@ -20,7 +20,10 @@ import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/c
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { useCredentialSets } from '@/hooks/queries/credential-sets'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import {
|
||||
useOAuthCredentialDetail,
|
||||
useOAuthCredentials,
|
||||
} from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { useOrganizations } from '@/hooks/queries/organization'
|
||||
import { useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
|
||||
@@ -10,7 +10,7 @@ import type { KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge'
|
||||
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
interface KnowledgeBaseSelectorProps {
|
||||
blockId: string
|
||||
|
||||
@@ -12,7 +12,10 @@ import {
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CREDENTIAL } from '@/executor/constants'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import {
|
||||
useOAuthCredentialDetail,
|
||||
useOAuthCredentials,
|
||||
} from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ import { getDependsOnFields } from '@/blocks/utils'
|
||||
import { useKnowledgeBase } from '@/hooks/kb/use-knowledge'
|
||||
import { useCustomTools } from '@/hooks/queries/custom-tools'
|
||||
import { useMcpServers, useMcpToolsQuery } from '@/hooks/queries/mcp'
|
||||
import { useCredentialName } from '@/hooks/queries/oauth-credentials'
|
||||
import { useCredentialName } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { useReactivateSchedule, useScheduleInfo } from '@/hooks/queries/schedules'
|
||||
import { useSkills } from '@/hooks/queries/skills'
|
||||
import { useDeployChildWorkflow } from '@/hooks/queries/workflows'
|
||||
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
useConnectOAuthService,
|
||||
useDisconnectOAuthService,
|
||||
useOAuthConnections,
|
||||
} from '@/hooks/queries/oauth-connections'
|
||||
} from '@/hooks/queries/oauth/oauth-connections'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
|
||||
const logger = createLogger('Integrations')
|
||||
|
||||
53
apps/sim/background/knowledge-connector-sync.ts
Normal file
53
apps/sim/background/knowledge-connector-sync.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { executeSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
|
||||
const logger = createLogger('TriggerKnowledgeConnectorSync')
|
||||
|
||||
export type ConnectorSyncPayload = {
|
||||
connectorId: string
|
||||
fullSync?: boolean
|
||||
requestId: string
|
||||
}
|
||||
|
||||
export const knowledgeConnectorSync = task({
|
||||
id: 'knowledge-connector-sync',
|
||||
maxDuration: 1800,
|
||||
machine: 'large-1x',
|
||||
retry: {
|
||||
maxAttempts: 3,
|
||||
factor: 2,
|
||||
minTimeoutInMs: 5000,
|
||||
maxTimeoutInMs: 30000,
|
||||
},
|
||||
queue: {
|
||||
concurrencyLimit: 5,
|
||||
name: 'connector-sync-queue',
|
||||
},
|
||||
run: async (payload: ConnectorSyncPayload) => {
|
||||
const { connectorId, fullSync, requestId } = payload
|
||||
|
||||
logger.info(`[${requestId}] Starting connector sync: ${connectorId}`)
|
||||
|
||||
try {
|
||||
const result = await executeSync(connectorId, { fullSync })
|
||||
|
||||
logger.info(`[${requestId}] Connector sync completed`, {
|
||||
connectorId,
|
||||
added: result.docsAdded,
|
||||
updated: result.docsUpdated,
|
||||
deleted: result.docsDeleted,
|
||||
unchanged: result.docsUnchanged,
|
||||
})
|
||||
|
||||
return {
|
||||
success: !result.error,
|
||||
connectorId,
|
||||
...result,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Connector sync failed: ${connectorId}`, error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -25,6 +25,8 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
{ label: 'Get Record', id: 'get' },
|
||||
{ label: 'Create Records', id: 'create' },
|
||||
{ label: 'Update Record', id: 'update' },
|
||||
{ label: 'List Bases', id: 'listBases' },
|
||||
{ label: 'Get Base Schema', id: 'getSchema' },
|
||||
],
|
||||
value: () => 'list',
|
||||
},
|
||||
@@ -36,6 +38,7 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
requiredScopes: [
|
||||
'data.records:read',
|
||||
'data.records:write',
|
||||
'schema.bases:read',
|
||||
'user.email:read',
|
||||
'webhook:manage',
|
||||
],
|
||||
@@ -48,6 +51,7 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your base ID (e.g., appXXXXXXXXXXXXXX)',
|
||||
dependsOn: ['credential'],
|
||||
condition: { field: 'operation', value: 'listBases', not: true },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
@@ -56,6 +60,7 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter table ID (e.g., tblXXXXXXXXXXXXXX)',
|
||||
dependsOn: ['credential', 'baseId'],
|
||||
condition: { field: 'operation', value: ['listBases', 'getSchema'], not: true },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
@@ -200,6 +205,8 @@ Return ONLY the valid JSON object - no explanations, no markdown.`,
|
||||
'airtable_create_records',
|
||||
'airtable_update_record',
|
||||
'airtable_update_multiple_records',
|
||||
'airtable_list_bases',
|
||||
'airtable_get_base_schema',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -214,6 +221,10 @@ Return ONLY the valid JSON object - no explanations, no markdown.`,
|
||||
return 'airtable_update_record'
|
||||
case 'updateMultiple':
|
||||
return 'airtable_update_multiple_records'
|
||||
case 'listBases':
|
||||
return 'airtable_list_bases'
|
||||
case 'getSchema':
|
||||
return 'airtable_get_base_schema'
|
||||
default:
|
||||
throw new Error(`Invalid Airtable operation: ${params.operation}`)
|
||||
}
|
||||
@@ -267,9 +278,11 @@ Return ONLY the valid JSON object - no explanations, no markdown.`,
|
||||
},
|
||||
// Output structure depends on the operation, covered by AirtableResponse union type
|
||||
outputs: {
|
||||
records: { type: 'json', description: 'Retrieved record data' }, // Optional: for list, create, updateMultiple
|
||||
record: { type: 'json', description: 'Single record data' }, // Optional: for get, update single
|
||||
metadata: { type: 'json', description: 'Operation metadata' }, // Required: present in all responses
|
||||
records: { type: 'json', description: 'Retrieved record data' },
|
||||
record: { type: 'json', description: 'Single record data' },
|
||||
bases: { type: 'json', description: 'List of accessible Airtable bases' },
|
||||
tables: { type: 'json', description: 'Table schemas with fields and views' },
|
||||
metadata: { type: 'json', description: 'Operation metadata' },
|
||||
// Trigger outputs
|
||||
event_type: { type: 'string', description: 'Type of Airtable event' },
|
||||
base_id: { type: 'string', description: 'Airtable base identifier' },
|
||||
|
||||
@@ -6,9 +6,11 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
name: 'Knowledge',
|
||||
description: 'Use vector search',
|
||||
longDescription:
|
||||
'Integrate Knowledge into the workflow. Can search, upload chunks, and create documents.',
|
||||
'Integrate Knowledge into the workflow. Perform full CRUD operations on documents, chunks, and tags.',
|
||||
bestPractices: `
|
||||
- Clarify which tags are available for the knowledge base to understand whether to use tag filters on a search.
|
||||
- Use List Documents to enumerate documents before operating on them.
|
||||
- Use List Chunks to inspect a document's contents before updating or deleting chunks.
|
||||
`,
|
||||
bgColor: '#00B0B0',
|
||||
icon: PackageSearchIcon,
|
||||
@@ -21,20 +23,41 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Upload Chunk', id: 'upload_chunk' },
|
||||
{ label: 'List Documents', id: 'list_documents' },
|
||||
{ label: 'Create Document', id: 'create_document' },
|
||||
{ label: 'Delete Document', id: 'delete_document' },
|
||||
{ label: 'List Chunks', id: 'list_chunks' },
|
||||
{ label: 'Upload Chunk', id: 'upload_chunk' },
|
||||
{ label: 'Update Chunk', id: 'update_chunk' },
|
||||
{ label: 'Delete Chunk', id: 'delete_chunk' },
|
||||
{ label: 'List Tags', id: 'list_tags' },
|
||||
],
|
||||
value: () => 'search',
|
||||
},
|
||||
|
||||
// Knowledge Base selector — basic mode (visual selector)
|
||||
{
|
||||
id: 'knowledgeBaseId',
|
||||
id: 'knowledgeBaseSelector',
|
||||
title: 'Knowledge Base',
|
||||
type: 'knowledge-base-selector',
|
||||
canonicalParamId: 'knowledgeBaseId',
|
||||
placeholder: 'Select knowledge base',
|
||||
multiSelect: false,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['search', 'upload_chunk', 'create_document'] },
|
||||
mode: 'basic',
|
||||
},
|
||||
// Knowledge Base selector — advanced mode (manual ID input)
|
||||
{
|
||||
id: 'manualKnowledgeBaseId',
|
||||
title: 'Knowledge Base ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'knowledgeBaseId',
|
||||
placeholder: 'Enter knowledge base ID',
|
||||
required: true,
|
||||
mode: 'advanced',
|
||||
},
|
||||
|
||||
// --- Search ---
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Search Query',
|
||||
@@ -57,15 +80,72 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
placeholder: 'Add tag filters',
|
||||
condition: { field: 'operation', value: 'search' },
|
||||
},
|
||||
|
||||
// --- List Documents ---
|
||||
{
|
||||
id: 'documentId',
|
||||
id: 'search',
|
||||
title: 'Search',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter documents by filename',
|
||||
condition: { field: 'operation', value: 'list_documents' },
|
||||
},
|
||||
{
|
||||
id: 'enabledFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: 'all' },
|
||||
{ label: 'Enabled', id: 'enabled' },
|
||||
{ label: 'Disabled', id: 'disabled' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'list_documents' },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: 'Max items to return (default: 50)',
|
||||
condition: { field: 'operation', value: ['list_documents', 'list_chunks'] },
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of items to skip (default: 0)',
|
||||
condition: { field: 'operation', value: ['list_documents', 'list_chunks'] },
|
||||
},
|
||||
|
||||
// Document selector — basic mode (visual selector)
|
||||
{
|
||||
id: 'documentSelector',
|
||||
title: 'Document',
|
||||
type: 'document-selector',
|
||||
canonicalParamId: 'documentId',
|
||||
placeholder: 'Select document',
|
||||
dependsOn: ['knowledgeBaseId'],
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
mode: 'basic',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['upload_chunk', 'delete_document', 'list_chunks', 'update_chunk', 'delete_chunk'],
|
||||
},
|
||||
},
|
||||
// Document selector — advanced mode (manual ID input)
|
||||
{
|
||||
id: 'manualDocumentId',
|
||||
title: 'Document ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'documentId',
|
||||
placeholder: 'Enter document ID',
|
||||
required: true,
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['upload_chunk', 'delete_document', 'list_chunks', 'update_chunk', 'delete_chunk'],
|
||||
},
|
||||
},
|
||||
|
||||
// --- Upload Chunk ---
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Chunk Content',
|
||||
@@ -75,13 +155,15 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
},
|
||||
|
||||
// --- Create Document ---
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Document Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter document name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
@@ -90,18 +172,75 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
placeholder: 'Enter the document content',
|
||||
rows: 6,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
// Dynamic tag entry for Create Document
|
||||
{
|
||||
id: 'documentTags',
|
||||
title: 'Document Tags',
|
||||
type: 'document-tag-entry',
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
|
||||
// --- Update Chunk / Delete Chunk ---
|
||||
{
|
||||
id: 'chunkId',
|
||||
title: 'Chunk ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter chunk ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['update_chunk', 'delete_chunk'] },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'New Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter updated chunk content',
|
||||
rows: 6,
|
||||
condition: { field: 'operation', value: 'update_chunk' },
|
||||
},
|
||||
{
|
||||
id: 'enabled',
|
||||
title: 'Enabled',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'update_chunk' },
|
||||
},
|
||||
|
||||
// --- List Chunks ---
|
||||
{
|
||||
id: 'chunkSearch',
|
||||
title: 'Search',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter chunks by content',
|
||||
condition: { field: 'operation', value: 'list_chunks' },
|
||||
},
|
||||
{
|
||||
id: 'chunkEnabledFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: 'all' },
|
||||
{ label: 'Enabled', id: 'true' },
|
||||
{ label: 'Disabled', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'list_chunks' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document'],
|
||||
access: [
|
||||
'knowledge_search',
|
||||
'knowledge_upload_chunk',
|
||||
'knowledge_create_document',
|
||||
'knowledge_list_tags',
|
||||
'knowledge_list_documents',
|
||||
'knowledge_delete_document',
|
||||
'knowledge_list_chunks',
|
||||
'knowledge_update_chunk',
|
||||
'knowledge_delete_chunk',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -111,25 +250,62 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
return 'knowledge_upload_chunk'
|
||||
case 'create_document':
|
||||
return 'knowledge_create_document'
|
||||
case 'list_tags':
|
||||
return 'knowledge_list_tags'
|
||||
case 'list_documents':
|
||||
return 'knowledge_list_documents'
|
||||
case 'delete_document':
|
||||
return 'knowledge_delete_document'
|
||||
case 'list_chunks':
|
||||
return 'knowledge_list_chunks'
|
||||
case 'update_chunk':
|
||||
return 'knowledge_update_chunk'
|
||||
case 'delete_chunk':
|
||||
return 'knowledge_delete_chunk'
|
||||
default:
|
||||
return 'knowledge_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
// Validate required fields for each operation
|
||||
if (params.operation === 'search' && !params.knowledgeBaseId) {
|
||||
throw new Error('Knowledge base ID is required for search operation')
|
||||
const knowledgeBaseId = params.knowledgeBaseId ? String(params.knowledgeBaseId).trim() : ''
|
||||
if (!knowledgeBaseId) {
|
||||
throw new Error('Knowledge base ID is required')
|
||||
}
|
||||
if (
|
||||
(params.operation === 'upload_chunk' || params.operation === 'create_document') &&
|
||||
!params.knowledgeBaseId
|
||||
) {
|
||||
throw new Error(
|
||||
'Knowledge base ID is required for upload_chunk and create_document operations'
|
||||
)
|
||||
params.knowledgeBaseId = knowledgeBaseId
|
||||
|
||||
const docOps = [
|
||||
'upload_chunk',
|
||||
'delete_document',
|
||||
'list_chunks',
|
||||
'update_chunk',
|
||||
'delete_chunk',
|
||||
]
|
||||
if (docOps.includes(params.operation)) {
|
||||
const documentId = params.documentId ? String(params.documentId).trim() : ''
|
||||
if (!documentId) {
|
||||
throw new Error(`Document ID is required for ${params.operation} operation`)
|
||||
}
|
||||
params.documentId = documentId
|
||||
}
|
||||
if (params.operation === 'upload_chunk' && !params.documentId) {
|
||||
throw new Error('Document ID is required for upload_chunk operation')
|
||||
|
||||
const chunkOps = ['update_chunk', 'delete_chunk']
|
||||
if (chunkOps.includes(params.operation)) {
|
||||
const chunkId = params.chunkId ? String(params.chunkId).trim() : ''
|
||||
if (!chunkId) {
|
||||
throw new Error(`Chunk ID is required for ${params.operation} operation`)
|
||||
}
|
||||
params.chunkId = chunkId
|
||||
}
|
||||
|
||||
// Map list_chunks sub-block fields to tool params
|
||||
if (params.operation === 'list_chunks') {
|
||||
if (params.chunkSearch) params.search = params.chunkSearch
|
||||
if (params.chunkEnabledFilter) params.enabled = params.chunkEnabledFilter
|
||||
}
|
||||
|
||||
// Convert enabled dropdown string to boolean for update_chunk
|
||||
if (params.operation === 'update_chunk' && typeof params.enabled === 'string') {
|
||||
params.enabled = params.enabled === 'true'
|
||||
}
|
||||
|
||||
return params
|
||||
@@ -142,12 +318,18 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
query: { type: 'string', description: 'Search query terms' },
|
||||
topK: { type: 'number', description: 'Number of results' },
|
||||
documentId: { type: 'string', description: 'Document identifier' },
|
||||
chunkId: { type: 'string', description: 'Chunk identifier' },
|
||||
content: { type: 'string', description: 'Content data' },
|
||||
name: { type: 'string', description: 'Document name' },
|
||||
// Dynamic tag filters for search
|
||||
search: { type: 'string', description: 'Search filter for documents' },
|
||||
enabledFilter: { type: 'string', description: 'Filter by enabled status' },
|
||||
enabled: { type: 'string', description: 'Enable or disable a chunk' },
|
||||
limit: { type: 'number', description: 'Max items to return' },
|
||||
offset: { type: 'number', description: 'Pagination offset' },
|
||||
tagFilters: { type: 'string', description: 'Tag filter criteria' },
|
||||
// Document tags for create document (JSON string of tag objects)
|
||||
documentTags: { type: 'string', description: 'Document tags' },
|
||||
chunkSearch: { type: 'string', description: 'Search filter for chunks' },
|
||||
chunkEnabledFilter: { type: 'string', description: 'Filter chunks by enabled status' },
|
||||
},
|
||||
outputs: {
|
||||
results: { type: 'json', description: 'Search results' },
|
||||
|
||||
@@ -28,7 +28,7 @@ const checkboxVariants = cva(
|
||||
'border-[var(--border-1)] bg-transparent',
|
||||
'focus-visible:outline-none',
|
||||
'data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50',
|
||||
'data-[state=checked]:border-[var(--text-primary)] data-[state=checked]:bg-[var(--text-primary)]',
|
||||
'data-[state=checked]:border-[var(--brand-tertiary-2)] data-[state=checked]:bg-[var(--brand-tertiary-2)]',
|
||||
].join(' '),
|
||||
{
|
||||
variants: {
|
||||
|
||||
@@ -797,7 +797,7 @@ const DatePicker = React.forwardRef<HTMLDivElement, DatePickerProps>((props, ref
|
||||
side='bottom'
|
||||
align='start'
|
||||
sideOffset={4}
|
||||
avoidCollisions={false}
|
||||
collisionPadding={16}
|
||||
className={cn(
|
||||
'rounded-[6px] border border-[var(--border-1)] p-0',
|
||||
isRangeMode ? 'w-auto' : 'w-[280px]'
|
||||
|
||||
390
apps/sim/connectors/airtable/airtable.ts
Normal file
390
apps/sim/connectors/airtable/airtable.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { AirtableIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('AirtableConnector')
|
||||
|
||||
const AIRTABLE_API = 'https://api.airtable.com/v0'
|
||||
const PAGE_SIZE = 100
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens a record's fields into a plain-text representation.
|
||||
* Each field is rendered as "Field Name: value" on its own line.
|
||||
*/
|
||||
function recordToPlainText(
|
||||
fields: Record<string, unknown>,
|
||||
fieldNames?: Map<string, string>
|
||||
): string {
|
||||
const lines: string[] = []
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
if (value == null) continue
|
||||
const displayName = fieldNames?.get(key) ?? key
|
||||
if (Array.isArray(value)) {
|
||||
// Attachments or linked records
|
||||
const items = value.map((v) => {
|
||||
if (typeof v === 'object' && v !== null) {
|
||||
const obj = v as Record<string, unknown>
|
||||
return (obj.url as string) || (obj.name as string) || JSON.stringify(v)
|
||||
}
|
||||
return String(v)
|
||||
})
|
||||
lines.push(`${displayName}: ${items.join(', ')}`)
|
||||
} else if (typeof value === 'object') {
|
||||
lines.push(`${displayName}: ${JSON.stringify(value)}`)
|
||||
} else {
|
||||
lines.push(`${displayName}: ${String(value)}`)
|
||||
}
|
||||
}
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a human-readable title from a record's fields.
|
||||
* Prefers the configured title field, then falls back to common field names.
|
||||
*/
|
||||
function extractTitle(fields: Record<string, unknown>, titleField?: string): string {
|
||||
if (titleField && fields[titleField] != null) {
|
||||
return String(fields[titleField])
|
||||
}
|
||||
const candidates = ['Name', 'Title', 'name', 'title', 'Summary', 'summary']
|
||||
for (const candidate of candidates) {
|
||||
if (fields[candidate] != null) {
|
||||
return String(fields[candidate])
|
||||
}
|
||||
}
|
||||
// Fall back to first non-null string field
|
||||
for (const value of Object.values(fields)) {
|
||||
if (typeof value === 'string' && value.trim()) {
|
||||
return value.length > 80 ? `${value.slice(0, 80)}…` : value
|
||||
}
|
||||
}
|
||||
return 'Untitled'
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the cursor format: "offset:<airtable_offset>"
|
||||
*/
|
||||
function parseCursor(cursor?: string): string | undefined {
|
||||
if (!cursor) return undefined
|
||||
if (cursor.startsWith('offset:')) return cursor.slice(7)
|
||||
return cursor
|
||||
}
|
||||
|
||||
export const airtableConnector: ConnectorConfig = {
|
||||
id: 'airtable',
|
||||
name: 'Airtable',
|
||||
description: 'Sync records from an Airtable table into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: AirtableIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'airtable',
|
||||
requiredScopes: ['data.records:read', 'schema.bases:read'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'baseId',
|
||||
title: 'Base ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. appXXXXXXXXXXXXXX',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'tableIdOrName',
|
||||
title: 'Table Name or ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Tasks or tblXXXXXXXXXXXXXX',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'viewId',
|
||||
title: 'View',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Grid view or viwXXXXXXXXXXXXXX',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'titleField',
|
||||
title: 'Title Field',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Name',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'maxRecords',
|
||||
title: 'Max Records',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. 1000 (default: unlimited)',
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const baseId = sourceConfig.baseId as string
|
||||
const tableIdOrName = sourceConfig.tableIdOrName as string
|
||||
const viewId = sourceConfig.viewId as string | undefined
|
||||
const titleField = sourceConfig.titleField as string | undefined
|
||||
const maxRecords = sourceConfig.maxRecords ? Number(sourceConfig.maxRecords) : 0
|
||||
|
||||
// Fetch table schema for field name mapping
|
||||
const fieldNames = await fetchFieldNames(accessToken, baseId, tableIdOrName)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.append('pageSize', String(PAGE_SIZE))
|
||||
if (viewId) params.append('view', viewId)
|
||||
if (maxRecords > 0) params.append('maxRecords', String(maxRecords))
|
||||
|
||||
const offset = parseCursor(cursor)
|
||||
if (offset) params.append('offset', offset)
|
||||
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const url = `${AIRTABLE_API}/${baseId}/${encodedTable}?${params.toString()}`
|
||||
|
||||
logger.info(`Listing records from ${baseId}/${tableIdOrName}`, {
|
||||
offset: offset ?? 'none',
|
||||
view: viewId ?? 'default',
|
||||
})
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to list Airtable records', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to list Airtable records: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
records: AirtableRecord[]
|
||||
offset?: string
|
||||
}
|
||||
|
||||
const records = data.records || []
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
records.map((record) =>
|
||||
recordToDocument(record, baseId, tableIdOrName, titleField, fieldNames)
|
||||
)
|
||||
)
|
||||
|
||||
const nextOffset = data.offset
|
||||
return {
|
||||
documents,
|
||||
nextCursor: nextOffset ? `offset:${nextOffset}` : undefined,
|
||||
hasMore: Boolean(nextOffset),
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const baseId = sourceConfig.baseId as string
|
||||
const tableIdOrName = sourceConfig.tableIdOrName as string
|
||||
const titleField = sourceConfig.titleField as string | undefined
|
||||
|
||||
const fieldNames = await fetchFieldNames(accessToken, baseId, tableIdOrName)
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const url = `${AIRTABLE_API}/${baseId}/${encodedTable}/${externalId}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404 || response.status === 422) return null
|
||||
throw new Error(`Failed to get Airtable record: ${response.status}`)
|
||||
}
|
||||
|
||||
const record = (await response.json()) as AirtableRecord
|
||||
return recordToDocument(record, baseId, tableIdOrName, titleField, fieldNames)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const baseId = sourceConfig.baseId as string
|
||||
const tableIdOrName = sourceConfig.tableIdOrName as string
|
||||
|
||||
if (!baseId || !tableIdOrName) {
|
||||
return { valid: false, error: 'Base ID and table name are required' }
|
||||
}
|
||||
|
||||
if (baseId && !baseId.startsWith('app')) {
|
||||
return { valid: false, error: 'Base ID should start with "app"' }
|
||||
}
|
||||
|
||||
const maxRecords = sourceConfig.maxRecords as string | undefined
|
||||
if (maxRecords && (Number.isNaN(Number(maxRecords)) || Number(maxRecords) <= 0)) {
|
||||
return { valid: false, error: 'Max records must be a positive number' }
|
||||
}
|
||||
|
||||
try {
|
||||
// Verify base and table are accessible by fetching 1 record
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const url = `${AIRTABLE_API}/${baseId}/${encodedTable}?pageSize=1`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
if (response.status === 404 || response.status === 422) {
|
||||
return { valid: false, error: `Table "${tableIdOrName}" not found in base "${baseId}"` }
|
||||
}
|
||||
if (response.status === 403) {
|
||||
return { valid: false, error: 'Access denied. Check your Airtable permissions.' }
|
||||
}
|
||||
return { valid: false, error: `Airtable API error: ${response.status} - ${errorText}` }
|
||||
}
|
||||
|
||||
// If a view is specified, verify it exists
|
||||
const viewId = sourceConfig.viewId as string | undefined
|
||||
if (viewId) {
|
||||
const viewUrl = `${AIRTABLE_API}/${baseId}/${encodedTable}?pageSize=1&view=${encodeURIComponent(viewId)}`
|
||||
const viewResponse = await fetch(viewUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
if (!viewResponse.ok) {
|
||||
return { valid: false, error: `View "${viewId}" not found in table "${tableIdOrName}"` }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [{ id: 'createdTime', displayName: 'Created Time', fieldType: 'date' }],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (typeof metadata.createdTime === 'string') {
|
||||
const date = new Date(metadata.createdTime)
|
||||
if (!Number.isNaN(date.getTime())) result.createdTime = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
|
||||
interface AirtableRecord {
|
||||
id: string
|
||||
fields: Record<string, unknown>
|
||||
createdTime: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an Airtable record to an ExternalDocument.
|
||||
*/
|
||||
async function recordToDocument(
|
||||
record: AirtableRecord,
|
||||
baseId: string,
|
||||
tableIdOrName: string,
|
||||
titleField: string | undefined,
|
||||
fieldNames: Map<string, string>
|
||||
): Promise<ExternalDocument> {
|
||||
const plainText = recordToPlainText(record.fields, fieldNames)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
const title = extractTitle(record.fields, titleField)
|
||||
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const sourceUrl = `https://airtable.com/${baseId}/${encodedTable}/${record.id}`
|
||||
|
||||
return {
|
||||
externalId: record.id,
|
||||
title,
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl,
|
||||
contentHash,
|
||||
metadata: {
|
||||
createdTime: record.createdTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the table schema to build a field ID → field name mapping.
|
||||
* Falls back to an empty map if the schema endpoint is unavailable.
|
||||
*/
|
||||
async function fetchFieldNames(
|
||||
accessToken: string,
|
||||
baseId: string,
|
||||
tableIdOrName: string
|
||||
): Promise<Map<string, string>> {
|
||||
const fieldNames = new Map<string, string>()
|
||||
|
||||
try {
|
||||
const url = `${AIRTABLE_API}/meta/bases/${baseId}/tables`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn('Failed to fetch Airtable schema, using raw field keys', {
|
||||
status: response.status,
|
||||
})
|
||||
return fieldNames
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
tables: { id: string; name: string; fields: { id: string; name: string; type: string }[] }[]
|
||||
}
|
||||
|
||||
const table = data.tables.find((t) => t.id === tableIdOrName || t.name === tableIdOrName)
|
||||
|
||||
if (table) {
|
||||
for (const field of table.fields) {
|
||||
fieldNames.set(field.id, field.name)
|
||||
fieldNames.set(field.name, field.name)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Error fetching Airtable schema', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
return fieldNames
|
||||
}
|
||||
1
apps/sim/connectors/airtable/index.ts
Normal file
1
apps/sim/connectors/airtable/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { airtableConnector } from '@/connectors/airtable/airtable'
|
||||
611
apps/sim/connectors/confluence/confluence.ts
Normal file
611
apps/sim/connectors/confluence/confluence.ts
Normal file
@@ -0,0 +1,611 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ConfluenceIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceConnector')
|
||||
|
||||
/**
|
||||
* Strips HTML tags from content and decodes HTML entities.
|
||||
*/
|
||||
function htmlToPlainText(html: string): string {
|
||||
let text = html.replace(/<[^>]*>/g, ' ')
|
||||
text = text
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/&/g, '&')
|
||||
return text.replace(/\s+/g, ' ').trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const encoder = new TextEncoder()
|
||||
const data = encoder.encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer))
|
||||
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches labels for a batch of page IDs using the v2 labels endpoint.
|
||||
*/
|
||||
async function fetchLabelsForPages(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
pageIds: string[]
|
||||
): Promise<Map<string, string[]>> {
|
||||
const labelsByPageId = new Map<string, string[]>()
|
||||
|
||||
const results = await Promise.all(
|
||||
pageIds.map(async (pageId) => {
|
||||
try {
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/labels`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch labels for page ${pageId}`, { status: response.status })
|
||||
return { pageId, labels: [] as string[] }
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const labels = (data.results || []).map(
|
||||
(label: Record<string, unknown>) => label.name as string
|
||||
)
|
||||
return { pageId, labels }
|
||||
} catch (error) {
|
||||
logger.warn(`Error fetching labels for page ${pageId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return { pageId, labels: [] as string[] }
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
for (const { pageId, labels } of results) {
|
||||
labelsByPageId.set(pageId, labels)
|
||||
}
|
||||
|
||||
return labelsByPageId
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a v1 CQL search result item to an ExternalDocument.
|
||||
*/
|
||||
async function cqlResultToDocument(
|
||||
item: Record<string, unknown>,
|
||||
domain: string
|
||||
): Promise<ExternalDocument> {
|
||||
const body = item.body as Record<string, Record<string, string>> | undefined
|
||||
const rawContent = body?.storage?.value || ''
|
||||
const plainText = htmlToPlainText(rawContent)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
|
||||
const version = item.version as Record<string, unknown> | undefined
|
||||
const links = item._links as Record<string, string> | undefined
|
||||
const metadata = item.metadata as Record<string, unknown> | undefined
|
||||
const labelsWrapper = metadata?.labels as Record<string, unknown> | undefined
|
||||
const labelResults = (labelsWrapper?.results || []) as Record<string, unknown>[]
|
||||
const labels = labelResults.map((l) => l.name as string)
|
||||
|
||||
return {
|
||||
externalId: String(item.id),
|
||||
title: (item.title as string) || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: links?.webui ? `https://${domain}/wiki${links.webui}` : undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
spaceId: (item.space as Record<string, unknown>)?.key,
|
||||
status: item.status,
|
||||
version: version?.number,
|
||||
labels,
|
||||
lastModified: version?.when,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceConnector: ConnectorConfig = {
|
||||
id: 'confluence',
|
||||
name: 'Confluence',
|
||||
description: 'Sync pages from a Confluence space into your knowledge base',
|
||||
version: '1.1.0',
|
||||
icon: ConfluenceIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
requiredScopes: ['read:confluence-content.all', 'read:page:confluence', 'offline_access'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Confluence Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.atlassian.net',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'spaceKey',
|
||||
title: 'Space Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. ENG, PRODUCT',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'contentType',
|
||||
title: 'Content Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Pages only', id: 'page' },
|
||||
{ label: 'Blog posts only', id: 'blogpost' },
|
||||
{ label: 'All content', id: 'all' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'labelFilter',
|
||||
title: 'Filter by Label',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. published, engineering',
|
||||
},
|
||||
{
|
||||
id: 'maxPages',
|
||||
title: 'Max Pages',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const spaceKey = sourceConfig.spaceKey as string
|
||||
const contentType = (sourceConfig.contentType as string) || 'page'
|
||||
const labelFilter = (sourceConfig.labelFilter as string) || ''
|
||||
const maxPages = sourceConfig.maxPages ? Number(sourceConfig.maxPages) : 0
|
||||
|
||||
const cloudId = await getConfluenceCloudId(domain, accessToken)
|
||||
|
||||
// If label filtering is enabled, use CQL search via v1 API
|
||||
if (labelFilter.trim()) {
|
||||
return listDocumentsViaCql(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceKey,
|
||||
contentType,
|
||||
labelFilter,
|
||||
maxPages,
|
||||
cursor
|
||||
)
|
||||
}
|
||||
|
||||
// Otherwise use v2 API (default path)
|
||||
const spaceId = await resolveSpaceId(cloudId, accessToken, spaceKey)
|
||||
|
||||
if (contentType === 'all') {
|
||||
return listAllContentTypes(cloudId, accessToken, domain, spaceId, spaceKey, maxPages, cursor)
|
||||
}
|
||||
|
||||
return listDocumentsV2(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceId,
|
||||
spaceKey,
|
||||
contentType,
|
||||
maxPages,
|
||||
cursor
|
||||
)
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const cloudId = await getConfluenceCloudId(domain, accessToken)
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${externalId}?body-format=storage`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Confluence page: ${response.status}`)
|
||||
}
|
||||
|
||||
const page = await response.json()
|
||||
const rawContent = page.body?.storage?.value || ''
|
||||
const plainText = htmlToPlainText(rawContent)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
|
||||
// Fetch labels for this page
|
||||
const labelMap = await fetchLabelsForPages(cloudId, accessToken, [String(page.id)])
|
||||
const labels = labelMap.get(String(page.id)) ?? []
|
||||
|
||||
return {
|
||||
externalId: String(page.id),
|
||||
title: page.title || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: page._links?.webui ? `https://${domain}/wiki${page._links.webui}` : undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
spaceId: page.spaceId,
|
||||
status: page.status,
|
||||
version: page.version?.number,
|
||||
labels,
|
||||
lastModified: page.version?.createdAt,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const spaceKey = sourceConfig.spaceKey as string
|
||||
|
||||
if (!domain || !spaceKey) {
|
||||
return { valid: false, error: 'Domain and space key are required' }
|
||||
}
|
||||
|
||||
const maxPages = sourceConfig.maxPages as string | undefined
|
||||
if (maxPages && (Number.isNaN(Number(maxPages)) || Number(maxPages) <= 0)) {
|
||||
return { valid: false, error: 'Max pages must be a positive number' }
|
||||
}
|
||||
|
||||
try {
|
||||
const cloudId = await getConfluenceCloudId(domain, accessToken)
|
||||
await resolveSpaceId(cloudId, accessToken, spaceKey)
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'version', displayName: 'Version', fieldType: 'number' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
if (metadata.version != null) {
|
||||
const num = Number(metadata.version)
|
||||
if (!Number.isNaN(num)) result.version = num
|
||||
}
|
||||
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists documents using the v2 API for a single content type (pages or blogposts).
|
||||
*/
|
||||
async function listDocumentsV2(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
domain: string,
|
||||
spaceId: string,
|
||||
spaceKey: string,
|
||||
contentType: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', '50')
|
||||
queryParams.append('body-format', 'storage')
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
|
||||
const endpoint = contentType === 'blogpost' ? 'blogposts' : 'pages'
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/${endpoint}?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Listing ${endpoint} in space ${spaceKey} (ID: ${spaceId})`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`Failed to list Confluence ${endpoint}`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to list Confluence ${endpoint}: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = data.results || []
|
||||
|
||||
// Fetch labels for all pages in this batch
|
||||
const pageIds = results.map((page: Record<string, unknown>) => String(page.id))
|
||||
const labelsByPageId = await fetchLabelsForPages(cloudId, accessToken, pageIds)
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
results.map(async (page: Record<string, unknown>) => {
|
||||
const rawContent = (page.body as Record<string, Record<string, string>>)?.storage?.value || ''
|
||||
const plainText = htmlToPlainText(rawContent)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
const pageId = String(page.id)
|
||||
|
||||
return {
|
||||
externalId: pageId,
|
||||
title: (page.title as string) || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: (page._links as Record<string, string>)?.webui
|
||||
? `https://${domain}/wiki${(page._links as Record<string, string>).webui}`
|
||||
: undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
spaceId: page.spaceId,
|
||||
status: page.status,
|
||||
version: (page.version as Record<string, unknown>)?.number,
|
||||
labels: labelsByPageId.get(pageId) ?? [],
|
||||
lastModified: (page.version as Record<string, unknown>)?.createdAt,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
// Extract next cursor from _links.next
|
||||
let nextCursor: string | undefined
|
||||
const nextLink = (data._links as Record<string, string>)?.next
|
||||
if (nextLink) {
|
||||
try {
|
||||
nextCursor = new URL(nextLink, 'https://placeholder').searchParams.get('cursor') || undefined
|
||||
} catch {
|
||||
// Ignore malformed URLs
|
||||
}
|
||||
}
|
||||
|
||||
// Enforce maxPages limit
|
||||
if (maxPages > 0 && !cursor) {
|
||||
// On subsequent pages, the sync engine tracks total count
|
||||
// We signal stop by clearing hasMore when we'd exceed maxPages
|
||||
}
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: Boolean(nextCursor),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists both pages and blogposts using a compound cursor that tracks
|
||||
* pagination state for each content type independently.
|
||||
*/
|
||||
async function listAllContentTypes(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
domain: string,
|
||||
spaceId: string,
|
||||
spaceKey: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
let pageCursor: string | undefined
|
||||
let blogCursor: string | undefined
|
||||
let pagesDone = false
|
||||
let blogsDone = false
|
||||
|
||||
if (cursor) {
|
||||
try {
|
||||
const parsed = JSON.parse(cursor)
|
||||
pageCursor = parsed.page
|
||||
blogCursor = parsed.blog
|
||||
pagesDone = parsed.pagesDone === true
|
||||
blogsDone = parsed.blogsDone === true
|
||||
} catch {
|
||||
pageCursor = cursor
|
||||
}
|
||||
}
|
||||
|
||||
const results: ExternalDocumentList = { documents: [], hasMore: false }
|
||||
|
||||
if (!pagesDone) {
|
||||
const pagesResult = await listDocumentsV2(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceId,
|
||||
spaceKey,
|
||||
'page',
|
||||
maxPages,
|
||||
pageCursor
|
||||
)
|
||||
results.documents.push(...pagesResult.documents)
|
||||
pageCursor = pagesResult.nextCursor
|
||||
pagesDone = !pagesResult.hasMore
|
||||
}
|
||||
|
||||
if (!blogsDone) {
|
||||
const blogResult = await listDocumentsV2(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceId,
|
||||
spaceKey,
|
||||
'blogpost',
|
||||
maxPages,
|
||||
blogCursor
|
||||
)
|
||||
results.documents.push(...blogResult.documents)
|
||||
blogCursor = blogResult.nextCursor
|
||||
blogsDone = !blogResult.hasMore
|
||||
}
|
||||
|
||||
results.hasMore = !pagesDone || !blogsDone
|
||||
|
||||
if (results.hasMore) {
|
||||
results.nextCursor = JSON.stringify({
|
||||
page: pageCursor,
|
||||
blog: blogCursor,
|
||||
pagesDone,
|
||||
blogsDone,
|
||||
})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists documents using CQL search via the v1 API (used when label filtering is enabled).
|
||||
*/
|
||||
async function listDocumentsViaCql(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
domain: string,
|
||||
spaceKey: string,
|
||||
contentType: string,
|
||||
labelFilter: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const labels = labelFilter
|
||||
.split(',')
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
// Build CQL query
|
||||
let cql = `space="${spaceKey}"`
|
||||
|
||||
if (contentType === 'blogpost') {
|
||||
cql += ' AND type="blogpost"'
|
||||
} else if (contentType === 'page' || !contentType) {
|
||||
cql += ' AND type="page"'
|
||||
}
|
||||
// contentType === 'all' — no type filter
|
||||
|
||||
if (labels.length === 1) {
|
||||
cql += ` AND label="${labels[0]}"`
|
||||
} else if (labels.length > 1) {
|
||||
const labelList = labels.map((l) => `"${l}"`).join(',')
|
||||
cql += ` AND label in (${labelList})`
|
||||
}
|
||||
|
||||
const limit = maxPages > 0 ? Math.min(maxPages, 50) : 50
|
||||
const start = cursor ? Number(cursor) : 0
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('cql', cql)
|
||||
queryParams.append('limit', String(limit))
|
||||
queryParams.append('start', String(start))
|
||||
queryParams.append('expand', 'body.storage,version,metadata.labels')
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/search?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Searching Confluence via CQL: ${cql}`, { start, limit })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to search Confluence via CQL', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to search Confluence via CQL: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = data.results || []
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
results.map((item: Record<string, unknown>) => cqlResultToDocument(item, domain))
|
||||
)
|
||||
|
||||
const totalSize = (data.totalSize as number) ?? 0
|
||||
const nextStart = start + results.length
|
||||
const hasMore = nextStart < totalSize && (maxPages <= 0 || nextStart < maxPages)
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasMore ? String(nextStart) : undefined,
|
||||
hasMore,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a Confluence space key to its numeric space ID.
|
||||
*/
|
||||
async function resolveSpaceId(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
spaceKey: string
|
||||
): Promise<string> {
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces?keys=${encodeURIComponent(spaceKey)}&limit=1`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to resolve space key "${spaceKey}": ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = data.results || []
|
||||
|
||||
if (results.length === 0) {
|
||||
throw new Error(`Space "${spaceKey}" not found`)
|
||||
}
|
||||
|
||||
return String(results[0].id)
|
||||
}
|
||||
1
apps/sim/connectors/confluence/index.ts
Normal file
1
apps/sim/connectors/confluence/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { confluenceConnector } from '@/connectors/confluence/confluence'
|
||||
409
apps/sim/connectors/github/github.ts
Normal file
409
apps/sim/connectors/github/github.ts
Normal file
@@ -0,0 +1,409 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { GithubIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('GitHubConnector')
|
||||
|
||||
const GITHUB_API_URL = 'https://api.github.com'
|
||||
const BATCH_SIZE = 30
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the repository string into owner and repo.
|
||||
*/
|
||||
function parseRepo(repository: string): { owner: string; repo: string } {
|
||||
const cleaned = repository.replace(/^https?:\/\/github\.com\//, '').replace(/\.git$/, '')
|
||||
const parts = cleaned.split('/')
|
||||
if (parts.length < 2 || !parts[0] || !parts[1]) {
|
||||
throw new Error(`Invalid repository format: "${repository}". Use "owner/repo".`)
|
||||
}
|
||||
return { owner: parts[0], repo: parts[1] }
|
||||
}
|
||||
|
||||
/**
|
||||
* File extension filter set from user config. Returns null if no filter (accept all).
|
||||
*/
|
||||
function parseExtensions(extensions: string): Set<string> | null {
|
||||
const trimmed = extensions.trim()
|
||||
if (!trimmed) return null
|
||||
const exts = trimmed
|
||||
.split(',')
|
||||
.map((e) => e.trim().toLowerCase())
|
||||
.filter(Boolean)
|
||||
.map((e) => (e.startsWith('.') ? e : `.${e}`))
|
||||
return exts.length > 0 ? new Set(exts) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a file path matches the extension filter.
|
||||
*/
|
||||
function matchesExtension(filePath: string, extSet: Set<string> | null): boolean {
|
||||
if (!extSet) return true
|
||||
const lastDot = filePath.lastIndexOf('.')
|
||||
if (lastDot === -1) return false
|
||||
return extSet.has(filePath.slice(lastDot).toLowerCase())
|
||||
}
|
||||
|
||||
interface TreeItem {
|
||||
path: string
|
||||
mode: string
|
||||
type: string
|
||||
sha: string
|
||||
size?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the full recursive tree for a branch.
|
||||
*/
|
||||
async function fetchTree(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string
|
||||
): Promise<TreeItem[]> {
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/git/trees/${encodeURIComponent(branch)}?recursive=1`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to fetch GitHub tree', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to fetch repository tree: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.truncated) {
|
||||
logger.warn('GitHub tree was truncated — some files may be missing', { owner, repo, branch })
|
||||
}
|
||||
|
||||
return (data.tree || []).filter((item: TreeItem) => item.type === 'blob')
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches file content via the Blobs API and decodes base64.
|
||||
*/
|
||||
async function fetchBlobContent(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
sha: string
|
||||
): Promise<string> {
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/git/blobs/${sha}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch blob ${sha}: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.encoding === 'base64') {
|
||||
return atob(data.content.replace(/\n/g, ''))
|
||||
}
|
||||
|
||||
return data.content || ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a tree item to an ExternalDocument by fetching its content.
|
||||
*/
|
||||
async function treeItemToDocument(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
item: TreeItem
|
||||
): Promise<ExternalDocument> {
|
||||
const content = await fetchBlobContent(accessToken, owner, repo, item.sha)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
return {
|
||||
externalId: item.path,
|
||||
title: item.path.split('/').pop() || item.path,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: `https://github.com/${owner}/${repo}/blob/${branch}/${item.path}`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
path: item.path,
|
||||
sha: item.sha,
|
||||
size: item.size,
|
||||
branch,
|
||||
repository: `${owner}/${repo}`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const githubConnector: ConnectorConfig = {
|
||||
id: 'github',
|
||||
name: 'GitHub',
|
||||
description: 'Sync files from a GitHub repository into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: GithubIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'github',
|
||||
requiredScopes: ['repo'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'repository',
|
||||
title: 'Repository',
|
||||
type: 'short-input',
|
||||
placeholder: 'owner/repo',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'branch',
|
||||
title: 'Branch',
|
||||
type: 'short-input',
|
||||
placeholder: 'main (default)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'pathPrefix',
|
||||
title: 'Path Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. docs/, src/components/',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'extensions',
|
||||
title: 'File Extensions',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. .md, .txt, .mdx',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'maxFiles',
|
||||
title: 'Max Files',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const { owner, repo } = parseRepo(sourceConfig.repository as string)
|
||||
const branch = ((sourceConfig.branch as string) || 'main').trim()
|
||||
const pathPrefix = ((sourceConfig.pathPrefix as string) || '').trim()
|
||||
const extSet = parseExtensions((sourceConfig.extensions as string) || '')
|
||||
const maxFiles = sourceConfig.maxFiles ? Number(sourceConfig.maxFiles) : 0
|
||||
|
||||
const tree = await fetchTree(accessToken, owner, repo, branch)
|
||||
|
||||
// Filter by path prefix and extensions
|
||||
const filtered = tree.filter((item) => {
|
||||
if (pathPrefix && !item.path.startsWith(pathPrefix)) return false
|
||||
if (!matchesExtension(item.path, extSet)) return false
|
||||
return true
|
||||
})
|
||||
|
||||
// Apply max files limit
|
||||
const capped = maxFiles > 0 ? filtered.slice(0, maxFiles) : filtered
|
||||
|
||||
// Paginate using offset cursor
|
||||
const offset = cursor ? Number(cursor) : 0
|
||||
const batch = capped.slice(offset, offset + BATCH_SIZE)
|
||||
|
||||
logger.info('Listing GitHub files', {
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
totalFiltered: capped.length,
|
||||
offset,
|
||||
batchSize: batch.length,
|
||||
})
|
||||
|
||||
const documents: ExternalDocument[] = []
|
||||
for (const item of batch) {
|
||||
try {
|
||||
const doc = await treeItemToDocument(accessToken, owner, repo, branch, item)
|
||||
documents.push(doc)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to fetch content for ${item.path}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const nextOffset = offset + BATCH_SIZE
|
||||
const hasMore = nextOffset < capped.length
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasMore ? String(nextOffset) : undefined,
|
||||
hasMore,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const { owner, repo } = parseRepo(sourceConfig.repository as string)
|
||||
const branch = ((sourceConfig.branch as string) || 'main').trim()
|
||||
|
||||
// externalId is the file path
|
||||
const path = externalId
|
||||
|
||||
try {
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/contents/${encodeURIComponent(path)}?ref=${encodeURIComponent(branch)}`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to fetch file ${path}: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const content =
|
||||
data.encoding === 'base64'
|
||||
? atob((data.content as string).replace(/\n/g, ''))
|
||||
: (data.content as string) || ''
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
return {
|
||||
externalId,
|
||||
title: path.split('/').pop() || path,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: `https://github.com/${owner}/${repo}/blob/${branch}/${path}`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
path,
|
||||
sha: data.sha as string,
|
||||
size: data.size as number,
|
||||
branch,
|
||||
repository: `${owner}/${repo}`,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to fetch GitHub document ${externalId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const repository = (sourceConfig.repository as string)?.trim()
|
||||
if (!repository) {
|
||||
return { valid: false, error: 'Repository is required' }
|
||||
}
|
||||
|
||||
let owner: string
|
||||
let repo: string
|
||||
try {
|
||||
const parsed = parseRepo(repository)
|
||||
owner = parsed.owner
|
||||
repo = parsed.repo
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: error instanceof Error ? error.message : 'Invalid repository format',
|
||||
}
|
||||
}
|
||||
|
||||
const maxFiles = sourceConfig.maxFiles as string | undefined
|
||||
if (maxFiles && (Number.isNaN(Number(maxFiles)) || Number(maxFiles) <= 0)) {
|
||||
return { valid: false, error: 'Max files must be a positive number' }
|
||||
}
|
||||
|
||||
const branch = ((sourceConfig.branch as string) || 'main').trim()
|
||||
|
||||
try {
|
||||
// Verify repo and branch are accessible
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/branches/${encodeURIComponent(branch)}`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (response.status === 404) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Repository "${owner}/${repo}" or branch "${branch}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Cannot access repository: ${response.status}` }
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'path', displayName: 'File Path', fieldType: 'text' },
|
||||
{ id: 'repository', displayName: 'Repository', fieldType: 'text' },
|
||||
{ id: 'branch', displayName: 'Branch', fieldType: 'text' },
|
||||
{ id: 'size', displayName: 'File Size', fieldType: 'number' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (typeof metadata.path === 'string') result.path = metadata.path
|
||||
if (typeof metadata.repository === 'string') result.repository = metadata.repository
|
||||
if (typeof metadata.branch === 'string') result.branch = metadata.branch
|
||||
|
||||
if (metadata.size != null) {
|
||||
const num = Number(metadata.size)
|
||||
if (!Number.isNaN(num)) result.size = num
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
1
apps/sim/connectors/github/index.ts
Normal file
1
apps/sim/connectors/github/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { githubConnector } from '@/connectors/github/github'
|
||||
424
apps/sim/connectors/google-drive/google-drive.ts
Normal file
424
apps/sim/connectors/google-drive/google-drive.ts
Normal file
@@ -0,0 +1,424 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { GoogleDriveIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('GoogleDriveConnector')
|
||||
|
||||
const GOOGLE_WORKSPACE_MIME_TYPES: Record<string, string> = {
|
||||
'application/vnd.google-apps.document': 'text/plain',
|
||||
'application/vnd.google-apps.spreadsheet': 'text/csv',
|
||||
'application/vnd.google-apps.presentation': 'text/plain',
|
||||
}
|
||||
|
||||
const SUPPORTED_TEXT_MIME_TYPES = [
|
||||
'text/plain',
|
||||
'text/csv',
|
||||
'text/html',
|
||||
'text/markdown',
|
||||
'application/json',
|
||||
'application/xml',
|
||||
]
|
||||
|
||||
const MAX_EXPORT_SIZE = 10 * 1024 * 1024 // 10 MB (Google export limit)
|
||||
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
function isGoogleWorkspaceFile(mimeType: string): boolean {
|
||||
return mimeType in GOOGLE_WORKSPACE_MIME_TYPES
|
||||
}
|
||||
|
||||
function isSupportedTextFile(mimeType: string): boolean {
|
||||
return SUPPORTED_TEXT_MIME_TYPES.some((t) => mimeType.startsWith(t))
|
||||
}
|
||||
|
||||
function htmlToPlainText(html: string): string {
|
||||
let text = html.replace(/<[^>]*>/g, ' ')
|
||||
text = text
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/&/g, '&')
|
||||
return text.replace(/\s+/g, ' ').trim()
|
||||
}
|
||||
|
||||
async function exportGoogleWorkspaceFile(
|
||||
accessToken: string,
|
||||
fileId: string,
|
||||
sourceMimeType: string
|
||||
): Promise<string> {
|
||||
const exportMimeType = GOOGLE_WORKSPACE_MIME_TYPES[sourceMimeType]
|
||||
if (!exportMimeType) {
|
||||
throw new Error(`Unsupported Google Workspace MIME type: ${sourceMimeType}`)
|
||||
}
|
||||
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportMimeType)}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to export file ${fileId}: ${response.status}`)
|
||||
}
|
||||
|
||||
return response.text()
|
||||
}
|
||||
|
||||
async function downloadTextFile(accessToken: string, fileId: string): Promise<string> {
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download file ${fileId}: ${response.status}`)
|
||||
}
|
||||
|
||||
const text = await response.text()
|
||||
if (text.length > MAX_EXPORT_SIZE) {
|
||||
return text.slice(0, MAX_EXPORT_SIZE)
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
async function fetchFileContent(
|
||||
accessToken: string,
|
||||
fileId: string,
|
||||
mimeType: string
|
||||
): Promise<string> {
|
||||
if (isGoogleWorkspaceFile(mimeType)) {
|
||||
return exportGoogleWorkspaceFile(accessToken, fileId, mimeType)
|
||||
}
|
||||
if (mimeType === 'text/html') {
|
||||
const html = await downloadTextFile(accessToken, fileId)
|
||||
return htmlToPlainText(html)
|
||||
}
|
||||
if (isSupportedTextFile(mimeType)) {
|
||||
return downloadTextFile(accessToken, fileId)
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported MIME type for content extraction: ${mimeType}`)
|
||||
}
|
||||
|
||||
interface DriveFile {
|
||||
id: string
|
||||
name: string
|
||||
mimeType: string
|
||||
modifiedTime?: string
|
||||
createdTime?: string
|
||||
webViewLink?: string
|
||||
parents?: string[]
|
||||
owners?: { displayName?: string; emailAddress?: string }[]
|
||||
size?: string
|
||||
starred?: boolean
|
||||
trashed?: boolean
|
||||
}
|
||||
|
||||
function buildQuery(sourceConfig: Record<string, unknown>): string {
|
||||
const parts: string[] = ['trashed = false']
|
||||
|
||||
const folderId = sourceConfig.folderId as string | undefined
|
||||
if (folderId?.trim()) {
|
||||
parts.push(`'${folderId.trim()}' in parents`)
|
||||
}
|
||||
|
||||
const fileType = (sourceConfig.fileType as string) || 'all'
|
||||
switch (fileType) {
|
||||
case 'documents':
|
||||
parts.push("mimeType = 'application/vnd.google-apps.document'")
|
||||
break
|
||||
case 'spreadsheets':
|
||||
parts.push("mimeType = 'application/vnd.google-apps.spreadsheet'")
|
||||
break
|
||||
case 'presentations':
|
||||
parts.push("mimeType = 'application/vnd.google-apps.presentation'")
|
||||
break
|
||||
case 'text':
|
||||
parts.push(`(${SUPPORTED_TEXT_MIME_TYPES.map((t) => `mimeType = '${t}'`).join(' or ')})`)
|
||||
break
|
||||
default: {
|
||||
// Include Google Workspace files + plain text files, exclude folders
|
||||
const allMimeTypes = [
|
||||
...Object.keys(GOOGLE_WORKSPACE_MIME_TYPES),
|
||||
...SUPPORTED_TEXT_MIME_TYPES,
|
||||
]
|
||||
parts.push(`(${allMimeTypes.map((t) => `mimeType = '${t}'`).join(' or ')})`)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return parts.join(' and ')
|
||||
}
|
||||
|
||||
async function fileToDocument(
|
||||
accessToken: string,
|
||||
file: DriveFile
|
||||
): Promise<ExternalDocument | null> {
|
||||
try {
|
||||
const content = await fetchFileContent(accessToken, file.id, file.mimeType)
|
||||
if (!content.trim()) {
|
||||
logger.info(`Skipping empty file: ${file.name} (${file.id})`)
|
||||
return null
|
||||
}
|
||||
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
return {
|
||||
externalId: file.id,
|
||||
title: file.name || 'Untitled',
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: file.webViewLink || `https://drive.google.com/file/d/${file.id}/view`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
originalMimeType: file.mimeType,
|
||||
modifiedTime: file.modifiedTime,
|
||||
createdTime: file.createdTime,
|
||||
owners: file.owners?.map((o) => o.displayName || o.emailAddress).filter(Boolean),
|
||||
starred: file.starred,
|
||||
fileSize: file.size ? Number(file.size) : undefined,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to extract content from file: ${file.name} (${file.id})`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export const googleDriveConnector: ConnectorConfig = {
|
||||
id: 'google_drive',
|
||||
name: 'Google Drive',
|
||||
description: 'Sync documents from Google Drive into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: GoogleDriveIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'google-drive',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/drive.readonly'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'folderId',
|
||||
title: 'Folder ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. 1aBcDeFgHiJkLmNoPqRsTuVwXyZ (optional)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'fileType',
|
||||
title: 'File Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'All supported files', id: 'all' },
|
||||
{ label: 'Google Docs only', id: 'documents' },
|
||||
{ label: 'Google Sheets only', id: 'spreadsheets' },
|
||||
{ label: 'Google Slides only', id: 'presentations' },
|
||||
{ label: 'Plain text files only', id: 'text' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'maxFiles',
|
||||
title: 'Max Files',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const query = buildQuery(sourceConfig)
|
||||
const pageSize = 100
|
||||
|
||||
const queryParams = new URLSearchParams({
|
||||
q: query,
|
||||
pageSize: String(pageSize),
|
||||
fields:
|
||||
'nextPageToken,files(id,name,mimeType,modifiedTime,createdTime,webViewLink,parents,owners,size,starred)',
|
||||
supportsAllDrives: 'true',
|
||||
includeItemsFromAllDrives: 'true',
|
||||
})
|
||||
|
||||
if (cursor) {
|
||||
queryParams.set('pageToken', cursor)
|
||||
}
|
||||
|
||||
const url = `https://www.googleapis.com/drive/v3/files?${queryParams.toString()}`
|
||||
|
||||
logger.info('Listing Google Drive files', { query, cursor: cursor ?? 'initial' })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to list Google Drive files', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to list Google Drive files: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const files = (data.files || []) as DriveFile[]
|
||||
|
||||
const documentResults = await Promise.all(
|
||||
files.map((file) => fileToDocument(accessToken, file))
|
||||
)
|
||||
const documents = documentResults.filter(Boolean) as ExternalDocument[]
|
||||
|
||||
const nextPageToken = data.nextPageToken as string | undefined
|
||||
const hasMore = Boolean(nextPageToken)
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: nextPageToken,
|
||||
hasMore,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const fields =
|
||||
'id,name,mimeType,modifiedTime,createdTime,webViewLink,parents,owners,size,starred,trashed'
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${externalId}?fields=${encodeURIComponent(fields)}&supportsAllDrives=true`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Google Drive file: ${response.status}`)
|
||||
}
|
||||
|
||||
const file = (await response.json()) as DriveFile
|
||||
|
||||
if (file.trashed) return null
|
||||
|
||||
return fileToDocument(accessToken, file)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const folderId = sourceConfig.folderId as string | undefined
|
||||
const maxFiles = sourceConfig.maxFiles as string | undefined
|
||||
|
||||
if (maxFiles && (Number.isNaN(Number(maxFiles)) || Number(maxFiles) <= 0)) {
|
||||
return { valid: false, error: 'Max files must be a positive number' }
|
||||
}
|
||||
|
||||
// Verify access to Drive API
|
||||
try {
|
||||
if (folderId?.trim()) {
|
||||
// Verify the folder exists and is accessible
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${folderId.trim()}?fields=id,name,mimeType&supportsAllDrives=true`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
return { valid: false, error: 'Folder not found. Check the folder ID and permissions.' }
|
||||
}
|
||||
return { valid: false, error: `Failed to access folder: ${response.status}` }
|
||||
}
|
||||
|
||||
const folder = await response.json()
|
||||
if (folder.mimeType !== 'application/vnd.google-apps.folder') {
|
||||
return { valid: false, error: 'The provided ID is not a folder' }
|
||||
}
|
||||
} else {
|
||||
// Verify basic Drive access by listing one file
|
||||
const url = 'https://www.googleapis.com/drive/v3/files?pageSize=1&fields=files(id)'
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Failed to access Google Drive: ${response.status}` }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'owners', displayName: 'Owner', fieldType: 'text' },
|
||||
{ id: 'fileType', displayName: 'File Type', fieldType: 'text' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
{ id: 'starred', displayName: 'Starred', fieldType: 'boolean' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const owners = Array.isArray(metadata.owners) ? (metadata.owners as string[]) : []
|
||||
if (owners.length > 0) result.owners = owners.join(', ')
|
||||
|
||||
if (typeof metadata.originalMimeType === 'string') {
|
||||
const mimeType = metadata.originalMimeType
|
||||
if (mimeType.includes('document')) result.fileType = 'Google Doc'
|
||||
else if (mimeType.includes('spreadsheet')) result.fileType = 'Google Sheet'
|
||||
else if (mimeType.includes('presentation')) result.fileType = 'Google Slides'
|
||||
else if (mimeType.startsWith('text/')) result.fileType = 'Text File'
|
||||
else result.fileType = mimeType
|
||||
}
|
||||
|
||||
if (typeof metadata.modifiedTime === 'string') {
|
||||
const date = new Date(metadata.modifiedTime)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
if (typeof metadata.starred === 'boolean') {
|
||||
result.starred = metadata.starred
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
1
apps/sim/connectors/google-drive/index.ts
Normal file
1
apps/sim/connectors/google-drive/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { googleDriveConnector } from '@/connectors/google-drive/google-drive'
|
||||
20
apps/sim/connectors/icons.ts
Normal file
20
apps/sim/connectors/icons.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { ConfluenceIcon, GithubIcon, LinearIcon, NotionIcon } from '@/components/icons'
|
||||
|
||||
interface ConnectorMeta {
|
||||
icon: React.ComponentType<{ className?: string }>
|
||||
name: string
|
||||
}
|
||||
|
||||
/** Connector type → client-safe metadata (icon + display name) */
|
||||
export const CONNECTOR_META: Record<string, ConnectorMeta> = {
|
||||
confluence: { icon: ConfluenceIcon, name: 'Confluence' },
|
||||
github: { icon: GithubIcon, name: 'GitHub' },
|
||||
linear: { icon: LinearIcon, name: 'Linear' },
|
||||
notion: { icon: NotionIcon, name: 'Notion' },
|
||||
}
|
||||
|
||||
/** Connector type → icon component mapping for client-side use */
|
||||
export const CONNECTOR_ICONS: Record<
|
||||
string,
|
||||
React.ComponentType<{ className?: string }>
|
||||
> = Object.fromEntries(Object.entries(CONNECTOR_META).map(([k, v]) => [k, v.icon]))
|
||||
9
apps/sim/connectors/index.ts
Normal file
9
apps/sim/connectors/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
export type {
|
||||
ConnectorConfig,
|
||||
ConnectorConfigField,
|
||||
ConnectorRegistry,
|
||||
ExternalDocument,
|
||||
ExternalDocumentList,
|
||||
SyncResult,
|
||||
} from '@/connectors/types'
|
||||
1
apps/sim/connectors/jira/index.ts
Normal file
1
apps/sim/connectors/jira/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { jiraConnector } from '@/connectors/jira/jira'
|
||||
313
apps/sim/connectors/jira/jira.ts
Normal file
313
apps/sim/connectors/jira/jira.ts
Normal file
@@ -0,0 +1,313 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { JiraIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
import { extractAdfText, getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
const logger = createLogger('JiraConnector')
|
||||
|
||||
const PAGE_SIZE = 50
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a plain-text representation of a Jira issue for knowledge base indexing.
|
||||
*/
|
||||
function buildIssueContent(fields: Record<string, unknown>): string {
|
||||
const parts: string[] = []
|
||||
|
||||
const summary = fields.summary as string | undefined
|
||||
if (summary) parts.push(summary)
|
||||
|
||||
const description = extractAdfText(fields.description)
|
||||
if (description) parts.push(description)
|
||||
|
||||
const comments = fields.comment as { comments?: Array<{ body?: unknown }> } | undefined
|
||||
if (comments?.comments) {
|
||||
for (const comment of comments.comments) {
|
||||
const text = extractAdfText(comment.body)
|
||||
if (text) parts.push(text)
|
||||
}
|
||||
}
|
||||
|
||||
return parts.join('\n\n').trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Jira issue API response to an ExternalDocument.
|
||||
*/
|
||||
async function issueToDocument(
|
||||
issue: Record<string, unknown>,
|
||||
domain: string
|
||||
): Promise<ExternalDocument> {
|
||||
const fields = (issue.fields || {}) as Record<string, unknown>
|
||||
const content = buildIssueContent(fields)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
const key = issue.key as string
|
||||
const issueType = fields.issuetype as Record<string, unknown> | undefined
|
||||
const status = fields.status as Record<string, unknown> | undefined
|
||||
const priority = fields.priority as Record<string, unknown> | undefined
|
||||
const assignee = fields.assignee as Record<string, unknown> | undefined
|
||||
const reporter = fields.reporter as Record<string, unknown> | undefined
|
||||
const project = fields.project as Record<string, unknown> | undefined
|
||||
const labels = Array.isArray(fields.labels) ? (fields.labels as string[]) : []
|
||||
|
||||
return {
|
||||
externalId: String(issue.id),
|
||||
title: `${key}: ${(fields.summary as string) || 'Untitled'}`,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: `https://${domain}/browse/${key}`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
key,
|
||||
issueType: issueType?.name,
|
||||
status: status?.name,
|
||||
priority: priority?.name,
|
||||
assignee: assignee?.displayName,
|
||||
reporter: reporter?.displayName,
|
||||
project: project?.key,
|
||||
labels,
|
||||
created: fields.created,
|
||||
updated: fields.updated,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const jiraConnector: ConnectorConfig = {
|
||||
id: 'jira',
|
||||
name: 'Jira',
|
||||
description: 'Sync issues from a Jira project into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: JiraIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'jira',
|
||||
requiredScopes: ['read:jira-work', 'offline_access'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Jira Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.atlassian.net',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'projectKey',
|
||||
title: 'Project Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. ENG, PROJ',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'jql',
|
||||
title: 'JQL Filter',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. status = "Done" AND type = Bug',
|
||||
},
|
||||
{
|
||||
id: 'maxIssues',
|
||||
title: 'Max Issues',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const projectKey = sourceConfig.projectKey as string
|
||||
const jqlFilter = (sourceConfig.jql as string) || ''
|
||||
const maxIssues = sourceConfig.maxIssues ? Number(sourceConfig.maxIssues) : 0
|
||||
|
||||
const cloudId = await getJiraCloudId(domain, accessToken)
|
||||
|
||||
let jql = `project = "${projectKey}" ORDER BY updated DESC`
|
||||
if (jqlFilter.trim()) {
|
||||
jql = `project = "${projectKey}" AND (${jqlFilter.trim()}) ORDER BY updated DESC`
|
||||
}
|
||||
|
||||
const startAt = cursor ? Number(cursor) : 0
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.append('jql', jql)
|
||||
params.append('startAt', String(startAt))
|
||||
params.append('maxResults', String(PAGE_SIZE))
|
||||
params.append(
|
||||
'fields',
|
||||
'summary,description,comment,issuetype,status,priority,assignee,reporter,project,labels,created,updated'
|
||||
)
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${params.toString()}`
|
||||
|
||||
logger.info(`Listing Jira issues for project ${projectKey}`, { startAt })
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to search Jira issues', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to search Jira issues: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const issues = (data.issues || []) as Record<string, unknown>[]
|
||||
const total = (data.total as number) ?? 0
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
issues.map((issue) => issueToDocument(issue, domain))
|
||||
)
|
||||
|
||||
const nextStart = startAt + issues.length
|
||||
const hasMore = nextStart < total && (maxIssues <= 0 || nextStart < maxIssues)
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasMore ? String(nextStart) : undefined,
|
||||
hasMore,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const cloudId = await getJiraCloudId(domain, accessToken)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.append(
|
||||
'fields',
|
||||
'summary,description,comment,issuetype,status,priority,assignee,reporter,project,labels,created,updated'
|
||||
)
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${externalId}?${params.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Jira issue: ${response.status}`)
|
||||
}
|
||||
|
||||
const issue = await response.json()
|
||||
return issueToDocument(issue, domain)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const projectKey = sourceConfig.projectKey as string
|
||||
|
||||
if (!domain || !projectKey) {
|
||||
return { valid: false, error: 'Domain and project key are required' }
|
||||
}
|
||||
|
||||
const maxIssues = sourceConfig.maxIssues as string | undefined
|
||||
if (maxIssues && (Number.isNaN(Number(maxIssues)) || Number(maxIssues) <= 0)) {
|
||||
return { valid: false, error: 'Max issues must be a positive number' }
|
||||
}
|
||||
|
||||
const jql = sourceConfig.jql as string | undefined
|
||||
if (jql?.trim()) {
|
||||
if (/\b(delete|drop|truncate|insert|update|alter|create|grant|revoke)\b/i.test(jql)) {
|
||||
return { valid: false, error: 'Invalid JQL filter' }
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const cloudId = await getJiraCloudId(domain, accessToken)
|
||||
|
||||
// Verify the project exists by running a minimal search
|
||||
const params = new URLSearchParams()
|
||||
params.append('jql', `project = "${projectKey}"`)
|
||||
params.append('maxResults', '0')
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${params.toString()}`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
if (response.status === 400) {
|
||||
return { valid: false, error: `Project "${projectKey}" not found or JQL is invalid` }
|
||||
}
|
||||
return { valid: false, error: `Failed to validate: ${response.status} - ${errorText}` }
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'issueType', displayName: 'Issue Type', fieldType: 'text' },
|
||||
{ id: 'status', displayName: 'Status', fieldType: 'text' },
|
||||
{ id: 'priority', displayName: 'Priority', fieldType: 'text' },
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'assignee', displayName: 'Assignee', fieldType: 'text' },
|
||||
{ id: 'updated', displayName: 'Last Updated', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (typeof metadata.issueType === 'string') result.issueType = metadata.issueType
|
||||
if (typeof metadata.status === 'string') result.status = metadata.status
|
||||
if (typeof metadata.priority === 'string') result.priority = metadata.priority
|
||||
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
if (typeof metadata.assignee === 'string') result.assignee = metadata.assignee
|
||||
|
||||
if (typeof metadata.updated === 'string') {
|
||||
const date = new Date(metadata.updated)
|
||||
if (!Number.isNaN(date.getTime())) result.updated = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
1
apps/sim/connectors/linear/index.ts
Normal file
1
apps/sim/connectors/linear/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { linearConnector } from '@/connectors/linear/linear'
|
||||
415
apps/sim/connectors/linear/linear.ts
Normal file
415
apps/sim/connectors/linear/linear.ts
Normal file
@@ -0,0 +1,415 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { LinearIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('LinearConnector')
|
||||
|
||||
const LINEAR_API = 'https://api.linear.app/graphql'
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips Markdown formatting to produce plain text.
|
||||
*/
|
||||
function markdownToPlainText(md: string): string {
|
||||
let text = md
|
||||
.replace(/!\[.*?\]\(.*?\)/g, '') // images
|
||||
.replace(/\[([^\]]*)\]\(.*?\)/g, '$1') // links
|
||||
.replace(/#{1,6}\s+/g, '') // headings
|
||||
.replace(/(\*\*|__)(.*?)\1/g, '$2') // bold
|
||||
.replace(/(\*|_)(.*?)\1/g, '$2') // italic
|
||||
.replace(/~~(.*?)~~/g, '$1') // strikethrough
|
||||
.replace(/`{3}[\s\S]*?`{3}/g, '') // code blocks
|
||||
.replace(/`([^`]*)`/g, '$1') // inline code
|
||||
.replace(/^\s*[-*+]\s+/gm, '') // list items
|
||||
.replace(/^\s*\d+\.\s+/gm, '') // ordered list items
|
||||
.replace(/^\s*>\s+/gm, '') // blockquotes
|
||||
.replace(/---+/g, '') // horizontal rules
|
||||
text = text.replace(/\s+/g, ' ').trim()
|
||||
return text
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a GraphQL query against the Linear API.
|
||||
*/
|
||||
async function linearGraphQL(
|
||||
accessToken: string,
|
||||
query: string,
|
||||
variables?: Record<string, unknown>
|
||||
): Promise<Record<string, unknown>> {
|
||||
const response = await fetch(LINEAR_API, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({ query, variables }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Linear GraphQL request failed', { status: response.status, error: errorText })
|
||||
throw new Error(`Linear API error: ${response.status}`)
|
||||
}
|
||||
|
||||
const json = (await response.json()) as { data?: Record<string, unknown>; errors?: unknown[] }
|
||||
if (json.errors) {
|
||||
logger.error('Linear GraphQL errors', { errors: json.errors })
|
||||
throw new Error(`Linear GraphQL error: ${JSON.stringify(json.errors)}`)
|
||||
}
|
||||
|
||||
return json.data as Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a formatted text document from a Linear issue.
|
||||
*/
|
||||
function buildIssueContent(issue: Record<string, unknown>): string {
|
||||
const parts: string[] = []
|
||||
|
||||
const identifier = issue.identifier as string | undefined
|
||||
const title = (issue.title as string) || 'Untitled'
|
||||
parts.push(`${identifier ? `${identifier}: ` : ''}${title}`)
|
||||
|
||||
const state = issue.state as Record<string, unknown> | undefined
|
||||
if (state?.name) parts.push(`Status: ${state.name}`)
|
||||
|
||||
const priority = issue.priorityLabel as string | undefined
|
||||
if (priority) parts.push(`Priority: ${priority}`)
|
||||
|
||||
const assignee = issue.assignee as Record<string, unknown> | undefined
|
||||
if (assignee?.name) parts.push(`Assignee: ${assignee.name}`)
|
||||
|
||||
const labelsConn = issue.labels as Record<string, unknown> | undefined
|
||||
const labelNodes = (labelsConn?.nodes || []) as Record<string, unknown>[]
|
||||
if (labelNodes.length > 0) {
|
||||
parts.push(`Labels: ${labelNodes.map((l) => l.name as string).join(', ')}`)
|
||||
}
|
||||
|
||||
const description = issue.description as string | undefined
|
||||
if (description) {
|
||||
parts.push('')
|
||||
parts.push(markdownToPlainText(description))
|
||||
}
|
||||
|
||||
return parts.join('\n')
|
||||
}
|
||||
|
||||
const ISSUE_FIELDS = `
|
||||
id
|
||||
identifier
|
||||
title
|
||||
description
|
||||
priority
|
||||
priorityLabel
|
||||
url
|
||||
createdAt
|
||||
updatedAt
|
||||
state { name }
|
||||
assignee { name }
|
||||
labels { nodes { name } }
|
||||
team { name key }
|
||||
project { name }
|
||||
`
|
||||
|
||||
const ISSUE_BY_ID_QUERY = `
|
||||
query GetIssue($id: String!) {
|
||||
issue(id: $id) {
|
||||
${ISSUE_FIELDS}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
const TEAMS_QUERY = `
|
||||
query { teams { nodes { id name key } } }
|
||||
`
|
||||
|
||||
/**
|
||||
* Dynamically builds a GraphQL issues query with only the filter clauses
|
||||
* that have values, preventing null comparators from being sent to Linear.
|
||||
*/
|
||||
function buildIssuesQuery(sourceConfig: Record<string, unknown>): {
|
||||
query: string
|
||||
variables: Record<string, unknown>
|
||||
} {
|
||||
const teamId = (sourceConfig.teamId as string) || ''
|
||||
const projectId = (sourceConfig.projectId as string) || ''
|
||||
const stateFilter = (sourceConfig.stateFilter as string) || ''
|
||||
|
||||
const varDefs: string[] = ['$first: Int!', '$after: String']
|
||||
const filterClauses: string[] = []
|
||||
const variables: Record<string, unknown> = {}
|
||||
|
||||
if (teamId) {
|
||||
varDefs.push('$teamId: String!')
|
||||
filterClauses.push('team: { id: { eq: $teamId } }')
|
||||
variables.teamId = teamId
|
||||
}
|
||||
|
||||
if (projectId) {
|
||||
varDefs.push('$projectId: String!')
|
||||
filterClauses.push('project: { id: { eq: $projectId } }')
|
||||
variables.projectId = projectId
|
||||
}
|
||||
|
||||
if (stateFilter) {
|
||||
const states = stateFilter
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
if (states.length > 0) {
|
||||
varDefs.push('$stateFilter: [String!]!')
|
||||
filterClauses.push('state: { name: { in: $stateFilter } }')
|
||||
variables.stateFilter = states
|
||||
}
|
||||
}
|
||||
|
||||
const filterArg = filterClauses.length > 0 ? `, filter: { ${filterClauses.join(', ')} }` : ''
|
||||
|
||||
const query = `
|
||||
query ListIssues(${varDefs.join(', ')}) {
|
||||
issues(first: $first, after: $after${filterArg}) {
|
||||
nodes {
|
||||
${ISSUE_FIELDS}
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
return { query, variables }
|
||||
}
|
||||
|
||||
export const linearConnector: ConnectorConfig = {
|
||||
id: 'linear',
|
||||
name: 'Linear',
|
||||
description: 'Sync issues from Linear into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: LinearIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'linear',
|
||||
requiredScopes: ['read'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'teamId',
|
||||
title: 'Team ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. abc123 (leave empty for all teams)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. def456 (leave empty for all projects)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'stateFilter',
|
||||
title: 'State Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. In Progress, Todo',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'maxIssues',
|
||||
title: 'Max Issues',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const maxIssues = sourceConfig.maxIssues ? Number(sourceConfig.maxIssues) : 0
|
||||
const pageSize = maxIssues > 0 ? Math.min(maxIssues, 50) : 50
|
||||
|
||||
const { query, variables } = buildIssuesQuery(sourceConfig)
|
||||
const allVars = { ...variables, first: pageSize, after: cursor || undefined }
|
||||
|
||||
logger.info('Listing Linear issues', {
|
||||
cursor,
|
||||
pageSize,
|
||||
hasTeamFilter: Boolean(sourceConfig.teamId),
|
||||
hasProjectFilter: Boolean(sourceConfig.projectId),
|
||||
})
|
||||
|
||||
const data = await linearGraphQL(accessToken, query, allVars)
|
||||
const issuesConn = data.issues as Record<string, unknown>
|
||||
const nodes = (issuesConn.nodes || []) as Record<string, unknown>[]
|
||||
const pageInfo = issuesConn.pageInfo as Record<string, unknown>
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
nodes.map(async (issue) => {
|
||||
const content = buildIssueContent(issue)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
|
||||
string,
|
||||
unknown
|
||||
>[]
|
||||
|
||||
return {
|
||||
externalId: issue.id as string,
|
||||
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
|
||||
content,
|
||||
mimeType: 'text/plain' as const,
|
||||
sourceUrl: (issue.url as string) || undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
identifier: issue.identifier,
|
||||
state: (issue.state as Record<string, unknown>)?.name,
|
||||
priority: issue.priorityLabel,
|
||||
assignee: (issue.assignee as Record<string, unknown>)?.name,
|
||||
labels: labelNodes.map((l) => l.name as string),
|
||||
team: (issue.team as Record<string, unknown>)?.name,
|
||||
project: (issue.project as Record<string, unknown>)?.name,
|
||||
lastModified: issue.updatedAt,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const hasNextPage = Boolean(pageInfo.hasNextPage)
|
||||
const endCursor = (pageInfo.endCursor as string) || undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasNextPage ? endCursor : undefined,
|
||||
hasMore: hasNextPage,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
try {
|
||||
const data = await linearGraphQL(accessToken, ISSUE_BY_ID_QUERY, { id: externalId })
|
||||
const issue = data.issue as Record<string, unknown> | null
|
||||
|
||||
if (!issue) return null
|
||||
|
||||
const content = buildIssueContent(issue)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
|
||||
string,
|
||||
unknown
|
||||
>[]
|
||||
|
||||
return {
|
||||
externalId: issue.id as string,
|
||||
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: (issue.url as string) || undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
identifier: issue.identifier,
|
||||
state: (issue.state as Record<string, unknown>)?.name,
|
||||
priority: issue.priorityLabel,
|
||||
assignee: (issue.assignee as Record<string, unknown>)?.name,
|
||||
labels: labelNodes.map((l) => l.name as string),
|
||||
team: (issue.team as Record<string, unknown>)?.name,
|
||||
project: (issue.project as Record<string, unknown>)?.name,
|
||||
lastModified: issue.updatedAt,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to get Linear issue', {
|
||||
externalId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const maxIssues = sourceConfig.maxIssues as string | undefined
|
||||
if (maxIssues && (Number.isNaN(Number(maxIssues)) || Number(maxIssues) <= 0)) {
|
||||
return { valid: false, error: 'Max issues must be a positive number' }
|
||||
}
|
||||
|
||||
try {
|
||||
// Verify the token works by fetching teams
|
||||
const data = await linearGraphQL(accessToken, TEAMS_QUERY)
|
||||
const teamsConn = data.teams as Record<string, unknown>
|
||||
const teams = (teamsConn.nodes || []) as Record<string, unknown>[]
|
||||
|
||||
if (teams.length === 0) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'No teams found — check that the OAuth token has read access',
|
||||
}
|
||||
}
|
||||
|
||||
// If teamId specified, verify it exists
|
||||
const teamId = sourceConfig.teamId as string | undefined
|
||||
if (teamId) {
|
||||
const found = teams.some((t) => t.id === teamId)
|
||||
if (!found) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Team ID "${teamId}" not found. Available teams: ${teams.map((t) => `${t.name} (${t.id})`).join(', ')}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'state', displayName: 'State', fieldType: 'text' },
|
||||
{ id: 'priority', displayName: 'Priority', fieldType: 'text' },
|
||||
{ id: 'assignee', displayName: 'Assignee', fieldType: 'text' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
if (typeof metadata.state === 'string') result.state = metadata.state
|
||||
if (typeof metadata.priority === 'string') result.priority = metadata.priority
|
||||
if (typeof metadata.assignee === 'string') result.assignee = metadata.assignee
|
||||
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
1
apps/sim/connectors/notion/index.ts
Normal file
1
apps/sim/connectors/notion/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export { notionConnector } from '@/connectors/notion/notion'
|
||||
587
apps/sim/connectors/notion/notion.ts
Normal file
587
apps/sim/connectors/notion/notion.ts
Normal file
@@ -0,0 +1,587 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NotionIcon } from '@/components/icons'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('NotionConnector')
|
||||
|
||||
const NOTION_API_VERSION = '2022-06-28'
|
||||
const NOTION_BASE_URL = 'https://api.notion.com/v1'
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the title from a Notion page's properties.
|
||||
*/
|
||||
function extractTitle(properties: Record<string, unknown>): string {
|
||||
for (const value of Object.values(properties)) {
|
||||
const prop = value as Record<string, unknown>
|
||||
if (prop.type === 'title' && Array.isArray(prop.title) && prop.title.length > 0) {
|
||||
return prop.title.map((t: Record<string, unknown>) => (t.plain_text as string) || '').join('')
|
||||
}
|
||||
}
|
||||
return 'Untitled'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts plain text from a rich_text array.
|
||||
*/
|
||||
function richTextToPlain(richText: Record<string, unknown>[]): string {
|
||||
return richText.map((t) => (t.plain_text as string) || '').join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts plain text content from Notion blocks.
|
||||
*/
|
||||
function blocksToPlainText(blocks: Record<string, unknown>[]): string {
|
||||
return blocks
|
||||
.map((block) => {
|
||||
const type = block.type as string
|
||||
const blockData = block[type] as Record<string, unknown> | undefined
|
||||
if (!blockData) return ''
|
||||
|
||||
const richText = blockData.rich_text as Record<string, unknown>[] | undefined
|
||||
if (!richText) return ''
|
||||
|
||||
const text = richTextToPlain(richText)
|
||||
|
||||
switch (type) {
|
||||
case 'heading_1':
|
||||
return `# ${text}`
|
||||
case 'heading_2':
|
||||
return `## ${text}`
|
||||
case 'heading_3':
|
||||
return `### ${text}`
|
||||
case 'bulleted_list_item':
|
||||
return `- ${text}`
|
||||
case 'numbered_list_item':
|
||||
return `1. ${text}`
|
||||
case 'to_do': {
|
||||
const checked = (blockData.checked as boolean) ? '[x]' : '[ ]'
|
||||
return `${checked} ${text}`
|
||||
}
|
||||
case 'quote':
|
||||
return `> ${text}`
|
||||
case 'callout':
|
||||
return text
|
||||
case 'toggle':
|
||||
return text
|
||||
default:
|
||||
return text
|
||||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all block children for a page, handling pagination.
|
||||
*/
|
||||
async function fetchAllBlocks(
|
||||
accessToken: string,
|
||||
pageId: string
|
||||
): Promise<Record<string, unknown>[]> {
|
||||
const allBlocks: Record<string, unknown>[] = []
|
||||
let cursor: string | undefined
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore) {
|
||||
const params = new URLSearchParams({ page_size: '100' })
|
||||
if (cursor) params.append('start_cursor', cursor)
|
||||
|
||||
const response = await fetch(
|
||||
`${NOTION_BASE_URL}/blocks/${pageId}/children?${params.toString()}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch blocks for page ${pageId}`, { status: response.status })
|
||||
break
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
allBlocks.push(...(data.results || []))
|
||||
cursor = data.next_cursor ?? undefined
|
||||
hasMore = data.has_more === true
|
||||
}
|
||||
|
||||
return allBlocks
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts multi_select tags from page properties.
|
||||
*/
|
||||
function extractTags(properties: Record<string, unknown>): string[] {
|
||||
const tags: string[] = []
|
||||
for (const value of Object.values(properties)) {
|
||||
const prop = value as Record<string, unknown>
|
||||
if (prop.type === 'multi_select' && Array.isArray(prop.multi_select)) {
|
||||
for (const item of prop.multi_select) {
|
||||
const name = (item as Record<string, unknown>).name as string
|
||||
if (name) tags.push(name)
|
||||
}
|
||||
}
|
||||
if (prop.type === 'select' && prop.select) {
|
||||
const name = (prop.select as Record<string, unknown>).name as string
|
||||
if (name) tags.push(name)
|
||||
}
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Notion page to an ExternalDocument by fetching its block content.
|
||||
*/
|
||||
async function pageToExternalDocument(
|
||||
accessToken: string,
|
||||
page: Record<string, unknown>
|
||||
): Promise<ExternalDocument> {
|
||||
const pageId = page.id as string
|
||||
const properties = (page.properties || {}) as Record<string, unknown>
|
||||
const title = extractTitle(properties)
|
||||
const url = page.url as string
|
||||
|
||||
// Fetch page content
|
||||
const blocks = await fetchAllBlocks(accessToken, pageId)
|
||||
const plainText = blocksToPlainText(blocks)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
|
||||
// Extract tags from multi_select/select properties
|
||||
const tags = extractTags(properties)
|
||||
|
||||
return {
|
||||
externalId: pageId,
|
||||
title: title || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: url,
|
||||
contentHash,
|
||||
metadata: {
|
||||
tags,
|
||||
lastModified: page.last_edited_time as string,
|
||||
createdTime: page.created_time as string,
|
||||
parentType: (page.parent as Record<string, unknown>)?.type,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const notionConnector: ConnectorConfig = {
|
||||
id: 'notion',
|
||||
name: 'Notion',
|
||||
description: 'Sync pages from a Notion workspace into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: NotionIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'notion',
|
||||
requiredScopes: [],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'scope',
|
||||
title: 'Sync Scope',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Entire workspace', id: 'workspace' },
|
||||
{ label: 'Specific database', id: 'database' },
|
||||
{ label: 'Specific page (and children)', id: 'page' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'databaseId',
|
||||
title: 'Database ID',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 8a3b5f6e-1234-5678-abcd-ef0123456789',
|
||||
},
|
||||
{
|
||||
id: 'rootPageId',
|
||||
title: 'Page ID',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 8a3b5f6e-1234-5678-abcd-ef0123456789',
|
||||
},
|
||||
{
|
||||
id: 'searchQuery',
|
||||
title: 'Search Filter',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. meeting notes, project plan',
|
||||
},
|
||||
{
|
||||
id: 'maxPages',
|
||||
title: 'Max Pages',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const scope = (sourceConfig.scope as string) || 'workspace'
|
||||
const databaseId = (sourceConfig.databaseId as string)?.trim()
|
||||
const rootPageId = (sourceConfig.rootPageId as string)?.trim()
|
||||
const maxPages = sourceConfig.maxPages ? Number(sourceConfig.maxPages) : 0
|
||||
|
||||
if (scope === 'database' && databaseId) {
|
||||
return listFromDatabase(accessToken, databaseId, maxPages, cursor)
|
||||
}
|
||||
|
||||
if (scope === 'page' && rootPageId) {
|
||||
return listFromParentPage(accessToken, rootPageId, maxPages, cursor)
|
||||
}
|
||||
|
||||
// Default: workspace-wide search
|
||||
const searchQuery = (sourceConfig.searchQuery as string) || ''
|
||||
return listFromWorkspace(accessToken, searchQuery, maxPages, cursor)
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
_sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const response = await fetch(`${NOTION_BASE_URL}/pages/${externalId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Notion page: ${response.status}`)
|
||||
}
|
||||
|
||||
const page = await response.json()
|
||||
return pageToExternalDocument(accessToken, page)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const scope = (sourceConfig.scope as string) || 'workspace'
|
||||
const databaseId = (sourceConfig.databaseId as string)?.trim()
|
||||
const rootPageId = (sourceConfig.rootPageId as string)?.trim()
|
||||
const maxPages = sourceConfig.maxPages as string | undefined
|
||||
|
||||
if (maxPages && (Number.isNaN(Number(maxPages)) || Number(maxPages) <= 0)) {
|
||||
return { valid: false, error: 'Max pages must be a positive number' }
|
||||
}
|
||||
|
||||
if (scope === 'database' && !databaseId) {
|
||||
return { valid: false, error: 'Database ID is required when scope is "Specific database"' }
|
||||
}
|
||||
|
||||
if (scope === 'page' && !rootPageId) {
|
||||
return { valid: false, error: 'Page ID is required when scope is "Specific page"' }
|
||||
}
|
||||
|
||||
try {
|
||||
// Verify the token works
|
||||
if (scope === 'database' && databaseId) {
|
||||
// Verify database is accessible
|
||||
const response = await fetch(`${NOTION_BASE_URL}/databases/${databaseId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
})
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Cannot access database: ${response.status}` }
|
||||
}
|
||||
} else if (scope === 'page' && rootPageId) {
|
||||
// Verify page is accessible
|
||||
const response = await fetch(`${NOTION_BASE_URL}/pages/${rootPageId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
})
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Cannot access page: ${response.status}` }
|
||||
}
|
||||
} else {
|
||||
// Workspace scope — just verify token works
|
||||
const response = await fetch(`${NOTION_BASE_URL}/search`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ page_size: 1 }),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return { valid: false, error: `Cannot access Notion workspace: ${errorText}` }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'tags', displayName: 'Tags', fieldType: 'text' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
{ id: 'created', displayName: 'Created', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const tags = Array.isArray(metadata.tags) ? (metadata.tags as string[]) : []
|
||||
if (tags.length > 0) result.tags = tags.join(', ')
|
||||
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
if (typeof metadata.createdTime === 'string') {
|
||||
const date = new Date(metadata.createdTime)
|
||||
if (!Number.isNaN(date.getTime())) result.created = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists pages from the entire workspace using the search API.
|
||||
*/
|
||||
async function listFromWorkspace(
|
||||
accessToken: string,
|
||||
searchQuery: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const body: Record<string, unknown> = {
|
||||
page_size: 20,
|
||||
filter: { value: 'page', property: 'object' },
|
||||
sort: { direction: 'descending', timestamp: 'last_edited_time' },
|
||||
}
|
||||
|
||||
if (searchQuery.trim()) {
|
||||
body.query = searchQuery.trim()
|
||||
}
|
||||
|
||||
if (cursor) {
|
||||
body.start_cursor = cursor
|
||||
}
|
||||
|
||||
logger.info('Listing Notion pages from workspace', { searchQuery, cursor })
|
||||
|
||||
const response = await fetch(`${NOTION_BASE_URL}/search`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to search Notion', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to search Notion: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = (data.results || []) as Record<string, unknown>[]
|
||||
const pages = results.filter((r) => r.object === 'page' && !(r.archived as boolean))
|
||||
|
||||
const documents = await processPages(accessToken, pages)
|
||||
const nextCursor = (data.next_cursor as string) ?? undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: data.has_more === true && (maxPages <= 0 || documents.length < maxPages),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists pages from a specific Notion database.
|
||||
*/
|
||||
async function listFromDatabase(
|
||||
accessToken: string,
|
||||
databaseId: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const body: Record<string, unknown> = {
|
||||
page_size: 20,
|
||||
}
|
||||
|
||||
if (cursor) {
|
||||
body.start_cursor = cursor
|
||||
}
|
||||
|
||||
logger.info('Querying Notion database', { databaseId, cursor })
|
||||
|
||||
const response = await fetch(`${NOTION_BASE_URL}/databases/${databaseId}/query`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to query Notion database', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to query Notion database: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = (data.results || []) as Record<string, unknown>[]
|
||||
const pages = results.filter((r) => r.object === 'page' && !(r.archived as boolean))
|
||||
|
||||
const documents = await processPages(accessToken, pages)
|
||||
const nextCursor = (data.next_cursor as string) ?? undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: data.has_more === true && (maxPages <= 0 || documents.length < maxPages),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists child pages under a specific parent page.
|
||||
*
|
||||
* Uses the blocks children endpoint to find child_page blocks,
|
||||
* then fetches each page's content.
|
||||
*/
|
||||
async function listFromParentPage(
|
||||
accessToken: string,
|
||||
rootPageId: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const params = new URLSearchParams({ page_size: '100' })
|
||||
if (cursor) params.append('start_cursor', cursor)
|
||||
|
||||
logger.info('Listing child pages under root page', { rootPageId, cursor })
|
||||
|
||||
const response = await fetch(
|
||||
`${NOTION_BASE_URL}/blocks/${rootPageId}/children?${params.toString()}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to list child blocks', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to list child blocks: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const blocks = (data.results || []) as Record<string, unknown>[]
|
||||
|
||||
// Filter to child_page and child_database blocks
|
||||
const childPageIds = blocks
|
||||
.filter((b) => b.type === 'child_page' || b.type === 'child_database')
|
||||
.map((b) => b.id as string)
|
||||
|
||||
// Also include the root page itself on the first call (no cursor)
|
||||
const pageIdsToFetch = !cursor ? [rootPageId, ...childPageIds] : childPageIds
|
||||
|
||||
// Fetch each child page
|
||||
const documents: ExternalDocument[] = []
|
||||
for (const pageId of pageIdsToFetch) {
|
||||
if (maxPages > 0 && documents.length >= maxPages) break
|
||||
|
||||
try {
|
||||
const pageResponse = await fetch(`${NOTION_BASE_URL}/pages/${pageId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
})
|
||||
|
||||
if (!pageResponse.ok) {
|
||||
logger.warn(`Failed to fetch child page ${pageId}`, { status: pageResponse.status })
|
||||
continue
|
||||
}
|
||||
|
||||
const page = await pageResponse.json()
|
||||
if (page.archived) continue
|
||||
|
||||
const doc = await pageToExternalDocument(accessToken, page)
|
||||
documents.push(doc)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to process child page ${pageId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const nextCursor = (data.next_cursor as string) ?? undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: data.has_more === true && (maxPages <= 0 || documents.length < maxPages),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an array of Notion page objects to ExternalDocuments.
|
||||
*/
|
||||
async function processPages(
|
||||
accessToken: string,
|
||||
pages: Record<string, unknown>[]
|
||||
): Promise<ExternalDocument[]> {
|
||||
const documents: ExternalDocument[] = []
|
||||
for (const page of pages) {
|
||||
try {
|
||||
const doc = await pageToExternalDocument(accessToken, page)
|
||||
documents.push(doc)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to process Notion page ${page.id}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
return documents
|
||||
}
|
||||
18
apps/sim/connectors/registry.ts
Normal file
18
apps/sim/connectors/registry.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { airtableConnector } from '@/connectors/airtable'
|
||||
import { confluenceConnector } from '@/connectors/confluence'
|
||||
import { githubConnector } from '@/connectors/github'
|
||||
import { googleDriveConnector } from '@/connectors/google-drive'
|
||||
import { jiraConnector } from '@/connectors/jira'
|
||||
import { linearConnector } from '@/connectors/linear'
|
||||
import { notionConnector } from '@/connectors/notion'
|
||||
import type { ConnectorRegistry } from '@/connectors/types'
|
||||
|
||||
export const CONNECTOR_REGISTRY: ConnectorRegistry = {
|
||||
airtable: airtableConnector,
|
||||
confluence: confluenceConnector,
|
||||
github: githubConnector,
|
||||
google_drive: googleDriveConnector,
|
||||
jira: jiraConnector,
|
||||
linear: linearConnector,
|
||||
notion: notionConnector,
|
||||
}
|
||||
159
apps/sim/connectors/types.ts
Normal file
159
apps/sim/connectors/types.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import type { OAuthService } from '@/lib/oauth/types'
|
||||
|
||||
/**
|
||||
* A single document fetched from an external source.
|
||||
*/
|
||||
export interface ExternalDocument {
|
||||
/** Source-specific unique ID (page ID, file ID) */
|
||||
externalId: string
|
||||
/** Document title / filename */
|
||||
title: string
|
||||
/** Extracted text content */
|
||||
content: string
|
||||
/** MIME type of the content */
|
||||
mimeType: string
|
||||
/** Link back to the original document */
|
||||
sourceUrl?: string
|
||||
/** SHA-256 of content for change detection */
|
||||
contentHash: string
|
||||
/** Additional source-specific metadata */
|
||||
metadata?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Paginated result from listing documents in an external source.
|
||||
*/
|
||||
export interface ExternalDocumentList {
|
||||
documents: ExternalDocument[]
|
||||
nextCursor?: string
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of a sync operation.
|
||||
*/
|
||||
export interface SyncResult {
|
||||
docsAdded: number
|
||||
docsUpdated: number
|
||||
docsDeleted: number
|
||||
docsUnchanged: number
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Config field for source-specific settings (rendered in the add-connector UI).
|
||||
*/
|
||||
export interface ConnectorConfigField {
|
||||
id: string
|
||||
title: string
|
||||
type: 'short-input' | 'dropdown'
|
||||
placeholder?: string
|
||||
required?: boolean
|
||||
description?: string
|
||||
options?: { label: string; id: string }[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Declarative config for a knowledge source connector.
|
||||
*
|
||||
* Mirrors ToolConfig/TriggerConfig pattern:
|
||||
* - Purely declarative metadata (id, name, icon, oauth, configFields)
|
||||
* - Runtime functions for data fetching (listDocuments, getDocument, validateConfig)
|
||||
*
|
||||
* Adding a new connector = creating one of these + registering it.
|
||||
*/
|
||||
export interface ConnectorConfig {
|
||||
/** Unique connector identifier, e.g. 'confluence', 'google_drive', 'notion' */
|
||||
id: string
|
||||
/** Human-readable name, e.g. 'Confluence', 'Google Drive' */
|
||||
name: string
|
||||
/** Short description of the connector */
|
||||
description: string
|
||||
/** Semver version */
|
||||
version: string
|
||||
/** Icon component for the connector */
|
||||
icon: React.ComponentType<{ className?: string }>
|
||||
|
||||
/** OAuth configuration (same pattern as ToolConfig.oauth) */
|
||||
oauth: {
|
||||
required: true
|
||||
provider: OAuthService
|
||||
requiredScopes?: string[]
|
||||
}
|
||||
|
||||
/** Source configuration fields rendered in the add-connector UI */
|
||||
configFields: ConnectorConfigField[]
|
||||
|
||||
/** List all documents from the configured source (handles pagination via cursor) */
|
||||
listDocuments: (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
) => Promise<ExternalDocumentList>
|
||||
|
||||
/** Fetch a single document by its external ID */
|
||||
getDocument: (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
) => Promise<ExternalDocument | null>
|
||||
|
||||
/** Validate that sourceConfig is correct and accessible (called on save) */
|
||||
validateConfig: (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
) => Promise<{ valid: boolean; error?: string }>
|
||||
|
||||
/** Map source metadata to semantic tag keys (translated to slots by the sync engine) */
|
||||
mapTags?: (metadata: Record<string, unknown>) => Record<string, unknown>
|
||||
|
||||
/**
|
||||
* Tag definitions this connector populates. Shown in the add-connector modal
|
||||
* as opt-out checkboxes. On connector creation, tag definitions are auto-created
|
||||
* on the KB for enabled slots, and mapTags output is filtered to only include them.
|
||||
*/
|
||||
tagDefinitions?: ConnectorTagDefinition[]
|
||||
}
|
||||
|
||||
/**
|
||||
* A tag that a connector populates, with a semantic ID and human-readable name.
|
||||
* Slots are dynamically assigned on connector creation via getNextAvailableSlot.
|
||||
*/
|
||||
export interface ConnectorTagDefinition {
|
||||
/** Semantic ID matching a key returned by mapTags (e.g. 'labels', 'version') */
|
||||
id: string
|
||||
/** Human-readable name shown in UI (e.g. 'Labels', 'Last Modified') */
|
||||
displayName: string
|
||||
/** Field type determines which slot pool to draw from */
|
||||
fieldType: 'text' | 'number' | 'date' | 'boolean'
|
||||
}
|
||||
|
||||
/**
|
||||
* Tag slots available on the document table for connector metadata mapping.
|
||||
*/
|
||||
export interface DocumentTags {
|
||||
tag1?: string
|
||||
tag2?: string
|
||||
tag3?: string
|
||||
tag4?: string
|
||||
tag5?: string
|
||||
tag6?: string
|
||||
tag7?: string
|
||||
number1?: number
|
||||
number2?: number
|
||||
number3?: number
|
||||
number4?: number
|
||||
number5?: number
|
||||
date1?: Date
|
||||
date2?: Date
|
||||
boolean1?: boolean
|
||||
boolean2?: boolean
|
||||
boolean3?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Registry mapping connector IDs to their configs.
|
||||
*/
|
||||
export interface ConnectorRegistry {
|
||||
[connectorId: string]: ConnectorConfig
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/knowledge'
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useCallback, useMemo } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
type DocumentTagFilter,
|
||||
type KnowledgeChunksResponse,
|
||||
type KnowledgeDocumentsResponse,
|
||||
knowledgeKeys,
|
||||
@@ -12,7 +13,7 @@ import {
|
||||
useKnowledgeBasesQuery,
|
||||
useKnowledgeChunksQuery,
|
||||
useKnowledgeDocumentsQuery,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const DEFAULT_PAGE_SIZE = 50
|
||||
|
||||
@@ -72,12 +73,14 @@ export function useKnowledgeBaseDocuments(
|
||||
| false
|
||||
| ((data: KnowledgeDocumentsResponse | undefined) => number | false)
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
tagFilters?: DocumentTagFilter[]
|
||||
}
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset ?? 0
|
||||
const enabledFilter = options?.enabledFilter ?? 'all'
|
||||
const tagFilters = options?.tagFilters
|
||||
const paramsKey = serializeDocumentParams({
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
@@ -86,6 +89,7 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
tagFilters,
|
||||
})
|
||||
|
||||
const refetchIntervalFn = useMemo(() => {
|
||||
@@ -107,6 +111,7 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
tagFilters,
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
@@ -227,7 +232,8 @@ export function useDocumentChunks(
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
page = 1,
|
||||
search = ''
|
||||
search = '',
|
||||
enabledFilter: 'all' | 'enabled' | 'disabled' = 'all'
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
@@ -241,6 +247,7 @@ export function useDocumentChunks(
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
@@ -272,11 +279,12 @@ export function useDocumentChunks(
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
enabledFilter,
|
||||
})
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, offset, search, queryClient])
|
||||
}, [knowledgeBaseId, documentId, offset, search, enabledFilter, queryClient])
|
||||
|
||||
const updateChunk = useCallback(
|
||||
(chunkId: string, updates: Partial<ChunkData>) => {
|
||||
@@ -286,6 +294,7 @@ export function useDocumentChunks(
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
enabledFilter,
|
||||
})
|
||||
queryClient.setQueryData<KnowledgeChunksResponse>(
|
||||
knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
@@ -300,7 +309,7 @@ export function useDocumentChunks(
|
||||
}
|
||||
)
|
||||
},
|
||||
[knowledgeBaseId, documentId, offset, search, queryClient]
|
||||
[knowledgeBaseId, documentId, offset, search, enabledFilter, queryClient]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
useDeleteDocumentTagDefinitions,
|
||||
useDocumentTagDefinitionsQuery,
|
||||
useSaveDocumentTagDefinitions,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
|
||||
426
apps/sim/hooks/queries/kb/connectors.ts
Normal file
426
apps/sim/hooks/queries/kb/connectors.ts
Normal file
@@ -0,0 +1,426 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorQueries')
|
||||
|
||||
export interface ConnectorData {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
connectorType: string
|
||||
credentialId: string
|
||||
sourceConfig: Record<string, unknown>
|
||||
syncMode: string
|
||||
syncIntervalMinutes: number
|
||||
status: 'active' | 'paused' | 'syncing' | 'error'
|
||||
lastSyncAt: string | null
|
||||
lastSyncError: string | null
|
||||
lastSyncDocCount: number | null
|
||||
nextSyncAt: string | null
|
||||
consecutiveFailures: number
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface SyncLogData {
|
||||
id: string
|
||||
connectorId: string
|
||||
status: string
|
||||
startedAt: string
|
||||
completedAt: string | null
|
||||
docsAdded: number
|
||||
docsUpdated: number
|
||||
docsDeleted: number
|
||||
docsUnchanged: number
|
||||
errorMessage: string | null
|
||||
}
|
||||
|
||||
export interface ConnectorDetailData extends ConnectorData {
|
||||
syncLogs: SyncLogData[]
|
||||
}
|
||||
|
||||
export const connectorKeys = {
|
||||
all: (knowledgeBaseId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'connectors'] as const,
|
||||
list: (knowledgeBaseId?: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'connectors', 'list'] as const,
|
||||
detail: (knowledgeBaseId?: string, connectorId?: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'connectors', 'detail', connectorId ?? ''] as const,
|
||||
}
|
||||
|
||||
async function fetchConnectors(knowledgeBaseId: string): Promise<ConnectorData[]> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch connectors: ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch connectors')
|
||||
}
|
||||
|
||||
return Array.isArray(result.data) ? result.data : []
|
||||
}
|
||||
|
||||
async function fetchConnectorDetail(
|
||||
knowledgeBaseId: string,
|
||||
connectorId: string
|
||||
): Promise<ConnectorDetailData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch connector: ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to fetch connector')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useConnectorList(knowledgeBaseId?: string) {
|
||||
return useQuery({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
queryFn: () => fetchConnectors(knowledgeBaseId as string),
|
||||
enabled: Boolean(knowledgeBaseId),
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
refetchInterval: (query) => {
|
||||
const connectors = query.state.data
|
||||
const hasSyncing = connectors?.some((c) => c.status === 'syncing')
|
||||
return hasSyncing ? 3000 : false
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useConnectorDetail(knowledgeBaseId?: string, connectorId?: string) {
|
||||
return useQuery({
|
||||
queryKey: connectorKeys.detail(knowledgeBaseId, connectorId),
|
||||
queryFn: () => fetchConnectorDetail(knowledgeBaseId as string, connectorId as string),
|
||||
enabled: Boolean(knowledgeBaseId && connectorId),
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface CreateConnectorParams {
|
||||
knowledgeBaseId: string
|
||||
connectorType: string
|
||||
credentialId: string
|
||||
sourceConfig: Record<string, unknown>
|
||||
syncIntervalMinutes?: number
|
||||
}
|
||||
|
||||
async function createConnector({
|
||||
knowledgeBaseId,
|
||||
...body
|
||||
}: CreateConnectorParams): Promise<ConnectorData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create connector')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to create connector')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useCreateConnector() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: createConnector,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface UpdateConnectorParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
updates: {
|
||||
sourceConfig?: Record<string, unknown>
|
||||
syncIntervalMinutes?: number
|
||||
status?: 'active' | 'paused'
|
||||
}
|
||||
}
|
||||
|
||||
async function updateConnector({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
updates,
|
||||
}: UpdateConnectorParams): Promise<ConnectorData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update connector')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update connector')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useUpdateConnector() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateConnector,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.detail(knowledgeBaseId, connectorId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteConnectorParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
}
|
||||
|
||||
async function deleteConnector({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
}: DeleteConnectorParams): Promise<void> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete connector')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete connector')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteConnector() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteConnector,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface TriggerSyncParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
}
|
||||
|
||||
async function triggerSync({ knowledgeBaseId, connectorId }: TriggerSyncParams): Promise<void> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/sync`, {
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to trigger sync')
|
||||
}
|
||||
}
|
||||
|
||||
export function useTriggerSync() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: triggerSync,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.detail(knowledgeBaseId, connectorId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface ConnectorDocumentData {
|
||||
id: string
|
||||
filename: string
|
||||
externalId: string | null
|
||||
sourceUrl: string | null
|
||||
enabled: boolean
|
||||
deletedAt: string | null
|
||||
userExcluded: boolean
|
||||
uploadedAt: string
|
||||
processingStatus: string
|
||||
}
|
||||
|
||||
export interface ConnectorDocumentsResponse {
|
||||
documents: ConnectorDocumentData[]
|
||||
counts: { active: number; excluded: number }
|
||||
}
|
||||
|
||||
export const connectorDocumentKeys = {
|
||||
list: (knowledgeBaseId?: string, connectorId?: string) =>
|
||||
[...connectorKeys.detail(knowledgeBaseId, connectorId), 'documents'] as const,
|
||||
}
|
||||
|
||||
async function fetchConnectorDocuments(
|
||||
knowledgeBaseId: string,
|
||||
connectorId: string,
|
||||
includeExcluded: boolean
|
||||
): Promise<ConnectorDocumentsResponse> {
|
||||
const params = includeExcluded ? '?includeExcluded=true' : ''
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/documents${params}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch connector documents: ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch connector documents')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useConnectorDocuments(
|
||||
knowledgeBaseId?: string,
|
||||
connectorId?: string,
|
||||
options?: { includeExcluded?: boolean }
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: [
|
||||
...connectorDocumentKeys.list(knowledgeBaseId, connectorId),
|
||||
options?.includeExcluded ?? false,
|
||||
],
|
||||
queryFn: () =>
|
||||
fetchConnectorDocuments(
|
||||
knowledgeBaseId as string,
|
||||
connectorId as string,
|
||||
options?.includeExcluded ?? false
|
||||
),
|
||||
enabled: Boolean(knowledgeBaseId && connectorId),
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
interface ConnectorDocumentMutationParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
documentIds: string[]
|
||||
}
|
||||
|
||||
async function excludeConnectorDocuments({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
documentIds,
|
||||
}: ConnectorDocumentMutationParams): Promise<{ excludedCount: number }> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/documents`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation: 'exclude', documentIds }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to exclude documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useExcludeConnectorDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: excludeConnectorDocuments,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorDocumentKeys.list(knowledgeBaseId, connectorId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function restoreConnectorDocuments({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
documentIds,
|
||||
}: ConnectorDocumentMutationParams): Promise<{ restoredCount: number }> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/documents`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation: 'restore', documentIds }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to restore documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useRestoreConnectorDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: restoreConnectorDocuments,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorDocumentKeys.list(knowledgeBaseId, connectorId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -79,6 +79,14 @@ export async function fetchDocument(
|
||||
return result.data
|
||||
}
|
||||
|
||||
export interface DocumentTagFilter {
|
||||
tagSlot: string
|
||||
fieldType: 'text' | 'number' | 'date' | 'boolean'
|
||||
operator: string
|
||||
value: string
|
||||
valueTo?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsParams {
|
||||
knowledgeBaseId: string
|
||||
search?: string
|
||||
@@ -87,6 +95,7 @@ export interface KnowledgeDocumentsParams {
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
tagFilters?: DocumentTagFilter[]
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsResponse {
|
||||
@@ -102,6 +111,7 @@ export async function fetchKnowledgeDocuments({
|
||||
sortBy,
|
||||
sortOrder,
|
||||
enabledFilter,
|
||||
tagFilters,
|
||||
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
@@ -110,6 +120,7 @@ export async function fetchKnowledgeDocuments({
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', offset.toString())
|
||||
if (enabledFilter) params.set('enabledFilter', enabledFilter)
|
||||
if (tagFilters && tagFilters.length > 0) params.set('tagFilters', JSON.stringify(tagFilters))
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
@@ -147,6 +158,7 @@ export interface KnowledgeChunksParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
search?: string
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
limit?: number
|
||||
offset?: number
|
||||
}
|
||||
@@ -160,11 +172,15 @@ export async function fetchKnowledgeChunks({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
search,
|
||||
enabledFilter,
|
||||
limit = 50,
|
||||
offset = 0,
|
||||
}: KnowledgeChunksParams): Promise<KnowledgeChunksResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
if (enabledFilter && enabledFilter !== 'all') {
|
||||
params.set('enabled', enabledFilter === 'enabled' ? 'true' : 'false')
|
||||
}
|
||||
if (limit) params.set('limit', limit.toString())
|
||||
if (offset) params.set('offset', offset.toString())
|
||||
|
||||
@@ -234,6 +250,7 @@ export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
|
||||
sortBy: params.sortBy ?? '',
|
||||
sortOrder: params.sortOrder ?? '',
|
||||
enabledFilter: params.enabledFilter ?? 'all',
|
||||
tagFilters: params.tagFilters ?? [],
|
||||
})
|
||||
|
||||
export function useKnowledgeDocumentsQuery(
|
||||
@@ -260,6 +277,7 @@ export function useKnowledgeDocumentsQuery(
|
||||
export const serializeChunkParams = (params: KnowledgeChunksParams) =>
|
||||
JSON.stringify({
|
||||
search: params.search ?? '',
|
||||
enabledFilter: params.enabledFilter ?? 'all',
|
||||
limit: params.limit ?? 50,
|
||||
offset: params.offset ?? 0,
|
||||
})
|
||||
@@ -1814,7 +1814,13 @@ export const auth = betterAuth({
|
||||
authorizationUrl: 'https://airtable.com/oauth2/v1/authorize',
|
||||
tokenUrl: 'https://airtable.com/oauth2/v1/token',
|
||||
userInfoUrl: 'https://api.airtable.com/v0/meta/whoami',
|
||||
scopes: ['data.records:read', 'data.records:write', 'user.email:read', 'webhook:manage'],
|
||||
scopes: [
|
||||
'data.records:read',
|
||||
'data.records:write',
|
||||
'schema.bases:read',
|
||||
'user.email:read',
|
||||
'webhook:manage',
|
||||
],
|
||||
responseType: 'code',
|
||||
pkce: true,
|
||||
accessType: 'offline',
|
||||
|
||||
493
apps/sim/lib/knowledge/connectors/sync-engine.ts
Normal file
493
apps/sim/lib/knowledge/connectors/sync-engine.ts
Normal file
@@ -0,0 +1,493 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
document,
|
||||
knowledgeBase,
|
||||
knowledgeConnector,
|
||||
knowledgeConnectorSyncLog,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, ne } from 'drizzle-orm'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { isTriggerAvailable, processDocumentAsync } from '@/lib/knowledge/documents/service'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { knowledgeConnectorSync } from '@/background/knowledge-connector-sync'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { DocumentTags, ExternalDocument, SyncResult } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('ConnectorSyncEngine')
|
||||
|
||||
/**
|
||||
* Resolves tag values from connector metadata using the connector's mapTags function.
|
||||
* Translates semantic keys returned by mapTags to actual DB slots using the
|
||||
* tagSlotMapping stored in sourceConfig during connector creation.
|
||||
*/
|
||||
function resolveTagMapping(
|
||||
connectorType: string,
|
||||
metadata: Record<string, unknown>,
|
||||
sourceConfig?: Record<string, unknown>
|
||||
): Partial<DocumentTags> | undefined {
|
||||
const config = CONNECTOR_REGISTRY[connectorType]
|
||||
if (!config?.mapTags || !metadata) return undefined
|
||||
|
||||
const semanticTags = config.mapTags(metadata)
|
||||
const mapping = sourceConfig?.tagSlotMapping as Record<string, string> | undefined
|
||||
if (!mapping || !semanticTags) return undefined
|
||||
|
||||
const result: Partial<DocumentTags> = {}
|
||||
for (const [semanticKey, slot] of Object.entries(mapping)) {
|
||||
const value = semanticTags[semanticKey]
|
||||
;(result as Record<string, unknown>)[slot] = value != null ? value : null
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch a connector sync — uses Trigger.dev when available,
|
||||
* otherwise falls back to direct executeSync.
|
||||
*/
|
||||
export async function dispatchSync(
|
||||
connectorId: string,
|
||||
options?: { fullSync?: boolean; requestId?: string }
|
||||
): Promise<void> {
|
||||
const requestId = options?.requestId ?? crypto.randomUUID()
|
||||
|
||||
if (isTriggerAvailable()) {
|
||||
await knowledgeConnectorSync.trigger({
|
||||
connectorId,
|
||||
fullSync: options?.fullSync,
|
||||
requestId,
|
||||
})
|
||||
logger.info(`Dispatched connector sync to Trigger.dev`, { connectorId, requestId })
|
||||
} else {
|
||||
executeSync(connectorId, { fullSync: options?.fullSync }).catch((error) => {
|
||||
logger.error(`Sync failed for connector ${connectorId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
requestId,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a sync for a given knowledge connector.
|
||||
*
|
||||
* This is the core sync algorithm — connector-agnostic.
|
||||
* It looks up the ConnectorConfig from the registry and calls its
|
||||
* listDocuments/getDocument methods.
|
||||
*/
|
||||
export async function executeSync(
|
||||
connectorId: string,
|
||||
options?: { fullSync?: boolean }
|
||||
): Promise<SyncResult> {
|
||||
const result: SyncResult = {
|
||||
docsAdded: 0,
|
||||
docsUpdated: 0,
|
||||
docsDeleted: 0,
|
||||
docsUnchanged: 0,
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(and(eq(knowledgeConnector.id, connectorId), isNull(knowledgeConnector.deletedAt)))
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
throw new Error(`Connector not found: ${connectorId}`)
|
||||
}
|
||||
|
||||
const connector = connectorRows[0]
|
||||
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connector.connectorType]
|
||||
if (!connectorConfig) {
|
||||
throw new Error(`Unknown connector type: ${connector.connectorType}`)
|
||||
}
|
||||
|
||||
const kbRows = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, connector.knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kbRows.length === 0) {
|
||||
throw new Error(`Knowledge base not found: ${connector.knowledgeBaseId}`)
|
||||
}
|
||||
|
||||
const userId = kbRows[0].userId
|
||||
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
connector.credentialId,
|
||||
userId,
|
||||
`sync-${connectorId}`
|
||||
)
|
||||
|
||||
if (!accessToken) {
|
||||
throw new Error('Failed to obtain access token')
|
||||
}
|
||||
|
||||
const lockResult = await db
|
||||
.update(knowledgeConnector)
|
||||
.set({ status: 'syncing', updatedAt: new Date() })
|
||||
.where(and(eq(knowledgeConnector.id, connectorId), ne(knowledgeConnector.status, 'syncing')))
|
||||
.returning({ id: knowledgeConnector.id })
|
||||
|
||||
if (lockResult.length === 0) {
|
||||
logger.info('Sync already in progress, skipping', { connectorId })
|
||||
return result
|
||||
}
|
||||
|
||||
const syncLogId = crypto.randomUUID()
|
||||
await db.insert(knowledgeConnectorSyncLog).values({
|
||||
id: syncLogId,
|
||||
connectorId,
|
||||
status: 'started',
|
||||
startedAt: new Date(),
|
||||
})
|
||||
|
||||
const sourceConfig = connector.sourceConfig as Record<string, unknown>
|
||||
|
||||
try {
|
||||
const externalDocs: ExternalDocument[] = []
|
||||
let cursor: string | undefined
|
||||
let hasMore = true
|
||||
const MAX_PAGES = 500
|
||||
|
||||
for (let pageNum = 0; hasMore && pageNum < MAX_PAGES; pageNum++) {
|
||||
const page = await connectorConfig.listDocuments(accessToken, sourceConfig, cursor)
|
||||
externalDocs.push(...page.documents)
|
||||
|
||||
if (page.hasMore && !page.nextCursor) {
|
||||
logger.warn('Source returned hasMore=true with no cursor, stopping pagination', {
|
||||
connectorId,
|
||||
pageNum,
|
||||
docsSoFar: externalDocs.length,
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
cursor = page.nextCursor
|
||||
hasMore = page.hasMore
|
||||
}
|
||||
|
||||
logger.info(`Fetched ${externalDocs.length} documents from ${connectorConfig.name}`, {
|
||||
connectorId,
|
||||
})
|
||||
|
||||
const existingDocs = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
externalId: document.externalId,
|
||||
contentHash: document.contentHash,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(eq(document.connectorId, connectorId), isNull(document.deletedAt)))
|
||||
|
||||
const excludedDocs = await db
|
||||
.select({ externalId: document.externalId })
|
||||
.from(document)
|
||||
.where(and(eq(document.connectorId, connectorId), eq(document.userExcluded, true)))
|
||||
|
||||
const excludedExternalIds = new Set(excludedDocs.map((d) => d.externalId).filter(Boolean))
|
||||
|
||||
if (externalDocs.length === 0 && existingDocs.length > 0 && !options?.fullSync) {
|
||||
logger.warn(
|
||||
`Source returned 0 documents but ${existingDocs.length} exist — skipping reconciliation`,
|
||||
{ connectorId }
|
||||
)
|
||||
|
||||
await db
|
||||
.update(knowledgeConnectorSyncLog)
|
||||
.set({ status: 'completed', completedAt: new Date() })
|
||||
.where(eq(knowledgeConnectorSyncLog.id, syncLogId))
|
||||
|
||||
const now = new Date()
|
||||
const nextSync =
|
||||
connector.syncIntervalMinutes > 0
|
||||
? new Date(now.getTime() + connector.syncIntervalMinutes * 60 * 1000)
|
||||
: null
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set({
|
||||
status: 'active',
|
||||
lastSyncAt: now,
|
||||
lastSyncError: null,
|
||||
nextSyncAt: nextSync,
|
||||
consecutiveFailures: 0,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(knowledgeConnector.id, connectorId))
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const existingByExternalId = new Map(
|
||||
existingDocs.filter((d) => d.externalId !== null).map((d) => [d.externalId!, d])
|
||||
)
|
||||
|
||||
const seenExternalIds = new Set<string>()
|
||||
|
||||
for (const extDoc of externalDocs) {
|
||||
seenExternalIds.add(extDoc.externalId)
|
||||
|
||||
if (excludedExternalIds.has(extDoc.externalId)) {
|
||||
result.docsUnchanged++
|
||||
continue
|
||||
}
|
||||
|
||||
if (!extDoc.content.trim()) {
|
||||
logger.info(`Skipping empty document: ${extDoc.title}`, {
|
||||
externalId: extDoc.externalId,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const existing = existingByExternalId.get(extDoc.externalId)
|
||||
|
||||
if (!existing) {
|
||||
await addDocument(
|
||||
connector.knowledgeBaseId,
|
||||
connectorId,
|
||||
connector.connectorType,
|
||||
extDoc,
|
||||
sourceConfig
|
||||
)
|
||||
result.docsAdded++
|
||||
} else if (existing.contentHash !== extDoc.contentHash) {
|
||||
await updateDocument(
|
||||
existing.id,
|
||||
connector.knowledgeBaseId,
|
||||
connectorId,
|
||||
connector.connectorType,
|
||||
extDoc,
|
||||
sourceConfig
|
||||
)
|
||||
result.docsUpdated++
|
||||
} else {
|
||||
result.docsUnchanged++
|
||||
}
|
||||
}
|
||||
|
||||
if (options?.fullSync || connector.syncMode === 'incremental') {
|
||||
for (const existing of existingDocs) {
|
||||
if (existing.externalId && !seenExternalIds.has(existing.externalId)) {
|
||||
await db
|
||||
.update(document)
|
||||
.set({ deletedAt: new Date() })
|
||||
.where(eq(document.id, existing.id))
|
||||
result.docsDeleted++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnectorSyncLog)
|
||||
.set({
|
||||
status: 'completed',
|
||||
completedAt: new Date(),
|
||||
docsAdded: result.docsAdded,
|
||||
docsUpdated: result.docsUpdated,
|
||||
docsDeleted: result.docsDeleted,
|
||||
docsUnchanged: result.docsUnchanged,
|
||||
})
|
||||
.where(eq(knowledgeConnectorSyncLog.id, syncLogId))
|
||||
|
||||
const now = new Date()
|
||||
const nextSync =
|
||||
connector.syncIntervalMinutes > 0
|
||||
? new Date(now.getTime() + connector.syncIntervalMinutes * 60 * 1000)
|
||||
: null
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set({
|
||||
status: 'active',
|
||||
lastSyncAt: now,
|
||||
lastSyncError: null,
|
||||
lastSyncDocCount: externalDocs.length,
|
||||
nextSyncAt: nextSync,
|
||||
consecutiveFailures: 0,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(knowledgeConnector.id, connectorId))
|
||||
|
||||
logger.info('Sync completed', { connectorId, ...result })
|
||||
return result
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
|
||||
await db
|
||||
.update(knowledgeConnectorSyncLog)
|
||||
.set({
|
||||
status: 'failed',
|
||||
completedAt: new Date(),
|
||||
errorMessage,
|
||||
docsAdded: result.docsAdded,
|
||||
docsUpdated: result.docsUpdated,
|
||||
docsDeleted: result.docsDeleted,
|
||||
docsUnchanged: result.docsUnchanged,
|
||||
})
|
||||
.where(eq(knowledgeConnectorSyncLog.id, syncLogId))
|
||||
|
||||
const now = new Date()
|
||||
const failures = (connector.consecutiveFailures ?? 0) + 1
|
||||
const backoffMinutes = Math.min(failures * 30, 1440)
|
||||
const nextSync = new Date(now.getTime() + backoffMinutes * 60 * 1000)
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set({
|
||||
status: 'error',
|
||||
lastSyncAt: now,
|
||||
lastSyncError: errorMessage,
|
||||
nextSyncAt: nextSync,
|
||||
consecutiveFailures: failures,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(knowledgeConnector.id, connectorId))
|
||||
|
||||
logger.error('Sync failed', { connectorId, error: errorMessage })
|
||||
result.error = errorMessage
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload content to storage as a .txt file, create a document record,
|
||||
* and trigger processing via the existing pipeline.
|
||||
*/
|
||||
async function addDocument(
|
||||
knowledgeBaseId: string,
|
||||
connectorId: string,
|
||||
connectorType: string,
|
||||
extDoc: ExternalDocument,
|
||||
sourceConfig?: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
const documentId = crypto.randomUUID()
|
||||
const contentBuffer = Buffer.from(extDoc.content, 'utf-8')
|
||||
const safeTitle = extDoc.title.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const customKey = `kb/${Date.now()}-${documentId}-${safeTitle}.txt`
|
||||
|
||||
const fileInfo = await StorageService.uploadFile({
|
||||
file: contentBuffer,
|
||||
fileName: `${safeTitle}.txt`,
|
||||
contentType: 'text/plain',
|
||||
context: 'knowledge-base',
|
||||
customKey,
|
||||
preserveKey: true,
|
||||
})
|
||||
|
||||
const fileUrl = `${getInternalApiBaseUrl()}${fileInfo.path}?context=knowledge-base`
|
||||
|
||||
const tagValues = extDoc.metadata
|
||||
? resolveTagMapping(connectorType, extDoc.metadata, sourceConfig)
|
||||
: undefined
|
||||
|
||||
const displayName = extDoc.title
|
||||
const processingFilename = `${safeTitle}.txt`
|
||||
|
||||
await db.insert(document).values({
|
||||
id: documentId,
|
||||
knowledgeBaseId,
|
||||
filename: displayName,
|
||||
fileUrl,
|
||||
fileSize: contentBuffer.length,
|
||||
mimeType: 'text/plain',
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending',
|
||||
enabled: true,
|
||||
connectorId,
|
||||
externalId: extDoc.externalId,
|
||||
contentHash: extDoc.contentHash,
|
||||
sourceUrl: extDoc.sourceUrl ?? null,
|
||||
...tagValues,
|
||||
uploadedAt: new Date(),
|
||||
})
|
||||
|
||||
processDocumentAsync(
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
{
|
||||
filename: processingFilename,
|
||||
fileUrl,
|
||||
fileSize: contentBuffer.length,
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
{}
|
||||
).catch((error) => {
|
||||
logger.error('Failed to process connector document', {
|
||||
documentId,
|
||||
connectorId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an existing connector-sourced document with new content.
|
||||
* Updates in-place to avoid unique constraint violations on (connectorId, externalId).
|
||||
*/
|
||||
async function updateDocument(
|
||||
existingDocId: string,
|
||||
knowledgeBaseId: string,
|
||||
connectorId: string,
|
||||
connectorType: string,
|
||||
extDoc: ExternalDocument,
|
||||
sourceConfig?: Record<string, unknown>
|
||||
): Promise<void> {
|
||||
const contentBuffer = Buffer.from(extDoc.content, 'utf-8')
|
||||
const safeTitle = extDoc.title.replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const customKey = `kb/${Date.now()}-${existingDocId}-${safeTitle}.txt`
|
||||
|
||||
const fileInfo = await StorageService.uploadFile({
|
||||
file: contentBuffer,
|
||||
fileName: `${safeTitle}.txt`,
|
||||
contentType: 'text/plain',
|
||||
context: 'knowledge-base',
|
||||
customKey,
|
||||
preserveKey: true,
|
||||
})
|
||||
|
||||
const fileUrl = `${getInternalApiBaseUrl()}${fileInfo.path}?context=knowledge-base`
|
||||
|
||||
const tagValues = extDoc.metadata
|
||||
? resolveTagMapping(connectorType, extDoc.metadata, sourceConfig)
|
||||
: undefined
|
||||
|
||||
const processingFilename = `${safeTitle}.txt`
|
||||
|
||||
await db
|
||||
.update(document)
|
||||
.set({
|
||||
filename: extDoc.title,
|
||||
fileUrl,
|
||||
fileSize: contentBuffer.length,
|
||||
contentHash: extDoc.contentHash,
|
||||
sourceUrl: extDoc.sourceUrl ?? null,
|
||||
...tagValues,
|
||||
processingStatus: 'pending',
|
||||
uploadedAt: new Date(),
|
||||
})
|
||||
.where(eq(document.id, existingDocId))
|
||||
|
||||
processDocumentAsync(
|
||||
knowledgeBaseId,
|
||||
existingDocId,
|
||||
{
|
||||
filename: processingFilename,
|
||||
fileUrl,
|
||||
fileSize: contentBuffer.length,
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
{}
|
||||
).catch((error) => {
|
||||
logger.error('Failed to re-process updated connector document', {
|
||||
documentId: existingDocId,
|
||||
connectorId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,9 +1,29 @@
|
||||
import crypto, { randomUUID } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { document, embedding, knowledgeBase, knowledgeBaseTagDefinitions } from '@sim/db/schema'
|
||||
import {
|
||||
document,
|
||||
embedding,
|
||||
knowledgeBase,
|
||||
knowledgeBaseTagDefinitions,
|
||||
knowledgeConnector,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, asc, desc, eq, inArray, isNull, sql } from 'drizzle-orm'
|
||||
import {
|
||||
and,
|
||||
asc,
|
||||
desc,
|
||||
eq,
|
||||
gt,
|
||||
gte,
|
||||
inArray,
|
||||
isNull,
|
||||
lt,
|
||||
lte,
|
||||
ne,
|
||||
type SQL,
|
||||
sql,
|
||||
} from 'drizzle-orm'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getStorageMethod, isRedisStorage } from '@/lib/core/storage'
|
||||
import { processDocument } from '@/lib/knowledge/documents/document-processor'
|
||||
@@ -770,6 +790,117 @@ export async function createDocumentRecords(
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* A single tag filter condition passed from the API layer.
|
||||
*/
|
||||
export interface TagFilterCondition {
|
||||
tagSlot: string
|
||||
fieldType: 'text' | 'number' | 'date' | 'boolean'
|
||||
operator: string
|
||||
value: string
|
||||
valueTo?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Drizzle SQL condition from a tag filter.
|
||||
*/
|
||||
function buildTagFilterCondition(filter: TagFilterCondition): SQL | undefined {
|
||||
const column = document[filter.tagSlot as keyof typeof document.$inferSelect]
|
||||
if (!column) return undefined
|
||||
|
||||
const col = document[filter.tagSlot as keyof typeof document]
|
||||
|
||||
if (filter.fieldType === 'text') {
|
||||
const v = filter.value
|
||||
switch (filter.operator) {
|
||||
case 'eq':
|
||||
return eq(col as typeof document.tag1, v)
|
||||
case 'neq':
|
||||
return ne(col as typeof document.tag1, v)
|
||||
case 'contains':
|
||||
return sql`LOWER(${col}) LIKE LOWER(${`%${v}%`})`
|
||||
case 'not_contains':
|
||||
return sql`LOWER(${col}) NOT LIKE LOWER(${`%${v}%`})`
|
||||
case 'starts_with':
|
||||
return sql`LOWER(${col}) LIKE LOWER(${`${v}%`})`
|
||||
case 'ends_with':
|
||||
return sql`LOWER(${col}) LIKE LOWER(${`%${v}`})`
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.fieldType === 'number') {
|
||||
const num = Number(filter.value)
|
||||
if (Number.isNaN(num)) return undefined
|
||||
switch (filter.operator) {
|
||||
case 'eq':
|
||||
return eq(col as typeof document.number1, num)
|
||||
case 'neq':
|
||||
return ne(col as typeof document.number1, num)
|
||||
case 'gt':
|
||||
return gt(col as typeof document.number1, num)
|
||||
case 'gte':
|
||||
return gte(col as typeof document.number1, num)
|
||||
case 'lt':
|
||||
return lt(col as typeof document.number1, num)
|
||||
case 'lte':
|
||||
return lte(col as typeof document.number1, num)
|
||||
case 'between': {
|
||||
const numTo = Number(filter.valueTo)
|
||||
if (Number.isNaN(numTo)) return undefined
|
||||
return and(
|
||||
gte(col as typeof document.number1, num),
|
||||
lte(col as typeof document.number1, numTo)
|
||||
)
|
||||
}
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.fieldType === 'date') {
|
||||
const v = filter.value
|
||||
switch (filter.operator) {
|
||||
case 'eq':
|
||||
return eq(col as typeof document.date1, new Date(v))
|
||||
case 'neq':
|
||||
return ne(col as typeof document.date1, new Date(v))
|
||||
case 'gt':
|
||||
return gt(col as typeof document.date1, new Date(v))
|
||||
case 'gte':
|
||||
return gte(col as typeof document.date1, new Date(v))
|
||||
case 'lt':
|
||||
return lt(col as typeof document.date1, new Date(v))
|
||||
case 'lte':
|
||||
return lte(col as typeof document.date1, new Date(v))
|
||||
case 'between': {
|
||||
if (!filter.valueTo) return undefined
|
||||
return and(
|
||||
gte(col as typeof document.date1, new Date(v)),
|
||||
lte(col as typeof document.date1, new Date(filter.valueTo))
|
||||
)
|
||||
}
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.fieldType === 'boolean') {
|
||||
const boolVal = filter.value === 'true'
|
||||
switch (filter.operator) {
|
||||
case 'eq':
|
||||
return eq(col as typeof document.boolean1, boolVal)
|
||||
case 'neq':
|
||||
return ne(col as typeof document.boolean1, boolVal)
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Get documents for a knowledge base with filtering and pagination
|
||||
*/
|
||||
@@ -782,6 +913,7 @@ export async function getDocuments(
|
||||
offset?: number
|
||||
sortBy?: DocumentSortField
|
||||
sortOrder?: SortOrder
|
||||
tagFilters?: TagFilterCondition[]
|
||||
},
|
||||
requestId: string
|
||||
): Promise<{
|
||||
@@ -821,6 +953,10 @@ export async function getDocuments(
|
||||
boolean1: boolean | null
|
||||
boolean2: boolean | null
|
||||
boolean3: boolean | null
|
||||
// Connector fields
|
||||
connectorId: string | null
|
||||
connectorType: string | null
|
||||
sourceUrl: string | null
|
||||
}>
|
||||
pagination: {
|
||||
total: number
|
||||
@@ -836,9 +972,10 @@ export async function getDocuments(
|
||||
offset = 0,
|
||||
sortBy = 'filename',
|
||||
sortOrder = 'asc',
|
||||
tagFilters,
|
||||
} = options
|
||||
|
||||
const whereConditions = [
|
||||
const whereConditions: (SQL | undefined)[] = [
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt),
|
||||
]
|
||||
@@ -853,6 +990,15 @@ export async function getDocuments(
|
||||
whereConditions.push(sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`)
|
||||
}
|
||||
|
||||
if (tagFilters && tagFilters.length > 0) {
|
||||
for (const filter of tagFilters) {
|
||||
const condition = buildTagFilterCondition(filter)
|
||||
if (condition) {
|
||||
whereConditions.push(condition)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const totalResult = await db
|
||||
.select({ count: sql<number>`COUNT(*)` })
|
||||
.from(document)
|
||||
@@ -923,8 +1069,13 @@ export async function getDocuments(
|
||||
boolean1: document.boolean1,
|
||||
boolean2: document.boolean2,
|
||||
boolean3: document.boolean3,
|
||||
// Connector fields
|
||||
connectorId: document.connectorId,
|
||||
connectorType: knowledgeConnector.connectorType,
|
||||
sourceUrl: document.sourceUrl,
|
||||
})
|
||||
.from(document)
|
||||
.leftJoin(knowledgeConnector, eq(document.connectorId, knowledgeConnector.id))
|
||||
.where(and(...whereConditions))
|
||||
.orderBy(primaryOrderBy, secondaryOrderBy)
|
||||
.limit(limit)
|
||||
@@ -971,6 +1122,10 @@ export async function getDocuments(
|
||||
boolean1: doc.boolean1,
|
||||
boolean2: doc.boolean2,
|
||||
boolean3: doc.boolean3,
|
||||
// Connector fields
|
||||
connectorId: doc.connectorId,
|
||||
connectorType: doc.connectorType ?? null,
|
||||
sourceUrl: doc.sourceUrl,
|
||||
})),
|
||||
pagination: {
|
||||
total,
|
||||
@@ -1177,6 +1332,7 @@ export async function bulkDocumentOperation(
|
||||
.update(document)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
userExcluded: sql`CASE WHEN ${document.connectorId} IS NOT NULL THEN true ELSE ${document.userExcluded} END`,
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
@@ -1260,6 +1416,7 @@ export async function bulkDocumentOperationByFilter(
|
||||
.update(document)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
userExcluded: sql`CASE WHEN ${document.connectorId} IS NOT NULL THEN true ELSE ${document.userExcluded} END`,
|
||||
})
|
||||
.where(and(...whereConditions))
|
||||
.returning({ id: document.id, deletedAt: document.deletedAt })
|
||||
@@ -1630,20 +1787,31 @@ export async function updateDocument(
|
||||
}
|
||||
|
||||
/**
|
||||
* Soft delete a document
|
||||
* Soft delete a document.
|
||||
* For connector-sourced documents, also sets userExcluded so the sync engine
|
||||
* will not re-import the document on future syncs.
|
||||
*/
|
||||
export async function deleteDocument(
|
||||
documentId: string,
|
||||
requestId: string
|
||||
): Promise<{ success: boolean; message: string }> {
|
||||
const doc = await db
|
||||
.select({ connectorId: document.connectorId })
|
||||
.from(document)
|
||||
.where(eq(document.id, documentId))
|
||||
.limit(1)
|
||||
|
||||
await db
|
||||
.update(document)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
...(doc[0]?.connectorId ? { userExcluded: true } : {}),
|
||||
})
|
||||
.where(eq(document.id, documentId))
|
||||
|
||||
logger.info(`[${requestId}] Document deleted: ${documentId}`)
|
||||
logger.info(`[${requestId}] Document deleted: ${documentId}`, {
|
||||
userExcluded: Boolean(doc[0]?.connectorId),
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { document, knowledgeBase, permissions } from '@sim/db/schema'
|
||||
import { document, knowledgeBase, knowledgeConnector, permissions } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, eq, isNotNull, isNull, or } from 'drizzle-orm'
|
||||
import { and, count, eq, inArray, isNotNull, isNull, or } from 'drizzle-orm'
|
||||
import type {
|
||||
ChunkingConfig,
|
||||
CreateKnowledgeBaseData,
|
||||
@@ -69,10 +69,38 @@ export async function getKnowledgeBases(
|
||||
.groupBy(knowledgeBase.id)
|
||||
.orderBy(knowledgeBase.createdAt)
|
||||
|
||||
const kbIds = knowledgeBasesWithCounts.map((kb) => kb.id)
|
||||
|
||||
const connectorRows =
|
||||
kbIds.length > 0
|
||||
? await db
|
||||
.select({
|
||||
knowledgeBaseId: knowledgeConnector.knowledgeBaseId,
|
||||
connectorType: knowledgeConnector.connectorType,
|
||||
})
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
inArray(knowledgeConnector.knowledgeBaseId, kbIds),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
: []
|
||||
|
||||
const connectorTypesByKb = new Map<string, string[]>()
|
||||
for (const row of connectorRows) {
|
||||
const types = connectorTypesByKb.get(row.knowledgeBaseId) ?? []
|
||||
if (!types.includes(row.connectorType)) {
|
||||
types.push(row.connectorType)
|
||||
}
|
||||
connectorTypesByKb.set(row.knowledgeBaseId, types)
|
||||
}
|
||||
|
||||
return knowledgeBasesWithCounts.map((kb) => ({
|
||||
...kb,
|
||||
chunkingConfig: kb.chunkingConfig as ChunkingConfig,
|
||||
docCount: Number(kb.docCount),
|
||||
connectorTypes: connectorTypesByKb.get(kb.id) ?? [],
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -122,6 +150,7 @@ export async function createKnowledgeBase(
|
||||
updatedAt: now,
|
||||
workspaceId: data.workspaceId,
|
||||
docCount: 0,
|
||||
connectorTypes: [],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -203,6 +232,7 @@ export async function updateKnowledgeBase(
|
||||
...updatedKb[0],
|
||||
chunkingConfig: updatedKb[0].chunkingConfig as ChunkingConfig,
|
||||
docCount: Number(updatedKb[0].docCount),
|
||||
connectorTypes: [],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -243,6 +273,7 @@ export async function getKnowledgeBaseById(
|
||||
...result[0],
|
||||
chunkingConfig: result[0].chunkingConfig as ChunkingConfig,
|
||||
docCount: Number(result[0].docCount),
|
||||
connectorTypes: [],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ export interface KnowledgeBaseWithCounts {
|
||||
updatedAt: Date
|
||||
workspaceId: string | null
|
||||
docCount: number
|
||||
connectorTypes: string[]
|
||||
}
|
||||
|
||||
export interface CreateKnowledgeBaseData {
|
||||
@@ -124,6 +125,7 @@ export interface KnowledgeBaseData {
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
workspaceId?: string
|
||||
connectorTypes?: string[]
|
||||
}
|
||||
|
||||
/** Document data for API responses */
|
||||
@@ -160,6 +162,9 @@ export interface DocumentData {
|
||||
boolean1?: boolean | null
|
||||
boolean2?: boolean | null
|
||||
boolean3?: boolean | null
|
||||
connectorId?: string | null
|
||||
connectorType?: string | null
|
||||
sourceUrl?: string | null
|
||||
}
|
||||
|
||||
/** Chunk data for API responses */
|
||||
|
||||
@@ -412,7 +412,13 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
||||
providerId: 'airtable',
|
||||
icon: AirtableIcon,
|
||||
baseProviderIcon: AirtableIcon,
|
||||
scopes: ['data.records:read', 'data.records:write', 'user.email:read', 'webhook:manage'],
|
||||
scopes: [
|
||||
'data.records:read',
|
||||
'data.records:write',
|
||||
'schema.bases:read',
|
||||
'user.email:read',
|
||||
'webhook:manage',
|
||||
],
|
||||
},
|
||||
},
|
||||
defaultService: 'airtable',
|
||||
|
||||
@@ -3,7 +3,7 @@ import { getBlock } from '@/blocks/registry'
|
||||
import { SELECTOR_TYPES_HYDRATION_REQUIRED, type SubBlockConfig } from '@/blocks/types'
|
||||
import { CREDENTIAL_SET, isUuid } from '@/executor/constants'
|
||||
import { fetchCredentialSetById } from '@/hooks/queries/credential-sets'
|
||||
import { fetchOAuthCredentialDetail } from '@/hooks/queries/oauth-credentials'
|
||||
import { fetchOAuthCredentialDetail } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { getSelectorDefinition } from '@/hooks/selectors/registry'
|
||||
import { resolveSelectorForSubBlock } from '@/hooks/selectors/resolution'
|
||||
import type { SelectorKey } from '@/hooks/selectors/types'
|
||||
|
||||
126
apps/sim/tools/airtable/get_base_schema.ts
Normal file
126
apps/sim/tools/airtable/get_base_schema.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface AirtableGetBaseSchemaParams {
|
||||
accessToken: string
|
||||
baseId: string
|
||||
}
|
||||
|
||||
interface AirtableFieldSchema {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
description?: string
|
||||
options?: Record<string, unknown>
|
||||
}
|
||||
|
||||
interface AirtableViewSchema {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
}
|
||||
|
||||
interface AirtableTableSchema {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
fields: AirtableFieldSchema[]
|
||||
views: AirtableViewSchema[]
|
||||
}
|
||||
|
||||
export interface AirtableGetBaseSchemaResponse extends ToolResponse {
|
||||
output: {
|
||||
tables: AirtableTableSchema[]
|
||||
metadata: {
|
||||
totalTables: number
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const airtableGetBaseSchemaTool: ToolConfig<
|
||||
AirtableGetBaseSchemaParams,
|
||||
AirtableGetBaseSchemaResponse
|
||||
> = {
|
||||
id: 'airtable_get_base_schema',
|
||||
name: 'Airtable Get Base Schema',
|
||||
description: 'Get the schema of all tables, fields, and views in an Airtable base',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'airtable',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
baseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Airtable base ID (starts with "app", e.g., "appXXXXXXXXXXXXXX")',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://api.airtable.com/v0/meta/bases/${params.baseId}/tables`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
const tables = (data.tables || []).map((table: Record<string, unknown>) => ({
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
fields: ((table.fields as Record<string, unknown>[]) || []).map((field) => ({
|
||||
id: field.id,
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
description: field.description,
|
||||
options: field.options,
|
||||
})),
|
||||
views: ((table.views as Record<string, unknown>[]) || []).map((view) => ({
|
||||
id: view.id,
|
||||
name: view.name,
|
||||
type: view.type,
|
||||
})),
|
||||
}))
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
tables,
|
||||
metadata: {
|
||||
totalTables: tables.length,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
tables: {
|
||||
type: 'json',
|
||||
description: 'Array of table schemas with fields and views',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string' },
|
||||
fields: { type: 'json' },
|
||||
views: { type: 'json' },
|
||||
},
|
||||
},
|
||||
},
|
||||
metadata: {
|
||||
type: 'json',
|
||||
description: 'Operation metadata including total tables count',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,12 +1,16 @@
|
||||
import { airtableCreateRecordsTool } from '@/tools/airtable/create_records'
|
||||
import { airtableGetBaseSchemaTool } from '@/tools/airtable/get_base_schema'
|
||||
import { airtableGetRecordTool } from '@/tools/airtable/get_record'
|
||||
import { airtableListBasesTool } from '@/tools/airtable/list_bases'
|
||||
import { airtableListRecordsTool } from '@/tools/airtable/list_records'
|
||||
import { airtableUpdateMultipleRecordsTool } from '@/tools/airtable/update_multiple_records'
|
||||
import { airtableUpdateRecordTool } from '@/tools/airtable/update_record'
|
||||
|
||||
export {
|
||||
airtableCreateRecordsTool,
|
||||
airtableGetBaseSchemaTool,
|
||||
airtableGetRecordTool,
|
||||
airtableListBasesTool,
|
||||
airtableListRecordsTool,
|
||||
airtableUpdateMultipleRecordsTool,
|
||||
airtableUpdateRecordTool,
|
||||
|
||||
85
apps/sim/tools/airtable/list_bases.ts
Normal file
85
apps/sim/tools/airtable/list_bases.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface AirtableListBasesParams {
|
||||
accessToken: string
|
||||
}
|
||||
|
||||
export interface AirtableListBasesResponse extends ToolResponse {
|
||||
output: {
|
||||
bases: Array<{
|
||||
id: string
|
||||
name: string
|
||||
permissionLevel: string
|
||||
}>
|
||||
metadata: {
|
||||
totalBases: number
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const airtableListBasesTool: ToolConfig<AirtableListBasesParams, AirtableListBasesResponse> =
|
||||
{
|
||||
id: 'airtable_list_bases',
|
||||
name: 'Airtable List Bases',
|
||||
description: 'List all bases the authenticated user has access to',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'airtable',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.airtable.com/v0/meta/bases',
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
const bases = (data.bases || []).map((base: Record<string, unknown>) => ({
|
||||
id: base.id,
|
||||
name: base.name,
|
||||
permissionLevel: base.permissionLevel,
|
||||
}))
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
bases,
|
||||
metadata: {
|
||||
totalBases: bases.length,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
bases: {
|
||||
type: 'json',
|
||||
description: 'Array of Airtable bases with id, name, and permissionLevel',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
permissionLevel: { type: 'string' },
|
||||
},
|
||||
},
|
||||
},
|
||||
metadata: {
|
||||
type: 'json',
|
||||
description: 'Operation metadata including total bases count',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -94,3 +94,25 @@ export type AirtableResponse =
|
||||
| AirtableCreateResponse
|
||||
| AirtableUpdateResponse
|
||||
| AirtableUpdateMultipleResponse
|
||||
| AirtableListBasesResponse
|
||||
| AirtableGetBaseSchemaResponse
|
||||
|
||||
export interface AirtableListBasesResponse extends ToolResponse {
|
||||
output: {
|
||||
bases: Array<{ id: string; name: string; permissionLevel: string }>
|
||||
metadata: { totalBases: number }
|
||||
}
|
||||
}
|
||||
|
||||
export interface AirtableGetBaseSchemaResponse extends ToolResponse {
|
||||
output: {
|
||||
tables: Array<{
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
fields: Array<{ id: string; name: string; type: string; description?: string }>
|
||||
views: Array<{ id: string; name: string; type: string }>
|
||||
}>
|
||||
metadata: { totalTables: number }
|
||||
}
|
||||
}
|
||||
|
||||
67
apps/sim/tools/knowledge/delete_chunk.ts
Normal file
67
apps/sim/tools/knowledge/delete_chunk.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import type { KnowledgeDeleteChunkResponse } from '@/tools/knowledge/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeDeleteChunkTool: ToolConfig<any, KnowledgeDeleteChunkResponse> = {
|
||||
id: 'knowledge_delete_chunk',
|
||||
name: 'Knowledge Delete Chunk',
|
||||
description: 'Delete a chunk from a document in a knowledge base',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the document containing the chunk',
|
||||
},
|
||||
chunkId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the chunk to delete',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) =>
|
||||
`/api/knowledge/${params.knowledgeBaseId}/documents/${params.documentId}/chunks/${params.chunkId}`,
|
||||
method: 'DELETE',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response, params): Promise<KnowledgeDeleteChunkResponse> => {
|
||||
const result = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
chunkId: params?.chunkId ?? '',
|
||||
documentId: params?.documentId ?? '',
|
||||
message: result.data?.message ?? 'Chunk deleted successfully',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
chunkId: {
|
||||
type: 'string',
|
||||
description: 'ID of the deleted chunk',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
description: 'ID of the parent document',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Confirmation message',
|
||||
},
|
||||
},
|
||||
}
|
||||
55
apps/sim/tools/knowledge/delete_document.ts
Normal file
55
apps/sim/tools/knowledge/delete_document.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import type { KnowledgeDeleteDocumentResponse } from '@/tools/knowledge/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeDeleteDocumentTool: ToolConfig<any, KnowledgeDeleteDocumentResponse> = {
|
||||
id: 'knowledge_delete_document',
|
||||
name: 'Knowledge Delete Document',
|
||||
description: 'Delete a document from a knowledge base',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base containing the document',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the document to delete',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `/api/knowledge/${params.knowledgeBaseId}/documents/${params.documentId}`,
|
||||
method: 'DELETE',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response, params): Promise<KnowledgeDeleteDocumentResponse> => {
|
||||
const result = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
documentId: params?.documentId ?? '',
|
||||
message: result.data?.message ?? 'Document deleted successfully',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
documentId: {
|
||||
type: 'string',
|
||||
description: 'ID of the deleted document',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Confirmation message',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,5 +1,21 @@
|
||||
import { knowledgeCreateDocumentTool } from '@/tools/knowledge/create_document'
|
||||
import { knowledgeDeleteChunkTool } from '@/tools/knowledge/delete_chunk'
|
||||
import { knowledgeDeleteDocumentTool } from '@/tools/knowledge/delete_document'
|
||||
import { knowledgeListChunksTool } from '@/tools/knowledge/list_chunks'
|
||||
import { knowledgeListDocumentsTool } from '@/tools/knowledge/list_documents'
|
||||
import { knowledgeListTagsTool } from '@/tools/knowledge/list_tags'
|
||||
import { knowledgeSearchTool } from '@/tools/knowledge/search'
|
||||
import { knowledgeUpdateChunkTool } from '@/tools/knowledge/update_chunk'
|
||||
import { knowledgeUploadChunkTool } from '@/tools/knowledge/upload_chunk'
|
||||
|
||||
export { knowledgeSearchTool, knowledgeUploadChunkTool, knowledgeCreateDocumentTool }
|
||||
export {
|
||||
knowledgeSearchTool,
|
||||
knowledgeUploadChunkTool,
|
||||
knowledgeCreateDocumentTool,
|
||||
knowledgeListTagsTool,
|
||||
knowledgeListDocumentsTool,
|
||||
knowledgeDeleteDocumentTool,
|
||||
knowledgeListChunksTool,
|
||||
knowledgeUpdateChunkTool,
|
||||
knowledgeDeleteChunkTool,
|
||||
}
|
||||
|
||||
144
apps/sim/tools/knowledge/list_chunks.ts
Normal file
144
apps/sim/tools/knowledge/list_chunks.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import type { KnowledgeListChunksResponse } from '@/tools/knowledge/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeListChunksTool: ToolConfig<any, KnowledgeListChunksResponse> = {
|
||||
id: 'knowledge_list_chunks',
|
||||
name: 'Knowledge List Chunks',
|
||||
description:
|
||||
'List chunks for a document in a knowledge base with optional filtering and pagination',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the document to list chunks from',
|
||||
},
|
||||
search: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Search query to filter chunks by content',
|
||||
},
|
||||
enabled: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Filter by enabled status: "true", "false", or "all" (default: "all")',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of chunks to return (1-100, default: 50)',
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Number of chunks to skip for pagination (default: 0)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const queryParams = new URLSearchParams()
|
||||
if (params.search) queryParams.set('search', params.search)
|
||||
if (params.enabled) queryParams.set('enabled', params.enabled)
|
||||
if (params.limit)
|
||||
queryParams.set('limit', String(Math.max(1, Math.min(100, Number(params.limit)))))
|
||||
if (params.offset) queryParams.set('offset', String(params.offset))
|
||||
const qs = queryParams.toString()
|
||||
return `/api/knowledge/${params.knowledgeBaseId}/documents/${params.documentId}/chunks${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response, params): Promise<KnowledgeListChunksResponse> => {
|
||||
const result = await response.json()
|
||||
const chunks = result.data || []
|
||||
const pagination = result.pagination || {}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
knowledgeBaseId: params?.knowledgeBaseId ?? '',
|
||||
documentId: params?.documentId ?? '',
|
||||
chunks: chunks.map(
|
||||
(chunk: {
|
||||
id: string
|
||||
chunkIndex: number
|
||||
content: string
|
||||
contentLength: number
|
||||
tokenCount: number
|
||||
enabled: boolean
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}) => ({
|
||||
id: chunk.id,
|
||||
chunkIndex: chunk.chunkIndex ?? 0,
|
||||
content: chunk.content,
|
||||
contentLength: chunk.contentLength ?? 0,
|
||||
tokenCount: chunk.tokenCount ?? 0,
|
||||
enabled: chunk.enabled ?? true,
|
||||
createdAt: chunk.createdAt ?? null,
|
||||
updatedAt: chunk.updatedAt ?? null,
|
||||
})
|
||||
),
|
||||
totalChunks: pagination.total ?? chunks.length,
|
||||
limit: pagination.limit ?? 50,
|
||||
offset: pagination.offset ?? 0,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
description: 'ID of the knowledge base',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
description: 'ID of the document',
|
||||
},
|
||||
chunks: {
|
||||
type: 'array',
|
||||
description: 'Array of chunks in the document',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Chunk ID' },
|
||||
chunkIndex: { type: 'number', description: 'Index of the chunk within the document' },
|
||||
content: { type: 'string', description: 'Chunk text content' },
|
||||
contentLength: { type: 'number', description: 'Content length in characters' },
|
||||
tokenCount: { type: 'number', description: 'Token count for the chunk' },
|
||||
enabled: { type: 'boolean', description: 'Whether the chunk is enabled' },
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'Last update timestamp' },
|
||||
},
|
||||
},
|
||||
},
|
||||
totalChunks: {
|
||||
type: 'number',
|
||||
description: 'Total number of chunks matching the filter',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'Page size used',
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
description: 'Offset used for pagination',
|
||||
},
|
||||
},
|
||||
}
|
||||
141
apps/sim/tools/knowledge/list_documents.ts
Normal file
141
apps/sim/tools/knowledge/list_documents.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import type { KnowledgeListDocumentsResponse } from '@/tools/knowledge/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeListDocumentsTool: ToolConfig<any, KnowledgeListDocumentsResponse> = {
|
||||
id: 'knowledge_list_documents',
|
||||
name: 'Knowledge List Documents',
|
||||
description: 'List documents in a knowledge base with optional filtering, search, and pagination',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base to list documents from',
|
||||
},
|
||||
search: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Search query to filter documents by filename',
|
||||
},
|
||||
enabledFilter: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Filter by enabled status: "all", "enabled", or "disabled"',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of documents to return (default: 50)',
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Number of documents to skip for pagination (default: 0)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const queryParams = new URLSearchParams()
|
||||
if (params.search) queryParams.set('search', params.search)
|
||||
if (params.enabledFilter) queryParams.set('enabledFilter', params.enabledFilter)
|
||||
if (params.limit) queryParams.set('limit', String(params.limit))
|
||||
if (params.offset) queryParams.set('offset', String(params.offset))
|
||||
const qs = queryParams.toString()
|
||||
return `/api/knowledge/${params.knowledgeBaseId}/documents${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response, params): Promise<KnowledgeListDocumentsResponse> => {
|
||||
const result = await response.json()
|
||||
const data = result.data || {}
|
||||
const documents = data.documents || []
|
||||
const pagination = data.pagination || {}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
knowledgeBaseId: params?.knowledgeBaseId ?? '',
|
||||
documents: documents.map(
|
||||
(doc: {
|
||||
id: string
|
||||
filename: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
enabled: boolean
|
||||
processingStatus: string
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
uploadedAt: string
|
||||
updatedAt: string
|
||||
}) => ({
|
||||
id: doc.id,
|
||||
filename: doc.filename,
|
||||
fileSize: doc.fileSize ?? 0,
|
||||
mimeType: doc.mimeType ?? null,
|
||||
enabled: doc.enabled ?? true,
|
||||
processingStatus: doc.processingStatus ?? null,
|
||||
chunkCount: doc.chunkCount ?? 0,
|
||||
tokenCount: doc.tokenCount ?? 0,
|
||||
uploadedAt: doc.uploadedAt ?? null,
|
||||
updatedAt: doc.updatedAt ?? null,
|
||||
})
|
||||
),
|
||||
totalDocuments: pagination.total ?? documents.length,
|
||||
limit: pagination.limit ?? 50,
|
||||
offset: pagination.offset ?? 0,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
description: 'ID of the knowledge base',
|
||||
},
|
||||
documents: {
|
||||
type: 'array',
|
||||
description: 'Array of documents in the knowledge base',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Document ID' },
|
||||
filename: { type: 'string', description: 'Document filename' },
|
||||
fileSize: { type: 'number', description: 'File size in bytes' },
|
||||
mimeType: { type: 'string', description: 'MIME type of the document' },
|
||||
enabled: { type: 'boolean', description: 'Whether the document is enabled' },
|
||||
processingStatus: {
|
||||
type: 'string',
|
||||
description: 'Processing status (pending, processing, completed, failed)',
|
||||
},
|
||||
chunkCount: { type: 'number', description: 'Number of chunks in the document' },
|
||||
tokenCount: { type: 'number', description: 'Total token count across chunks' },
|
||||
uploadedAt: { type: 'string', description: 'Upload timestamp' },
|
||||
updatedAt: { type: 'string', description: 'Last update timestamp' },
|
||||
},
|
||||
},
|
||||
},
|
||||
totalDocuments: {
|
||||
type: 'number',
|
||||
description: 'Total number of documents matching the filter',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
description: 'Page size used',
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
description: 'Offset used for pagination',
|
||||
},
|
||||
},
|
||||
}
|
||||
85
apps/sim/tools/knowledge/list_tags.ts
Normal file
85
apps/sim/tools/knowledge/list_tags.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { KnowledgeListTagsResponse } from '@/tools/knowledge/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeListTagsTool: ToolConfig<any, KnowledgeListTagsResponse> = {
|
||||
id: 'knowledge_list_tags',
|
||||
name: 'Knowledge List Tags',
|
||||
description: 'List all tag definitions for a knowledge base',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base to list tags for',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `/api/knowledge/${params.knowledgeBaseId}/tag-definitions`,
|
||||
method: 'GET',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response, params): Promise<KnowledgeListTagsResponse> => {
|
||||
const result = await response.json()
|
||||
const tags = result.data || []
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
knowledgeBaseId: params?.knowledgeBaseId ?? '',
|
||||
tags: tags.map(
|
||||
(tag: {
|
||||
id: string
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}) => ({
|
||||
id: tag.id,
|
||||
tagSlot: tag.tagSlot,
|
||||
displayName: tag.displayName,
|
||||
fieldType: tag.fieldType,
|
||||
createdAt: tag.createdAt ?? null,
|
||||
updatedAt: tag.updatedAt ?? null,
|
||||
})
|
||||
),
|
||||
totalTags: tags.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
description: 'ID of the knowledge base',
|
||||
},
|
||||
tags: {
|
||||
type: 'array',
|
||||
description: 'Array of tag definitions for the knowledge base',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Tag definition ID' },
|
||||
tagSlot: { type: 'string', description: 'Internal tag slot (e.g. tag1, number1)' },
|
||||
displayName: { type: 'string', description: 'Human-readable tag name' },
|
||||
fieldType: {
|
||||
type: 'string',
|
||||
description: 'Tag field type (text, number, date, boolean)',
|
||||
},
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'Last update timestamp' },
|
||||
},
|
||||
},
|
||||
},
|
||||
totalTags: {
|
||||
type: 'number',
|
||||
description: 'Total number of tag definitions',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -100,3 +100,109 @@ export interface KnowledgeCreateDocumentResponse {
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeTagDefinition {
|
||||
id: string
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
export interface KnowledgeListTagsParams {
|
||||
knowledgeBaseId: string
|
||||
}
|
||||
|
||||
export interface KnowledgeListTagsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
knowledgeBaseId: string
|
||||
tags: KnowledgeTagDefinition[]
|
||||
totalTags: number
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentSummary {
|
||||
id: string
|
||||
filename: string
|
||||
fileSize: number
|
||||
mimeType: string | null
|
||||
enabled: boolean
|
||||
processingStatus: string | null
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
uploadedAt: string | null
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
export interface KnowledgeListDocumentsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
knowledgeBaseId: string
|
||||
documents: KnowledgeDocumentSummary[]
|
||||
totalDocuments: number
|
||||
limit: number
|
||||
offset: number
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeDeleteDocumentResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
documentId: string
|
||||
message: string
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeChunkSummary {
|
||||
id: string
|
||||
chunkIndex: number
|
||||
content: string
|
||||
contentLength: number
|
||||
tokenCount: number
|
||||
enabled: boolean
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
export interface KnowledgeListChunksResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
chunks: KnowledgeChunkSummary[]
|
||||
totalChunks: number
|
||||
limit: number
|
||||
offset: number
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeUpdateChunkResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
documentId: string
|
||||
id: string
|
||||
chunkIndex: number
|
||||
content: string
|
||||
contentLength: number
|
||||
tokenCount: number
|
||||
enabled: boolean
|
||||
updatedAt: string | null
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeDeleteChunkResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
chunkId: string
|
||||
documentId: string
|
||||
message: string
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
112
apps/sim/tools/knowledge/update_chunk.ts
Normal file
112
apps/sim/tools/knowledge/update_chunk.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import type { KnowledgeUpdateChunkResponse } from '@/tools/knowledge/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeUpdateChunkTool: ToolConfig<any, KnowledgeUpdateChunkResponse> = {
|
||||
id: 'knowledge_update_chunk',
|
||||
name: 'Knowledge Update Chunk',
|
||||
description: 'Update the content or enabled status of a chunk in a knowledge base',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the document containing the chunk',
|
||||
},
|
||||
chunkId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the chunk to update',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'New content for the chunk',
|
||||
},
|
||||
enabled: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Whether the chunk should be enabled or disabled',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) =>
|
||||
`/api/knowledge/${params.knowledgeBaseId}/documents/${params.documentId}/chunks/${params.chunkId}`,
|
||||
method: 'PUT',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {}
|
||||
if (params.content !== undefined) body.content = params.content
|
||||
if (params.enabled !== undefined) body.enabled = params.enabled
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response, params): Promise<KnowledgeUpdateChunkResponse> => {
|
||||
const result = await response.json()
|
||||
const chunk = result.data || {}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
documentId: params?.documentId ?? '',
|
||||
id: chunk.id ?? '',
|
||||
chunkIndex: chunk.chunkIndex ?? 0,
|
||||
content: chunk.content ?? '',
|
||||
contentLength: chunk.contentLength ?? 0,
|
||||
tokenCount: chunk.tokenCount ?? 0,
|
||||
enabled: chunk.enabled ?? true,
|
||||
updatedAt: chunk.updatedAt ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
documentId: {
|
||||
type: 'string',
|
||||
description: 'ID of the parent document',
|
||||
},
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'Chunk ID',
|
||||
},
|
||||
chunkIndex: {
|
||||
type: 'number',
|
||||
description: 'Index of the chunk within the document',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
description: 'Updated chunk content',
|
||||
},
|
||||
contentLength: {
|
||||
type: 'number',
|
||||
description: 'Content length in characters',
|
||||
},
|
||||
tokenCount: {
|
||||
type: 'number',
|
||||
description: 'Token count for the chunk',
|
||||
},
|
||||
enabled: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the chunk is enabled',
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'Last update timestamp',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -20,8 +20,11 @@ import {
|
||||
} from '@/tools/ahrefs'
|
||||
import {
|
||||
airtableCreateRecordsTool,
|
||||
airtableGetBaseSchemaTool,
|
||||
airtableGetRecordTool,
|
||||
airtableListBasesTool,
|
||||
airtableListRecordsTool,
|
||||
airtableUpdateMultipleRecordsTool,
|
||||
airtableUpdateRecordTool,
|
||||
} from '@/tools/airtable'
|
||||
import { airweaveSearchTool } from '@/tools/airweave'
|
||||
@@ -899,7 +902,13 @@ import {
|
||||
} from '@/tools/kalshi'
|
||||
import {
|
||||
knowledgeCreateDocumentTool,
|
||||
knowledgeDeleteChunkTool,
|
||||
knowledgeDeleteDocumentTool,
|
||||
knowledgeListChunksTool,
|
||||
knowledgeListDocumentsTool,
|
||||
knowledgeListTagsTool,
|
||||
knowledgeSearchTool,
|
||||
knowledgeUpdateChunkTool,
|
||||
knowledgeUploadChunkTool,
|
||||
} from '@/tools/knowledge'
|
||||
import { langsmithCreateRunsBatchTool, langsmithCreateRunTool } from '@/tools/langsmith'
|
||||
@@ -2705,8 +2714,11 @@ export const tools: Record<string, ToolConfig> = {
|
||||
twilio_voice_list_calls: listCallsTool,
|
||||
twilio_voice_get_recording: getRecordingTool,
|
||||
airtable_create_records: airtableCreateRecordsTool,
|
||||
airtable_get_base_schema: airtableGetBaseSchemaTool,
|
||||
airtable_get_record: airtableGetRecordTool,
|
||||
airtable_list_bases: airtableListBasesTool,
|
||||
airtable_list_records: airtableListRecordsTool,
|
||||
airtable_update_multiple_records: airtableUpdateMultipleRecordsTool,
|
||||
airtable_update_record: airtableUpdateRecordTool,
|
||||
ahrefs_domain_rating: ahrefsDomainRatingTool,
|
||||
ahrefs_backlinks: ahrefsBacklinksTool,
|
||||
@@ -2774,6 +2786,12 @@ export const tools: Record<string, ToolConfig> = {
|
||||
knowledge_search: knowledgeSearchTool,
|
||||
knowledge_upload_chunk: knowledgeUploadChunkTool,
|
||||
knowledge_create_document: knowledgeCreateDocumentTool,
|
||||
knowledge_list_tags: knowledgeListTagsTool,
|
||||
knowledge_list_documents: knowledgeListDocumentsTool,
|
||||
knowledge_delete_document: knowledgeDeleteDocumentTool,
|
||||
knowledge_list_chunks: knowledgeListChunksTool,
|
||||
knowledge_update_chunk: knowledgeUpdateChunkTool,
|
||||
knowledge_delete_chunk: knowledgeDeleteChunkTool,
|
||||
search_tool: searchTool,
|
||||
elevenlabs_tts: elevenLabsTtsTool,
|
||||
stt_whisper: whisperSttTool,
|
||||
|
||||
45
packages/db/migrations/0155_talented_ben_parker.sql
Normal file
45
packages/db/migrations/0155_talented_ben_parker.sql
Normal file
@@ -0,0 +1,45 @@
|
||||
CREATE TABLE "knowledge_connector" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"knowledge_base_id" text NOT NULL,
|
||||
"connector_type" text NOT NULL,
|
||||
"credential_id" text NOT NULL,
|
||||
"source_config" json NOT NULL,
|
||||
"sync_mode" text DEFAULT 'incremental' NOT NULL,
|
||||
"sync_interval_minutes" integer DEFAULT 1440 NOT NULL,
|
||||
"status" text DEFAULT 'active' NOT NULL,
|
||||
"last_sync_at" timestamp,
|
||||
"last_sync_error" text,
|
||||
"last_sync_doc_count" integer,
|
||||
"next_sync_at" timestamp,
|
||||
"consecutive_failures" integer DEFAULT 0 NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp DEFAULT now() NOT NULL,
|
||||
"deleted_at" timestamp
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "knowledge_connector_sync_log" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"connector_id" text NOT NULL,
|
||||
"status" text NOT NULL,
|
||||
"started_at" timestamp DEFAULT now() NOT NULL,
|
||||
"completed_at" timestamp,
|
||||
"docs_added" integer DEFAULT 0 NOT NULL,
|
||||
"docs_updated" integer DEFAULT 0 NOT NULL,
|
||||
"docs_deleted" integer DEFAULT 0 NOT NULL,
|
||||
"docs_unchanged" integer DEFAULT 0 NOT NULL,
|
||||
"error_message" text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "document" ADD COLUMN "user_excluded" boolean DEFAULT false NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "document" ADD COLUMN "connector_id" text;--> statement-breakpoint
|
||||
ALTER TABLE "document" ADD COLUMN "external_id" text;--> statement-breakpoint
|
||||
ALTER TABLE "document" ADD COLUMN "content_hash" text;--> statement-breakpoint
|
||||
ALTER TABLE "document" ADD COLUMN "source_url" text;--> statement-breakpoint
|
||||
ALTER TABLE "knowledge_connector" ADD CONSTRAINT "knowledge_connector_knowledge_base_id_knowledge_base_id_fk" FOREIGN KEY ("knowledge_base_id") REFERENCES "public"."knowledge_base"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "knowledge_connector_sync_log" ADD CONSTRAINT "knowledge_connector_sync_log_connector_id_knowledge_connector_id_fk" FOREIGN KEY ("connector_id") REFERENCES "public"."knowledge_connector"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "kc_knowledge_base_id_idx" ON "knowledge_connector" USING btree ("knowledge_base_id");--> statement-breakpoint
|
||||
CREATE INDEX "kc_status_next_sync_idx" ON "knowledge_connector" USING btree ("status","next_sync_at");--> statement-breakpoint
|
||||
CREATE INDEX "kcsl_connector_id_idx" ON "knowledge_connector_sync_log" USING btree ("connector_id");--> statement-breakpoint
|
||||
ALTER TABLE "document" ADD CONSTRAINT "document_connector_id_knowledge_connector_id_fk" FOREIGN KEY ("connector_id") REFERENCES "public"."knowledge_connector"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "doc_connector_external_id_idx" ON "document" USING btree ("connector_id","external_id") WHERE "document"."deleted_at" IS NULL;--> statement-breakpoint
|
||||
CREATE INDEX "doc_connector_id_idx" ON "document" USING btree ("connector_id");
|
||||
11300
packages/db/migrations/meta/0155_snapshot.json
Normal file
11300
packages/db/migrations/meta/0155_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1079,6 +1079,13 @@
|
||||
"when": 1770869658697,
|
||||
"tag": "0154_bumpy_living_mummy",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 155,
|
||||
"version": "7",
|
||||
"when": 1771305981173,
|
||||
"tag": "0155_talented_ben_parker",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1215,6 +1215,7 @@ export const document = pgTable(
|
||||
// Document state
|
||||
enabled: boolean('enabled').notNull().default(true), // Enable/disable from knowledge base
|
||||
deletedAt: timestamp('deleted_at'), // Soft delete
|
||||
userExcluded: boolean('user_excluded').notNull().default(false), // User explicitly excluded — skip on sync
|
||||
|
||||
// Document tags for filtering (inherited by all chunks)
|
||||
// Text tags (7 slots)
|
||||
@@ -1239,6 +1240,14 @@ export const document = pgTable(
|
||||
boolean2: boolean('boolean2'),
|
||||
boolean3: boolean('boolean3'),
|
||||
|
||||
// Connector-sourced document fields
|
||||
connectorId: text('connector_id').references(() => knowledgeConnector.id, {
|
||||
onDelete: 'set null',
|
||||
}),
|
||||
externalId: text('external_id'),
|
||||
contentHash: text('content_hash'),
|
||||
sourceUrl: text('source_url'),
|
||||
|
||||
// Timestamps
|
||||
uploadedAt: timestamp('uploaded_at').notNull().defaultNow(),
|
||||
},
|
||||
@@ -1252,6 +1261,12 @@ export const document = pgTable(
|
||||
table.knowledgeBaseId,
|
||||
table.processingStatus
|
||||
),
|
||||
// Connector document uniqueness (partial — only non-deleted rows)
|
||||
connectorExternalIdIdx: uniqueIndex('doc_connector_external_id_idx')
|
||||
.on(table.connectorId, table.externalId)
|
||||
.where(sql`${table.deletedAt} IS NULL`),
|
||||
// Sync engine: load all active docs for a connector
|
||||
connectorIdIdx: index('doc_connector_id_idx').on(table.connectorId),
|
||||
// Text tag indexes
|
||||
tag1Idx: index('doc_tag1_idx').on(table.tag1),
|
||||
tag2Idx: index('doc_tag2_idx').on(table.tag2),
|
||||
@@ -2240,3 +2255,60 @@ export const asyncJobs = pgTable(
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
/**
|
||||
* Knowledge Connector - persistent link to an external source (Confluence, Google Drive, etc.)
|
||||
* that syncs documents into a knowledge base.
|
||||
*/
|
||||
export const knowledgeConnector = pgTable(
|
||||
'knowledge_connector',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
knowledgeBaseId: text('knowledge_base_id')
|
||||
.notNull()
|
||||
.references(() => knowledgeBase.id, { onDelete: 'cascade' }),
|
||||
connectorType: text('connector_type').notNull(),
|
||||
credentialId: text('credential_id').notNull(),
|
||||
sourceConfig: json('source_config').notNull(),
|
||||
syncMode: text('sync_mode').notNull().default('incremental'),
|
||||
syncIntervalMinutes: integer('sync_interval_minutes').notNull().default(1440),
|
||||
status: text('status').notNull().default('active'),
|
||||
lastSyncAt: timestamp('last_sync_at'),
|
||||
lastSyncError: text('last_sync_error'),
|
||||
lastSyncDocCount: integer('last_sync_doc_count'),
|
||||
nextSyncAt: timestamp('next_sync_at'),
|
||||
consecutiveFailures: integer('consecutive_failures').notNull().default(0),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
deletedAt: timestamp('deleted_at'),
|
||||
},
|
||||
(table) => ({
|
||||
knowledgeBaseIdIdx: index('kc_knowledge_base_id_idx').on(table.knowledgeBaseId),
|
||||
// Cron scheduler: WHERE status='active' AND nextSyncAt <= now AND deletedAt IS NULL
|
||||
statusNextSyncIdx: index('kc_status_next_sync_idx').on(table.status, table.nextSyncAt),
|
||||
})
|
||||
)
|
||||
|
||||
/**
|
||||
* Knowledge Connector Sync Log - audit trail for connector sync operations.
|
||||
*/
|
||||
export const knowledgeConnectorSyncLog = pgTable(
|
||||
'knowledge_connector_sync_log',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
connectorId: text('connector_id')
|
||||
.notNull()
|
||||
.references(() => knowledgeConnector.id, { onDelete: 'cascade' }),
|
||||
status: text('status').notNull(),
|
||||
startedAt: timestamp('started_at').notNull().defaultNow(),
|
||||
completedAt: timestamp('completed_at'),
|
||||
docsAdded: integer('docs_added').notNull().default(0),
|
||||
docsUpdated: integer('docs_updated').notNull().default(0),
|
||||
docsDeleted: integer('docs_deleted').notNull().default(0),
|
||||
docsUnchanged: integer('docs_unchanged').notNull().default(0),
|
||||
errorMessage: text('error_message'),
|
||||
},
|
||||
(table) => ({
|
||||
connectorIdIdx: index('kcsl_connector_id_idx').on(table.connectorId),
|
||||
})
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user