mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-18 18:25:14 -05:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da46a387c9 | ||
|
|
a0afb5d03e | ||
|
|
cdacb796a8 | ||
|
|
3ce54147e6 | ||
|
|
08690b2906 | ||
|
|
299cc26694 | ||
|
|
48715ff013 | ||
|
|
ad0d0ed1f1 | ||
|
|
b7e377ec4b |
@@ -1,261 +0,0 @@
|
||||
---
|
||||
description: Add a knowledge base connector for syncing documents from an external source
|
||||
argument-hint: <service-name> [api-docs-url]
|
||||
---
|
||||
|
||||
# Add Connector Skill
|
||||
|
||||
You are an expert at adding knowledge base connectors to Sim. A connector syncs documents from an external source (Confluence, Google Drive, Notion, etc.) into a knowledge base.
|
||||
|
||||
## Your Task
|
||||
|
||||
When the user asks you to create a connector:
|
||||
1. Use Context7 or WebFetch to read the service's API documentation
|
||||
2. Create the connector directory and config
|
||||
3. Register it in the connector registry
|
||||
|
||||
## Directory Structure
|
||||
|
||||
Create files in `apps/sim/connectors/{service}/`:
|
||||
```
|
||||
connectors/{service}/
|
||||
├── index.ts # Barrel export
|
||||
└── {service}.ts # ConnectorConfig definition
|
||||
```
|
||||
|
||||
## ConnectorConfig Structure
|
||||
|
||||
```typescript
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { {Service}Icon } from '@/components/icons'
|
||||
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('{Service}Connector')
|
||||
|
||||
export const {service}Connector: ConnectorConfig = {
|
||||
id: '{service}',
|
||||
name: '{Service}',
|
||||
description: 'Sync documents from {Service} into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: {Service}Icon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: '{service}', // Must match OAuthService in lib/oauth/types.ts
|
||||
requiredScopes: ['read:...'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
// Rendered dynamically by the add-connector modal UI
|
||||
// Supports 'short-input' and 'dropdown' types
|
||||
],
|
||||
|
||||
listDocuments: async (accessToken, sourceConfig, cursor) => {
|
||||
// Paginate via cursor, extract text, compute SHA-256 hash
|
||||
// Return { documents: ExternalDocument[], nextCursor?, hasMore }
|
||||
},
|
||||
|
||||
getDocument: async (accessToken, sourceConfig, externalId) => {
|
||||
// Return ExternalDocument or null
|
||||
},
|
||||
|
||||
validateConfig: async (accessToken, sourceConfig) => {
|
||||
// Return { valid: true } or { valid: false, error: 'message' }
|
||||
},
|
||||
|
||||
// Optional: map source metadata to semantic tag keys (translated to slots by sync engine)
|
||||
mapTags: (metadata) => {
|
||||
// Return Record<string, unknown> with keys matching tagDefinitions[].id
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## ConfigField Types
|
||||
|
||||
The add-connector modal renders these automatically — no custom UI needed.
|
||||
|
||||
```typescript
|
||||
// Text input
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.example.com',
|
||||
required: true,
|
||||
}
|
||||
|
||||
// Dropdown (static options)
|
||||
{
|
||||
id: 'contentType',
|
||||
title: 'Content Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Pages only', id: 'page' },
|
||||
{ label: 'Blog posts only', id: 'blogpost' },
|
||||
{ label: 'All content', id: 'all' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## ExternalDocument Shape
|
||||
|
||||
Every document returned from `listDocuments`/`getDocument` must include:
|
||||
|
||||
```typescript
|
||||
{
|
||||
externalId: string // Source-specific unique ID
|
||||
title: string // Document title
|
||||
content: string // Extracted plain text
|
||||
mimeType: 'text/plain' // Always text/plain (content is extracted)
|
||||
contentHash: string // SHA-256 of content (change detection)
|
||||
sourceUrl?: string // Link back to original (stored on document record)
|
||||
metadata?: Record<string, unknown> // Source-specific data (fed to mapTags)
|
||||
}
|
||||
```
|
||||
|
||||
## Content Hashing (Required)
|
||||
|
||||
The sync engine uses content hashes for change detection:
|
||||
|
||||
```typescript
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer)).map(b => b.toString(16).padStart(2, '0')).join('')
|
||||
}
|
||||
```
|
||||
|
||||
## tagDefinitions — Declared Tag Definitions
|
||||
|
||||
Declare which tags the connector populates using semantic IDs. Shown in the add-connector modal as opt-out checkboxes.
|
||||
On connector creation, slots are **dynamically assigned** via `getNextAvailableSlot` — connectors never hardcode slot names.
|
||||
|
||||
```typescript
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'version', displayName: 'Version', fieldType: 'number' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
```
|
||||
|
||||
Each entry has:
|
||||
- `id`: Semantic key matching a key returned by `mapTags` (e.g. `'labels'`, `'version'`)
|
||||
- `displayName`: Human-readable name shown in the UI (e.g. "Labels", "Last Modified")
|
||||
- `fieldType`: `'text'` | `'number'` | `'date'` | `'boolean'` — determines which slot pool to draw from
|
||||
|
||||
Users can opt out of specific tags in the modal. Disabled IDs are stored in `sourceConfig.disabledTagIds`.
|
||||
The assigned mapping (`semantic id → slot`) is stored in `sourceConfig.tagSlotMapping`.
|
||||
|
||||
## mapTags — Metadata to Semantic Keys
|
||||
|
||||
Maps source metadata to semantic tag keys. Required if `tagDefinitions` is set.
|
||||
The sync engine calls this automatically and translates semantic keys to actual DB slots
|
||||
using the `tagSlotMapping` stored on the connector.
|
||||
|
||||
Return keys must match the `id` values declared in `tagDefinitions`.
|
||||
|
||||
```typescript
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
// Validate arrays before casting — metadata may be malformed
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
// Validate numbers — guard against NaN
|
||||
if (metadata.version != null) {
|
||||
const num = Number(metadata.version)
|
||||
if (!Number.isNaN(num)) result.version = num
|
||||
}
|
||||
|
||||
// Validate dates — guard against Invalid Date
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
```
|
||||
|
||||
## External API Calls — Use `fetchWithRetry`
|
||||
|
||||
All external API calls must use `fetchWithRetry` from `@/lib/knowledge/documents/utils` instead of raw `fetch()`. This provides exponential backoff with retries on 429/502/503/504 errors. It returns a standard `Response` — all `.ok`, `.json()`, `.text()` checks work unchanged.
|
||||
|
||||
For `validateConfig` (user-facing, called on save), pass `VALIDATE_RETRY_OPTIONS` to cap wait time at ~7s. Background operations (`listDocuments`, `getDocument`) use the built-in defaults (5 retries, ~31s max).
|
||||
|
||||
```typescript
|
||||
import { VALIDATE_RETRY_OPTIONS, fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
|
||||
// Background sync — use defaults
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
// validateConfig — tighter retry budget
|
||||
const response = await fetchWithRetry(url, { ... }, VALIDATE_RETRY_OPTIONS)
|
||||
```
|
||||
|
||||
## sourceUrl
|
||||
|
||||
If `ExternalDocument.sourceUrl` is set, the sync engine stores it on the document record. Always construct the full URL (not a relative path).
|
||||
|
||||
## Sync Engine Behavior (Do Not Modify)
|
||||
|
||||
The sync engine (`lib/knowledge/connectors/sync-engine.ts`) is connector-agnostic. It:
|
||||
1. Calls `listDocuments` with pagination until `hasMore` is false
|
||||
2. Compares `contentHash` to detect new/changed/unchanged documents
|
||||
3. Stores `sourceUrl` and calls `mapTags` on insert/update automatically
|
||||
4. Handles soft-delete of removed documents
|
||||
|
||||
You never need to modify the sync engine when adding a connector.
|
||||
|
||||
## OAuth Credential Reuse
|
||||
|
||||
Connectors reuse the existing OAuth infrastructure. The `oauth.provider` must match an `OAuthService` from `apps/sim/lib/oauth/types.ts`. Check existing providers before adding a new one.
|
||||
|
||||
## Icon
|
||||
|
||||
The `icon` field on `ConnectorConfig` is used throughout the UI — in the connector list, the add-connector modal, and as the document icon in the knowledge base table (replacing the generic file type icon for connector-sourced documents). The icon is read from `CONNECTOR_REGISTRY[connectorType].icon` at runtime — no separate icon map to maintain.
|
||||
|
||||
If the service already has an icon in `apps/sim/components/icons.tsx` (from a tool integration), reuse it. Otherwise, ask the user to provide the SVG.
|
||||
|
||||
## Registering
|
||||
|
||||
Add one line to `apps/sim/connectors/registry.ts`:
|
||||
|
||||
```typescript
|
||||
import { {service}Connector } from '@/connectors/{service}'
|
||||
|
||||
export const CONNECTOR_REGISTRY: ConnectorRegistry = {
|
||||
// ... existing connectors ...
|
||||
{service}: {service}Connector,
|
||||
}
|
||||
```
|
||||
|
||||
## Reference Implementation
|
||||
|
||||
See `apps/sim/connectors/confluence/confluence.ts` for a complete example with:
|
||||
- Multiple config field types (text + dropdown)
|
||||
- Label fetching and CQL search filtering
|
||||
- Blogpost + page content types
|
||||
- `mapTags` mapping labels, version, and dates to semantic keys
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Created `connectors/{service}/{service}.ts` with full ConnectorConfig
|
||||
- [ ] Created `connectors/{service}/index.ts` barrel export
|
||||
- [ ] `oauth.provider` matches an existing OAuthService in `lib/oauth/types.ts`
|
||||
- [ ] `listDocuments` handles pagination and computes content hashes
|
||||
- [ ] `sourceUrl` set on each ExternalDocument (full URL, not relative)
|
||||
- [ ] `metadata` includes source-specific data for tag mapping
|
||||
- [ ] `tagDefinitions` declared for each semantic key returned by `mapTags`
|
||||
- [ ] `mapTags` implemented if source has useful metadata (labels, dates, versions)
|
||||
- [ ] `validateConfig` verifies the source is accessible
|
||||
- [ ] All external API calls use `fetchWithRetry` (not raw `fetch`)
|
||||
- [ ] All optional config fields validated in `validateConfig`
|
||||
- [ ] Icon exists in `components/icons.tsx` (or asked user to provide SVG)
|
||||
- [ ] Registered in `connectors/registry.ts`
|
||||
@@ -59,12 +59,6 @@ body {
|
||||
--content-gap: 1.75rem;
|
||||
}
|
||||
|
||||
/* Remove custom layout variable overrides to fallback to fumadocs defaults */
|
||||
|
||||
/* ============================================
|
||||
Navbar Light Mode Styling
|
||||
============================================ */
|
||||
|
||||
/* Light mode navbar and search styling */
|
||||
:root:not(.dark) nav {
|
||||
background-color: hsla(0, 0%, 96%, 0.85) !important;
|
||||
@@ -88,10 +82,6 @@ body {
|
||||
-webkit-backdrop-filter: blur(25px) saturate(180%) brightness(0.6) !important;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Custom Sidebar Styling (Turborepo-inspired)
|
||||
============================================ */
|
||||
|
||||
/* Floating sidebar appearance - remove background */
|
||||
[data-sidebar-container],
|
||||
#nd-sidebar {
|
||||
@@ -468,10 +458,6 @@ aside[data-sidebar],
|
||||
writing-mode: horizontal-tb !important;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Code Block Styling (Improved)
|
||||
============================================ */
|
||||
|
||||
/* Apply Geist Mono to code elements */
|
||||
code,
|
||||
pre,
|
||||
@@ -532,10 +518,6 @@ pre code .line {
|
||||
color: var(--color-fd-primary);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
TOC (Table of Contents) Styling
|
||||
============================================ */
|
||||
|
||||
/* Remove the thin border-left on nested TOC items (keeps main indicator only) */
|
||||
#nd-toc a[style*="padding-inline-start"] {
|
||||
border-left: none !important;
|
||||
@@ -554,10 +536,6 @@ main article,
|
||||
padding-bottom: 4rem;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Center and Constrain Main Content Width
|
||||
============================================ */
|
||||
|
||||
/* Main content area - center and constrain like turborepo/raindrop */
|
||||
/* Note: --sidebar-offset and --toc-offset are now applied at #nd-docs-layout level */
|
||||
main[data-main] {
|
||||
|
||||
@@ -130,37 +130,4 @@ Update multiple existing records in an Airtable table
|
||||
| `records` | json | Array of updated Airtable records |
|
||||
| `metadata` | json | Operation metadata including record count and updated record IDs |
|
||||
|
||||
### `airtable_list_bases`
|
||||
|
||||
List all bases the authenticated user has access to
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `bases` | json | Array of Airtable bases with id, name, and permissionLevel |
|
||||
| `metadata` | json | Operation metadata including total bases count |
|
||||
|
||||
### `airtable_get_base_schema`
|
||||
|
||||
Get the schema of all tables, fields, and views in an Airtable base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `baseId` | string | Yes | Airtable base ID \(starts with "app", e.g., "appXXXXXXXXXXXXXX"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tables` | json | Array of table schemas with fields and views |
|
||||
| `metadata` | json | Operation metadata including total tables count |
|
||||
|
||||
|
||||
|
||||
@@ -234,7 +234,6 @@ List actions from incident.io. Optionally filter by incident ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `incident_id` | string | No | Filter actions by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
|
||||
| `page_size` | number | No | Number of actions to return per page \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -309,7 +308,6 @@ List follow-ups from incident.io. Optionally filter by incident ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `incident_id` | string | No | Filter follow-ups by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
|
||||
| `page_size` | number | No | Number of follow-ups to return per page \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -396,6 +394,7 @@ List all users in your Incident.io workspace. Returns user details including id,
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Incident.io API Key |
|
||||
| `page_size` | number | No | Number of results to return per page \(e.g., 10, 25, 50\). Default: 25 |
|
||||
| `after` | string | No | Pagination cursor to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -406,6 +405,10 @@ List all users in your Incident.io workspace. Returns user details including id,
|
||||
| ↳ `name` | string | Full name of the user |
|
||||
| ↳ `email` | string | Email address of the user |
|
||||
| ↳ `role` | string | Role of the user in the workspace |
|
||||
| `pagination_meta` | object | Pagination metadata |
|
||||
| ↳ `after` | string | Cursor for next page |
|
||||
| ↳ `page_size` | number | Number of items per page |
|
||||
| ↳ `total_record_count` | number | Total number of records |
|
||||
|
||||
### `incidentio_users_show`
|
||||
|
||||
@@ -644,7 +647,6 @@ List all escalation policies in incident.io
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `page_size` | number | No | Number of results per page \(e.g., 10, 25, 50\). Default: 25 |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ In Sim, the Knowledge Base block enables your agents to perform intelligent sema
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Knowledge into the workflow. Perform full CRUD operations on documents, chunks, and tags.
|
||||
Integrate Knowledge into the workflow. Can search, upload chunks, and create documents.
|
||||
|
||||
|
||||
|
||||
@@ -126,161 +126,4 @@ Create a new document in a knowledge base
|
||||
| `message` | string | Success or error message describing the operation result |
|
||||
| `documentId` | string | ID of the created document |
|
||||
|
||||
### `knowledge_list_tags`
|
||||
|
||||
List all tag definitions for a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list tags for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `tags` | array | Array of tag definitions for the knowledge base |
|
||||
| ↳ `id` | string | Tag definition ID |
|
||||
| ↳ `tagSlot` | string | Internal tag slot \(e.g. tag1, number1\) |
|
||||
| ↳ `displayName` | string | Human-readable tag name |
|
||||
| ↳ `fieldType` | string | Tag field type \(text, number, date, boolean\) |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalTags` | number | Total number of tag definitions |
|
||||
|
||||
### `knowledge_list_documents`
|
||||
|
||||
List documents in a knowledge base with optional filtering, search, and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base to list documents from |
|
||||
| `search` | string | No | Search query to filter documents by filename |
|
||||
| `enabledFilter` | string | No | Filter by enabled status: "all", "enabled", or "disabled" |
|
||||
| `limit` | number | No | Maximum number of documents to return \(default: 50\) |
|
||||
| `offset` | number | No | Number of documents to skip for pagination \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `documents` | array | Array of documents in the knowledge base |
|
||||
| ↳ `id` | string | Document ID |
|
||||
| ↳ `filename` | string | Document filename |
|
||||
| ↳ `fileSize` | number | File size in bytes |
|
||||
| ↳ `mimeType` | string | MIME type of the document |
|
||||
| ↳ `enabled` | boolean | Whether the document is enabled |
|
||||
| ↳ `processingStatus` | string | Processing status \(pending, processing, completed, failed\) |
|
||||
| ↳ `chunkCount` | number | Number of chunks in the document |
|
||||
| ↳ `tokenCount` | number | Total token count across chunks |
|
||||
| ↳ `uploadedAt` | string | Upload timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalDocuments` | number | Total number of documents matching the filter |
|
||||
| `limit` | number | Page size used |
|
||||
| `offset` | number | Offset used for pagination |
|
||||
|
||||
### `knowledge_delete_document`
|
||||
|
||||
Delete a document from a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `documentId` | string | Yes | ID of the document to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `documentId` | string | ID of the deleted document |
|
||||
| `message` | string | Confirmation message |
|
||||
|
||||
### `knowledge_list_chunks`
|
||||
|
||||
List chunks for a document in a knowledge base with optional filtering and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document to list chunks from |
|
||||
| `search` | string | No | Search query to filter chunks by content |
|
||||
| `enabled` | string | No | Filter by enabled status: "true", "false", or "all" \(default: "all"\) |
|
||||
| `limit` | number | No | Maximum number of chunks to return \(1-100, default: 50\) |
|
||||
| `offset` | number | No | Number of chunks to skip for pagination \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `knowledgeBaseId` | string | ID of the knowledge base |
|
||||
| `documentId` | string | ID of the document |
|
||||
| `chunks` | array | Array of chunks in the document |
|
||||
| ↳ `id` | string | Chunk ID |
|
||||
| ↳ `chunkIndex` | number | Index of the chunk within the document |
|
||||
| ↳ `content` | string | Chunk text content |
|
||||
| ↳ `contentLength` | number | Content length in characters |
|
||||
| ↳ `tokenCount` | number | Token count for the chunk |
|
||||
| ↳ `enabled` | boolean | Whether the chunk is enabled |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| `totalChunks` | number | Total number of chunks matching the filter |
|
||||
| `limit` | number | Page size used |
|
||||
| `offset` | number | Offset used for pagination |
|
||||
|
||||
### `knowledge_update_chunk`
|
||||
|
||||
Update the content or enabled status of a chunk in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document containing the chunk |
|
||||
| `chunkId` | string | Yes | ID of the chunk to update |
|
||||
| `content` | string | No | New content for the chunk |
|
||||
| `enabled` | boolean | No | Whether the chunk should be enabled or disabled |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `documentId` | string | ID of the parent document |
|
||||
| `id` | string | Chunk ID |
|
||||
| `chunkIndex` | number | Index of the chunk within the document |
|
||||
| `content` | string | Updated chunk content |
|
||||
| `contentLength` | number | Content length in characters |
|
||||
| `tokenCount` | number | Token count for the chunk |
|
||||
| `enabled` | boolean | Whether the chunk is enabled |
|
||||
| `updatedAt` | string | Last update timestamp |
|
||||
|
||||
### `knowledge_delete_chunk`
|
||||
|
||||
Delete a chunk from a document in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base |
|
||||
| `documentId` | string | Yes | ID of the document containing the chunk |
|
||||
| `chunkId` | string | Yes | ID of the chunk to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `chunkId` | string | ID of the deleted chunk |
|
||||
| `documentId` | string | ID of the parent document |
|
||||
| `message` | string | Confirmation message |
|
||||
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ Retrieve all deals from Pipedrive with optional filters
|
||||
| `pipeline_id` | string | No | If supplied, only deals in the specified pipeline are returned \(e.g., "1"\) |
|
||||
| `updated_since` | string | No | If set, only deals updated after this time are returned. Format: 2025-01-01T10:20:00Z |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -74,6 +75,8 @@ Retrieve all deals from Pipedrive with optional filters
|
||||
| `metadata` | object | Pagination metadata for the response |
|
||||
| ↳ `total_items` | number | Total number of items |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
| ↳ `next_cursor` | string | Cursor for fetching the next page \(v2 endpoints\) |
|
||||
| ↳ `next_start` | number | Offset for fetching the next page \(v1 endpoints\) |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_deal`
|
||||
@@ -148,10 +151,9 @@ Retrieve files from Pipedrive with optional filters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deal_id` | string | No | Filter files by deal ID \(e.g., "123"\) |
|
||||
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
|
||||
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `sort` | string | No | Sort files by field \(supported: "id", "update_time"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 100\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
| `downloadFiles` | boolean | No | Download file contents into file outputs |
|
||||
|
||||
#### Output
|
||||
@@ -171,6 +173,8 @@ Retrieve files from Pipedrive with optional filters
|
||||
| ↳ `url` | string | File download URL |
|
||||
| `downloadedFiles` | file[] | Downloaded files from Pipedrive |
|
||||
| `total_items` | number | Total number of files returned |
|
||||
| `has_more` | boolean | Whether more files are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_mail_messages`
|
||||
@@ -183,6 +187,7 @@ Retrieve mail threads from Pipedrive mailbox
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `folder` | string | No | Filter by folder: inbox, drafts, sent, archive \(default: inbox\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "25", default: 50\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -190,6 +195,8 @@ Retrieve mail threads from Pipedrive mailbox
|
||||
| --------- | ---- | ----------- |
|
||||
| `messages` | array | Array of mail thread objects from Pipedrive mailbox |
|
||||
| `total_items` | number | Total number of mail threads returned |
|
||||
| `has_more` | boolean | Whether more messages are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_mail_thread`
|
||||
@@ -221,7 +228,7 @@ Retrieve all pipelines from Pipedrive
|
||||
| `sort_by` | string | No | Field to sort by: id, update_time, add_time \(default: id\) |
|
||||
| `sort_direction` | string | No | Sorting direction: asc, desc \(default: asc\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -237,6 +244,8 @@ Retrieve all pipelines from Pipedrive
|
||||
| ↳ `add_time` | string | When the pipeline was created |
|
||||
| ↳ `update_time` | string | When the pipeline was last updated |
|
||||
| `total_items` | number | Total number of pipelines returned |
|
||||
| `has_more` | boolean | Whether more pipelines are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_pipeline_deals`
|
||||
@@ -249,8 +258,8 @@ Retrieve all deals in a specific pipeline
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `pipeline_id` | string | Yes | The ID of the pipeline \(e.g., "1"\) |
|
||||
| `stage_id` | string | No | Filter by specific stage within the pipeline \(e.g., "2"\) |
|
||||
| `status` | string | No | Filter by deal status: open, won, lost |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -271,6 +280,7 @@ Retrieve all projects or a specific project from Pipedrive
|
||||
| `project_id` | string | No | Optional: ID of a specific project to retrieve \(e.g., "123"\) |
|
||||
| `status` | string | No | Filter by project status: open, completed, deleted \(only for listing all\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500, only for listing all\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -279,6 +289,8 @@ Retrieve all projects or a specific project from Pipedrive
|
||||
| `projects` | array | Array of project objects \(when listing all\) |
|
||||
| `project` | object | Single project object \(when project_id is provided\) |
|
||||
| `total_items` | number | Total number of projects returned |
|
||||
| `has_more` | boolean | Whether more projects are available |
|
||||
| `next_cursor` | string | Cursor for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_project`
|
||||
@@ -309,12 +321,11 @@ Retrieve activities (tasks) from Pipedrive with optional filters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deal_id` | string | No | Filter activities by deal ID \(e.g., "123"\) |
|
||||
| `person_id` | string | No | Filter activities by person ID \(e.g., "456"\) |
|
||||
| `org_id` | string | No | Filter activities by organization ID \(e.g., "789"\) |
|
||||
| `user_id` | string | No | Filter activities by user ID \(e.g., "123"\) |
|
||||
| `type` | string | No | Filter by activity type \(call, meeting, task, deadline, email, lunch\) |
|
||||
| `done` | string | No | Filter by completion status: 0 for not done, 1 for done |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -335,6 +346,8 @@ Retrieve activities (tasks) from Pipedrive with optional filters
|
||||
| ↳ `add_time` | string | When the activity was created |
|
||||
| ↳ `update_time` | string | When the activity was last updated |
|
||||
| `total_items` | number | Total number of activities returned |
|
||||
| `has_more` | boolean | Whether more activities are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_activity`
|
||||
@@ -399,6 +412,7 @@ Retrieve all leads or a specific lead from Pipedrive
|
||||
| `person_id` | string | No | Filter by person ID \(e.g., "456"\) |
|
||||
| `organization_id` | string | No | Filter by organization ID \(e.g., "789"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -433,6 +447,8 @@ Retrieve all leads or a specific lead from Pipedrive
|
||||
| ↳ `add_time` | string | When the lead was created \(ISO 8601\) |
|
||||
| ↳ `update_time` | string | When the lead was last updated \(ISO 8601\) |
|
||||
| `total_items` | number | Total number of leads returned |
|
||||
| `has_more` | boolean | Whether more leads are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_lead`
|
||||
|
||||
@@ -57,6 +57,7 @@ Query data from a Supabase table
|
||||
| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) |
|
||||
| `orderBy` | string | No | Column to order by \(add DESC for descending\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
| `offset` | number | No | Number of rows to skip \(for pagination\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
@@ -211,6 +212,7 @@ Perform full-text search on a Supabase table
|
||||
| `searchType` | string | No | Search type: plain, phrase, or websearch \(default: websearch\) |
|
||||
| `language` | string | No | Language for text search configuration \(default: english\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
| `offset` | number | No | Number of rows to skip \(for pagination\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -43,6 +43,8 @@ Retrieve form responses from Typeform
|
||||
| `formId` | string | Yes | Typeform form ID \(e.g., "abc123XYZ"\) |
|
||||
| `apiKey` | string | Yes | Typeform Personal Access Token |
|
||||
| `pageSize` | number | No | Number of responses to retrieve \(e.g., 10, 25, 50\) |
|
||||
| `before` | string | No | Cursor token for fetching the next page of older responses |
|
||||
| `after` | string | No | Cursor token for fetching the next page of newer responses |
|
||||
| `since` | string | No | Retrieve responses submitted after this date \(e.g., "2024-01-01T00:00:00Z"\) |
|
||||
| `until` | string | No | Retrieve responses submitted before this date \(e.g., "2024-12-31T23:59:59Z"\) |
|
||||
| `completed` | string | No | Filter by completion status \(e.g., "true", "false", "all"\) |
|
||||
|
||||
@@ -67,10 +67,9 @@ Retrieve a list of tickets from Zendesk with optional filtering
|
||||
| `type` | string | No | Filter by type: "problem", "incident", "question", or "task" |
|
||||
| `assigneeId` | string | No | Filter by assignee user ID as a numeric string \(e.g., "12345"\) |
|
||||
| `organizationId` | string | No | Filter by organization ID as a numeric string \(e.g., "67890"\) |
|
||||
| `sortBy` | string | No | Sort field: "created_at", "updated_at", "priority", or "status" |
|
||||
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
|
||||
| `sort` | string | No | Sort field for ticket listing \(only applies without filters\): "updated_at", "id", or "status". Prefix with "-" for descending \(e.g., "-updated_at"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -129,10 +128,10 @@ Retrieve a list of tickets from Zendesk with optional filtering
|
||||
| ↳ `from_messaging_channel` | boolean | Whether the ticket originated from a messaging channel |
|
||||
| ↳ `ticket_form_id` | number | Ticket form ID |
|
||||
| ↳ `generated_timestamp` | number | Unix timestamp of the ticket generation |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -515,7 +514,7 @@ Retrieve a list of users from Zendesk with optional filtering
|
||||
| `role` | string | No | Filter by role: "end-user", "agent", or "admin" |
|
||||
| `permissionSet` | string | No | Filter by permission set ID as a numeric string \(e.g., "12345"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -563,10 +562,10 @@ Retrieve a list of users from Zendesk with optional filtering
|
||||
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
|
||||
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
|
||||
| ↳ `remote_photo_url` | string | URL to a remote photo |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -706,7 +705,7 @@ Search for users in Zendesk using a query string
|
||||
| `query` | string | No | Search query string \(e.g., user name or email\) |
|
||||
| `externalId` | string | No | External ID to search by \(your system identifier\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `page` | string | No | Page number for pagination \(1-based\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -754,10 +753,10 @@ Search for users in Zendesk using a query string
|
||||
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
|
||||
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
|
||||
| ↳ `remote_photo_url` | string | URL to a remote photo |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -999,7 +998,7 @@ Retrieve a list of organizations from Zendesk
|
||||
| `apiToken` | string | Yes | Zendesk API token |
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain \(e.g., "mycompany" for mycompany.zendesk.com\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -1020,10 +1019,10 @@ Retrieve a list of organizations from Zendesk
|
||||
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
|
||||
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
|
||||
| ↳ `external_id` | string | External ID for linking to external records |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -1075,7 +1074,7 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain |
|
||||
| `name` | string | Yes | Organization name prefix to search for \(e.g., "Acme"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `page` | string | No | Page number for pagination \(1-based\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -1096,10 +1095,10 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
|
||||
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
|
||||
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
|
||||
| ↳ `external_id` | string | External ID for linking to external records |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -1249,19 +1248,18 @@ Unified search across tickets, users, and organizations in Zendesk
|
||||
| `apiToken` | string | Yes | Zendesk API token |
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain |
|
||||
| `query` | string | Yes | Search query string using Zendesk search syntax \(e.g., "type:ticket status:open"\) |
|
||||
| `sortBy` | string | No | Sort field: "relevance", "created_at", "updated_at", "priority", "status", or "ticket_type" |
|
||||
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
|
||||
| `filterType` | string | Yes | Resource type to search for: "ticket", "user", "organization", or "group" |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
'use server'
|
||||
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isProd } from '@/lib/core/config/feature-flags'
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
transform: isAnimated ? 'translateY(0) scale(1)' : 'translateY(8px) scale(0.98)',
|
||||
transition:
|
||||
'opacity 0.6s cubic-bezier(0.22, 1, 0.36, 1), transform 0.6s cubic-bezier(0.22, 1, 0.36, 1)',
|
||||
willChange: 'transform, opacity',
|
||||
willChange: isAnimated ? 'auto' : 'transform, opacity',
|
||||
}}
|
||||
>
|
||||
<LandingBlock icon={data.icon} color={data.color} name={data.name} tags={data.tags} />
|
||||
|
||||
@@ -67,7 +67,6 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
strokeLinejoin: 'round',
|
||||
pointerEvents: 'none',
|
||||
animation: `landing-edge-dash-${id} 1s linear infinite`,
|
||||
willChange: 'stroke-dashoffset',
|
||||
...style,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -754,3 +754,100 @@ input[type="search"]::-ms-clear {
|
||||
text-decoration: none !important;
|
||||
color: inherit !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Respect user's prefers-reduced-motion setting (WCAG 2.3.3)
|
||||
* Disables animations and transitions for users who prefer reduced motion.
|
||||
*/
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
animation-duration: 0.01ms !important;
|
||||
animation-iteration-count: 1 !important;
|
||||
transition-duration: 0.01ms !important;
|
||||
scroll-behavior: auto !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* WandPromptBar status indicator */
|
||||
@keyframes smoke-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.8);
|
||||
opacity: 0.4;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
position: relative;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
overflow: hidden;
|
||||
background-color: hsl(var(--muted-foreground) / 0.5);
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
.status-indicator.streaming {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.status-indicator.streaming::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
border-radius: 50%;
|
||||
background: radial-gradient(
|
||||
circle,
|
||||
hsl(var(--primary) / 0.9) 0%,
|
||||
hsl(var(--primary) / 0.4) 60%,
|
||||
transparent 80%
|
||||
);
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.dark .status-indicator.streaming::before {
|
||||
background: #6b7280;
|
||||
opacity: 0.9;
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
/* MessageContainer loading dot */
|
||||
@keyframes growShrink {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.9);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
}
|
||||
}
|
||||
|
||||
.loading-dot {
|
||||
animation: growShrink 1.5s infinite ease-in-out;
|
||||
}
|
||||
|
||||
/* Subflow node z-index and drag-over styles */
|
||||
.workflow-container .react-flow__node-subflowNode {
|
||||
z-index: -1 !important;
|
||||
}
|
||||
|
||||
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected="true"]) {
|
||||
z-index: 10 !important;
|
||||
}
|
||||
|
||||
.loop-node-drag-over,
|
||||
.parallel-node-drag-over {
|
||||
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
|
||||
border-radius: 8px !important;
|
||||
}
|
||||
|
||||
.react-flow__node[data-parent-node-id] .react-flow__handle {
|
||||
z-index: 30;
|
||||
}
|
||||
|
||||
@@ -1,300 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
document: {
|
||||
id: 'id',
|
||||
connectorId: 'connectorId',
|
||||
deletedAt: 'deletedAt',
|
||||
filename: 'filename',
|
||||
externalId: 'externalId',
|
||||
sourceUrl: 'sourceUrl',
|
||||
enabled: 'enabled',
|
||||
userExcluded: 'userExcluded',
|
||||
uploadedAt: 'uploadedAt',
|
||||
processingStatus: 'processingStatus',
|
||||
},
|
||||
knowledgeConnector: {
|
||||
id: 'id',
|
||||
knowledgeBaseId: 'knowledgeBaseId',
|
||||
deletedAt: 'deletedAt',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/api/knowledge/utils', () => ({
|
||||
checkKnowledgeBaseAccess: vi.fn(),
|
||||
checkKnowledgeBaseWriteAccess: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
|
||||
}))
|
||||
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Connector Documents API Route', () => {
|
||||
/**
|
||||
* The route chains db calls in sequence. We track call order
|
||||
* to return different values for connector lookup vs document queries.
|
||||
*/
|
||||
let limitCallCount: number
|
||||
let orderByCallCount: number
|
||||
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn(() => {
|
||||
orderByCallCount++
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
limit: vi.fn(() => {
|
||||
limitCallCount++
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
returning: vi.fn().mockResolvedValue([]),
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
limitCallCount = 0
|
||||
orderByCallCount = 0
|
||||
mockDbChain.select.mockReturnThis()
|
||||
mockDbChain.from.mockReturnThis()
|
||||
mockDbChain.where.mockReturnThis()
|
||||
mockDbChain.orderBy.mockImplementation(() => {
|
||||
orderByCallCount++
|
||||
return Promise.resolve([])
|
||||
})
|
||||
mockDbChain.limit.mockImplementation(() => {
|
||||
limitCallCount++
|
||||
return Promise.resolve([])
|
||||
})
|
||||
mockDbChain.update.mockReturnThis()
|
||||
mockDbChain.set.mockReturnThis()
|
||||
mockDbChain.returning.mockResolvedValue([])
|
||||
|
||||
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('GET', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: false,
|
||||
userId: null,
|
||||
} as never)
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns documents list on success', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
|
||||
const doc = { id: 'doc-1', filename: 'test.txt', userExcluded: false }
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.orderBy.mockResolvedValueOnce([doc])
|
||||
|
||||
const url = 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents'
|
||||
const req = createMockRequest('GET', undefined, undefined, url)
|
||||
Object.assign(req, { nextUrl: new URL(url) })
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.documents).toHaveLength(1)
|
||||
expect(data.data.counts.active).toBe(1)
|
||||
expect(data.data.counts.excluded).toBe(0)
|
||||
})
|
||||
|
||||
it('includes excluded documents when includeExcluded=true', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.orderBy
|
||||
.mockResolvedValueOnce([{ id: 'doc-1', userExcluded: false }])
|
||||
.mockResolvedValueOnce([{ id: 'doc-2', userExcluded: true }])
|
||||
|
||||
const url =
|
||||
'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents?includeExcluded=true'
|
||||
const req = createMockRequest('GET', undefined, undefined, url)
|
||||
Object.assign(req, { nextUrl: new URL(url) })
|
||||
const { GET } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await GET(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.documents).toHaveLength(2)
|
||||
expect(data.data.counts.active).toBe(1)
|
||||
expect(data.data.counts.excluded).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('PATCH', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: false,
|
||||
userId: null,
|
||||
} as never)
|
||||
|
||||
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 400 for invalid body', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
|
||||
const req = createMockRequest('PATCH', { documentIds: [] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns success for restore operation', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-1' }])
|
||||
|
||||
const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] })
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.restoredCount).toBe(1)
|
||||
})
|
||||
|
||||
it('returns success for exclude operation', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }])
|
||||
mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-2' }, { id: 'doc-3' }])
|
||||
|
||||
const req = createMockRequest('PATCH', {
|
||||
operation: 'exclude',
|
||||
documentIds: ['doc-2', 'doc-3'],
|
||||
})
|
||||
const { PATCH } = await import(
|
||||
'@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route'
|
||||
)
|
||||
const response = await PATCH(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.excludedCount).toBe(2)
|
||||
expect(data.data.documentIds).toEqual(['doc-2', 'doc-3'])
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,210 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { document, knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('ConnectorDocumentsAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors/[connectorId]/documents
|
||||
* Returns documents for a connector, optionally including user-excluded ones.
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select({ id: knowledgeConnector.id })
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const includeExcluded = request.nextUrl.searchParams.get('includeExcluded') === 'true'
|
||||
|
||||
const activeDocs = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
externalId: document.externalId,
|
||||
sourceUrl: document.sourceUrl,
|
||||
enabled: document.enabled,
|
||||
userExcluded: document.userExcluded,
|
||||
uploadedAt: document.uploadedAt,
|
||||
processingStatus: document.processingStatus,
|
||||
})
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
isNull(document.deletedAt),
|
||||
eq(document.userExcluded, false)
|
||||
)
|
||||
)
|
||||
.orderBy(document.filename)
|
||||
|
||||
const excludedDocs = includeExcluded
|
||||
? await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
externalId: document.externalId,
|
||||
sourceUrl: document.sourceUrl,
|
||||
enabled: document.enabled,
|
||||
userExcluded: document.userExcluded,
|
||||
uploadedAt: document.uploadedAt,
|
||||
processingStatus: document.processingStatus,
|
||||
})
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
eq(document.userExcluded, true),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.orderBy(document.filename)
|
||||
: []
|
||||
|
||||
const docs = [...activeDocs, ...excludedDocs]
|
||||
const activeCount = activeDocs.length
|
||||
const excludedCount = excludedDocs.length
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
documents: docs,
|
||||
counts: { active: activeCount, excluded: excludedCount },
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching connector documents`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const PatchSchema = z.object({
|
||||
operation: z.enum(['restore', 'exclude']),
|
||||
documentIds: z.array(z.string()).min(1),
|
||||
})
|
||||
|
||||
/**
|
||||
* PATCH /api/knowledge/[id]/connectors/[connectorId]/documents
|
||||
* Restore or exclude connector documents.
|
||||
*/
|
||||
export async function PATCH(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select({ id: knowledgeConnector.id })
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = PatchSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { operation, documentIds } = parsed.data
|
||||
|
||||
if (operation === 'restore') {
|
||||
const updated = await db
|
||||
.update(document)
|
||||
.set({ userExcluded: false, deletedAt: null, enabled: true })
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
inArray(document.id, documentIds),
|
||||
eq(document.userExcluded, true)
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id })
|
||||
|
||||
logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) },
|
||||
})
|
||||
}
|
||||
|
||||
const updated = await db
|
||||
.update(document)
|
||||
.set({ userExcluded: true })
|
||||
.where(
|
||||
and(
|
||||
eq(document.connectorId, connectorId),
|
||||
inArray(document.id, documentIds),
|
||||
eq(document.userExcluded, false),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id })
|
||||
|
||||
logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) },
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating connector documents`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/app/api/knowledge/utils', () => ({
|
||||
checkKnowledgeBaseAccess: vi.fn(),
|
||||
checkKnowledgeBaseWriteAccess: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
|
||||
}))
|
||||
vi.mock('@/app/api/auth/oauth/utils', () => ({
|
||||
refreshAccessTokenIfNeeded: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/connectors/registry', () => ({
|
||||
CONNECTOR_REGISTRY: {
|
||||
jira: { validateConfig: vi.fn() },
|
||||
},
|
||||
}))
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
knowledgeBase: { id: 'id', userId: 'userId' },
|
||||
knowledgeConnector: {
|
||||
id: 'id',
|
||||
knowledgeBaseId: 'knowledgeBaseId',
|
||||
deletedAt: 'deletedAt',
|
||||
connectorType: 'connectorType',
|
||||
credentialId: 'credentialId',
|
||||
},
|
||||
knowledgeConnectorSyncLog: { connectorId: 'connectorId', startedAt: 'startedAt' },
|
||||
}))
|
||||
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Knowledge Connector By ID API Route', () => {
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
returning: vi.fn().mockResolvedValue([]),
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.resetModules()
|
||||
mockDbChain.select.mockReturnThis()
|
||||
mockDbChain.from.mockReturnThis()
|
||||
mockDbChain.where.mockReturnThis()
|
||||
mockDbChain.orderBy.mockReturnThis()
|
||||
mockDbChain.limit.mockResolvedValue([])
|
||||
mockDbChain.update.mockReturnThis()
|
||||
mockDbChain.set.mockReturnThis()
|
||||
|
||||
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('GET', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 404 when KB not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: false, notFound: true })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true })
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns connector with sync logs on success', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const mockConnector = { id: 'conn-456', connectorType: 'jira', status: 'active' }
|
||||
const mockLogs = [{ id: 'log-1', status: 'completed' }]
|
||||
|
||||
mockDbChain.limit.mockResolvedValueOnce([mockConnector]).mockResolvedValueOnce(mockLogs)
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await GET(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(data.data.id).toBe('conn-456')
|
||||
expect(data.data.syncLogs).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('PATCH', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
|
||||
|
||||
const req = createMockRequest('PATCH', { status: 'paused' })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 400 for invalid body', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const req = createMockRequest('PATCH', { syncIntervalMinutes: 'not a number' })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Invalid request')
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found during sourceConfig validation', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('PATCH', { sourceConfig: { project: 'NEW' } })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 200 and updates status', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const updatedConnector = { id: 'conn-456', status: 'paused', syncIntervalMinutes: 120 }
|
||||
mockDbChain.limit.mockResolvedValueOnce([updatedConnector])
|
||||
|
||||
const req = createMockRequest('PATCH', { status: 'paused', syncIntervalMinutes: 120 })
|
||||
const { PATCH } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await PATCH(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(data.data.status).toBe('paused')
|
||||
})
|
||||
})
|
||||
|
||||
describe('DELETE', () => {
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: false, userId: null })
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const { DELETE } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await DELETE(req, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 200 on successful soft-delete', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({ success: true, userId: 'user-1' })
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true })
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const { DELETE } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/route')
|
||||
const response = await DELETE(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,248 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBase, knowledgeConnector, knowledgeConnectorSyncLog } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorByIdAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
const UpdateConnectorSchema = z.object({
|
||||
sourceConfig: z.record(z.unknown()).optional(),
|
||||
syncIntervalMinutes: z.number().int().min(0).optional(),
|
||||
status: z.enum(['active', 'paused']).optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors/[connectorId] - Get connector details with recent sync logs
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const syncLogs = await db
|
||||
.select()
|
||||
.from(knowledgeConnectorSyncLog)
|
||||
.where(eq(knowledgeConnectorSyncLog.connectorId, connectorId))
|
||||
.orderBy(desc(knowledgeConnectorSyncLog.startedAt))
|
||||
.limit(10)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
...connectorRows[0],
|
||||
syncLogs,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH /api/knowledge/[id]/connectors/[connectorId] - Update a connector
|
||||
*/
|
||||
export async function PATCH(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = UpdateConnectorSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (parsed.data.sourceConfig !== undefined) {
|
||||
const existingRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const existing = existingRows[0]
|
||||
const connectorConfig = CONNECTOR_REGISTRY[existing.connectorType]
|
||||
|
||||
if (!connectorConfig) {
|
||||
return NextResponse.json(
|
||||
{ error: `Unknown connector type: ${existing.connectorType}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const kbRows = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kbRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
existing.credentialId,
|
||||
kbRows[0].userId,
|
||||
`patch-${connectorId}`
|
||||
)
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to refresh access token. Please reconnect your account.' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await connectorConfig.validateConfig(accessToken, parsed.data.sourceConfig)
|
||||
if (!validation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: validation.error || 'Invalid source configuration' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = { updatedAt: new Date() }
|
||||
if (parsed.data.sourceConfig !== undefined) {
|
||||
updates.sourceConfig = parsed.data.sourceConfig
|
||||
}
|
||||
if (parsed.data.syncIntervalMinutes !== undefined) {
|
||||
updates.syncIntervalMinutes = parsed.data.syncIntervalMinutes
|
||||
if (parsed.data.syncIntervalMinutes > 0) {
|
||||
updates.nextSyncAt = new Date(Date.now() + parsed.data.syncIntervalMinutes * 60 * 1000)
|
||||
} else {
|
||||
updates.nextSyncAt = null
|
||||
}
|
||||
}
|
||||
if (parsed.data.status !== undefined) {
|
||||
updates.status = parsed.data.status
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set(updates)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
const updated = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ success: true, data: updated[0] })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error updating connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/knowledge/[id]/connectors/[connectorId] - Soft-delete a connector
|
||||
*/
|
||||
export async function DELETE(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(knowledgeConnector)
|
||||
.set({ deletedAt: new Date(), status: 'paused', updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Soft-deleted connector ${connectorId}`)
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, mockConsoleLogger, mockDrizzleOrm } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
knowledgeConnector: {
|
||||
id: 'id',
|
||||
knowledgeBaseId: 'knowledgeBaseId',
|
||||
deletedAt: 'deletedAt',
|
||||
status: 'status',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/api/knowledge/utils', () => ({
|
||||
checkKnowledgeBaseWriteAccess: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-req-id'),
|
||||
}))
|
||||
vi.mock('@/lib/knowledge/connectors/sync-engine', () => ({
|
||||
dispatchSync: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Connector Manual Sync API Route', () => {
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockResolvedValue([]),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' })
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDbChain.select.mockReturnThis()
|
||||
mockDbChain.from.mockReturnThis()
|
||||
mockDbChain.where.mockReturnThis()
|
||||
mockDbChain.orderBy.mockResolvedValue([])
|
||||
mockDbChain.limit.mockResolvedValue([])
|
||||
mockDbChain.update.mockReturnThis()
|
||||
mockDbChain.set.mockReturnThis()
|
||||
|
||||
vi.doMock('@sim/db', () => ({ db: mockDbChain }))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('returns 401 when unauthenticated', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: false,
|
||||
userId: null,
|
||||
} as never)
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
})
|
||||
|
||||
it('returns 404 when connector not found', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
})
|
||||
|
||||
it('returns 409 when connector is syncing', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'syncing' }])
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
|
||||
expect(response.status).toBe(409)
|
||||
})
|
||||
|
||||
it('dispatches sync on valid request', async () => {
|
||||
const { checkSessionOrInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
const { checkKnowledgeBaseWriteAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { dispatchSync } = await import('@/lib/knowledge/connectors/sync-engine')
|
||||
|
||||
vi.mocked(checkSessionOrInternalAuth).mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-1',
|
||||
} as never)
|
||||
vi.mocked(checkKnowledgeBaseWriteAccess).mockResolvedValue({ hasAccess: true } as never)
|
||||
mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'active' }])
|
||||
|
||||
const req = createMockRequest('POST')
|
||||
const { POST } = await import('@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route')
|
||||
const response = await POST(req as never, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(vi.mocked(dispatchSync)).toHaveBeenCalledWith('conn-456', { requestId: 'test-req-id' })
|
||||
})
|
||||
})
|
||||
@@ -1,71 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('ConnectorManualSyncAPI')
|
||||
|
||||
type RouteParams = { params: Promise<{ id: string; connectorId: string }> }
|
||||
|
||||
/**
|
||||
* POST /api/knowledge/[id]/connectors/[connectorId]/sync - Trigger a manual sync
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: RouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId, connectorId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectorRows = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.id, connectorId),
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (connectorRows.length === 0) {
|
||||
return NextResponse.json({ error: 'Connector not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (connectorRows[0].status === 'syncing') {
|
||||
return NextResponse.json({ error: 'Sync already in progress' }, { status: 409 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`)
|
||||
|
||||
dispatchSync(connectorId, { requestId }).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Sync triggered',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error triggering manual sync`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,204 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBaseTagDefinitions, knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
import { allocateTagSlots } from '@/lib/knowledge/constants'
|
||||
import { createTagDefinition } from '@/lib/knowledge/tags/service'
|
||||
import { getCredential } from '@/app/api/auth/oauth/utils'
|
||||
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorsAPI')
|
||||
|
||||
const CreateConnectorSchema = z.object({
|
||||
connectorType: z.string().min(1),
|
||||
credentialId: z.string().min(1),
|
||||
sourceConfig: z.record(z.unknown()),
|
||||
syncIntervalMinutes: z.number().int().min(0).default(1440),
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/knowledge/[id]/connectors - List connectors for a knowledge base
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const connectors = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(knowledgeConnector.createdAt))
|
||||
|
||||
return NextResponse.json({ success: true, data: connectors })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error listing connectors`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/knowledge/[id]/connectors - Create a new connector
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId)
|
||||
if (!writeCheck.hasAccess) {
|
||||
const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401
|
||||
return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = CreateConnectorSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: parsed.error.flatten() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { connectorType, credentialId, sourceConfig, syncIntervalMinutes } = parsed.data
|
||||
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connectorType]
|
||||
if (!connectorConfig) {
|
||||
return NextResponse.json(
|
||||
{ error: `Unknown connector type: ${connectorType}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, auth.userId)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!credential.accessToken) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Credential has no access token. Please reconnect your account.' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await connectorConfig.validateConfig(credential.accessToken, sourceConfig)
|
||||
if (!validation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: validation.error || 'Invalid source configuration' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let finalSourceConfig: Record<string, unknown> = sourceConfig
|
||||
const tagSlotMapping: Record<string, string> = {}
|
||||
|
||||
if (connectorConfig.tagDefinitions?.length) {
|
||||
const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? [])
|
||||
const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id))
|
||||
|
||||
const existingDefs = await db
|
||||
.select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot })
|
||||
.from(knowledgeBaseTagDefinitions)
|
||||
.where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId))
|
||||
|
||||
const usedSlots = new Set<string>(existingDefs.map((d) => d.tagSlot))
|
||||
const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots)
|
||||
Object.assign(tagSlotMapping, mapping)
|
||||
|
||||
for (const name of skippedTags) {
|
||||
logger.warn(`[${requestId}] No available slots for "${name}"`)
|
||||
}
|
||||
|
||||
if (skippedTags.length > 0 && Object.keys(tagSlotMapping).length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: `No available tag slots. Could not assign: ${skippedTags.join(', ')}` },
|
||||
{ status: 422 }
|
||||
)
|
||||
}
|
||||
|
||||
finalSourceConfig = { ...sourceConfig, tagSlotMapping }
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const connectorId = crypto.randomUUID()
|
||||
const nextSyncAt =
|
||||
syncIntervalMinutes > 0 ? new Date(now.getTime() + syncIntervalMinutes * 60 * 1000) : null
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
for (const [semanticId, slot] of Object.entries(tagSlotMapping)) {
|
||||
const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)!
|
||||
await createTagDefinition(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
tagSlot: slot,
|
||||
displayName: td.displayName,
|
||||
fieldType: td.fieldType,
|
||||
},
|
||||
requestId,
|
||||
tx
|
||||
)
|
||||
}
|
||||
|
||||
await tx.insert(knowledgeConnector).values({
|
||||
id: connectorId,
|
||||
knowledgeBaseId,
|
||||
connectorType,
|
||||
credentialId,
|
||||
sourceConfig: finalSourceConfig,
|
||||
syncIntervalMinutes,
|
||||
status: 'active',
|
||||
nextSyncAt,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`)
|
||||
|
||||
dispatchSync(connectorId, { requestId }).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
const created = await db
|
||||
.select()
|
||||
.from(knowledgeConnector)
|
||||
.where(eq(knowledgeConnector.id, connectorId))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ success: true, data: created[0] }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating connector`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
getDocuments,
|
||||
getProcessingConfig,
|
||||
processDocumentsWithQueue,
|
||||
type TagFilterCondition,
|
||||
} from '@/lib/knowledge/documents/service'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
@@ -131,21 +130,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
? (sortOrderParam as SortOrder)
|
||||
: undefined
|
||||
|
||||
let tagFilters: TagFilterCondition[] | undefined
|
||||
const tagFiltersParam = url.searchParams.get('tagFilters')
|
||||
if (tagFiltersParam) {
|
||||
try {
|
||||
const parsed = JSON.parse(tagFiltersParam)
|
||||
if (Array.isArray(parsed)) {
|
||||
tagFilters = parsed.filter(
|
||||
(f: TagFilterCondition) => f.tagSlot && f.operator && f.value !== undefined
|
||||
)
|
||||
}
|
||||
} catch {
|
||||
logger.warn(`[${requestId}] Invalid tagFilters param`)
|
||||
}
|
||||
}
|
||||
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
@@ -155,7 +139,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
offset,
|
||||
...(sortBy && { sortBy }),
|
||||
...(sortOrder && { sortOrder }),
|
||||
tagFilters,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeConnector } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, lte } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('ConnectorSyncSchedulerAPI')
|
||||
|
||||
/**
|
||||
* Cron endpoint that checks for connectors due for sync and dispatches sync jobs.
|
||||
* Should be called every 5 minutes by an external cron service.
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] Connector sync scheduler triggered`)
|
||||
|
||||
const authError = verifyCronAuth(request, 'Connector sync scheduler')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
try {
|
||||
const now = new Date()
|
||||
|
||||
const dueConnectors = await db
|
||||
.select({
|
||||
id: knowledgeConnector.id,
|
||||
})
|
||||
.from(knowledgeConnector)
|
||||
.where(
|
||||
and(
|
||||
eq(knowledgeConnector.status, 'active'),
|
||||
lte(knowledgeConnector.nextSyncAt, now),
|
||||
isNull(knowledgeConnector.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Found ${dueConnectors.length} connectors due for sync`)
|
||||
|
||||
if (dueConnectors.length === 0) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'No connectors due for sync',
|
||||
count: 0,
|
||||
})
|
||||
}
|
||||
|
||||
for (const connector of dueConnectors) {
|
||||
dispatchSync(connector.id, { requestId }).catch((error) => {
|
||||
logger.error(`[${requestId}] Failed to dispatch sync for connector ${connector.id}`, error)
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Dispatched ${dueConnectors.length} connector sync(s)`,
|
||||
count: dueConnectors.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Connector sync scheduler error`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -22,15 +22,20 @@ interface PipedriveFile {
|
||||
interface PipedriveApiResponse {
|
||||
success: boolean
|
||||
data?: PipedriveFile[]
|
||||
additional_data?: {
|
||||
pagination?: {
|
||||
more_items_in_collection: boolean
|
||||
next_start: number
|
||||
}
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
const PipedriveGetFilesSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
deal_id: z.string().optional().nullable(),
|
||||
person_id: z.string().optional().nullable(),
|
||||
org_id: z.string().optional().nullable(),
|
||||
sort: z.enum(['id', 'update_time']).optional().nullable(),
|
||||
limit: z.string().optional().nullable(),
|
||||
start: z.string().optional().nullable(),
|
||||
downloadFiles: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
@@ -54,20 +59,19 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||
|
||||
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||
const { accessToken, sort, limit, start, downloadFiles } = validatedData
|
||||
|
||||
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||
if (person_id) queryParams.append('person_id', person_id)
|
||||
if (org_id) queryParams.append('org_id', org_id)
|
||||
if (sort) queryParams.append('sort', sort)
|
||||
if (limit) queryParams.append('limit', limit)
|
||||
if (start) queryParams.append('start', start)
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
|
||||
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||
logger.info(`[${requestId}] Fetching files from Pipedrive`)
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -93,6 +97,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
const files = data.data || []
|
||||
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
|
||||
const nextStart = data.additional_data?.pagination?.next_start ?? null
|
||||
const downloadedFiles: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
@@ -149,6 +155,8 @@ export async function POST(request: NextRequest) {
|
||||
files,
|
||||
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||
total_items: files.length,
|
||||
has_more: hasMore,
|
||||
next_start: nextStart,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -30,21 +30,6 @@ export const ChatMessageContainer = memo(function ChatMessageContainer({
|
||||
}: ChatMessageContainerProps) {
|
||||
return (
|
||||
<div className='relative flex flex-1 flex-col overflow-hidden bg-white'>
|
||||
<style jsx>{`
|
||||
@keyframes growShrink {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.9);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
}
|
||||
}
|
||||
.loading-dot {
|
||||
animation: growShrink 1.5s infinite ease-in-out;
|
||||
}
|
||||
`}</style>
|
||||
|
||||
{/* Scrollable Messages Area */}
|
||||
<div
|
||||
ref={messagesContainerRef}
|
||||
|
||||
@@ -71,7 +71,7 @@ export function VoiceInterface({
|
||||
const [state, setState] = useState<'idle' | 'listening' | 'agent_speaking'>('idle')
|
||||
const [isInitialized, setIsInitialized] = useState(false)
|
||||
const [isMuted, setIsMuted] = useState(false)
|
||||
const [audioLevels, setAudioLevels] = useState<number[]>(new Array(200).fill(0))
|
||||
const [audioLevels, setAudioLevels] = useState<number[]>(() => new Array(200).fill(0))
|
||||
const [permissionStatus, setPermissionStatus] = useState<'prompt' | 'granted' | 'denied'>(
|
||||
'prompt'
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { redirect } from 'next/navigation'
|
||||
import { redirect, unstable_rethrow } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
@@ -14,24 +14,27 @@ interface FileViewerPageProps {
|
||||
export default async function FileViewerPage({ params }: FileViewerPageProps) {
|
||||
const { workspaceId, fileId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
if (!fileRecord) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
return <FileViewer file={fileRecord} />
|
||||
} catch (error) {
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
let fileRecord: Awaited<ReturnType<typeof getWorkspaceFile>>
|
||||
try {
|
||||
fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
} catch (error) {
|
||||
unstable_rethrow(error)
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
if (!fileRecord) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
return <FileViewer file={fileRecord} />
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { useCreateChunk } from '@/hooks/queries/kb/knowledge'
|
||||
import { useCreateChunk } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateChunkModal')
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import { useDeleteChunk } from '@/hooks/queries/kb/knowledge'
|
||||
import { useDeleteChunk } from '@/hooks/queries/knowledge'
|
||||
|
||||
interface DeleteChunkModalProps {
|
||||
chunk: ChunkData | null
|
||||
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/kb/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/kb/use-tag-definitions'
|
||||
import { useUpdateDocumentTags } from '@/hooks/queries/kb/knowledge'
|
||||
import { useUpdateDocumentTags } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('DocumentTagsModal')
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ import {
|
||||
import type { ChunkData, DocumentData } from '@/lib/knowledge/types'
|
||||
import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useUpdateChunk } from '@/hooks/queries/kb/knowledge'
|
||||
import { useUpdateChunk } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('EditChunkModal')
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import { startTransition, useCallback, useEffect, useRef, useState } from 'react
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
Circle,
|
||||
@@ -25,10 +24,6 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
@@ -60,7 +55,7 @@ import {
|
||||
useDeleteDocument,
|
||||
useDocumentChunkSearchQuery,
|
||||
useUpdateChunk,
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
@@ -261,8 +256,6 @@ export function Document({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
@@ -275,7 +268,7 @@ export function Document({
|
||||
refreshChunks: initialRefreshChunks,
|
||||
updateChunk: initialUpdateChunk,
|
||||
isFetching: isFetchingChunks,
|
||||
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL, '', enabledFilter)
|
||||
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL)
|
||||
|
||||
const {
|
||||
data: searchResults = [],
|
||||
@@ -697,103 +690,47 @@ export function Document({
|
||||
</div>
|
||||
|
||||
<div className='mt-[14px] flex items-center justify-between'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
|
||||
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
|
||||
<Input
|
||||
placeholder={
|
||||
documentData?.processingStatus === 'completed'
|
||||
? 'Search chunks...'
|
||||
: 'Document processing...'
|
||||
}
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
disabled={documentData?.processingStatus !== 'completed'}
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
{searchQuery &&
|
||||
(isLoadingSearch ? (
|
||||
<Loader2 className='h-[14px] w-[14px] animate-spin text-[var(--text-subtle)]' />
|
||||
) : (
|
||||
<button
|
||||
onClick={() => setSearchQuery('')}
|
||||
className='text-[var(--text-subtle)] transition-colors hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
<X className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'Status'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'all'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('all')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setSelectedChunks(new Set())
|
||||
goToPage(1)
|
||||
}}
|
||||
>
|
||||
All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'enabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('enabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setSelectedChunks(new Set())
|
||||
goToPage(1)
|
||||
}}
|
||||
>
|
||||
Enabled
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'disabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('disabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setSelectedChunks(new Set())
|
||||
goToPage(1)
|
||||
}}
|
||||
>
|
||||
Disabled
|
||||
</PopoverItem>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={() => setIsCreateChunkModalOpen(true)}
|
||||
disabled={
|
||||
documentData?.processingStatus === 'failed' || !userPermissions.canEdit
|
||||
}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
|
||||
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
|
||||
<Input
|
||||
placeholder={
|
||||
documentData?.processingStatus === 'completed'
|
||||
? 'Search chunks...'
|
||||
: 'Document processing...'
|
||||
}
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
disabled={documentData?.processingStatus !== 'completed'}
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
{searchQuery &&
|
||||
(isLoadingSearch ? (
|
||||
<Loader2 className='h-[14px] w-[14px] animate-spin text-[var(--text-subtle)]' />
|
||||
) : (
|
||||
<button
|
||||
onClick={() => setSearchQuery('')}
|
||||
className='text-[var(--text-subtle)] transition-colors hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
Create Chunk
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{!userPermissions.canEdit && (
|
||||
<Tooltip.Content>Write permission required to create chunks</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
<X className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={() => setIsCreateChunkModalOpen(true)}
|
||||
disabled={documentData?.processingStatus === 'failed' || !userPermissions.canEdit}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Create Chunk
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{!userPermissions.canEdit && (
|
||||
<Tooltip.Content>Write permission required to create chunks</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
<div
|
||||
@@ -1122,6 +1059,7 @@ export function Document({
|
||||
isLoading={isBulkOperating}
|
||||
/>
|
||||
|
||||
{/* Delete Document Modal */}
|
||||
<Modal open={showDeleteDocumentDialog} onOpenChange={setShowDeleteDocumentDialog}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Document</ModalHeader>
|
||||
@@ -1134,14 +1072,7 @@ export function Document({
|
||||
? This will permanently delete the document and all {documentData?.chunkCount ?? 0}{' '}
|
||||
chunk
|
||||
{documentData?.chunkCount === 1 ? '' : 's'} within it.{' '}
|
||||
{documentData?.connectorId ? (
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This document is synced from a connector. Deleting it will permanently exclude it
|
||||
from future syncs. To temporarily hide it from search, disable it instead.
|
||||
</span>
|
||||
) : (
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
)}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { format } from 'date-fns'
|
||||
import {
|
||||
@@ -11,9 +11,7 @@ import {
|
||||
ChevronUp,
|
||||
Circle,
|
||||
CircleOff,
|
||||
Filter,
|
||||
Loader2,
|
||||
Plus,
|
||||
RotateCcw,
|
||||
Search,
|
||||
X,
|
||||
@@ -24,11 +22,6 @@ import {
|
||||
Breadcrumb,
|
||||
Button,
|
||||
Checkbox,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
DatePicker,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
@@ -47,28 +40,25 @@ import {
|
||||
Tooltip,
|
||||
Trash,
|
||||
} from '@/components/emcn'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import { type FilterFieldType, getOperatorsForFieldType } from '@/lib/knowledge/filters/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
ActionBar,
|
||||
AddConnectorModal,
|
||||
AddDocumentsModal,
|
||||
BaseTagsModal,
|
||||
ConnectorsSection,
|
||||
DocumentContextMenu,
|
||||
RenameDocumentModal,
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import {
|
||||
useKnowledgeBase,
|
||||
useKnowledgeBaseDocuments,
|
||||
@@ -78,14 +68,12 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useConnectorList } from '@/hooks/queries/kb/connectors'
|
||||
import type { DocumentTagFilter } from '@/hooks/queries/kb/knowledge'
|
||||
import {
|
||||
useBulkDocumentOperation,
|
||||
useDeleteDocument,
|
||||
useDeleteKnowledgeBase,
|
||||
useUpdateDocument,
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
@@ -357,32 +345,6 @@ export function KnowledgeBase({
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
const [isTagFilterPopoverOpen, setIsTagFilterPopoverOpen] = useState(false)
|
||||
const [tagFilterEntries, setTagFilterEntries] = useState<
|
||||
{
|
||||
id: string
|
||||
tagName: string
|
||||
tagSlot: string
|
||||
fieldType: FilterFieldType
|
||||
operator: string
|
||||
value: string
|
||||
valueTo: string
|
||||
}[]
|
||||
>([])
|
||||
|
||||
const activeTagFilters: DocumentTagFilter[] = useMemo(
|
||||
() =>
|
||||
tagFilterEntries
|
||||
.filter((f) => f.tagSlot && f.value.trim())
|
||||
.map((f) => ({
|
||||
tagSlot: f.tagSlot,
|
||||
fieldType: f.fieldType,
|
||||
operator: f.operator,
|
||||
value: f.value,
|
||||
...(f.operator === 'between' && f.valueTo ? { valueTo: f.valueTo } : {}),
|
||||
})),
|
||||
[tagFilterEntries]
|
||||
)
|
||||
|
||||
/**
|
||||
* Memoize the search query setter to prevent unnecessary re-renders
|
||||
@@ -405,7 +367,6 @@ export function KnowledgeBase({
|
||||
const [contextMenuDocument, setContextMenuDocument] = useState<DocumentData | null>(null)
|
||||
const [showRenameModal, setShowRenameModal] = useState(false)
|
||||
const [documentToRename, setDocumentToRename] = useState<DocumentData | null>(null)
|
||||
const [showAddConnectorModal, setShowAddConnectorModal] = useState(false)
|
||||
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
@@ -446,23 +407,10 @@ export function KnowledgeBase({
|
||||
return hasPending ? 3000 : false
|
||||
},
|
||||
enabledFilter,
|
||||
tagFilters: activeTagFilters.length > 0 ? activeTagFilters : undefined,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
|
||||
const { data: connectors = [], isLoading: isLoadingConnectors } = useConnectorList(id)
|
||||
const hasSyncingConnectors = connectors.some((c) => c.status === 'syncing')
|
||||
|
||||
/** Refresh KB detail when connectors transition from syncing to done */
|
||||
const prevHadSyncingRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (prevHadSyncingRef.current && !hasSyncingConnectors) {
|
||||
refreshKnowledgeBase()
|
||||
}
|
||||
prevHadSyncingRef.current = hasSyncingConnectors
|
||||
}, [hasSyncingConnectors, refreshKnowledgeBase])
|
||||
|
||||
const router = useRouter()
|
||||
|
||||
const knowledgeBaseName = knowledgeBase?.name || passedKnowledgeBaseName || 'Knowledge Base'
|
||||
@@ -1055,14 +1003,6 @@ export function KnowledgeBase({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<ConnectorsSection
|
||||
knowledgeBaseId={id}
|
||||
connectors={connectors}
|
||||
isLoading={isLoadingConnectors}
|
||||
canEdit={userPermissions.canEdit}
|
||||
onAddConnector={() => setShowAddConnectorModal(true)}
|
||||
/>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
<span className='text-[14px] text-[var(--text-muted)]'>
|
||||
{pagination.total} {pagination.total === 1 ? 'document' : 'documents'}
|
||||
@@ -1109,7 +1049,7 @@ export function KnowledgeBase({
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'Status'
|
||||
? 'All'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
@@ -1158,19 +1098,6 @@ export function KnowledgeBase({
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<TagFilterPopover
|
||||
tagDefinitions={tagDefinitions}
|
||||
entries={tagFilterEntries}
|
||||
isOpen={isTagFilterPopoverOpen}
|
||||
onOpenChange={setIsTagFilterPopoverOpen}
|
||||
onChange={(entries) => {
|
||||
setTagFilterEntries(entries)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
/>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -1211,14 +1138,14 @@ export function KnowledgeBase({
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{searchQuery
|
||||
? 'No documents found'
|
||||
: enabledFilter !== 'all' || activeTagFilters.length > 0
|
||||
: enabledFilter !== 'all'
|
||||
? 'Nothing matches your filter'
|
||||
: 'No documents yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{searchQuery
|
||||
? 'Try a different search term'
|
||||
: enabledFilter !== 'all' || activeTagFilters.length > 0
|
||||
: enabledFilter !== 'all'
|
||||
? 'Try changing the filter'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
@@ -1291,12 +1218,6 @@ export function KnowledgeBase({
|
||||
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{(() => {
|
||||
const ConnectorIcon = doc.connectorType
|
||||
? CONNECTOR_REGISTRY[doc.connectorType]?.icon
|
||||
: null
|
||||
if (ConnectorIcon) {
|
||||
return <ConnectorIcon className='h-5 w-5 flex-shrink-0' />
|
||||
}
|
||||
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
})()}
|
||||
@@ -1568,26 +1489,13 @@ export function KnowledgeBase({
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Document</ModalHeader>
|
||||
<ModalBody>
|
||||
{(() => {
|
||||
const docToDelete = documents.find((doc) => doc.id === documentToDelete)
|
||||
return (
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{docToDelete?.filename ?? 'this document'}
|
||||
</span>
|
||||
?{' '}
|
||||
{docToDelete?.connectorId ? (
|
||||
<span className='text-[var(--text-error)]'>
|
||||
This document is synced from a connector. Deleting it will permanently exclude
|
||||
it from future syncs. To temporarily hide it from search, disable it instead.
|
||||
</span>
|
||||
) : (
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
)}
|
||||
</p>
|
||||
)
|
||||
})()}
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{documents.find((doc) => doc.id === documentToDelete)?.filename ?? 'this document'}
|
||||
</span>
|
||||
? <span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button
|
||||
@@ -1637,11 +1545,6 @@ export function KnowledgeBase({
|
||||
chunkingConfig={knowledgeBase?.chunkingConfig}
|
||||
/>
|
||||
|
||||
{/* Add Connector Modal — conditionally rendered so it remounts fresh each open */}
|
||||
{showAddConnectorModal && (
|
||||
<AddConnectorModal open onOpenChange={setShowAddConnectorModal} knowledgeBaseId={id} />
|
||||
)}
|
||||
|
||||
{/* Rename Document Modal */}
|
||||
{documentToRename && (
|
||||
<RenameDocumentModal
|
||||
@@ -1700,11 +1603,6 @@ export function KnowledgeBase({
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onOpenSource={
|
||||
contextMenuDocument?.sourceUrl && selectedDocuments.size === 1
|
||||
? () => window.open(contextMenuDocument.sourceUrl!, '_blank', 'noopener,noreferrer')
|
||||
: undefined
|
||||
}
|
||||
onRename={
|
||||
contextMenuDocument && selectedDocuments.size === 1 && userPermissions.canEdit
|
||||
? () => handleRenameDocument(contextMenuDocument)
|
||||
@@ -1757,224 +1655,3 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface TagFilterEntry {
|
||||
id: string
|
||||
tagName: string
|
||||
tagSlot: string
|
||||
fieldType: FilterFieldType
|
||||
operator: string
|
||||
value: string
|
||||
valueTo: string
|
||||
}
|
||||
|
||||
const createEmptyEntry = (): TagFilterEntry => ({
|
||||
id: crypto.randomUUID(),
|
||||
tagName: '',
|
||||
tagSlot: '',
|
||||
fieldType: 'text',
|
||||
operator: 'eq',
|
||||
value: '',
|
||||
valueTo: '',
|
||||
})
|
||||
|
||||
interface TagFilterPopoverProps {
|
||||
tagDefinitions: TagDefinition[]
|
||||
entries: TagFilterEntry[]
|
||||
isOpen: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
onChange: (entries: TagFilterEntry[]) => void
|
||||
}
|
||||
|
||||
function TagFilterPopover({
|
||||
tagDefinitions,
|
||||
entries,
|
||||
isOpen,
|
||||
onOpenChange,
|
||||
onChange,
|
||||
}: TagFilterPopoverProps) {
|
||||
const activeCount = entries.filter((f) => f.tagSlot && f.value.trim()).length
|
||||
|
||||
const tagOptions: ComboboxOption[] = tagDefinitions.map((t) => ({
|
||||
value: t.displayName,
|
||||
label: t.displayName,
|
||||
}))
|
||||
|
||||
const filtersToShow = useMemo(
|
||||
() => (entries.length > 0 ? entries : [createEmptyEntry()]),
|
||||
[entries]
|
||||
)
|
||||
|
||||
const updateEntry = (id: string, patch: Partial<TagFilterEntry>) => {
|
||||
const existing = filtersToShow.find((e) => e.id === id)
|
||||
if (!existing) return
|
||||
const updated = filtersToShow.map((e) => (e.id === id ? { ...e, ...patch } : e))
|
||||
onChange(updated)
|
||||
}
|
||||
|
||||
const handleTagChange = (id: string, tagName: string) => {
|
||||
const def = tagDefinitions.find((t) => t.displayName === tagName)
|
||||
const fieldType = (def?.fieldType || 'text') as FilterFieldType
|
||||
const operators = getOperatorsForFieldType(fieldType)
|
||||
updateEntry(id, {
|
||||
tagName,
|
||||
tagSlot: def?.tagSlot || '',
|
||||
fieldType,
|
||||
operator: operators[0]?.value || 'eq',
|
||||
value: '',
|
||||
valueTo: '',
|
||||
})
|
||||
}
|
||||
|
||||
const addFilter = () => {
|
||||
onChange([...filtersToShow, createEmptyEntry()])
|
||||
}
|
||||
|
||||
const removeFilter = (id: string) => {
|
||||
const remaining = filtersToShow.filter((e) => e.id !== id)
|
||||
onChange(remaining.length > 0 ? remaining : [])
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover open={isOpen} onOpenChange={onOpenChange}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
<Filter className='mr-1.5 h-3.5 w-3.5' />
|
||||
Tags
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4} className='w-[320px] p-0'>
|
||||
<div className='flex flex-col'>
|
||||
<div className='flex items-center justify-between border-[var(--border-1)] border-b px-[12px] py-[8px]'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-secondary)]'>
|
||||
Filter by tags
|
||||
</span>
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{activeCount > 0 && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-auto px-[6px] py-[2px] text-[11px] text-[var(--text-muted)]'
|
||||
onClick={() => onChange([])}
|
||||
>
|
||||
Clear all
|
||||
</Button>
|
||||
)}
|
||||
<Button variant='ghost' className='h-auto p-0' onClick={addFilter}>
|
||||
<Plus className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex max-h-[320px] flex-col gap-[8px] overflow-y-auto p-[12px]'>
|
||||
{filtersToShow.map((entry) => {
|
||||
const operators = getOperatorsForFieldType(entry.fieldType)
|
||||
const operatorOptions: ComboboxOption[] = operators.map((op) => ({
|
||||
value: op.value,
|
||||
label: op.label,
|
||||
}))
|
||||
const isBetween = entry.operator === 'between'
|
||||
|
||||
return (
|
||||
<div
|
||||
key={entry.id}
|
||||
className='flex flex-col gap-[6px] rounded-[6px] border border-[var(--border-1)] p-[8px]'
|
||||
>
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label className='text-[11px] text-[var(--text-muted)]'>Tag</Label>
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => removeFilter(entry.id)}
|
||||
className='text-[var(--text-muted)] transition-colors hover:text-[var(--text-error)]'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
<Combobox
|
||||
options={tagOptions}
|
||||
value={entry.tagName}
|
||||
onChange={(v) => handleTagChange(entry.id, v)}
|
||||
placeholder='Select tag'
|
||||
/>
|
||||
|
||||
{entry.tagSlot && (
|
||||
<>
|
||||
<Label className='text-[11px] text-[var(--text-muted)]'>Operator</Label>
|
||||
<Combobox
|
||||
options={operatorOptions}
|
||||
value={entry.operator}
|
||||
onChange={(v) => updateEntry(entry.id, { operator: v, valueTo: '' })}
|
||||
placeholder='Select operator'
|
||||
/>
|
||||
|
||||
<Label className='text-[11px] text-[var(--text-muted)]'>Value</Label>
|
||||
{entry.fieldType === 'date' ? (
|
||||
isBetween ? (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<DatePicker
|
||||
size='sm'
|
||||
value={entry.value || undefined}
|
||||
onChange={(v) => updateEntry(entry.id, { value: v })}
|
||||
placeholder='From'
|
||||
/>
|
||||
<span className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
to
|
||||
</span>
|
||||
<DatePicker
|
||||
size='sm'
|
||||
value={entry.valueTo || undefined}
|
||||
onChange={(v) => updateEntry(entry.id, { valueTo: v })}
|
||||
placeholder='To'
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<DatePicker
|
||||
size='sm'
|
||||
value={entry.value || undefined}
|
||||
onChange={(v) => updateEntry(entry.id, { value: v })}
|
||||
placeholder='Select date'
|
||||
/>
|
||||
)
|
||||
) : isBetween ? (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Input
|
||||
value={entry.value}
|
||||
onChange={(e) => updateEntry(entry.id, { value: e.target.value })}
|
||||
placeholder='From'
|
||||
className='h-[28px] text-[12px]'
|
||||
/>
|
||||
<span className='flex-shrink-0 text-[11px] text-[var(--text-muted)]'>
|
||||
to
|
||||
</span>
|
||||
<Input
|
||||
value={entry.valueTo}
|
||||
onChange={(e) => updateEntry(entry.id, { valueTo: e.target.value })}
|
||||
placeholder='To'
|
||||
className='h-[28px] text-[12px]'
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<Input
|
||||
value={entry.value}
|
||||
onChange={(e) => updateEntry(entry.id, { value: e.target.value })}
|
||||
placeholder={
|
||||
entry.fieldType === 'boolean'
|
||||
? 'true or false'
|
||||
: entry.fieldType === 'number'
|
||||
? 'Enter number'
|
||||
: 'Enter value'
|
||||
}
|
||||
className='h-[28px] text-[12px]'
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { ArrowLeft, Loader2, Plus } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Checkbox,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { ConnectorConfig } from '@/connectors/types'
|
||||
import { useCreateConnector } from '@/hooks/queries/kb/connectors'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
|
||||
const SYNC_INTERVALS = [
|
||||
{ label: 'Every hour', value: 60 },
|
||||
{ label: 'Every 6 hours', value: 360 },
|
||||
{ label: 'Daily', value: 1440 },
|
||||
{ label: 'Weekly', value: 10080 },
|
||||
{ label: 'Manual only', value: 0 },
|
||||
] as const
|
||||
|
||||
interface AddConnectorModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
knowledgeBaseId: string
|
||||
}
|
||||
|
||||
type Step = 'select-type' | 'configure'
|
||||
|
||||
export function AddConnectorModal({ open, onOpenChange, knowledgeBaseId }: AddConnectorModalProps) {
|
||||
const [step, setStep] = useState<Step>('select-type')
|
||||
const [selectedType, setSelectedType] = useState<string | null>(null)
|
||||
const [sourceConfig, setSourceConfig] = useState<Record<string, string>>({})
|
||||
const [syncInterval, setSyncInterval] = useState(1440)
|
||||
const [selectedCredentialId, setSelectedCredentialId] = useState<string | null>(null)
|
||||
const [disabledTagIds, setDisabledTagIds] = useState<Set<string>>(new Set())
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
|
||||
const { mutate: createConnector, isPending: isCreating } = useCreateConnector()
|
||||
|
||||
const connectorConfig = selectedType ? CONNECTOR_REGISTRY[selectedType] : null
|
||||
const connectorProviderId = useMemo(
|
||||
() =>
|
||||
connectorConfig
|
||||
? (getProviderIdFromServiceId(connectorConfig.oauth.provider) as OAuthProvider)
|
||||
: null,
|
||||
[connectorConfig]
|
||||
)
|
||||
|
||||
const { data: credentials = [], isLoading: credentialsLoading } = useOAuthCredentials(
|
||||
connectorProviderId ?? undefined,
|
||||
Boolean(connectorConfig)
|
||||
)
|
||||
|
||||
const effectiveCredentialId =
|
||||
selectedCredentialId ?? (credentials.length === 1 ? credentials[0].id : null)
|
||||
|
||||
const handleSelectType = (type: string) => {
|
||||
setSelectedType(type)
|
||||
setStep('configure')
|
||||
}
|
||||
|
||||
const canSubmit = useMemo(() => {
|
||||
if (!connectorConfig || !effectiveCredentialId) return false
|
||||
return connectorConfig.configFields
|
||||
.filter((f) => f.required)
|
||||
.every((f) => sourceConfig[f.id]?.trim())
|
||||
}, [connectorConfig, effectiveCredentialId, sourceConfig])
|
||||
|
||||
const handleSubmit = () => {
|
||||
if (!selectedType || !effectiveCredentialId || !canSubmit) return
|
||||
|
||||
setError(null)
|
||||
|
||||
const finalSourceConfig =
|
||||
disabledTagIds.size > 0
|
||||
? { ...sourceConfig, disabledTagIds: Array.from(disabledTagIds) }
|
||||
: sourceConfig
|
||||
|
||||
createConnector(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
connectorType: selectedType,
|
||||
credentialId: effectiveCredentialId,
|
||||
sourceConfig: finalSourceConfig,
|
||||
syncIntervalMinutes: syncInterval,
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
onOpenChange(false)
|
||||
},
|
||||
onError: (err) => {
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const connectorEntries = Object.entries(CONNECTOR_REGISTRY)
|
||||
|
||||
return (
|
||||
<>
|
||||
<Modal open={open} onOpenChange={(val) => !isCreating && onOpenChange(val)}>
|
||||
<ModalContent size='md'>
|
||||
<ModalHeader>
|
||||
{step === 'configure' && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='mr-2 h-6 w-6 p-0'
|
||||
onClick={() => setStep('select-type')}
|
||||
>
|
||||
<ArrowLeft className='h-4 w-4' />
|
||||
</Button>
|
||||
)}
|
||||
{step === 'select-type' ? 'Connect Source' : `Configure ${connectorConfig?.name}`}
|
||||
</ModalHeader>
|
||||
|
||||
<ModalBody>
|
||||
{step === 'select-type' ? (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{connectorEntries.map(([type, config]) => (
|
||||
<ConnectorTypeCard
|
||||
key={type}
|
||||
config={config}
|
||||
onClick={() => handleSelectType(type)}
|
||||
/>
|
||||
))}
|
||||
{connectorEntries.length === 0 && (
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>No connectors available.</p>
|
||||
)}
|
||||
</div>
|
||||
) : connectorConfig ? (
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
{/* Credential selection */}
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Account</Label>
|
||||
{credentialsLoading ? (
|
||||
<div className='flex items-center gap-2 text-[13px] text-[var(--text-muted)]'>
|
||||
<Loader2 className='h-4 w-4 animate-spin' />
|
||||
Loading credentials...
|
||||
</div>
|
||||
) : (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={[
|
||||
...credentials.map(
|
||||
(cred): ComboboxOption => ({
|
||||
label: cred.name || cred.provider,
|
||||
value: cred.id,
|
||||
icon: connectorConfig.icon,
|
||||
})
|
||||
),
|
||||
{
|
||||
label: 'Connect new account',
|
||||
value: '__connect_new__',
|
||||
icon: Plus,
|
||||
onSelect: () => {
|
||||
setShowOAuthModal(true)
|
||||
},
|
||||
},
|
||||
]}
|
||||
value={effectiveCredentialId ?? undefined}
|
||||
onChange={(value) => setSelectedCredentialId(value)}
|
||||
placeholder={
|
||||
credentials.length === 0
|
||||
? `No ${connectorConfig.name} accounts`
|
||||
: 'Select account'
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Config fields */}
|
||||
{connectorConfig.configFields.map((field) => (
|
||||
<div key={field.id} className='flex flex-col gap-[4px]'>
|
||||
<Label>
|
||||
{field.title}
|
||||
{field.required && (
|
||||
<span className='ml-[2px] text-[var(--text-error)]'>*</span>
|
||||
)}
|
||||
</Label>
|
||||
{field.description && (
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>{field.description}</p>
|
||||
)}
|
||||
{field.type === 'dropdown' && field.options ? (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={field.options.map((opt) => ({
|
||||
label: opt.label,
|
||||
value: opt.id,
|
||||
}))}
|
||||
value={sourceConfig[field.id] || undefined}
|
||||
onChange={(value) =>
|
||||
setSourceConfig((prev) => ({ ...prev, [field.id]: value }))
|
||||
}
|
||||
placeholder={field.placeholder || `Select ${field.title.toLowerCase()}`}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
value={sourceConfig[field.id] || ''}
|
||||
onChange={(e) =>
|
||||
setSourceConfig((prev) => ({ ...prev, [field.id]: e.target.value }))
|
||||
}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Tag definitions (opt-out) */}
|
||||
{connectorConfig.tagDefinitions && connectorConfig.tagDefinitions.length > 0 && (
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Metadata Tags</Label>
|
||||
{connectorConfig.tagDefinitions.map((tagDef) => (
|
||||
<div
|
||||
key={tagDef.id}
|
||||
className='flex cursor-pointer items-center gap-[8px] rounded-[4px] px-[2px] py-[2px] text-[13px]'
|
||||
onClick={() => {
|
||||
setDisabledTagIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
if (prev.has(tagDef.id)) {
|
||||
next.delete(tagDef.id)
|
||||
} else {
|
||||
next.add(tagDef.id)
|
||||
}
|
||||
return next
|
||||
})
|
||||
}}
|
||||
>
|
||||
<Checkbox
|
||||
checked={!disabledTagIds.has(tagDef.id)}
|
||||
onCheckedChange={(checked) => {
|
||||
setDisabledTagIds((prev) => {
|
||||
const next = new Set(prev)
|
||||
if (checked) {
|
||||
next.delete(tagDef.id)
|
||||
} else {
|
||||
next.add(tagDef.id)
|
||||
}
|
||||
return next
|
||||
})
|
||||
}}
|
||||
/>
|
||||
<span className='text-[var(--text-primary)]'>{tagDef.displayName}</span>
|
||||
<span className='text-[11px] text-[var(--text-muted)]'>
|
||||
({tagDef.fieldType})
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Sync interval */}
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Sync Frequency</Label>
|
||||
<ButtonGroup
|
||||
value={String(syncInterval)}
|
||||
onValueChange={(val) => setSyncInterval(Number(val))}
|
||||
>
|
||||
{SYNC_INTERVALS.map((interval) => (
|
||||
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
|
||||
{interval.label}
|
||||
</ButtonGroupItem>
|
||||
))}
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<p className='text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>
|
||||
)}
|
||||
</div>
|
||||
) : null}
|
||||
</ModalBody>
|
||||
|
||||
{step === 'configure' && (
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => onOpenChange(false)} disabled={isCreating}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' onClick={handleSubmit} disabled={!canSubmit || isCreating}>
|
||||
{isCreating ? (
|
||||
<>
|
||||
<Loader2 className='mr-1.5 h-3.5 w-3.5 animate-spin' />
|
||||
Connecting...
|
||||
</>
|
||||
) : (
|
||||
'Connect & Sync'
|
||||
)}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
)}
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
{connectorConfig && connectorProviderId && (
|
||||
<OAuthRequiredModal
|
||||
isOpen={showOAuthModal}
|
||||
onClose={() => setShowOAuthModal(false)}
|
||||
provider={connectorProviderId}
|
||||
toolName={connectorConfig.name}
|
||||
requiredScopes={getCanonicalScopesForProvider(connectorProviderId)}
|
||||
newScopes={connectorConfig.oauth.requiredScopes || []}
|
||||
serviceId={connectorConfig.oauth.provider}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
interface ConnectorTypeCardProps {
|
||||
config: ConnectorConfig
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
function ConnectorTypeCard({ config, onClick }: ConnectorTypeCardProps) {
|
||||
const Icon = config.icon
|
||||
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
className='flex items-center gap-[12px] rounded-[8px] border border-[var(--border-1)] px-[14px] py-[12px] text-left transition-colors hover:bg-[var(--surface-2)]'
|
||||
onClick={onClick}
|
||||
>
|
||||
<Icon className='h-6 w-6 flex-shrink-0' />
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>{config.name}</span>
|
||||
<span className='text-[12px] text-[var(--text-muted)]'>{config.description}</span>
|
||||
</div>
|
||||
</button>
|
||||
)
|
||||
}
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
|
||||
import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/kb/knowledge'
|
||||
import { useCreateTagDefinition, useDeleteTagDefinition } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('BaseTagsModal')
|
||||
|
||||
|
||||
@@ -1,537 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { format, formatDistanceToNow } from 'date-fns'
|
||||
import {
|
||||
AlertCircle,
|
||||
CheckCircle2,
|
||||
ChevronDown,
|
||||
Loader2,
|
||||
Pause,
|
||||
Play,
|
||||
RefreshCw,
|
||||
Settings,
|
||||
Trash,
|
||||
Unplug,
|
||||
XCircle,
|
||||
} from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { EditConnectorModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/edit-connector-modal/edit-connector-modal'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { ConnectorData, SyncLogData } from '@/hooks/queries/kb/connectors'
|
||||
import {
|
||||
useConnectorDetail,
|
||||
useDeleteConnector,
|
||||
useTriggerSync,
|
||||
useUpdateConnector,
|
||||
} from '@/hooks/queries/kb/connectors'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
|
||||
const logger = createLogger('ConnectorsSection')
|
||||
|
||||
interface ConnectorsSectionProps {
|
||||
knowledgeBaseId: string
|
||||
connectors: ConnectorData[]
|
||||
isLoading: boolean
|
||||
canEdit: boolean
|
||||
onAddConnector: () => void
|
||||
}
|
||||
|
||||
/** 5-minute cooldown after a manual sync trigger */
|
||||
const SYNC_COOLDOWN_MS = 5 * 60 * 1000
|
||||
|
||||
const STATUS_CONFIG = {
|
||||
active: { label: 'Active', variant: 'green' as const },
|
||||
syncing: { label: 'Syncing', variant: 'amber' as const },
|
||||
error: { label: 'Error', variant: 'red' as const },
|
||||
paused: { label: 'Paused', variant: 'gray' as const },
|
||||
} as const
|
||||
|
||||
export function ConnectorsSection({
|
||||
knowledgeBaseId,
|
||||
connectors,
|
||||
isLoading,
|
||||
canEdit,
|
||||
onAddConnector,
|
||||
}: ConnectorsSectionProps) {
|
||||
const { mutate: triggerSync, isPending: isSyncing } = useTriggerSync()
|
||||
const { mutate: updateConnector } = useUpdateConnector()
|
||||
const { mutate: deleteConnector } = useDeleteConnector()
|
||||
const [deleteTarget, setDeleteTarget] = useState<string | null>(null)
|
||||
const [editingConnector, setEditingConnector] = useState<ConnectorData | null>(null)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const syncTriggeredAt = useRef<Record<string, number>>({})
|
||||
const cooldownTimers = useRef<Set<ReturnType<typeof setTimeout>>>(new Set())
|
||||
const [, forceUpdate] = useState(0)
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
for (const timer of cooldownTimers.current) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const isSyncOnCooldown = useCallback((connectorId: string) => {
|
||||
const triggeredAt = syncTriggeredAt.current[connectorId]
|
||||
if (!triggeredAt) return false
|
||||
return Date.now() - triggeredAt < SYNC_COOLDOWN_MS
|
||||
}, [])
|
||||
|
||||
const handleSync = useCallback(
|
||||
(connectorId: string) => {
|
||||
if (isSyncOnCooldown(connectorId)) return
|
||||
|
||||
syncTriggeredAt.current[connectorId] = Date.now()
|
||||
|
||||
triggerSync(
|
||||
{ knowledgeBaseId, connectorId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setError(null)
|
||||
const timer = setTimeout(() => {
|
||||
cooldownTimers.current.delete(timer)
|
||||
forceUpdate((n) => n + 1)
|
||||
}, SYNC_COOLDOWN_MS)
|
||||
cooldownTimers.current.add(timer)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Sync trigger failed', { error: err.message })
|
||||
setError(err.message)
|
||||
delete syncTriggeredAt.current[connectorId]
|
||||
forceUpdate((n) => n + 1)
|
||||
},
|
||||
}
|
||||
)
|
||||
},
|
||||
[knowledgeBaseId, triggerSync, isSyncOnCooldown]
|
||||
)
|
||||
|
||||
if (isLoading) return null
|
||||
if (connectors.length === 0 && !canEdit) return null
|
||||
|
||||
return (
|
||||
<div className='mt-[16px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<h2 className='font-medium text-[14px] text-[var(--text-secondary)]'>Connected Sources</h2>
|
||||
{canEdit && (
|
||||
<Button
|
||||
variant='default'
|
||||
className='h-[28px] rounded-[6px] text-[12px]'
|
||||
onClick={onAddConnector}
|
||||
>
|
||||
<Unplug className='mr-1 h-3.5 w-3.5' />
|
||||
Connect Source
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<p className='mt-[8px] text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>
|
||||
)}
|
||||
|
||||
{connectors.length === 0 ? (
|
||||
<p className='mt-[8px] text-[13px] text-[var(--text-muted)]'>
|
||||
No connected sources yet. Connect an external source to automatically sync documents.
|
||||
</p>
|
||||
) : (
|
||||
<div className='mt-[8px] flex flex-col gap-[8px]'>
|
||||
{connectors.map((connector) => (
|
||||
<ConnectorCard
|
||||
key={connector.id}
|
||||
connector={connector}
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
canEdit={canEdit}
|
||||
isSyncing={isSyncing}
|
||||
syncCooldown={isSyncOnCooldown(connector.id)}
|
||||
onSync={() => handleSync(connector.id)}
|
||||
onTogglePause={() =>
|
||||
updateConnector(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
connectorId: connector.id,
|
||||
updates: {
|
||||
status: connector.status === 'paused' ? 'active' : 'paused',
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => setError(null),
|
||||
onError: (err) => {
|
||||
logger.error('Toggle pause failed', { error: err.message })
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
onEdit={() => setEditingConnector(connector)}
|
||||
onDelete={() => setDeleteTarget(connector.id)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{editingConnector && (
|
||||
<EditConnectorModal
|
||||
open={editingConnector !== null}
|
||||
onOpenChange={(val) => !val && setEditingConnector(null)}
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
connector={editingConnector}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Modal open={deleteTarget !== null} onOpenChange={() => setDeleteTarget(null)}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Connector</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[14px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to remove this connected source? Documents already synced will
|
||||
remain in the knowledge base.
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => setDeleteTarget(null)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='destructive'
|
||||
onClick={() => {
|
||||
if (deleteTarget) {
|
||||
deleteConnector(
|
||||
{ knowledgeBaseId, connectorId: deleteTarget },
|
||||
{
|
||||
onSuccess: () => setError(null),
|
||||
onError: (err) => {
|
||||
logger.error('Delete connector failed', { error: err.message })
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
setDeleteTarget(null)
|
||||
}
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface ConnectorCardProps {
|
||||
connector: ConnectorData
|
||||
knowledgeBaseId: string
|
||||
canEdit: boolean
|
||||
isSyncing: boolean
|
||||
syncCooldown: boolean
|
||||
onSync: () => void
|
||||
onEdit: () => void
|
||||
onTogglePause: () => void
|
||||
onDelete: () => void
|
||||
}
|
||||
|
||||
function ConnectorCard({
|
||||
connector,
|
||||
knowledgeBaseId,
|
||||
canEdit,
|
||||
isSyncing,
|
||||
syncCooldown,
|
||||
onSync,
|
||||
onEdit,
|
||||
onTogglePause,
|
||||
onDelete,
|
||||
}: ConnectorCardProps) {
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
|
||||
const connectorDef = CONNECTOR_REGISTRY[connector.connectorType]
|
||||
const Icon = connectorDef?.icon
|
||||
const statusConfig =
|
||||
STATUS_CONFIG[connector.status as keyof typeof STATUS_CONFIG] || STATUS_CONFIG.active
|
||||
|
||||
const serviceId = connectorDef?.oauth.provider
|
||||
const providerId = serviceId ? getProviderIdFromServiceId(serviceId) : undefined
|
||||
const requiredScopes = connectorDef?.oauth.requiredScopes ?? []
|
||||
|
||||
const { data: credentials } = useOAuthCredentials(providerId)
|
||||
|
||||
const missingScopes = useMemo(() => {
|
||||
if (!credentials || !connector.credentialId) return []
|
||||
const credential = credentials.find((c) => c.id === connector.credentialId)
|
||||
return getMissingRequiredScopes(credential, requiredScopes)
|
||||
}, [credentials, connector.credentialId, requiredScopes])
|
||||
|
||||
const { data: detail, isLoading: detailLoading } = useConnectorDetail(
|
||||
expanded ? knowledgeBaseId : undefined,
|
||||
expanded ? connector.id : undefined
|
||||
)
|
||||
const syncLogs = detail?.syncLogs ?? []
|
||||
|
||||
return (
|
||||
<div className='rounded-[8px] border border-[var(--border-1)]'>
|
||||
<div className='flex items-center justify-between px-[12px] py-[10px]'>
|
||||
<div className='flex items-center gap-[10px]'>
|
||||
{Icon && <Icon className='h-5 w-5 flex-shrink-0' />}
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
{connectorDef?.name || connector.connectorType}
|
||||
</span>
|
||||
<Badge variant={statusConfig.variant} className='text-[10px]'>
|
||||
{connector.status === 'syncing' && (
|
||||
<Loader2 className='mr-1 h-3 w-3 animate-spin' />
|
||||
)}
|
||||
{statusConfig.label}
|
||||
</Badge>
|
||||
</div>
|
||||
<div className='flex items-center gap-[6px] text-[11px] text-[var(--text-muted)]'>
|
||||
{connector.lastSyncAt && (
|
||||
<span>Last sync: {format(new Date(connector.lastSyncAt), 'MMM d, h:mm a')}</span>
|
||||
)}
|
||||
{connector.lastSyncDocCount !== null && (
|
||||
<>
|
||||
<span>·</span>
|
||||
<span>{connector.lastSyncDocCount} docs</span>
|
||||
</>
|
||||
)}
|
||||
{connector.nextSyncAt && connector.status === 'active' && (
|
||||
<>
|
||||
<span>·</span>
|
||||
<span>
|
||||
Next sync:{' '}
|
||||
{formatDistanceToNow(new Date(connector.nextSyncAt), { addSuffix: true })}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
{connector.lastSyncError && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<AlertCircle className='h-3 w-3 text-[var(--text-error)]' />
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{connector.lastSyncError}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{canEdit && (
|
||||
<>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-7 w-7 p-0'
|
||||
onClick={onSync}
|
||||
disabled={connector.status === 'syncing' || isSyncing || syncCooldown}
|
||||
>
|
||||
<RefreshCw
|
||||
className={cn(
|
||||
'h-3.5 w-3.5',
|
||||
connector.status === 'syncing' && 'animate-spin'
|
||||
)}
|
||||
/>
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
{syncCooldown ? 'Sync recently triggered' : 'Sync now'}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' className='h-7 w-7 p-0' onClick={onEdit}>
|
||||
<Settings className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Settings</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' className='h-7 w-7 p-0' onClick={onTogglePause}>
|
||||
{connector.status === 'paused' ? (
|
||||
<Play className='h-3.5 w-3.5' />
|
||||
) : (
|
||||
<Pause className='h-3.5 w-3.5' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
{connector.status === 'paused' ? 'Resume' : 'Pause'}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' className='h-7 w-7 p-0' onClick={onDelete}>
|
||||
<Trash className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Delete</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-7 w-7 p-0'
|
||||
onClick={() => setExpanded((prev) => !prev)}
|
||||
>
|
||||
<ChevronDown
|
||||
className={cn('h-3.5 w-3.5 transition-transform', expanded && 'rotate-180')}
|
||||
/>
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{expanded ? 'Hide history' : 'Sync history'}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{missingScopes.length > 0 && (
|
||||
<div className='border-[var(--border-1)] border-t px-[12px] py-[8px]'>
|
||||
<div className='flex flex-col gap-[4px] rounded-[4px] border bg-[var(--surface-2)] px-[8px] py-[6px]'>
|
||||
<div className='flex items-center font-medium text-[12px]'>
|
||||
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
|
||||
Additional permissions required
|
||||
</div>
|
||||
{canEdit && (
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{expanded && (
|
||||
<div className='border-[var(--border-1)] border-t px-[12px] py-[8px]'>
|
||||
<SyncHistory logs={syncLogs} isLoading={detailLoading} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showOAuthModal && serviceId && providerId && (
|
||||
<OAuthRequiredModal
|
||||
isOpen={showOAuthModal}
|
||||
onClose={() => setShowOAuthModal(false)}
|
||||
provider={providerId as OAuthProvider}
|
||||
toolName={connectorDef?.name ?? connector.connectorType}
|
||||
requiredScopes={getCanonicalScopesForProvider(providerId)}
|
||||
newScopes={missingScopes}
|
||||
serviceId={serviceId}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface SyncHistoryProps {
|
||||
logs: SyncLogData[]
|
||||
isLoading: boolean
|
||||
}
|
||||
|
||||
function SyncHistory({ logs, isLoading }: SyncHistoryProps) {
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex items-center gap-2 py-[4px] text-[11px] text-[var(--text-muted)]'>
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
Loading sync history...
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (logs.length === 0) {
|
||||
return <p className='py-[4px] text-[11px] text-[var(--text-muted)]'>No sync history yet.</p>
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
{logs.map((log) => {
|
||||
const isError = log.status === 'error' || log.status === 'failed'
|
||||
const isRunning = log.status === 'running' || log.status === 'syncing'
|
||||
const totalChanges = log.docsAdded + log.docsUpdated + log.docsDeleted
|
||||
|
||||
return (
|
||||
<div key={log.id} className='flex items-start gap-[8px] text-[11px]'>
|
||||
<div className='mt-[1px] flex-shrink-0'>
|
||||
{isRunning ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin text-[var(--text-muted)]' />
|
||||
) : isError ? (
|
||||
<XCircle className='h-3 w-3 text-[var(--text-error)]' />
|
||||
) : (
|
||||
<CheckCircle2 className='h-3 w-3 text-[var(--color-green-600)]' />
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='flex min-w-0 flex-1 flex-col gap-[1px]'>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<span className='text-[var(--text-muted)]'>
|
||||
{format(new Date(log.startedAt), 'MMM d, h:mm a')}
|
||||
</span>
|
||||
{!isRunning && !isError && (
|
||||
<span className='text-[var(--text-muted)]'>
|
||||
{totalChanges > 0 ? (
|
||||
<>
|
||||
{log.docsAdded > 0 && (
|
||||
<span className='text-[var(--color-green-600)]'>+{log.docsAdded}</span>
|
||||
)}
|
||||
{log.docsUpdated > 0 && (
|
||||
<>
|
||||
{log.docsAdded > 0 && ' '}
|
||||
<span className='text-[var(--color-amber-600)]'>
|
||||
~{log.docsUpdated}
|
||||
</span>
|
||||
</>
|
||||
)}
|
||||
{log.docsDeleted > 0 && (
|
||||
<>
|
||||
{(log.docsAdded > 0 || log.docsUpdated > 0) && ' '}
|
||||
<span className='text-[var(--text-error)]'>-{log.docsDeleted}</span>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
'No changes'
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
{isRunning && <span className='text-[var(--text-muted)]'>In progress...</span>}
|
||||
</div>
|
||||
|
||||
{isError && log.errorMessage && (
|
||||
<span className='truncate text-[var(--text-error)]'>{log.errorMessage}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -17,7 +17,6 @@ interface DocumentContextMenuProps {
|
||||
* Document-specific actions (shown when right-clicking on a document)
|
||||
*/
|
||||
onOpenInNewTab?: () => void
|
||||
onOpenSource?: () => void
|
||||
onRename?: () => void
|
||||
onToggleEnabled?: () => void
|
||||
onViewTags?: () => void
|
||||
@@ -75,7 +74,6 @@ export function DocumentContextMenu({
|
||||
menuRef,
|
||||
onClose,
|
||||
onOpenInNewTab,
|
||||
onOpenSource,
|
||||
onRename,
|
||||
onToggleEnabled,
|
||||
onViewTags,
|
||||
@@ -131,17 +129,7 @@ export function DocumentContextMenu({
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && onOpenSource && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onOpenSource()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Open source
|
||||
</PopoverItem>
|
||||
)}
|
||||
{!isMultiSelect && (onOpenInNewTab || onOpenSource) && <PopoverDivider />}
|
||||
{!isMultiSelect && onOpenInNewTab && <PopoverDivider />}
|
||||
|
||||
{/* Edit and view actions */}
|
||||
{!isMultiSelect && onRename && (
|
||||
@@ -182,7 +170,6 @@ export function DocumentContextMenu({
|
||||
{/* Destructive action */}
|
||||
{onDelete &&
|
||||
((!isMultiSelect && onOpenInNewTab) ||
|
||||
(!isMultiSelect && onOpenSource) ||
|
||||
(!isMultiSelect && onRename) ||
|
||||
(!isMultiSelect && hasTags && onViewTags) ||
|
||||
onToggleEnabled) && <PopoverDivider />}
|
||||
|
||||
@@ -1,339 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ExternalLink, Loader2, RotateCcw } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
Combobox,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
ModalTabs,
|
||||
ModalTabsContent,
|
||||
ModalTabsList,
|
||||
ModalTabsTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import type { ConnectorConfig } from '@/connectors/types'
|
||||
import type { ConnectorData } from '@/hooks/queries/kb/connectors'
|
||||
import {
|
||||
useConnectorDocuments,
|
||||
useExcludeConnectorDocument,
|
||||
useRestoreConnectorDocument,
|
||||
useUpdateConnector,
|
||||
} from '@/hooks/queries/kb/connectors'
|
||||
|
||||
const logger = createLogger('EditConnectorModal')
|
||||
|
||||
const SYNC_INTERVALS = [
|
||||
{ label: 'Every hour', value: 60 },
|
||||
{ label: 'Every 6 hours', value: 360 },
|
||||
{ label: 'Daily', value: 1440 },
|
||||
{ label: 'Weekly', value: 10080 },
|
||||
{ label: 'Manual only', value: 0 },
|
||||
] as const
|
||||
|
||||
/** Keys injected by the sync engine — not user-editable */
|
||||
const INTERNAL_CONFIG_KEYS = new Set(['tagSlotMapping', 'disabledTagIds'])
|
||||
|
||||
interface EditConnectorModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
knowledgeBaseId: string
|
||||
connector: ConnectorData
|
||||
}
|
||||
|
||||
export function EditConnectorModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
knowledgeBaseId,
|
||||
connector,
|
||||
}: EditConnectorModalProps) {
|
||||
const connectorConfig = CONNECTOR_REGISTRY[connector.connectorType] ?? null
|
||||
|
||||
const initialSourceConfig = useMemo(() => {
|
||||
const config: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(connector.sourceConfig)) {
|
||||
if (!INTERNAL_CONFIG_KEYS.has(key)) {
|
||||
config[key] = String(value ?? '')
|
||||
}
|
||||
}
|
||||
return config
|
||||
}, [connector.sourceConfig])
|
||||
|
||||
const [activeTab, setActiveTab] = useState('settings')
|
||||
const [sourceConfig, setSourceConfig] = useState<Record<string, string>>(initialSourceConfig)
|
||||
const [syncInterval, setSyncInterval] = useState(connector.syncIntervalMinutes)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const { mutate: updateConnector, isPending: isSaving } = useUpdateConnector()
|
||||
|
||||
const hasChanges = useMemo(() => {
|
||||
if (syncInterval !== connector.syncIntervalMinutes) return true
|
||||
for (const [key, value] of Object.entries(sourceConfig)) {
|
||||
if (String(connector.sourceConfig[key] ?? '') !== value) return true
|
||||
}
|
||||
return false
|
||||
}, [sourceConfig, syncInterval, connector.syncIntervalMinutes, connector.sourceConfig])
|
||||
|
||||
const handleSave = () => {
|
||||
setError(null)
|
||||
|
||||
const updates: { sourceConfig?: Record<string, unknown>; syncIntervalMinutes?: number } = {}
|
||||
|
||||
if (syncInterval !== connector.syncIntervalMinutes) {
|
||||
updates.syncIntervalMinutes = syncInterval
|
||||
}
|
||||
|
||||
const configChanged = Object.entries(sourceConfig).some(
|
||||
([key, value]) => String(connector.sourceConfig[key] ?? '') !== value
|
||||
)
|
||||
if (configChanged) {
|
||||
updates.sourceConfig = { ...connector.sourceConfig, ...sourceConfig }
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length === 0) {
|
||||
onOpenChange(false)
|
||||
return
|
||||
}
|
||||
|
||||
updateConnector(
|
||||
{ knowledgeBaseId, connectorId: connector.id, updates },
|
||||
{
|
||||
onSuccess: () => {
|
||||
onOpenChange(false)
|
||||
},
|
||||
onError: (err) => {
|
||||
logger.error('Failed to update connector', { error: err.message })
|
||||
setError(err.message)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const displayName = connectorConfig?.name ?? connector.connectorType
|
||||
const Icon = connectorConfig?.icon
|
||||
|
||||
return (
|
||||
<Modal open={open} onOpenChange={(val) => !isSaving && onOpenChange(val)}>
|
||||
<ModalContent size='md'>
|
||||
<ModalHeader>
|
||||
<div className='flex items-center gap-2'>
|
||||
{Icon && <Icon className='h-5 w-5' />}
|
||||
Edit {displayName}
|
||||
</div>
|
||||
</ModalHeader>
|
||||
|
||||
<ModalTabs value={activeTab} onValueChange={setActiveTab}>
|
||||
<ModalTabsList>
|
||||
<ModalTabsTrigger value='settings'>Settings</ModalTabsTrigger>
|
||||
<ModalTabsTrigger value='documents'>Documents</ModalTabsTrigger>
|
||||
</ModalTabsList>
|
||||
|
||||
<ModalBody>
|
||||
<ModalTabsContent value='settings'>
|
||||
<SettingsTab
|
||||
connectorConfig={connectorConfig}
|
||||
sourceConfig={sourceConfig}
|
||||
setSourceConfig={setSourceConfig}
|
||||
syncInterval={syncInterval}
|
||||
setSyncInterval={setSyncInterval}
|
||||
error={error}
|
||||
/>
|
||||
</ModalTabsContent>
|
||||
|
||||
<ModalTabsContent value='documents'>
|
||||
<DocumentsTab knowledgeBaseId={knowledgeBaseId} connectorId={connector.id} />
|
||||
</ModalTabsContent>
|
||||
</ModalBody>
|
||||
</ModalTabs>
|
||||
|
||||
{activeTab === 'settings' && (
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => onOpenChange(false)} disabled={isSaving}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' onClick={handleSave} disabled={!hasChanges || isSaving}>
|
||||
{isSaving ? (
|
||||
<>
|
||||
<Loader2 className='mr-1.5 h-3.5 w-3.5 animate-spin' />
|
||||
Saving...
|
||||
</>
|
||||
) : (
|
||||
'Save'
|
||||
)}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
)}
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
interface SettingsTabProps {
|
||||
connectorConfig: ConnectorConfig | null
|
||||
sourceConfig: Record<string, string>
|
||||
setSourceConfig: React.Dispatch<React.SetStateAction<Record<string, string>>>
|
||||
syncInterval: number
|
||||
setSyncInterval: (v: number) => void
|
||||
error: string | null
|
||||
}
|
||||
|
||||
function SettingsTab({
|
||||
connectorConfig,
|
||||
sourceConfig,
|
||||
setSourceConfig,
|
||||
syncInterval,
|
||||
setSyncInterval,
|
||||
error,
|
||||
}: SettingsTabProps) {
|
||||
return (
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
{connectorConfig?.configFields.map((field) => (
|
||||
<div key={field.id} className='flex flex-col gap-[4px]'>
|
||||
<Label>
|
||||
{field.title}
|
||||
{field.required && <span className='ml-[2px] text-[var(--text-error)]'>*</span>}
|
||||
</Label>
|
||||
{field.description && (
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>{field.description}</p>
|
||||
)}
|
||||
{field.type === 'dropdown' && field.options ? (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={field.options.map((opt) => ({
|
||||
label: opt.label,
|
||||
value: opt.id,
|
||||
}))}
|
||||
value={sourceConfig[field.id] || undefined}
|
||||
onChange={(value) => setSourceConfig((prev) => ({ ...prev, [field.id]: value }))}
|
||||
placeholder={field.placeholder || `Select ${field.title.toLowerCase()}`}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
value={sourceConfig[field.id] || ''}
|
||||
onChange={(e) => setSourceConfig((prev) => ({ ...prev, [field.id]: e.target.value }))}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className='flex flex-col gap-[4px]'>
|
||||
<Label>Sync Frequency</Label>
|
||||
<ButtonGroup
|
||||
value={String(syncInterval)}
|
||||
onValueChange={(val) => setSyncInterval(Number(val))}
|
||||
>
|
||||
{SYNC_INTERVALS.map((interval) => (
|
||||
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
|
||||
{interval.label}
|
||||
</ButtonGroupItem>
|
||||
))}
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
{error && <p className='text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface DocumentsTabProps {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
}
|
||||
|
||||
function DocumentsTab({ knowledgeBaseId, connectorId }: DocumentsTabProps) {
|
||||
const [filter, setFilter] = useState<'active' | 'excluded'>('active')
|
||||
|
||||
const { data, isLoading } = useConnectorDocuments(knowledgeBaseId, connectorId, {
|
||||
includeExcluded: true,
|
||||
})
|
||||
|
||||
const { mutate: excludeDoc, isPending: isExcluding } = useExcludeConnectorDocument()
|
||||
const { mutate: restoreDoc, isPending: isRestoring } = useRestoreConnectorDocument()
|
||||
|
||||
const documents = useMemo(() => {
|
||||
if (!data?.documents) return []
|
||||
return data.documents.filter((d) => (filter === 'excluded' ? d.userExcluded : !d.userExcluded))
|
||||
}, [data?.documents, filter])
|
||||
|
||||
const counts = data?.counts ?? { active: 0, excluded: 0 }
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Skeleton className='h-6 w-full' />
|
||||
<Skeleton className='h-6 w-full' />
|
||||
<Skeleton className='h-6 w-full' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
<ButtonGroup value={filter} onValueChange={(val) => setFilter(val as 'active' | 'excluded')}>
|
||||
<ButtonGroupItem value='active'>Active ({counts.active})</ButtonGroupItem>
|
||||
<ButtonGroupItem value='excluded'>Excluded ({counts.excluded})</ButtonGroupItem>
|
||||
</ButtonGroup>
|
||||
|
||||
<div className='max-h-[320px] min-h-0 overflow-y-auto'>
|
||||
{documents.length === 0 ? (
|
||||
<p className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
{filter === 'excluded' ? 'No excluded documents' : 'No documents yet'}
|
||||
</p>
|
||||
) : (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{documents.map((doc) => (
|
||||
<div key={doc.id} className='flex items-center justify-between'>
|
||||
<div className='flex min-w-0 items-center gap-[6px]'>
|
||||
<span className='truncate text-[13px] text-[var(--text-primary)]'>
|
||||
{doc.filename}
|
||||
</span>
|
||||
{doc.sourceUrl && (
|
||||
<a
|
||||
href={doc.sourceUrl}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='flex-shrink-0 text-[var(--text-muted)] hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
<ExternalLink className='h-3 w-3' />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='flex-shrink-0'
|
||||
disabled={doc.userExcluded ? isRestoring : isExcluding}
|
||||
onClick={() =>
|
||||
doc.userExcluded
|
||||
? restoreDoc({ knowledgeBaseId, connectorId, documentIds: [doc.id] })
|
||||
: excludeDoc({ knowledgeBaseId, connectorId, documentIds: [doc.id] })
|
||||
}
|
||||
>
|
||||
{doc.userExcluded ? (
|
||||
<>
|
||||
<RotateCcw className='mr-1 h-3 w-3' />
|
||||
Restore
|
||||
</>
|
||||
) : (
|
||||
'Exclude'
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { EditConnectorModal } from './edit-connector-modal'
|
||||
@@ -1,8 +1,5 @@
|
||||
export { ActionBar } from './action-bar/action-bar'
|
||||
export { AddConnectorModal } from './add-connector-modal/add-connector-modal'
|
||||
export { AddDocumentsModal } from './add-documents-modal/add-documents-modal'
|
||||
export { BaseTagsModal } from './base-tags-modal/base-tags-modal'
|
||||
export { ConnectorsSection } from './connectors-section/connectors-section'
|
||||
export { DocumentContextMenu } from './document-context-menu'
|
||||
export { EditConnectorModal } from './edit-connector-modal/edit-connector-modal'
|
||||
export { RenameDocumentModal } from './rename-document-modal'
|
||||
|
||||
@@ -7,7 +7,6 @@ import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatt
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
import { DeleteKnowledgeBaseModal } from '../delete-knowledge-base-modal/delete-knowledge-base-modal'
|
||||
import { EditKnowledgeBaseModal } from '../edit-knowledge-base-modal/edit-knowledge-base-modal'
|
||||
import { KnowledgeBaseContextMenu } from '../knowledge-base-context-menu/knowledge-base-context-menu'
|
||||
@@ -19,7 +18,6 @@ interface BaseCardProps {
|
||||
description: string
|
||||
createdAt?: string
|
||||
updatedAt?: string
|
||||
connectorTypes?: string[]
|
||||
onUpdate?: (id: string, name: string, description: string) => Promise<void>
|
||||
onDelete?: (id: string) => Promise<void>
|
||||
}
|
||||
@@ -77,7 +75,6 @@ export function BaseCard({
|
||||
docCount,
|
||||
description,
|
||||
updatedAt,
|
||||
connectorTypes = [],
|
||||
onUpdate,
|
||||
onDelete,
|
||||
}: BaseCardProps) {
|
||||
@@ -203,33 +200,9 @@ export function BaseCard({
|
||||
|
||||
<div className='h-0 w-full border-[var(--divider)] border-t' />
|
||||
|
||||
<div className='flex items-start justify-between gap-[8px]'>
|
||||
<p className='line-clamp-2 h-[36px] flex-1 text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
|
||||
{description}
|
||||
</p>
|
||||
{connectorTypes.length > 0 && (
|
||||
<div className='flex flex-shrink-0 items-center'>
|
||||
{connectorTypes.map((type, index) => {
|
||||
const config = CONNECTOR_REGISTRY[type]
|
||||
if (!config?.icon) return null
|
||||
const Icon = config.icon
|
||||
return (
|
||||
<Tooltip.Root key={type}>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-[4px] bg-[var(--surface-5)]'
|
||||
style={{ marginLeft: index > 0 ? '-4px' : '0' }}
|
||||
>
|
||||
<Icon className='h-[12px] w-[12px] text-[var(--text-secondary)]' />
|
||||
</div>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{config.name}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<p className='line-clamp-2 h-[36px] text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
|
||||
{description}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -22,7 +22,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||
import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
import { useCreateKnowledgeBase, useDeleteKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateBaseModal')
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { filterButtonClass } from '@/app/workspace/[workspaceId]/knowledge/components/constants'
|
||||
import { useUpdateKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
import { useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeHeader')
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import { knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge'
|
||||
import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/kb/knowledge'
|
||||
import { useDeleteKnowledgeBase, useUpdateKnowledgeBase } from '@/hooks/queries/knowledge'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
|
||||
const logger = createLogger('Knowledge')
|
||||
@@ -153,7 +153,6 @@ export function Knowledge() {
|
||||
description: kb.description || 'No description provided',
|
||||
createdAt: kb.createdAt,
|
||||
updatedAt: kb.updatedAt,
|
||||
connectorTypes: kb.connectorTypes ?? [],
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -284,7 +283,6 @@ export function Knowledge() {
|
||||
title={displayData.title}
|
||||
docCount={displayData.docCount}
|
||||
description={displayData.description}
|
||||
connectorTypes={displayData.connectorTypes}
|
||||
createdAt={displayData.createdAt}
|
||||
updatedAt={displayData.updatedAt}
|
||||
onUpdate={handleUpdateKnowledgeBase}
|
||||
|
||||
@@ -32,10 +32,7 @@ import {
|
||||
useTestNotification,
|
||||
useUpdateNotification,
|
||||
} from '@/hooks/queries/notifications'
|
||||
import {
|
||||
useConnectedAccounts,
|
||||
useConnectOAuthService,
|
||||
} from '@/hooks/queries/oauth/oauth-connections'
|
||||
import { useConnectedAccounts, useConnectOAuthService } from '@/hooks/queries/oauth-connections'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
import { SlackChannelSelector } from './components/slack-channel-selector'
|
||||
import { WorkflowSelector } from './components/workflow-selector'
|
||||
|
||||
@@ -131,10 +131,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
resumeActiveStream,
|
||||
})
|
||||
|
||||
// Handle scroll management (80px stickiness for copilot)
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage, {
|
||||
stickinessThreshold: 40,
|
||||
})
|
||||
// Handle scroll management
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage)
|
||||
|
||||
// Handle chat history grouping
|
||||
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { useReactFlow } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
|
||||
@@ -95,7 +95,6 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'offline.access': 'Access account when not using the application',
|
||||
'data.records:read': 'Read records',
|
||||
'data.records:write': 'Write to records',
|
||||
'schema.bases:read': 'Read base schemas and table metadata',
|
||||
'webhook:manage': 'Manage webhooks',
|
||||
'page.read': 'Read Notion pages',
|
||||
'page.write': 'Write to Notion pages',
|
||||
|
||||
@@ -20,10 +20,7 @@ import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/c
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { useCredentialSets } from '@/hooks/queries/credential-sets'
|
||||
import {
|
||||
useOAuthCredentialDetail,
|
||||
useOAuthCredentials,
|
||||
} from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { useOrganizations } from '@/hooks/queries/organization'
|
||||
import { useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
|
||||
@@ -10,7 +10,7 @@ import type { KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useKnowledgeBasesList } from '@/hooks/kb/use-knowledge'
|
||||
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
interface KnowledgeBaseSelectorProps {
|
||||
blockId: string
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
|
||||
import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
|
||||
@@ -12,10 +12,7 @@ import {
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CREDENTIAL } from '@/executor/constants'
|
||||
import {
|
||||
useOAuthCredentialDetail,
|
||||
useOAuthCredentials,
|
||||
} from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import {
|
||||
BookOpen,
|
||||
Check,
|
||||
|
||||
@@ -10,40 +10,6 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/componen
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { usePanelEditorStore } from '@/stores/panel'
|
||||
|
||||
/**
|
||||
* Global styles for subflow nodes (loop and parallel containers).
|
||||
* Includes animations for drag-over states and hover effects.
|
||||
*
|
||||
* @returns Style component with global CSS
|
||||
*/
|
||||
const SubflowNodeStyles: React.FC = () => {
|
||||
return (
|
||||
<style jsx global>{`
|
||||
/* Z-index management for subflow nodes - default behind blocks */
|
||||
.workflow-container .react-flow__node-subflowNode {
|
||||
z-index: -1 !important;
|
||||
}
|
||||
|
||||
/* Selected subflows appear above other subflows but below blocks (z-21) */
|
||||
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected='true']) {
|
||||
z-index: 10 !important;
|
||||
}
|
||||
|
||||
/* Drag-over states */
|
||||
.loop-node-drag-over,
|
||||
.parallel-node-drag-over {
|
||||
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
|
||||
border-radius: 8px !important;
|
||||
}
|
||||
|
||||
/* Handle z-index for nested nodes */
|
||||
.react-flow__node[data-parent-node-id] .react-flow__handle {
|
||||
z-index: 30;
|
||||
}
|
||||
`}</style>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Data structure for subflow nodes (loop and parallel containers)
|
||||
*/
|
||||
@@ -151,133 +117,130 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
)
|
||||
|
||||
return (
|
||||
<>
|
||||
<SubflowNodeStyles />
|
||||
<div className='group relative'>
|
||||
<div className='group relative'>
|
||||
<div
|
||||
ref={blockRef}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
|
||||
'transition-block-bg transition-ring',
|
||||
'z-[20]'
|
||||
)}
|
||||
style={{
|
||||
width: data.width || 500,
|
||||
height: data.height || 300,
|
||||
position: 'relative',
|
||||
overflow: 'visible',
|
||||
pointerEvents: isPreview ? 'none' : 'all',
|
||||
}}
|
||||
data-node-id={id}
|
||||
data-type='subflowNode'
|
||||
data-nesting-level={nestingLevel}
|
||||
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
|
||||
>
|
||||
{!isPreview && (
|
||||
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
|
||||
{/* Header Section */}
|
||||
<div
|
||||
ref={blockRef}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
|
||||
'transition-block-bg transition-ring',
|
||||
'z-[20]'
|
||||
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
|
||||
)}
|
||||
style={{
|
||||
width: data.width || 500,
|
||||
height: data.height || 300,
|
||||
position: 'relative',
|
||||
overflow: 'visible',
|
||||
pointerEvents: isPreview ? 'none' : 'all',
|
||||
}}
|
||||
data-node-id={id}
|
||||
data-type='subflowNode'
|
||||
data-nesting-level={nestingLevel}
|
||||
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
|
||||
>
|
||||
{!isPreview && (
|
||||
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
|
||||
{/* Header Section */}
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
|
||||
)}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
|
||||
>
|
||||
<BlockIcon className='h-[16px] w-[16px] text-white' />
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'truncate font-medium text-[16px]',
|
||||
!isEnabled && 'text-[var(--text-muted)]'
|
||||
)}
|
||||
title={blockName}
|
||||
>
|
||||
{blockName}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center gap-1'>
|
||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!isPreview && (
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
|
||||
style={{ pointerEvents: 'auto' }}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div
|
||||
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
|
||||
data-dragarea='true'
|
||||
style={{
|
||||
position: 'relative',
|
||||
pointerEvents: isPreview ? 'none' : 'auto',
|
||||
}}
|
||||
>
|
||||
{/* Subflow Start */}
|
||||
<div
|
||||
className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
data-parent-id={id}
|
||||
data-node-role={`${data.kind}-start`}
|
||||
data-extent='parent'
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
|
||||
>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
|
||||
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
id={startHandleId}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
top: '50%',
|
||||
transform: 'translateY(-50%)',
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
data-parent-id={id}
|
||||
/>
|
||||
<BlockIcon className='h-[16px] w-[16px] text-white' />
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'truncate font-medium text-[16px]',
|
||||
!isEnabled && 'text-[var(--text-muted)]'
|
||||
)}
|
||||
title={blockName}
|
||||
>
|
||||
{blockName}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center gap-1'>
|
||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
||||
</div>
|
||||
|
||||
{/* Input handle on left middle */}
|
||||
<Handle
|
||||
type='target'
|
||||
position={Position.Left}
|
||||
className={getHandleClasses('left')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Output handle on right middle */}
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
id={endHandleId}
|
||||
/>
|
||||
|
||||
{hasRing && (
|
||||
<div
|
||||
className={cn('pointer-events-none absolute inset-0 z-40 rounded-[8px]', ringStyles)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{!isPreview && (
|
||||
<div
|
||||
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
|
||||
style={{ pointerEvents: 'auto' }}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div
|
||||
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
|
||||
data-dragarea='true'
|
||||
style={{
|
||||
position: 'relative',
|
||||
pointerEvents: isPreview ? 'none' : 'auto',
|
||||
}}
|
||||
>
|
||||
{/* Subflow Start */}
|
||||
<div
|
||||
className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
data-parent-id={id}
|
||||
data-node-role={`${data.kind}-start`}
|
||||
data-extent='parent'
|
||||
>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
|
||||
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
id={startHandleId}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
top: '50%',
|
||||
transform: 'translateY(-50%)',
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
data-parent-id={id}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Input handle on left middle */}
|
||||
<Handle
|
||||
type='target'
|
||||
position={Position.Left}
|
||||
className={getHandleClasses('left')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Output handle on right middle */}
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
id={endHandleId}
|
||||
/>
|
||||
|
||||
{hasRing && (
|
||||
<div
|
||||
className={cn('pointer-events-none absolute inset-0 z-40 rounded-[8px]', ringStyles)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
@@ -134,57 +134,6 @@ export function WandPromptBar({
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<style jsx global>{`
|
||||
|
||||
@keyframes smoke-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.8);
|
||||
opacity: 0.4;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
position: relative;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
overflow: hidden;
|
||||
background-color: hsl(var(--muted-foreground) / 0.5);
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
.status-indicator.streaming {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.status-indicator.streaming::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
border-radius: 50%;
|
||||
background: radial-gradient(
|
||||
circle,
|
||||
hsl(var(--primary) / 0.9) 0%,
|
||||
hsl(var(--primary) / 0.4) 60%,
|
||||
transparent 80%
|
||||
);
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.dark .status-indicator.streaming::before {
|
||||
background: #6b7280;
|
||||
opacity: 0.9;
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
`}</style>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
@@ -38,7 +38,7 @@ import { getDependsOnFields } from '@/blocks/utils'
|
||||
import { useKnowledgeBase } from '@/hooks/kb/use-knowledge'
|
||||
import { useCustomTools } from '@/hooks/queries/custom-tools'
|
||||
import { useMcpServers, useMcpToolsQuery } from '@/hooks/queries/mcp'
|
||||
import { useCredentialName } from '@/hooks/queries/oauth/oauth-credentials'
|
||||
import { useCredentialName } from '@/hooks/queries/oauth-credentials'
|
||||
import { useReactivateSchedule, useScheduleInfo } from '@/hooks/queries/schedules'
|
||||
import { useSkills } from '@/hooks/queries/skills'
|
||||
import { useDeployChildWorkflow } from '@/hooks/queries/workflows'
|
||||
|
||||
@@ -16,7 +16,7 @@ interface UseScrollManagementOptions {
|
||||
/**
|
||||
* Distance from bottom (in pixels) within which auto-scroll stays active
|
||||
* @remarks Lower values = less sticky (user can scroll away easier)
|
||||
* @defaultValue 100
|
||||
* @defaultValue 30
|
||||
*/
|
||||
stickinessThreshold?: number
|
||||
}
|
||||
@@ -41,7 +41,7 @@ export function useScrollManagement(
|
||||
const lastScrollTopRef = useRef(0)
|
||||
|
||||
const scrollBehavior = options?.behavior ?? 'smooth'
|
||||
const stickinessThreshold = options?.stickinessThreshold ?? 100
|
||||
const stickinessThreshold = options?.stickinessThreshold ?? 30
|
||||
|
||||
/** Scrolls the container to the bottom */
|
||||
const scrollToBottom = useCallback(() => {
|
||||
|
||||
@@ -514,6 +514,7 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
|
||||
alt={`Preview ${index + 1}`}
|
||||
fill
|
||||
unoptimized
|
||||
sizes='(max-width: 768px) 100vw, 50vw'
|
||||
className='object-contain'
|
||||
/>
|
||||
<button
|
||||
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
useConnectOAuthService,
|
||||
useDisconnectOAuthService,
|
||||
useOAuthConnections,
|
||||
} from '@/hooks/queries/oauth/oauth-connections'
|
||||
} from '@/hooks/queries/oauth-connections'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
|
||||
const logger = createLogger('Integrations')
|
||||
|
||||
@@ -165,12 +165,16 @@ export function CancelSubscription({ subscription, subscriptionData }: CancelSub
|
||||
logger.info('Subscription restored successfully', result)
|
||||
}
|
||||
|
||||
await queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
if (activeOrgId) {
|
||||
await queryClient.invalidateQueries({ queryKey: organizationKeys.detail(activeOrgId) })
|
||||
await queryClient.invalidateQueries({ queryKey: organizationKeys.billing(activeOrgId) })
|
||||
await queryClient.invalidateQueries({ queryKey: organizationKeys.lists() })
|
||||
}
|
||||
await Promise.all([
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }),
|
||||
...(activeOrgId
|
||||
? [
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.detail(activeOrgId) }),
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.billing(activeOrgId) }),
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.lists() }),
|
||||
]
|
||||
: []),
|
||||
])
|
||||
|
||||
setIsDialogOpen(false)
|
||||
} catch (err) {
|
||||
|
||||
@@ -37,7 +37,7 @@ export const UsageLimit = forwardRef<UsageLimitRef, UsageLimitProps>(
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const [inputValue, setInputValue] = useState(currentLimit.toString())
|
||||
const [inputValue, setInputValue] = useState(() => currentLimit.toString())
|
||||
const [hasError, setHasError] = useState(false)
|
||||
const [errorType, setErrorType] = useState<'general' | 'belowUsage' | null>(null)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { executeSync } from '@/lib/knowledge/connectors/sync-engine'
|
||||
|
||||
const logger = createLogger('TriggerKnowledgeConnectorSync')
|
||||
|
||||
export type ConnectorSyncPayload = {
|
||||
connectorId: string
|
||||
fullSync?: boolean
|
||||
requestId: string
|
||||
}
|
||||
|
||||
export const knowledgeConnectorSync = task({
|
||||
id: 'knowledge-connector-sync',
|
||||
maxDuration: 1800,
|
||||
machine: 'large-1x',
|
||||
retry: {
|
||||
maxAttempts: 3,
|
||||
factor: 2,
|
||||
minTimeoutInMs: 5000,
|
||||
maxTimeoutInMs: 30000,
|
||||
},
|
||||
queue: {
|
||||
concurrencyLimit: 5,
|
||||
name: 'connector-sync-queue',
|
||||
},
|
||||
run: async (payload: ConnectorSyncPayload) => {
|
||||
const { connectorId, fullSync, requestId } = payload
|
||||
|
||||
logger.info(`[${requestId}] Starting connector sync: ${connectorId}`)
|
||||
|
||||
try {
|
||||
const result = await executeSync(connectorId, { fullSync })
|
||||
|
||||
logger.info(`[${requestId}] Connector sync completed`, {
|
||||
connectorId,
|
||||
added: result.docsAdded,
|
||||
updated: result.docsUpdated,
|
||||
deleted: result.docsDeleted,
|
||||
unchanged: result.docsUnchanged,
|
||||
})
|
||||
|
||||
return {
|
||||
success: !result.error,
|
||||
connectorId,
|
||||
...result,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Connector sync failed: ${connectorId}`, error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -25,8 +25,6 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
{ label: 'Get Record', id: 'get' },
|
||||
{ label: 'Create Records', id: 'create' },
|
||||
{ label: 'Update Record', id: 'update' },
|
||||
{ label: 'List Bases', id: 'listBases' },
|
||||
{ label: 'Get Base Schema', id: 'getSchema' },
|
||||
],
|
||||
value: () => 'list',
|
||||
},
|
||||
@@ -38,7 +36,6 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
requiredScopes: [
|
||||
'data.records:read',
|
||||
'data.records:write',
|
||||
'schema.bases:read',
|
||||
'user.email:read',
|
||||
'webhook:manage',
|
||||
],
|
||||
@@ -51,7 +48,6 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your base ID (e.g., appXXXXXXXXXXXXXX)',
|
||||
dependsOn: ['credential'],
|
||||
condition: { field: 'operation', value: 'listBases', not: true },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
@@ -60,7 +56,6 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter table ID (e.g., tblXXXXXXXXXXXXXX)',
|
||||
dependsOn: ['credential', 'baseId'],
|
||||
condition: { field: 'operation', value: ['listBases', 'getSchema'], not: true },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
@@ -205,8 +200,6 @@ Return ONLY the valid JSON object - no explanations, no markdown.`,
|
||||
'airtable_create_records',
|
||||
'airtable_update_record',
|
||||
'airtable_update_multiple_records',
|
||||
'airtable_list_bases',
|
||||
'airtable_get_base_schema',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -221,10 +214,6 @@ Return ONLY the valid JSON object - no explanations, no markdown.`,
|
||||
return 'airtable_update_record'
|
||||
case 'updateMultiple':
|
||||
return 'airtable_update_multiple_records'
|
||||
case 'listBases':
|
||||
return 'airtable_list_bases'
|
||||
case 'getSchema':
|
||||
return 'airtable_get_base_schema'
|
||||
default:
|
||||
throw new Error(`Invalid Airtable operation: ${params.operation}`)
|
||||
}
|
||||
@@ -278,11 +267,9 @@ Return ONLY the valid JSON object - no explanations, no markdown.`,
|
||||
},
|
||||
// Output structure depends on the operation, covered by AirtableResponse union type
|
||||
outputs: {
|
||||
records: { type: 'json', description: 'Retrieved record data' },
|
||||
record: { type: 'json', description: 'Single record data' },
|
||||
bases: { type: 'json', description: 'List of accessible Airtable bases' },
|
||||
tables: { type: 'json', description: 'Table schemas with fields and views' },
|
||||
metadata: { type: 'json', description: 'Operation metadata' },
|
||||
records: { type: 'json', description: 'Retrieved record data' }, // Optional: for list, create, updateMultiple
|
||||
record: { type: 'json', description: 'Single record data' }, // Optional: for get, update single
|
||||
metadata: { type: 'json', description: 'Operation metadata' }, // Required: present in all responses
|
||||
// Trigger outputs
|
||||
event_type: { type: 'string', description: 'Type of Airtable event' },
|
||||
base_id: { type: 'string', description: 'Airtable base identifier' },
|
||||
|
||||
@@ -92,12 +92,9 @@ export const IncidentioBlock: BlockConfig<IncidentioResponse> = {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'incidentio_incidents_list',
|
||||
'incidentio_actions_list',
|
||||
'incidentio_follow_ups_list',
|
||||
'incidentio_users_list',
|
||||
'incidentio_workflows_list',
|
||||
'incidentio_schedules_list',
|
||||
'incidentio_escalations_list',
|
||||
'incidentio_incident_updates_list',
|
||||
'incidentio_schedule_entries_list',
|
||||
],
|
||||
@@ -113,6 +110,7 @@ export const IncidentioBlock: BlockConfig<IncidentioResponse> = {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'incidentio_incidents_list',
|
||||
'incidentio_users_list',
|
||||
'incidentio_workflows_list',
|
||||
'incidentio_schedules_list',
|
||||
'incidentio_incident_updates_list',
|
||||
|
||||
@@ -6,11 +6,9 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
name: 'Knowledge',
|
||||
description: 'Use vector search',
|
||||
longDescription:
|
||||
'Integrate Knowledge into the workflow. Perform full CRUD operations on documents, chunks, and tags.',
|
||||
'Integrate Knowledge into the workflow. Can search, upload chunks, and create documents.',
|
||||
bestPractices: `
|
||||
- Clarify which tags are available for the knowledge base to understand whether to use tag filters on a search.
|
||||
- Use List Documents to enumerate documents before operating on them.
|
||||
- Use List Chunks to inspect a document's contents before updating or deleting chunks.
|
||||
`,
|
||||
bgColor: '#00B0B0',
|
||||
icon: PackageSearchIcon,
|
||||
@@ -23,41 +21,20 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'List Documents', id: 'list_documents' },
|
||||
{ label: 'Create Document', id: 'create_document' },
|
||||
{ label: 'Delete Document', id: 'delete_document' },
|
||||
{ label: 'List Chunks', id: 'list_chunks' },
|
||||
{ label: 'Upload Chunk', id: 'upload_chunk' },
|
||||
{ label: 'Update Chunk', id: 'update_chunk' },
|
||||
{ label: 'Delete Chunk', id: 'delete_chunk' },
|
||||
{ label: 'List Tags', id: 'list_tags' },
|
||||
{ label: 'Create Document', id: 'create_document' },
|
||||
],
|
||||
value: () => 'search',
|
||||
},
|
||||
|
||||
// Knowledge Base selector — basic mode (visual selector)
|
||||
{
|
||||
id: 'knowledgeBaseSelector',
|
||||
id: 'knowledgeBaseId',
|
||||
title: 'Knowledge Base',
|
||||
type: 'knowledge-base-selector',
|
||||
canonicalParamId: 'knowledgeBaseId',
|
||||
placeholder: 'Select knowledge base',
|
||||
multiSelect: false,
|
||||
required: true,
|
||||
mode: 'basic',
|
||||
condition: { field: 'operation', value: ['search', 'upload_chunk', 'create_document'] },
|
||||
},
|
||||
// Knowledge Base selector — advanced mode (manual ID input)
|
||||
{
|
||||
id: 'manualKnowledgeBaseId',
|
||||
title: 'Knowledge Base ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'knowledgeBaseId',
|
||||
placeholder: 'Enter knowledge base ID',
|
||||
required: true,
|
||||
mode: 'advanced',
|
||||
},
|
||||
|
||||
// --- Search ---
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Search Query',
|
||||
@@ -80,72 +57,15 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
placeholder: 'Add tag filters',
|
||||
condition: { field: 'operation', value: 'search' },
|
||||
},
|
||||
|
||||
// --- List Documents ---
|
||||
{
|
||||
id: 'search',
|
||||
title: 'Search',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter documents by filename',
|
||||
condition: { field: 'operation', value: 'list_documents' },
|
||||
},
|
||||
{
|
||||
id: 'enabledFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: 'all' },
|
||||
{ label: 'Enabled', id: 'enabled' },
|
||||
{ label: 'Disabled', id: 'disabled' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'list_documents' },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: 'Max items to return (default: 50)',
|
||||
condition: { field: 'operation', value: ['list_documents', 'list_chunks'] },
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of items to skip (default: 0)',
|
||||
condition: { field: 'operation', value: ['list_documents', 'list_chunks'] },
|
||||
},
|
||||
|
||||
// Document selector — basic mode (visual selector)
|
||||
{
|
||||
id: 'documentSelector',
|
||||
id: 'documentId',
|
||||
title: 'Document',
|
||||
type: 'document-selector',
|
||||
canonicalParamId: 'documentId',
|
||||
placeholder: 'Select document',
|
||||
dependsOn: ['knowledgeBaseId'],
|
||||
required: true,
|
||||
mode: 'basic',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['upload_chunk', 'delete_document', 'list_chunks', 'update_chunk', 'delete_chunk'],
|
||||
},
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
},
|
||||
// Document selector — advanced mode (manual ID input)
|
||||
{
|
||||
id: 'manualDocumentId',
|
||||
title: 'Document ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'documentId',
|
||||
placeholder: 'Enter document ID',
|
||||
required: true,
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['upload_chunk', 'delete_document', 'list_chunks', 'update_chunk', 'delete_chunk'],
|
||||
},
|
||||
},
|
||||
|
||||
// --- Upload Chunk ---
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Chunk Content',
|
||||
@@ -155,15 +75,13 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
},
|
||||
|
||||
// --- Create Document ---
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Document Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter document name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
@@ -172,75 +90,18 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
placeholder: 'Enter the document content',
|
||||
rows: 6,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
},
|
||||
// Dynamic tag entry for Create Document
|
||||
{
|
||||
id: 'documentTags',
|
||||
title: 'Document Tags',
|
||||
type: 'document-tag-entry',
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
|
||||
// --- Update Chunk / Delete Chunk ---
|
||||
{
|
||||
id: 'chunkId',
|
||||
title: 'Chunk ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter chunk ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['update_chunk', 'delete_chunk'] },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'New Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter updated chunk content',
|
||||
rows: 6,
|
||||
condition: { field: 'operation', value: 'update_chunk' },
|
||||
},
|
||||
{
|
||||
id: 'enabled',
|
||||
title: 'Enabled',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'update_chunk' },
|
||||
},
|
||||
|
||||
// --- List Chunks ---
|
||||
{
|
||||
id: 'chunkSearch',
|
||||
title: 'Search',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter chunks by content',
|
||||
condition: { field: 'operation', value: 'list_chunks' },
|
||||
},
|
||||
{
|
||||
id: 'chunkEnabledFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: 'all' },
|
||||
{ label: 'Enabled', id: 'true' },
|
||||
{ label: 'Disabled', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'list_chunks' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'knowledge_search',
|
||||
'knowledge_upload_chunk',
|
||||
'knowledge_create_document',
|
||||
'knowledge_list_tags',
|
||||
'knowledge_list_documents',
|
||||
'knowledge_delete_document',
|
||||
'knowledge_list_chunks',
|
||||
'knowledge_update_chunk',
|
||||
'knowledge_delete_chunk',
|
||||
],
|
||||
access: ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -250,62 +111,25 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
return 'knowledge_upload_chunk'
|
||||
case 'create_document':
|
||||
return 'knowledge_create_document'
|
||||
case 'list_tags':
|
||||
return 'knowledge_list_tags'
|
||||
case 'list_documents':
|
||||
return 'knowledge_list_documents'
|
||||
case 'delete_document':
|
||||
return 'knowledge_delete_document'
|
||||
case 'list_chunks':
|
||||
return 'knowledge_list_chunks'
|
||||
case 'update_chunk':
|
||||
return 'knowledge_update_chunk'
|
||||
case 'delete_chunk':
|
||||
return 'knowledge_delete_chunk'
|
||||
default:
|
||||
return 'knowledge_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const knowledgeBaseId = params.knowledgeBaseId ? String(params.knowledgeBaseId).trim() : ''
|
||||
if (!knowledgeBaseId) {
|
||||
throw new Error('Knowledge base ID is required')
|
||||
// Validate required fields for each operation
|
||||
if (params.operation === 'search' && !params.knowledgeBaseId) {
|
||||
throw new Error('Knowledge base ID is required for search operation')
|
||||
}
|
||||
params.knowledgeBaseId = knowledgeBaseId
|
||||
|
||||
const docOps = [
|
||||
'upload_chunk',
|
||||
'delete_document',
|
||||
'list_chunks',
|
||||
'update_chunk',
|
||||
'delete_chunk',
|
||||
]
|
||||
if (docOps.includes(params.operation)) {
|
||||
const documentId = params.documentId ? String(params.documentId).trim() : ''
|
||||
if (!documentId) {
|
||||
throw new Error(`Document ID is required for ${params.operation} operation`)
|
||||
}
|
||||
params.documentId = documentId
|
||||
if (
|
||||
(params.operation === 'upload_chunk' || params.operation === 'create_document') &&
|
||||
!params.knowledgeBaseId
|
||||
) {
|
||||
throw new Error(
|
||||
'Knowledge base ID is required for upload_chunk and create_document operations'
|
||||
)
|
||||
}
|
||||
|
||||
const chunkOps = ['update_chunk', 'delete_chunk']
|
||||
if (chunkOps.includes(params.operation)) {
|
||||
const chunkId = params.chunkId ? String(params.chunkId).trim() : ''
|
||||
if (!chunkId) {
|
||||
throw new Error(`Chunk ID is required for ${params.operation} operation`)
|
||||
}
|
||||
params.chunkId = chunkId
|
||||
}
|
||||
|
||||
// Map list_chunks sub-block fields to tool params
|
||||
if (params.operation === 'list_chunks') {
|
||||
if (params.chunkSearch) params.search = params.chunkSearch
|
||||
if (params.chunkEnabledFilter) params.enabled = params.chunkEnabledFilter
|
||||
}
|
||||
|
||||
// Convert enabled dropdown string to boolean for update_chunk
|
||||
if (params.operation === 'update_chunk' && typeof params.enabled === 'string') {
|
||||
params.enabled = params.enabled === 'true'
|
||||
if (params.operation === 'upload_chunk' && !params.documentId) {
|
||||
throw new Error('Document ID is required for upload_chunk operation')
|
||||
}
|
||||
|
||||
return params
|
||||
@@ -318,18 +142,12 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
query: { type: 'string', description: 'Search query terms' },
|
||||
topK: { type: 'number', description: 'Number of results' },
|
||||
documentId: { type: 'string', description: 'Document identifier' },
|
||||
chunkId: { type: 'string', description: 'Chunk identifier' },
|
||||
content: { type: 'string', description: 'Content data' },
|
||||
name: { type: 'string', description: 'Document name' },
|
||||
search: { type: 'string', description: 'Search filter for documents' },
|
||||
enabledFilter: { type: 'string', description: 'Filter by enabled status' },
|
||||
enabled: { type: 'string', description: 'Enable or disable a chunk' },
|
||||
limit: { type: 'number', description: 'Max items to return' },
|
||||
offset: { type: 'number', description: 'Pagination offset' },
|
||||
// Dynamic tag filters for search
|
||||
tagFilters: { type: 'string', description: 'Tag filter criteria' },
|
||||
// Document tags for create document (JSON string of tag objects)
|
||||
documentTags: { type: 'string', description: 'Document tags' },
|
||||
chunkSearch: { type: 'string', description: 'Search filter for chunks' },
|
||||
chunkEnabledFilter: { type: 'string', description: 'Filter chunks by enabled status' },
|
||||
},
|
||||
outputs: {
|
||||
results: { type: 'json', description: 'Search results' },
|
||||
|
||||
@@ -216,31 +216,21 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
condition: { field: 'operation', value: ['update_deal'] },
|
||||
},
|
||||
{
|
||||
id: 'deal_id',
|
||||
title: 'Deal ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by deal ID ',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
id: 'person_id',
|
||||
title: 'Person ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by person ID ',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
id: 'org_id',
|
||||
title: 'Organization ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by organization ID ',
|
||||
id: 'sort',
|
||||
title: 'Sort By',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'ID', id: 'id' },
|
||||
{ label: 'Update Time', id: 'update_time' },
|
||||
],
|
||||
value: () => 'id',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of results (default 100, max 500)',
|
||||
placeholder: 'Number of results (default 100, max 100)',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
@@ -305,8 +295,28 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
id: 'cursor',
|
||||
title: 'Cursor',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination cursor (optional)',
|
||||
condition: { field: 'operation', value: ['get_pipelines'] },
|
||||
placeholder: 'Pagination cursor from previous response',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_all_deals', 'get_projects'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'start',
|
||||
title: 'Start (Offset)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination offset (e.g., 0, 100, 200)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_activities',
|
||||
'get_leads',
|
||||
'get_files',
|
||||
'get_pipeline_deals',
|
||||
'get_mail_messages',
|
||||
'get_pipelines',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'pipeline_id',
|
||||
@@ -323,19 +333,6 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
placeholder: 'Filter by stage ID ',
|
||||
condition: { field: 'operation', value: ['get_pipeline_deals'] },
|
||||
},
|
||||
{
|
||||
id: 'status',
|
||||
title: 'Status',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Open', id: 'open' },
|
||||
{ label: 'Won', id: 'won' },
|
||||
{ label: 'Lost', id: 'lost' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: ['get_pipeline_deals'] },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
@@ -426,22 +423,29 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
id: 'deal_id',
|
||||
title: 'Deal ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by deal ID ',
|
||||
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
|
||||
placeholder: 'Associated deal ID ',
|
||||
condition: { field: 'operation', value: ['create_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'person_id',
|
||||
title: 'Person ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by person ID ',
|
||||
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
|
||||
placeholder: 'Associated person ID ',
|
||||
condition: { field: 'operation', value: ['create_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'org_id',
|
||||
title: 'Organization ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by organization ID ',
|
||||
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
|
||||
placeholder: 'Associated organization ID ',
|
||||
condition: { field: 'operation', value: ['create_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'user_id',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by user ID',
|
||||
condition: { field: 'operation', value: ['get_activities'] },
|
||||
},
|
||||
{
|
||||
id: 'type',
|
||||
@@ -781,7 +785,8 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
thread_id: { type: 'string', description: 'Mail thread ID' },
|
||||
sort_by: { type: 'string', description: 'Field to sort by' },
|
||||
sort_direction: { type: 'string', description: 'Sorting direction' },
|
||||
cursor: { type: 'string', description: 'Pagination cursor' },
|
||||
cursor: { type: 'string', description: 'Pagination cursor (v2 endpoints)' },
|
||||
start: { type: 'string', description: 'Pagination start offset (v1 endpoints)' },
|
||||
project_id: { type: 'string', description: 'Project ID' },
|
||||
description: { type: 'string', description: 'Description' },
|
||||
start_date: { type: 'string', description: 'Start date' },
|
||||
@@ -793,12 +798,15 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
due_time: { type: 'string', description: 'Due time' },
|
||||
duration: { type: 'string', description: 'Duration' },
|
||||
done: { type: 'string', description: 'Completion status' },
|
||||
user_id: { type: 'string', description: 'User ID' },
|
||||
note: { type: 'string', description: 'Notes' },
|
||||
lead_id: { type: 'string', description: 'Lead ID' },
|
||||
archived: { type: 'string', description: 'Archived status' },
|
||||
value_amount: { type: 'string', description: 'Value amount' },
|
||||
value_currency: { type: 'string', description: 'Value currency' },
|
||||
is_archived: { type: 'string', description: 'Archive status' },
|
||||
organization_id: { type: 'string', description: 'Organization ID' },
|
||||
owner_id: { type: 'string', description: 'Owner user ID' },
|
||||
},
|
||||
outputs: {
|
||||
deals: { type: 'json', description: 'Array of deal objects' },
|
||||
|
||||
@@ -445,6 +445,13 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '100',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
// Vector search operation fields
|
||||
{
|
||||
id: 'functionName',
|
||||
@@ -543,6 +550,13 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '100',
|
||||
condition: { field: 'operation', value: 'text_search' },
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'text_search' },
|
||||
},
|
||||
// Count operation fields
|
||||
{
|
||||
id: 'filter',
|
||||
|
||||
@@ -66,6 +66,20 @@ export const TypeformBlock: BlockConfig<TypeformResponse> = {
|
||||
placeholder: 'Number of responses per page (default: 25)',
|
||||
condition: { field: 'operation', value: 'typeform_responses' },
|
||||
},
|
||||
{
|
||||
id: 'before',
|
||||
title: 'Before (Cursor)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Cursor token from previous response for pagination',
|
||||
condition: { field: 'operation', value: 'typeform_responses' },
|
||||
},
|
||||
{
|
||||
id: 'after',
|
||||
title: 'After (Cursor)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Cursor token from previous response for newer results',
|
||||
condition: { field: 'operation', value: 'typeform_responses' },
|
||||
},
|
||||
{
|
||||
id: 'since',
|
||||
title: 'Since',
|
||||
@@ -380,6 +394,8 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
apiKey: { type: 'string', description: 'Personal access token' },
|
||||
// Response operation params
|
||||
pageSize: { type: 'number', description: 'Responses per page' },
|
||||
before: { type: 'string', description: 'Cursor token for fetching the next page' },
|
||||
after: { type: 'string', description: 'Cursor token for fetching newer results' },
|
||||
since: { type: 'string', description: 'Start date filter' },
|
||||
until: { type: 'string', description: 'End date filter' },
|
||||
completed: { type: 'string', description: 'Completion status filter' },
|
||||
|
||||
@@ -444,33 +444,36 @@ Return ONLY the search query - no explanations.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'sortBy',
|
||||
title: 'Sort By',
|
||||
id: 'filterType',
|
||||
title: 'Resource Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Relevance', id: 'relevance' },
|
||||
{ label: 'Created At', id: 'created_at' },
|
||||
{ label: 'Updated At', id: 'updated_at' },
|
||||
{ label: 'Priority', id: 'priority' },
|
||||
{ label: 'Status', id: 'status' },
|
||||
{ label: 'Ticket Type', id: 'ticket_type' },
|
||||
{ label: 'Ticket', id: 'ticket' },
|
||||
{ label: 'User', id: 'user' },
|
||||
{ label: 'Organization', id: 'organization' },
|
||||
{ label: 'Group', id: 'group' },
|
||||
],
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['search'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'sortOrder',
|
||||
title: 'Sort Order',
|
||||
id: 'sort',
|
||||
title: 'Sort',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Ascending', id: 'asc' },
|
||||
{ label: 'Descending', id: 'desc' },
|
||||
{ label: 'Updated At (Asc)', id: 'updated_at' },
|
||||
{ label: 'Updated At (Desc)', id: '-updated_at' },
|
||||
{ label: 'ID (Asc)', id: 'id' },
|
||||
{ label: 'ID (Desc)', id: '-id' },
|
||||
{ label: 'Status (Asc)', id: 'status' },
|
||||
{ label: 'Status (Desc)', id: '-status' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['search'],
|
||||
value: ['get_tickets'],
|
||||
},
|
||||
},
|
||||
// Pagination fields
|
||||
@@ -492,20 +495,25 @@ Return ONLY the search query - no explanations.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'page',
|
||||
title: 'Page',
|
||||
id: 'pageAfter',
|
||||
title: 'Page After (Cursor)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Page number',
|
||||
placeholder: 'Cursor from previous response (after_cursor)',
|
||||
description: 'Cursor value from a previous response to fetch the next page of results',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_tickets',
|
||||
'get_users',
|
||||
'get_organizations',
|
||||
'search_users',
|
||||
'autocomplete_organizations',
|
||||
'search',
|
||||
],
|
||||
value: ['get_tickets', 'get_users', 'get_organizations', 'search'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'page',
|
||||
title: 'Page Number',
|
||||
type: 'short-input',
|
||||
placeholder: 'Page number (default: 1)',
|
||||
description: 'Page number for offset-based pagination',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['search_users', 'autocomplete_organizations'],
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -624,6 +632,7 @@ Return ONLY the search query - no explanations.`,
|
||||
email: { type: 'string', description: 'Zendesk email address' },
|
||||
apiToken: { type: 'string', description: 'Zendesk API token' },
|
||||
subdomain: { type: 'string', description: 'Zendesk subdomain' },
|
||||
sort: { type: 'string', description: 'Sort field for ticket listing' },
|
||||
},
|
||||
outputs: {
|
||||
// Ticket operations - list
|
||||
@@ -665,8 +674,11 @@ Return ONLY the search query - no explanations.`,
|
||||
type: 'boolean',
|
||||
description: 'Deletion confirmation (delete_ticket, delete_user, delete_organization)',
|
||||
},
|
||||
// Pagination (shared across list operations)
|
||||
paging: { type: 'json', description: 'Pagination information for list operations' },
|
||||
// Cursor-based pagination (shared across list operations)
|
||||
paging: {
|
||||
type: 'json',
|
||||
description: 'Cursor-based pagination information (after_cursor, has_more)',
|
||||
},
|
||||
// Metadata (shared across all operations)
|
||||
metadata: { type: 'json', description: 'Operation metadata including operation type' },
|
||||
},
|
||||
|
||||
@@ -28,7 +28,7 @@ const checkboxVariants = cva(
|
||||
'border-[var(--border-1)] bg-transparent',
|
||||
'focus-visible:outline-none',
|
||||
'data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50',
|
||||
'data-[state=checked]:border-[var(--brand-tertiary-2)] data-[state=checked]:bg-[var(--brand-tertiary-2)]',
|
||||
'data-[state=checked]:border-[var(--text-primary)] data-[state=checked]:bg-[var(--text-primary)]',
|
||||
].join(' '),
|
||||
{
|
||||
variants: {
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
type ReactNode,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useId,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
@@ -170,6 +171,7 @@ const Combobox = memo(
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const listboxId = useId()
|
||||
const [open, setOpen] = useState(false)
|
||||
const [highlightedIndex, setHighlightedIndex] = useState(-1)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
@@ -513,6 +515,7 @@ const Combobox = memo(
|
||||
role='combobox'
|
||||
aria-expanded={open}
|
||||
aria-haspopup='listbox'
|
||||
aria-controls={listboxId}
|
||||
aria-disabled={disabled}
|
||||
tabIndex={disabled ? -1 : 0}
|
||||
className={cn(
|
||||
@@ -616,7 +619,7 @@ const Combobox = memo(
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div ref={dropdownRef} role='listbox'>
|
||||
<div ref={dropdownRef} role='listbox' id={listboxId}>
|
||||
{isLoading ? (
|
||||
<div className='flex items-center justify-center py-[14px]'>
|
||||
<Loader2 className='h-[16px] w-[16px] animate-spin text-[var(--text-muted)]' />
|
||||
|
||||
@@ -797,7 +797,7 @@ const DatePicker = React.forwardRef<HTMLDivElement, DatePickerProps>((props, ref
|
||||
side='bottom'
|
||||
align='start'
|
||||
sideOffset={4}
|
||||
collisionPadding={16}
|
||||
avoidCollisions={false}
|
||||
className={cn(
|
||||
'rounded-[6px] border border-[var(--border-1)] p-0',
|
||||
isRangeMode ? 'w-auto' : 'w-[280px]'
|
||||
|
||||
@@ -27,12 +27,14 @@ const Alert = React.forwardRef<
|
||||
Alert.displayName = 'Alert'
|
||||
|
||||
const AlertTitle = React.forwardRef<HTMLParagraphElement, React.HTMLAttributes<HTMLHeadingElement>>(
|
||||
({ className, ...props }, ref) => (
|
||||
({ className, children, ...props }, ref) => (
|
||||
<h5
|
||||
ref={ref}
|
||||
className={cn('mb-1 font-medium leading-none tracking-tight', className)}
|
||||
{...props}
|
||||
/>
|
||||
>
|
||||
{children}
|
||||
</h5>
|
||||
)
|
||||
)
|
||||
AlertTitle.displayName = 'AlertTitle'
|
||||
|
||||
@@ -1,400 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { AirtableIcon } from '@/components/icons'
|
||||
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('AirtableConnector')
|
||||
|
||||
const AIRTABLE_API = 'https://api.airtable.com/v0'
|
||||
const PAGE_SIZE = 100
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens a record's fields into a plain-text representation.
|
||||
* Each field is rendered as "Field Name: value" on its own line.
|
||||
*/
|
||||
function recordToPlainText(
|
||||
fields: Record<string, unknown>,
|
||||
fieldNames?: Map<string, string>
|
||||
): string {
|
||||
const lines: string[] = []
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
if (value == null) continue
|
||||
const displayName = fieldNames?.get(key) ?? key
|
||||
if (Array.isArray(value)) {
|
||||
// Attachments or linked records
|
||||
const items = value.map((v) => {
|
||||
if (typeof v === 'object' && v !== null) {
|
||||
const obj = v as Record<string, unknown>
|
||||
return (obj.url as string) || (obj.name as string) || JSON.stringify(v)
|
||||
}
|
||||
return String(v)
|
||||
})
|
||||
lines.push(`${displayName}: ${items.join(', ')}`)
|
||||
} else if (typeof value === 'object') {
|
||||
lines.push(`${displayName}: ${JSON.stringify(value)}`)
|
||||
} else {
|
||||
lines.push(`${displayName}: ${String(value)}`)
|
||||
}
|
||||
}
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a human-readable title from a record's fields.
|
||||
* Prefers the configured title field, then falls back to common field names.
|
||||
*/
|
||||
function extractTitle(fields: Record<string, unknown>, titleField?: string): string {
|
||||
if (titleField && fields[titleField] != null) {
|
||||
return String(fields[titleField])
|
||||
}
|
||||
const candidates = ['Name', 'Title', 'name', 'title', 'Summary', 'summary']
|
||||
for (const candidate of candidates) {
|
||||
if (fields[candidate] != null) {
|
||||
return String(fields[candidate])
|
||||
}
|
||||
}
|
||||
for (const value of Object.values(fields)) {
|
||||
if (typeof value === 'string' && value.trim()) {
|
||||
return value.length > 80 ? `${value.slice(0, 80)}…` : value
|
||||
}
|
||||
}
|
||||
return 'Untitled'
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the cursor format: "offset:<airtable_offset>"
|
||||
*/
|
||||
function parseCursor(cursor?: string): string | undefined {
|
||||
if (!cursor) return undefined
|
||||
if (cursor.startsWith('offset:')) return cursor.slice(7)
|
||||
return cursor
|
||||
}
|
||||
|
||||
export const airtableConnector: ConnectorConfig = {
|
||||
id: 'airtable',
|
||||
name: 'Airtable',
|
||||
description: 'Sync records from an Airtable table into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: AirtableIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'airtable',
|
||||
requiredScopes: ['data.records:read', 'schema.bases:read'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'baseId',
|
||||
title: 'Base ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. appXXXXXXXXXXXXXX',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'tableIdOrName',
|
||||
title: 'Table Name or ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Tasks or tblXXXXXXXXXXXXXX',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'viewId',
|
||||
title: 'View',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Grid view or viwXXXXXXXXXXXXXX',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'titleField',
|
||||
title: 'Title Field',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. Name',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'maxRecords',
|
||||
title: 'Max Records',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. 1000 (default: unlimited)',
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string,
|
||||
syncContext?: Record<string, unknown>
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const baseId = sourceConfig.baseId as string
|
||||
const tableIdOrName = sourceConfig.tableIdOrName as string
|
||||
const viewId = sourceConfig.viewId as string | undefined
|
||||
const titleField = sourceConfig.titleField as string | undefined
|
||||
const maxRecords = sourceConfig.maxRecords ? Number(sourceConfig.maxRecords) : 0
|
||||
|
||||
const fieldNames = await fetchFieldNames(accessToken, baseId, tableIdOrName, syncContext)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.append('pageSize', String(PAGE_SIZE))
|
||||
if (viewId) params.append('view', viewId)
|
||||
if (maxRecords > 0) params.append('maxRecords', String(maxRecords))
|
||||
|
||||
const offset = parseCursor(cursor)
|
||||
if (offset) params.append('offset', offset)
|
||||
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const url = `${AIRTABLE_API}/${baseId}/${encodedTable}?${params.toString()}`
|
||||
|
||||
logger.info(`Listing records from ${baseId}/${tableIdOrName}`, {
|
||||
offset: offset ?? 'none',
|
||||
view: viewId ?? 'default',
|
||||
})
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to list Airtable records', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to list Airtable records: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
records: AirtableRecord[]
|
||||
offset?: string
|
||||
}
|
||||
|
||||
const records = data.records || []
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
records.map((record) =>
|
||||
recordToDocument(record, baseId, tableIdOrName, titleField, fieldNames)
|
||||
)
|
||||
)
|
||||
|
||||
const nextOffset = data.offset
|
||||
return {
|
||||
documents,
|
||||
nextCursor: nextOffset ? `offset:${nextOffset}` : undefined,
|
||||
hasMore: Boolean(nextOffset),
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const baseId = sourceConfig.baseId as string
|
||||
const tableIdOrName = sourceConfig.tableIdOrName as string
|
||||
const titleField = sourceConfig.titleField as string | undefined
|
||||
|
||||
const fieldNames = await fetchFieldNames(accessToken, baseId, tableIdOrName)
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const url = `${AIRTABLE_API}/${baseId}/${encodedTable}/${externalId}`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404 || response.status === 422) return null
|
||||
throw new Error(`Failed to get Airtable record: ${response.status}`)
|
||||
}
|
||||
|
||||
const record = (await response.json()) as AirtableRecord
|
||||
return recordToDocument(record, baseId, tableIdOrName, titleField, fieldNames)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const baseId = sourceConfig.baseId as string
|
||||
const tableIdOrName = sourceConfig.tableIdOrName as string
|
||||
|
||||
if (!baseId || !tableIdOrName) {
|
||||
return { valid: false, error: 'Base ID and table name are required' }
|
||||
}
|
||||
|
||||
if (baseId && !baseId.startsWith('app')) {
|
||||
return { valid: false, error: 'Base ID should start with "app"' }
|
||||
}
|
||||
|
||||
const maxRecords = sourceConfig.maxRecords as string | undefined
|
||||
if (maxRecords && (Number.isNaN(Number(maxRecords)) || Number(maxRecords) <= 0)) {
|
||||
return { valid: false, error: 'Max records must be a positive number' }
|
||||
}
|
||||
|
||||
try {
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const url = `${AIRTABLE_API}/${baseId}/${encodedTable}?pageSize=1`
|
||||
const response = await fetchWithRetry(
|
||||
url,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
if (response.status === 404 || response.status === 422) {
|
||||
return { valid: false, error: `Table "${tableIdOrName}" not found in base "${baseId}"` }
|
||||
}
|
||||
if (response.status === 403) {
|
||||
return { valid: false, error: 'Access denied. Check your Airtable permissions.' }
|
||||
}
|
||||
return { valid: false, error: `Airtable API error: ${response.status} - ${errorText}` }
|
||||
}
|
||||
|
||||
const viewId = sourceConfig.viewId as string | undefined
|
||||
if (viewId) {
|
||||
const viewUrl = `${AIRTABLE_API}/${baseId}/${encodedTable}?pageSize=1&view=${encodeURIComponent(viewId)}`
|
||||
const viewResponse = await fetchWithRetry(
|
||||
viewUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
if (!viewResponse.ok) {
|
||||
return { valid: false, error: `View "${viewId}" not found in table "${tableIdOrName}"` }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [{ id: 'createdTime', displayName: 'Created Time', fieldType: 'date' }],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (typeof metadata.createdTime === 'string') {
|
||||
const date = new Date(metadata.createdTime)
|
||||
if (!Number.isNaN(date.getTime())) result.createdTime = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
|
||||
interface AirtableRecord {
|
||||
id: string
|
||||
fields: Record<string, unknown>
|
||||
createdTime: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an Airtable record to an ExternalDocument.
|
||||
*/
|
||||
async function recordToDocument(
|
||||
record: AirtableRecord,
|
||||
baseId: string,
|
||||
tableIdOrName: string,
|
||||
titleField: string | undefined,
|
||||
fieldNames: Map<string, string>
|
||||
): Promise<ExternalDocument> {
|
||||
const plainText = recordToPlainText(record.fields, fieldNames)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
const title = extractTitle(record.fields, titleField)
|
||||
|
||||
const encodedTable = encodeURIComponent(tableIdOrName)
|
||||
const sourceUrl = `https://airtable.com/${baseId}/${encodedTable}/${record.id}`
|
||||
|
||||
return {
|
||||
externalId: record.id,
|
||||
title,
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl,
|
||||
contentHash,
|
||||
metadata: {
|
||||
createdTime: record.createdTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the table schema to build a field ID → field name mapping.
|
||||
*/
|
||||
async function fetchFieldNames(
|
||||
accessToken: string,
|
||||
baseId: string,
|
||||
tableIdOrName: string,
|
||||
syncContext?: Record<string, unknown>
|
||||
): Promise<Map<string, string>> {
|
||||
const cacheKey = `fieldNames:${baseId}/${tableIdOrName}`
|
||||
if (syncContext?.[cacheKey]) return syncContext[cacheKey] as Map<string, string>
|
||||
|
||||
const fieldNames = new Map<string, string>()
|
||||
|
||||
try {
|
||||
const url = `${AIRTABLE_API}/meta/bases/${baseId}/tables`
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn('Failed to fetch Airtable schema, using raw field keys', {
|
||||
status: response.status,
|
||||
})
|
||||
return fieldNames
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
tables: { id: string; name: string; fields: { id: string; name: string; type: string }[] }[]
|
||||
}
|
||||
|
||||
const table = data.tables.find((t) => t.id === tableIdOrName || t.name === tableIdOrName)
|
||||
|
||||
if (table) {
|
||||
for (const field of table.fields) {
|
||||
fieldNames.set(field.id, field.name)
|
||||
fieldNames.set(field.name, field.name)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Error fetching Airtable schema', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
if (syncContext) syncContext[cacheKey] = fieldNames
|
||||
return fieldNames
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { airtableConnector } from '@/connectors/airtable/airtable'
|
||||
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { escapeCql } from '@/connectors/confluence/confluence'
|
||||
|
||||
describe('escapeCql', () => {
|
||||
it.concurrent('returns plain strings unchanged', () => {
|
||||
expect(escapeCql('Engineering')).toBe('Engineering')
|
||||
})
|
||||
|
||||
it.concurrent('escapes double quotes', () => {
|
||||
expect(escapeCql('say "hello"')).toBe('say \\"hello\\"')
|
||||
})
|
||||
|
||||
it.concurrent('escapes backslashes', () => {
|
||||
expect(escapeCql('path\\to\\file')).toBe('path\\\\to\\\\file')
|
||||
})
|
||||
|
||||
it.concurrent('escapes backslashes before quotes', () => {
|
||||
expect(escapeCql('a\\"b')).toBe('a\\\\\\"b')
|
||||
})
|
||||
|
||||
it.concurrent('handles empty string', () => {
|
||||
expect(escapeCql('')).toBe('')
|
||||
})
|
||||
|
||||
it.concurrent('leaves other special chars unchanged', () => {
|
||||
expect(escapeCql("it's a test & <tag>")).toBe("it's a test & <tag>")
|
||||
})
|
||||
})
|
||||
@@ -1,619 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ConfluenceIcon } from '@/components/icons'
|
||||
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceConnector')
|
||||
|
||||
/**
|
||||
* Escapes a value for use inside CQL double-quoted strings.
|
||||
*/
|
||||
export function escapeCql(value: string): string {
|
||||
return value.replace(/\\/g, '\\\\').replace(/"/g, '\\"')
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips HTML tags from content and decodes HTML entities.
|
||||
*/
|
||||
function htmlToPlainText(html: string): string {
|
||||
let text = html.replace(/<[^>]*>/g, ' ')
|
||||
text = text
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/&/g, '&')
|
||||
return text.replace(/\s+/g, ' ').trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const encoder = new TextEncoder()
|
||||
const data = encoder.encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer))
|
||||
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches labels for a batch of page IDs using the v2 labels endpoint.
|
||||
*/
|
||||
async function fetchLabelsForPages(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
pageIds: string[]
|
||||
): Promise<Map<string, string[]>> {
|
||||
const labelsByPageId = new Map<string, string[]>()
|
||||
|
||||
const results = await Promise.all(
|
||||
pageIds.map(async (pageId) => {
|
||||
try {
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/labels`
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch labels for page ${pageId}`, { status: response.status })
|
||||
return { pageId, labels: [] as string[] }
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const labels = (data.results || []).map(
|
||||
(label: Record<string, unknown>) => label.name as string
|
||||
)
|
||||
return { pageId, labels }
|
||||
} catch (error) {
|
||||
logger.warn(`Error fetching labels for page ${pageId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return { pageId, labels: [] as string[] }
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
for (const { pageId, labels } of results) {
|
||||
labelsByPageId.set(pageId, labels)
|
||||
}
|
||||
|
||||
return labelsByPageId
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a v1 CQL search result item to an ExternalDocument.
|
||||
*/
|
||||
async function cqlResultToDocument(
|
||||
item: Record<string, unknown>,
|
||||
domain: string
|
||||
): Promise<ExternalDocument> {
|
||||
const body = item.body as Record<string, Record<string, string>> | undefined
|
||||
const rawContent = body?.storage?.value || ''
|
||||
const plainText = htmlToPlainText(rawContent)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
|
||||
const version = item.version as Record<string, unknown> | undefined
|
||||
const links = item._links as Record<string, string> | undefined
|
||||
const metadata = item.metadata as Record<string, unknown> | undefined
|
||||
const labelsWrapper = metadata?.labels as Record<string, unknown> | undefined
|
||||
const labelResults = (labelsWrapper?.results || []) as Record<string, unknown>[]
|
||||
const labels = labelResults.map((l) => l.name as string)
|
||||
|
||||
return {
|
||||
externalId: String(item.id),
|
||||
title: (item.title as string) || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: links?.webui ? `https://${domain}/wiki${links.webui}` : undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
spaceId: (item.space as Record<string, unknown>)?.key,
|
||||
status: item.status,
|
||||
version: version?.number,
|
||||
labels,
|
||||
lastModified: version?.when,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceConnector: ConnectorConfig = {
|
||||
id: 'confluence',
|
||||
name: 'Confluence',
|
||||
description: 'Sync pages from a Confluence space into your knowledge base',
|
||||
version: '1.1.0',
|
||||
icon: ConfluenceIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
requiredScopes: ['read:confluence-content.all', 'read:page:confluence', 'offline_access'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Confluence Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.atlassian.net',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'spaceKey',
|
||||
title: 'Space Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. ENG, PRODUCT',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'contentType',
|
||||
title: 'Content Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Pages only', id: 'page' },
|
||||
{ label: 'Blog posts only', id: 'blogpost' },
|
||||
{ label: 'All content', id: 'all' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'labelFilter',
|
||||
title: 'Filter by Label',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. published, engineering',
|
||||
},
|
||||
{
|
||||
id: 'maxPages',
|
||||
title: 'Max Pages',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const spaceKey = sourceConfig.spaceKey as string
|
||||
const contentType = (sourceConfig.contentType as string) || 'page'
|
||||
const labelFilter = (sourceConfig.labelFilter as string) || ''
|
||||
const maxPages = sourceConfig.maxPages ? Number(sourceConfig.maxPages) : 0
|
||||
|
||||
const cloudId = await getConfluenceCloudId(domain, accessToken)
|
||||
|
||||
// If label filtering is enabled, use CQL search via v1 API
|
||||
if (labelFilter.trim()) {
|
||||
return listDocumentsViaCql(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceKey,
|
||||
contentType,
|
||||
labelFilter,
|
||||
maxPages,
|
||||
cursor
|
||||
)
|
||||
}
|
||||
|
||||
// Otherwise use v2 API (default path)
|
||||
const spaceId = await resolveSpaceId(cloudId, accessToken, spaceKey)
|
||||
|
||||
if (contentType === 'all') {
|
||||
return listAllContentTypes(cloudId, accessToken, domain, spaceId, spaceKey, maxPages, cursor)
|
||||
}
|
||||
|
||||
return listDocumentsV2(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceId,
|
||||
spaceKey,
|
||||
contentType,
|
||||
maxPages,
|
||||
cursor
|
||||
)
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const cloudId = await getConfluenceCloudId(domain, accessToken)
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${externalId}?body-format=storage`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Confluence page: ${response.status}`)
|
||||
}
|
||||
|
||||
const page = await response.json()
|
||||
const rawContent = page.body?.storage?.value || ''
|
||||
const plainText = htmlToPlainText(rawContent)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
|
||||
// Fetch labels for this page
|
||||
const labelMap = await fetchLabelsForPages(cloudId, accessToken, [String(page.id)])
|
||||
const labels = labelMap.get(String(page.id)) ?? []
|
||||
|
||||
return {
|
||||
externalId: String(page.id),
|
||||
title: page.title || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: page._links?.webui ? `https://${domain}/wiki${page._links.webui}` : undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
spaceId: page.spaceId,
|
||||
status: page.status,
|
||||
version: page.version?.number,
|
||||
labels,
|
||||
lastModified: page.version?.createdAt,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const spaceKey = sourceConfig.spaceKey as string
|
||||
|
||||
if (!domain || !spaceKey) {
|
||||
return { valid: false, error: 'Domain and space key are required' }
|
||||
}
|
||||
|
||||
const maxPages = sourceConfig.maxPages as string | undefined
|
||||
if (maxPages && (Number.isNaN(Number(maxPages)) || Number(maxPages) <= 0)) {
|
||||
return { valid: false, error: 'Max pages must be a positive number' }
|
||||
}
|
||||
|
||||
try {
|
||||
const cloudId = await getConfluenceCloudId(domain, accessToken)
|
||||
await resolveSpaceId(cloudId, accessToken, spaceKey)
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'version', displayName: 'Version', fieldType: 'number' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
if (metadata.version != null) {
|
||||
const num = Number(metadata.version)
|
||||
if (!Number.isNaN(num)) result.version = num
|
||||
}
|
||||
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists documents using the v2 API for a single content type (pages or blogposts).
|
||||
*/
|
||||
async function listDocumentsV2(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
domain: string,
|
||||
spaceId: string,
|
||||
spaceKey: string,
|
||||
contentType: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', '50')
|
||||
queryParams.append('body-format', 'storage')
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
|
||||
const endpoint = contentType === 'blogpost' ? 'blogposts' : 'pages'
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/${endpoint}?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Listing ${endpoint} in space ${spaceKey} (ID: ${spaceId})`)
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`Failed to list Confluence ${endpoint}`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to list Confluence ${endpoint}: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = data.results || []
|
||||
|
||||
// Fetch labels for all pages in this batch
|
||||
const pageIds = results.map((page: Record<string, unknown>) => String(page.id))
|
||||
const labelsByPageId = await fetchLabelsForPages(cloudId, accessToken, pageIds)
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
results.map(async (page: Record<string, unknown>) => {
|
||||
const rawContent = (page.body as Record<string, Record<string, string>>)?.storage?.value || ''
|
||||
const plainText = htmlToPlainText(rawContent)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
const pageId = String(page.id)
|
||||
|
||||
return {
|
||||
externalId: pageId,
|
||||
title: (page.title as string) || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: (page._links as Record<string, string>)?.webui
|
||||
? `https://${domain}/wiki${(page._links as Record<string, string>).webui}`
|
||||
: undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
spaceId: page.spaceId,
|
||||
status: page.status,
|
||||
version: (page.version as Record<string, unknown>)?.number,
|
||||
labels: labelsByPageId.get(pageId) ?? [],
|
||||
lastModified: (page.version as Record<string, unknown>)?.createdAt,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
// Extract next cursor from _links.next
|
||||
let nextCursor: string | undefined
|
||||
const nextLink = (data._links as Record<string, string>)?.next
|
||||
if (nextLink) {
|
||||
try {
|
||||
nextCursor = new URL(nextLink, 'https://placeholder').searchParams.get('cursor') || undefined
|
||||
} catch {
|
||||
// Ignore malformed URLs
|
||||
}
|
||||
}
|
||||
|
||||
// Enforce maxPages limit
|
||||
if (maxPages > 0 && !cursor) {
|
||||
// On subsequent pages, the sync engine tracks total count
|
||||
// We signal stop by clearing hasMore when we'd exceed maxPages
|
||||
}
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: Boolean(nextCursor),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists both pages and blogposts using a compound cursor that tracks
|
||||
* pagination state for each content type independently.
|
||||
*/
|
||||
async function listAllContentTypes(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
domain: string,
|
||||
spaceId: string,
|
||||
spaceKey: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
let pageCursor: string | undefined
|
||||
let blogCursor: string | undefined
|
||||
let pagesDone = false
|
||||
let blogsDone = false
|
||||
|
||||
if (cursor) {
|
||||
try {
|
||||
const parsed = JSON.parse(cursor)
|
||||
pageCursor = parsed.page
|
||||
blogCursor = parsed.blog
|
||||
pagesDone = parsed.pagesDone === true
|
||||
blogsDone = parsed.blogsDone === true
|
||||
} catch {
|
||||
pageCursor = cursor
|
||||
}
|
||||
}
|
||||
|
||||
const results: ExternalDocumentList = { documents: [], hasMore: false }
|
||||
|
||||
if (!pagesDone) {
|
||||
const pagesResult = await listDocumentsV2(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceId,
|
||||
spaceKey,
|
||||
'page',
|
||||
maxPages,
|
||||
pageCursor
|
||||
)
|
||||
results.documents.push(...pagesResult.documents)
|
||||
pageCursor = pagesResult.nextCursor
|
||||
pagesDone = !pagesResult.hasMore
|
||||
}
|
||||
|
||||
if (!blogsDone) {
|
||||
const blogResult = await listDocumentsV2(
|
||||
cloudId,
|
||||
accessToken,
|
||||
domain,
|
||||
spaceId,
|
||||
spaceKey,
|
||||
'blogpost',
|
||||
maxPages,
|
||||
blogCursor
|
||||
)
|
||||
results.documents.push(...blogResult.documents)
|
||||
blogCursor = blogResult.nextCursor
|
||||
blogsDone = !blogResult.hasMore
|
||||
}
|
||||
|
||||
results.hasMore = !pagesDone || !blogsDone
|
||||
|
||||
if (results.hasMore) {
|
||||
results.nextCursor = JSON.stringify({
|
||||
page: pageCursor,
|
||||
blog: blogCursor,
|
||||
pagesDone,
|
||||
blogsDone,
|
||||
})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists documents using CQL search via the v1 API (used when label filtering is enabled).
|
||||
*/
|
||||
async function listDocumentsViaCql(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
domain: string,
|
||||
spaceKey: string,
|
||||
contentType: string,
|
||||
labelFilter: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const labels = labelFilter
|
||||
.split(',')
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
// Build CQL query
|
||||
let cql = `space="${escapeCql(spaceKey)}"`
|
||||
|
||||
if (contentType === 'blogpost') {
|
||||
cql += ' AND type="blogpost"'
|
||||
} else if (contentType === 'page' || !contentType) {
|
||||
cql += ' AND type="page"'
|
||||
}
|
||||
// contentType === 'all' — no type filter
|
||||
|
||||
if (labels.length === 1) {
|
||||
cql += ` AND label="${escapeCql(labels[0])}"`
|
||||
} else if (labels.length > 1) {
|
||||
const labelList = labels.map((l) => `"${escapeCql(l)}"`).join(',')
|
||||
cql += ` AND label in (${labelList})`
|
||||
}
|
||||
|
||||
const limit = maxPages > 0 ? Math.min(maxPages, 50) : 50
|
||||
const start = cursor ? Number(cursor) : 0
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('cql', cql)
|
||||
queryParams.append('limit', String(limit))
|
||||
queryParams.append('start', String(start))
|
||||
queryParams.append('expand', 'body.storage,version,metadata.labels')
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/search?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Searching Confluence via CQL: ${cql}`, { start, limit })
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to search Confluence via CQL', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to search Confluence via CQL: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = data.results || []
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
results.map((item: Record<string, unknown>) => cqlResultToDocument(item, domain))
|
||||
)
|
||||
|
||||
const totalSize = (data.totalSize as number) ?? 0
|
||||
const nextStart = start + results.length
|
||||
const hasMore = nextStart < totalSize && (maxPages <= 0 || nextStart < maxPages)
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasMore ? String(nextStart) : undefined,
|
||||
hasMore,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a Confluence space key to its numeric space ID.
|
||||
*/
|
||||
async function resolveSpaceId(
|
||||
cloudId: string,
|
||||
accessToken: string,
|
||||
spaceKey: string
|
||||
): Promise<string> {
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces?keys=${encodeURIComponent(spaceKey)}&limit=1`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to resolve space key "${spaceKey}": ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = data.results || []
|
||||
|
||||
if (results.length === 0) {
|
||||
throw new Error(`Space "${spaceKey}" not found`)
|
||||
}
|
||||
|
||||
return String(results[0].id)
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { confluenceConnector } from '@/connectors/confluence/confluence'
|
||||
@@ -1,415 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { GithubIcon } from '@/components/icons'
|
||||
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('GitHubConnector')
|
||||
|
||||
const GITHUB_API_URL = 'https://api.github.com'
|
||||
const BATCH_SIZE = 30
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the repository string into owner and repo.
|
||||
*/
|
||||
function parseRepo(repository: string): { owner: string; repo: string } {
|
||||
const cleaned = repository.replace(/^https?:\/\/github\.com\//, '').replace(/\.git$/, '')
|
||||
const parts = cleaned.split('/')
|
||||
if (parts.length < 2 || !parts[0] || !parts[1]) {
|
||||
throw new Error(`Invalid repository format: "${repository}". Use "owner/repo".`)
|
||||
}
|
||||
return { owner: parts[0], repo: parts[1] }
|
||||
}
|
||||
|
||||
/**
|
||||
* File extension filter set from user config. Returns null if no filter (accept all).
|
||||
*/
|
||||
function parseExtensions(extensions: string): Set<string> | null {
|
||||
const trimmed = extensions.trim()
|
||||
if (!trimmed) return null
|
||||
const exts = trimmed
|
||||
.split(',')
|
||||
.map((e) => e.trim().toLowerCase())
|
||||
.filter(Boolean)
|
||||
.map((e) => (e.startsWith('.') ? e : `.${e}`))
|
||||
return exts.length > 0 ? new Set(exts) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a file path matches the extension filter.
|
||||
*/
|
||||
function matchesExtension(filePath: string, extSet: Set<string> | null): boolean {
|
||||
if (!extSet) return true
|
||||
const lastDot = filePath.lastIndexOf('.')
|
||||
if (lastDot === -1) return false
|
||||
return extSet.has(filePath.slice(lastDot).toLowerCase())
|
||||
}
|
||||
|
||||
interface TreeItem {
|
||||
path: string
|
||||
mode: string
|
||||
type: string
|
||||
sha: string
|
||||
size?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the full recursive tree for a branch.
|
||||
*/
|
||||
async function fetchTree(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string
|
||||
): Promise<TreeItem[]> {
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/git/trees/${encodeURIComponent(branch)}?recursive=1`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to fetch GitHub tree', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to fetch repository tree: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.truncated) {
|
||||
logger.warn('GitHub tree was truncated — some files may be missing', { owner, repo, branch })
|
||||
}
|
||||
|
||||
return (data.tree || []).filter((item: TreeItem) => item.type === 'blob')
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches file content via the Blobs API and decodes base64.
|
||||
*/
|
||||
async function fetchBlobContent(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
sha: string
|
||||
): Promise<string> {
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/git/blobs/${sha}`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch blob ${sha}: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.encoding === 'base64') {
|
||||
return atob(data.content.replace(/\n/g, ''))
|
||||
}
|
||||
|
||||
return data.content || ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a tree item to an ExternalDocument by fetching its content.
|
||||
*/
|
||||
async function treeItemToDocument(
|
||||
accessToken: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
item: TreeItem
|
||||
): Promise<ExternalDocument> {
|
||||
const content = await fetchBlobContent(accessToken, owner, repo, item.sha)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
return {
|
||||
externalId: item.path,
|
||||
title: item.path.split('/').pop() || item.path,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: `https://github.com/${owner}/${repo}/blob/${branch}/${item.path}`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
path: item.path,
|
||||
sha: item.sha,
|
||||
size: item.size,
|
||||
branch,
|
||||
repository: `${owner}/${repo}`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const githubConnector: ConnectorConfig = {
|
||||
id: 'github',
|
||||
name: 'GitHub',
|
||||
description: 'Sync files from a GitHub repository into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: GithubIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'github',
|
||||
requiredScopes: ['repo'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'repository',
|
||||
title: 'Repository',
|
||||
type: 'short-input',
|
||||
placeholder: 'owner/repo',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'branch',
|
||||
title: 'Branch',
|
||||
type: 'short-input',
|
||||
placeholder: 'main (default)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'pathPrefix',
|
||||
title: 'Path Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. docs/, src/components/',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'extensions',
|
||||
title: 'File Extensions',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. .md, .txt, .mdx',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'maxFiles',
|
||||
title: 'Max Files',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const { owner, repo } = parseRepo(sourceConfig.repository as string)
|
||||
const branch = ((sourceConfig.branch as string) || 'main').trim()
|
||||
const pathPrefix = ((sourceConfig.pathPrefix as string) || '').trim()
|
||||
const extSet = parseExtensions((sourceConfig.extensions as string) || '')
|
||||
const maxFiles = sourceConfig.maxFiles ? Number(sourceConfig.maxFiles) : 0
|
||||
|
||||
const tree = await fetchTree(accessToken, owner, repo, branch)
|
||||
|
||||
// Filter by path prefix and extensions
|
||||
const filtered = tree.filter((item) => {
|
||||
if (pathPrefix && !item.path.startsWith(pathPrefix)) return false
|
||||
if (!matchesExtension(item.path, extSet)) return false
|
||||
return true
|
||||
})
|
||||
|
||||
// Apply max files limit
|
||||
const capped = maxFiles > 0 ? filtered.slice(0, maxFiles) : filtered
|
||||
|
||||
// Paginate using offset cursor
|
||||
const offset = cursor ? Number(cursor) : 0
|
||||
const batch = capped.slice(offset, offset + BATCH_SIZE)
|
||||
|
||||
logger.info('Listing GitHub files', {
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
totalFiltered: capped.length,
|
||||
offset,
|
||||
batchSize: batch.length,
|
||||
})
|
||||
|
||||
const documents: ExternalDocument[] = []
|
||||
for (const item of batch) {
|
||||
try {
|
||||
const doc = await treeItemToDocument(accessToken, owner, repo, branch, item)
|
||||
documents.push(doc)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to fetch content for ${item.path}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const nextOffset = offset + BATCH_SIZE
|
||||
const hasMore = nextOffset < capped.length
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasMore ? String(nextOffset) : undefined,
|
||||
hasMore,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const { owner, repo } = parseRepo(sourceConfig.repository as string)
|
||||
const branch = ((sourceConfig.branch as string) || 'main').trim()
|
||||
|
||||
// externalId is the file path
|
||||
const path = externalId
|
||||
|
||||
try {
|
||||
const encodedPath = path.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/contents/${encodedPath}?ref=${encodeURIComponent(branch)}`
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to fetch file ${path}: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const content =
|
||||
data.encoding === 'base64'
|
||||
? atob((data.content as string).replace(/\n/g, ''))
|
||||
: (data.content as string) || ''
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
return {
|
||||
externalId,
|
||||
title: path.split('/').pop() || path,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: `https://github.com/${owner}/${repo}/blob/${branch}/${path}`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
path,
|
||||
sha: data.sha as string,
|
||||
size: data.size as number,
|
||||
branch,
|
||||
repository: `${owner}/${repo}`,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to fetch GitHub document ${externalId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const repository = (sourceConfig.repository as string)?.trim()
|
||||
if (!repository) {
|
||||
return { valid: false, error: 'Repository is required' }
|
||||
}
|
||||
|
||||
let owner: string
|
||||
let repo: string
|
||||
try {
|
||||
const parsed = parseRepo(repository)
|
||||
owner = parsed.owner
|
||||
repo = parsed.repo
|
||||
} catch (error) {
|
||||
return {
|
||||
valid: false,
|
||||
error: error instanceof Error ? error.message : 'Invalid repository format',
|
||||
}
|
||||
}
|
||||
|
||||
const maxFiles = sourceConfig.maxFiles as string | undefined
|
||||
if (maxFiles && (Number.isNaN(Number(maxFiles)) || Number(maxFiles) <= 0)) {
|
||||
return { valid: false, error: 'Max files must be a positive number' }
|
||||
}
|
||||
|
||||
const branch = ((sourceConfig.branch as string) || 'main').trim()
|
||||
|
||||
try {
|
||||
// Verify repo and branch are accessible
|
||||
const url = `${GITHUB_API_URL}/repos/${owner}/${repo}/branches/${encodeURIComponent(branch)}`
|
||||
const response = await fetchWithRetry(
|
||||
url,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
|
||||
if (response.status === 404) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Repository "${owner}/${repo}" or branch "${branch}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Cannot access repository: ${response.status}` }
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'path', displayName: 'File Path', fieldType: 'text' },
|
||||
{ id: 'repository', displayName: 'Repository', fieldType: 'text' },
|
||||
{ id: 'branch', displayName: 'Branch', fieldType: 'text' },
|
||||
{ id: 'size', displayName: 'File Size', fieldType: 'number' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (typeof metadata.path === 'string') result.path = metadata.path
|
||||
if (typeof metadata.repository === 'string') result.repository = metadata.repository
|
||||
if (typeof metadata.branch === 'string') result.branch = metadata.branch
|
||||
|
||||
if (metadata.size != null) {
|
||||
const num = Number(metadata.size)
|
||||
if (!Number.isNaN(num)) result.size = num
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { githubConnector } from '@/connectors/github/github'
|
||||
@@ -1,433 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { GoogleDriveIcon } from '@/components/icons'
|
||||
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('GoogleDriveConnector')
|
||||
|
||||
const GOOGLE_WORKSPACE_MIME_TYPES: Record<string, string> = {
|
||||
'application/vnd.google-apps.document': 'text/plain',
|
||||
'application/vnd.google-apps.spreadsheet': 'text/csv',
|
||||
'application/vnd.google-apps.presentation': 'text/plain',
|
||||
}
|
||||
|
||||
const SUPPORTED_TEXT_MIME_TYPES = [
|
||||
'text/plain',
|
||||
'text/csv',
|
||||
'text/html',
|
||||
'text/markdown',
|
||||
'application/json',
|
||||
'application/xml',
|
||||
]
|
||||
|
||||
const MAX_EXPORT_SIZE = 10 * 1024 * 1024 // 10 MB (Google export limit)
|
||||
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
function isGoogleWorkspaceFile(mimeType: string): boolean {
|
||||
return mimeType in GOOGLE_WORKSPACE_MIME_TYPES
|
||||
}
|
||||
|
||||
function isSupportedTextFile(mimeType: string): boolean {
|
||||
return SUPPORTED_TEXT_MIME_TYPES.some((t) => mimeType.startsWith(t))
|
||||
}
|
||||
|
||||
function htmlToPlainText(html: string): string {
|
||||
let text = html.replace(/<[^>]*>/g, ' ')
|
||||
text = text
|
||||
.replace(/ /g, ' ')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, "'")
|
||||
.replace(/&/g, '&')
|
||||
return text.replace(/\s+/g, ' ').trim()
|
||||
}
|
||||
|
||||
async function exportGoogleWorkspaceFile(
|
||||
accessToken: string,
|
||||
fileId: string,
|
||||
sourceMimeType: string
|
||||
): Promise<string> {
|
||||
const exportMimeType = GOOGLE_WORKSPACE_MIME_TYPES[sourceMimeType]
|
||||
if (!exportMimeType) {
|
||||
throw new Error(`Unsupported Google Workspace MIME type: ${sourceMimeType}`)
|
||||
}
|
||||
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportMimeType)}`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to export file ${fileId}: ${response.status}`)
|
||||
}
|
||||
|
||||
return response.text()
|
||||
}
|
||||
|
||||
async function downloadTextFile(accessToken: string, fileId: string): Promise<string> {
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download file ${fileId}: ${response.status}`)
|
||||
}
|
||||
|
||||
const text = await response.text()
|
||||
if (text.length > MAX_EXPORT_SIZE) {
|
||||
return text.slice(0, MAX_EXPORT_SIZE)
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
async function fetchFileContent(
|
||||
accessToken: string,
|
||||
fileId: string,
|
||||
mimeType: string
|
||||
): Promise<string> {
|
||||
if (isGoogleWorkspaceFile(mimeType)) {
|
||||
return exportGoogleWorkspaceFile(accessToken, fileId, mimeType)
|
||||
}
|
||||
if (mimeType === 'text/html') {
|
||||
const html = await downloadTextFile(accessToken, fileId)
|
||||
return htmlToPlainText(html)
|
||||
}
|
||||
if (isSupportedTextFile(mimeType)) {
|
||||
return downloadTextFile(accessToken, fileId)
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported MIME type for content extraction: ${mimeType}`)
|
||||
}
|
||||
|
||||
interface DriveFile {
|
||||
id: string
|
||||
name: string
|
||||
mimeType: string
|
||||
modifiedTime?: string
|
||||
createdTime?: string
|
||||
webViewLink?: string
|
||||
parents?: string[]
|
||||
owners?: { displayName?: string; emailAddress?: string }[]
|
||||
size?: string
|
||||
starred?: boolean
|
||||
trashed?: boolean
|
||||
}
|
||||
|
||||
function buildQuery(sourceConfig: Record<string, unknown>): string {
|
||||
const parts: string[] = ['trashed = false']
|
||||
|
||||
const folderId = sourceConfig.folderId as string | undefined
|
||||
if (folderId?.trim()) {
|
||||
parts.push(`'${folderId.trim()}' in parents`)
|
||||
}
|
||||
|
||||
const fileType = (sourceConfig.fileType as string) || 'all'
|
||||
switch (fileType) {
|
||||
case 'documents':
|
||||
parts.push("mimeType = 'application/vnd.google-apps.document'")
|
||||
break
|
||||
case 'spreadsheets':
|
||||
parts.push("mimeType = 'application/vnd.google-apps.spreadsheet'")
|
||||
break
|
||||
case 'presentations':
|
||||
parts.push("mimeType = 'application/vnd.google-apps.presentation'")
|
||||
break
|
||||
case 'text':
|
||||
parts.push(`(${SUPPORTED_TEXT_MIME_TYPES.map((t) => `mimeType = '${t}'`).join(' or ')})`)
|
||||
break
|
||||
default: {
|
||||
// Include Google Workspace files + plain text files, exclude folders
|
||||
const allMimeTypes = [
|
||||
...Object.keys(GOOGLE_WORKSPACE_MIME_TYPES),
|
||||
...SUPPORTED_TEXT_MIME_TYPES,
|
||||
]
|
||||
parts.push(`(${allMimeTypes.map((t) => `mimeType = '${t}'`).join(' or ')})`)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return parts.join(' and ')
|
||||
}
|
||||
|
||||
async function fileToDocument(
|
||||
accessToken: string,
|
||||
file: DriveFile
|
||||
): Promise<ExternalDocument | null> {
|
||||
try {
|
||||
const content = await fetchFileContent(accessToken, file.id, file.mimeType)
|
||||
if (!content.trim()) {
|
||||
logger.info(`Skipping empty file: ${file.name} (${file.id})`)
|
||||
return null
|
||||
}
|
||||
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
return {
|
||||
externalId: file.id,
|
||||
title: file.name || 'Untitled',
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: file.webViewLink || `https://drive.google.com/file/d/${file.id}/view`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
originalMimeType: file.mimeType,
|
||||
modifiedTime: file.modifiedTime,
|
||||
createdTime: file.createdTime,
|
||||
owners: file.owners?.map((o) => o.displayName || o.emailAddress).filter(Boolean),
|
||||
starred: file.starred,
|
||||
fileSize: file.size ? Number(file.size) : undefined,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to extract content from file: ${file.name} (${file.id})`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export const googleDriveConnector: ConnectorConfig = {
|
||||
id: 'google_drive',
|
||||
name: 'Google Drive',
|
||||
description: 'Sync documents from Google Drive into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: GoogleDriveIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'google-drive',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/drive.readonly'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'folderId',
|
||||
title: 'Folder ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. 1aBcDeFgHiJkLmNoPqRsTuVwXyZ (optional)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'fileType',
|
||||
title: 'File Type',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'All supported files', id: 'all' },
|
||||
{ label: 'Google Docs only', id: 'documents' },
|
||||
{ label: 'Google Sheets only', id: 'spreadsheets' },
|
||||
{ label: 'Google Slides only', id: 'presentations' },
|
||||
{ label: 'Plain text files only', id: 'text' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'maxFiles',
|
||||
title: 'Max Files',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const query = buildQuery(sourceConfig)
|
||||
const pageSize = 100
|
||||
|
||||
const queryParams = new URLSearchParams({
|
||||
q: query,
|
||||
pageSize: String(pageSize),
|
||||
fields:
|
||||
'nextPageToken,files(id,name,mimeType,modifiedTime,createdTime,webViewLink,parents,owners,size,starred)',
|
||||
supportsAllDrives: 'true',
|
||||
includeItemsFromAllDrives: 'true',
|
||||
})
|
||||
|
||||
if (cursor) {
|
||||
queryParams.set('pageToken', cursor)
|
||||
}
|
||||
|
||||
const url = `https://www.googleapis.com/drive/v3/files?${queryParams.toString()}`
|
||||
|
||||
logger.info('Listing Google Drive files', { query, cursor: cursor ?? 'initial' })
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to list Google Drive files', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to list Google Drive files: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const files = (data.files || []) as DriveFile[]
|
||||
|
||||
const documentResults = await Promise.all(
|
||||
files.map((file) => fileToDocument(accessToken, file))
|
||||
)
|
||||
const documents = documentResults.filter(Boolean) as ExternalDocument[]
|
||||
|
||||
const nextPageToken = data.nextPageToken as string | undefined
|
||||
const hasMore = Boolean(nextPageToken)
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: nextPageToken,
|
||||
hasMore,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const fields =
|
||||
'id,name,mimeType,modifiedTime,createdTime,webViewLink,parents,owners,size,starred,trashed'
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${externalId}?fields=${encodeURIComponent(fields)}&supportsAllDrives=true`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Google Drive file: ${response.status}`)
|
||||
}
|
||||
|
||||
const file = (await response.json()) as DriveFile
|
||||
|
||||
if (file.trashed) return null
|
||||
|
||||
return fileToDocument(accessToken, file)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const folderId = sourceConfig.folderId as string | undefined
|
||||
const maxFiles = sourceConfig.maxFiles as string | undefined
|
||||
|
||||
if (maxFiles && (Number.isNaN(Number(maxFiles)) || Number(maxFiles) <= 0)) {
|
||||
return { valid: false, error: 'Max files must be a positive number' }
|
||||
}
|
||||
|
||||
// Verify access to Drive API
|
||||
try {
|
||||
if (folderId?.trim()) {
|
||||
// Verify the folder exists and is accessible
|
||||
const url = `https://www.googleapis.com/drive/v3/files/${folderId.trim()}?fields=id,name,mimeType&supportsAllDrives=true`
|
||||
const response = await fetchWithRetry(
|
||||
url,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
return { valid: false, error: 'Folder not found. Check the folder ID and permissions.' }
|
||||
}
|
||||
return { valid: false, error: `Failed to access folder: ${response.status}` }
|
||||
}
|
||||
|
||||
const folder = await response.json()
|
||||
if (folder.mimeType !== 'application/vnd.google-apps.folder') {
|
||||
return { valid: false, error: 'The provided ID is not a folder' }
|
||||
}
|
||||
} else {
|
||||
// Verify basic Drive access by listing one file
|
||||
const url = 'https://www.googleapis.com/drive/v3/files?pageSize=1&fields=files(id)'
|
||||
const response = await fetchWithRetry(
|
||||
url,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Failed to access Google Drive: ${response.status}` }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'owners', displayName: 'Owner', fieldType: 'text' },
|
||||
{ id: 'fileType', displayName: 'File Type', fieldType: 'text' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
{ id: 'starred', displayName: 'Starred', fieldType: 'boolean' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const owners = Array.isArray(metadata.owners) ? (metadata.owners as string[]) : []
|
||||
if (owners.length > 0) result.owners = owners.join(', ')
|
||||
|
||||
if (typeof metadata.originalMimeType === 'string') {
|
||||
const mimeType = metadata.originalMimeType
|
||||
if (mimeType.includes('document')) result.fileType = 'Google Doc'
|
||||
else if (mimeType.includes('spreadsheet')) result.fileType = 'Google Sheet'
|
||||
else if (mimeType.includes('presentation')) result.fileType = 'Google Slides'
|
||||
else if (mimeType.startsWith('text/')) result.fileType = 'Text File'
|
||||
else result.fileType = mimeType
|
||||
}
|
||||
|
||||
if (typeof metadata.modifiedTime === 'string') {
|
||||
const date = new Date(metadata.modifiedTime)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
if (typeof metadata.starred === 'boolean') {
|
||||
result.starred = metadata.starred
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { googleDriveConnector } from '@/connectors/google-drive/google-drive'
|
||||
@@ -1,9 +0,0 @@
|
||||
export { CONNECTOR_REGISTRY } from '@/connectors/registry'
|
||||
export type {
|
||||
ConnectorConfig,
|
||||
ConnectorConfigField,
|
||||
ConnectorRegistry,
|
||||
ExternalDocument,
|
||||
ExternalDocumentList,
|
||||
SyncResult,
|
||||
} from '@/connectors/types'
|
||||
@@ -1 +0,0 @@
|
||||
export { jiraConnector } from '@/connectors/jira/jira'
|
||||
@@ -1,337 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { JiraIcon } from '@/components/icons'
|
||||
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
import { extractAdfText, getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
const logger = createLogger('JiraConnector')
|
||||
|
||||
const PAGE_SIZE = 50
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a plain-text representation of a Jira issue for knowledge base indexing.
|
||||
*/
|
||||
function buildIssueContent(fields: Record<string, unknown>): string {
|
||||
const parts: string[] = []
|
||||
|
||||
const summary = fields.summary as string | undefined
|
||||
if (summary) parts.push(summary)
|
||||
|
||||
const description = extractAdfText(fields.description)
|
||||
if (description) parts.push(description)
|
||||
|
||||
const comments = fields.comment as { comments?: Array<{ body?: unknown }> } | undefined
|
||||
if (comments?.comments) {
|
||||
for (const comment of comments.comments) {
|
||||
const text = extractAdfText(comment.body)
|
||||
if (text) parts.push(text)
|
||||
}
|
||||
}
|
||||
|
||||
return parts.join('\n\n').trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Jira issue API response to an ExternalDocument.
|
||||
*/
|
||||
async function issueToDocument(
|
||||
issue: Record<string, unknown>,
|
||||
domain: string
|
||||
): Promise<ExternalDocument> {
|
||||
const fields = (issue.fields || {}) as Record<string, unknown>
|
||||
const content = buildIssueContent(fields)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
const key = issue.key as string
|
||||
const issueType = fields.issuetype as Record<string, unknown> | undefined
|
||||
const status = fields.status as Record<string, unknown> | undefined
|
||||
const priority = fields.priority as Record<string, unknown> | undefined
|
||||
const assignee = fields.assignee as Record<string, unknown> | undefined
|
||||
const reporter = fields.reporter as Record<string, unknown> | undefined
|
||||
const project = fields.project as Record<string, unknown> | undefined
|
||||
const labels = Array.isArray(fields.labels) ? (fields.labels as string[]) : []
|
||||
|
||||
return {
|
||||
externalId: String(issue.id),
|
||||
title: `${key}: ${(fields.summary as string) || 'Untitled'}`,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: `https://${domain}/browse/${key}`,
|
||||
contentHash,
|
||||
metadata: {
|
||||
key,
|
||||
issueType: issueType?.name,
|
||||
status: status?.name,
|
||||
priority: priority?.name,
|
||||
assignee: assignee?.displayName,
|
||||
reporter: reporter?.displayName,
|
||||
project: project?.key,
|
||||
labels,
|
||||
created: fields.created,
|
||||
updated: fields.updated,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const jiraConnector: ConnectorConfig = {
|
||||
id: 'jira',
|
||||
name: 'Jira',
|
||||
description: 'Sync issues from a Jira project into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: JiraIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'jira',
|
||||
requiredScopes: ['read:jira-work', 'offline_access'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'domain',
|
||||
title: 'Jira Domain',
|
||||
type: 'short-input',
|
||||
placeholder: 'yoursite.atlassian.net',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'projectKey',
|
||||
title: 'Project Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. ENG, PROJ',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'jql',
|
||||
title: 'JQL Filter',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. status = "Done" AND type = Bug',
|
||||
},
|
||||
{
|
||||
id: 'maxIssues',
|
||||
title: 'Max Issues',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const projectKey = sourceConfig.projectKey as string
|
||||
const jqlFilter = (sourceConfig.jql as string) || ''
|
||||
const maxIssues = sourceConfig.maxIssues ? Number(sourceConfig.maxIssues) : 0
|
||||
|
||||
const cloudId = await getJiraCloudId(domain, accessToken)
|
||||
|
||||
const safeKey = projectKey.replace(/\\/g, '\\\\').replace(/"/g, '\\"')
|
||||
let jql = `project = "${safeKey}" ORDER BY updated DESC`
|
||||
if (jqlFilter.trim()) {
|
||||
jql = `project = "${safeKey}" AND (${jqlFilter.trim()}) ORDER BY updated DESC`
|
||||
}
|
||||
|
||||
const startAt = cursor ? Number(cursor) : 0
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.append('jql', jql)
|
||||
params.append('startAt', String(startAt))
|
||||
params.append('maxResults', String(PAGE_SIZE))
|
||||
params.append(
|
||||
'fields',
|
||||
'summary,description,comment,issuetype,status,priority,assignee,reporter,project,labels,created,updated'
|
||||
)
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${params.toString()}`
|
||||
|
||||
logger.info(`Listing Jira issues for project ${projectKey}`, { startAt })
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to search Jira issues', {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`Failed to search Jira issues: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const issues = (data.issues || []) as Record<string, unknown>[]
|
||||
const total = (data.total as number) ?? 0
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
issues.map((issue) => issueToDocument(issue, domain))
|
||||
)
|
||||
|
||||
const nextStart = startAt + issues.length
|
||||
const hasMore = nextStart < total && (maxIssues <= 0 || nextStart < maxIssues)
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasMore ? String(nextStart) : undefined,
|
||||
hasMore,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const cloudId = await getJiraCloudId(domain, accessToken)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.append(
|
||||
'fields',
|
||||
'summary,description,comment,issuetype,status,priority,assignee,reporter,project,labels,created,updated'
|
||||
)
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${externalId}?${params.toString()}`
|
||||
|
||||
const response = await fetchWithRetry(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Jira issue: ${response.status}`)
|
||||
}
|
||||
|
||||
const issue = await response.json()
|
||||
return issueToDocument(issue, domain)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const domain = sourceConfig.domain as string
|
||||
const projectKey = sourceConfig.projectKey as string
|
||||
|
||||
if (!domain || !projectKey) {
|
||||
return { valid: false, error: 'Domain and project key are required' }
|
||||
}
|
||||
|
||||
const maxIssues = sourceConfig.maxIssues as string | undefined
|
||||
if (maxIssues && (Number.isNaN(Number(maxIssues)) || Number(maxIssues) <= 0)) {
|
||||
return { valid: false, error: 'Max issues must be a positive number' }
|
||||
}
|
||||
|
||||
const jqlFilter = (sourceConfig.jql as string | undefined)?.trim() || ''
|
||||
|
||||
try {
|
||||
const cloudId = await getJiraCloudId(domain, accessToken)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
const safeKey = projectKey.replace(/\\/g, '\\\\').replace(/"/g, '\\"')
|
||||
params.append('jql', `project = "${safeKey}"`)
|
||||
params.append('maxResults', '0')
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${params.toString()}`
|
||||
const response = await fetchWithRetry(
|
||||
url,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
if (response.status === 400) {
|
||||
return { valid: false, error: `Project "${projectKey}" not found or JQL is invalid` }
|
||||
}
|
||||
return { valid: false, error: `Failed to validate: ${response.status} - ${errorText}` }
|
||||
}
|
||||
|
||||
if (jqlFilter) {
|
||||
const filterParams = new URLSearchParams()
|
||||
filterParams.append('jql', `project = "${safeKey}" AND (${jqlFilter})`)
|
||||
filterParams.append('maxResults', '0')
|
||||
|
||||
const filterUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${filterParams.toString()}`
|
||||
const filterResponse = await fetchWithRetry(
|
||||
filterUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
|
||||
if (!filterResponse.ok) {
|
||||
return { valid: false, error: 'Invalid JQL filter. Check syntax and field names.' }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'issueType', displayName: 'Issue Type', fieldType: 'text' },
|
||||
{ id: 'status', displayName: 'Status', fieldType: 'text' },
|
||||
{ id: 'priority', displayName: 'Priority', fieldType: 'text' },
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'assignee', displayName: 'Assignee', fieldType: 'text' },
|
||||
{ id: 'updated', displayName: 'Last Updated', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (typeof metadata.issueType === 'string') result.issueType = metadata.issueType
|
||||
if (typeof metadata.status === 'string') result.status = metadata.status
|
||||
if (typeof metadata.priority === 'string') result.priority = metadata.priority
|
||||
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
if (typeof metadata.assignee === 'string') result.assignee = metadata.assignee
|
||||
|
||||
if (typeof metadata.updated === 'string') {
|
||||
const date = new Date(metadata.updated)
|
||||
if (!Number.isNaN(date.getTime())) result.updated = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { linearConnector } from '@/connectors/linear/linear'
|
||||
@@ -1,416 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { LinearIcon } from '@/components/icons'
|
||||
import { fetchWithRetry } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('LinearConnector')
|
||||
|
||||
const LINEAR_API = 'https://api.linear.app/graphql'
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips Markdown formatting to produce plain text.
|
||||
*/
|
||||
function markdownToPlainText(md: string): string {
|
||||
let text = md
|
||||
.replace(/!\[.*?\]\(.*?\)/g, '') // images
|
||||
.replace(/\[([^\]]*)\]\(.*?\)/g, '$1') // links
|
||||
.replace(/#{1,6}\s+/g, '') // headings
|
||||
.replace(/(\*\*|__)(.*?)\1/g, '$2') // bold
|
||||
.replace(/(\*|_)(.*?)\1/g, '$2') // italic
|
||||
.replace(/~~(.*?)~~/g, '$1') // strikethrough
|
||||
.replace(/`{3}[\s\S]*?`{3}/g, '') // code blocks
|
||||
.replace(/`([^`]*)`/g, '$1') // inline code
|
||||
.replace(/^\s*[-*+]\s+/gm, '') // list items
|
||||
.replace(/^\s*\d+\.\s+/gm, '') // ordered list items
|
||||
.replace(/^\s*>\s+/gm, '') // blockquotes
|
||||
.replace(/---+/g, '') // horizontal rules
|
||||
text = text.replace(/\s+/g, ' ').trim()
|
||||
return text
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a GraphQL query against the Linear API.
|
||||
*/
|
||||
async function linearGraphQL(
|
||||
accessToken: string,
|
||||
query: string,
|
||||
variables?: Record<string, unknown>
|
||||
): Promise<Record<string, unknown>> {
|
||||
const response = await fetchWithRetry(LINEAR_API, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({ query, variables }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Linear GraphQL request failed', { status: response.status, error: errorText })
|
||||
throw new Error(`Linear API error: ${response.status}`)
|
||||
}
|
||||
|
||||
const json = (await response.json()) as { data?: Record<string, unknown>; errors?: unknown[] }
|
||||
if (json.errors) {
|
||||
logger.error('Linear GraphQL errors', { errors: json.errors })
|
||||
throw new Error(`Linear GraphQL error: ${JSON.stringify(json.errors)}`)
|
||||
}
|
||||
|
||||
return json.data as Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a formatted text document from a Linear issue.
|
||||
*/
|
||||
function buildIssueContent(issue: Record<string, unknown>): string {
|
||||
const parts: string[] = []
|
||||
|
||||
const identifier = issue.identifier as string | undefined
|
||||
const title = (issue.title as string) || 'Untitled'
|
||||
parts.push(`${identifier ? `${identifier}: ` : ''}${title}`)
|
||||
|
||||
const state = issue.state as Record<string, unknown> | undefined
|
||||
if (state?.name) parts.push(`Status: ${state.name}`)
|
||||
|
||||
const priority = issue.priorityLabel as string | undefined
|
||||
if (priority) parts.push(`Priority: ${priority}`)
|
||||
|
||||
const assignee = issue.assignee as Record<string, unknown> | undefined
|
||||
if (assignee?.name) parts.push(`Assignee: ${assignee.name}`)
|
||||
|
||||
const labelsConn = issue.labels as Record<string, unknown> | undefined
|
||||
const labelNodes = (labelsConn?.nodes || []) as Record<string, unknown>[]
|
||||
if (labelNodes.length > 0) {
|
||||
parts.push(`Labels: ${labelNodes.map((l) => l.name as string).join(', ')}`)
|
||||
}
|
||||
|
||||
const description = issue.description as string | undefined
|
||||
if (description) {
|
||||
parts.push('')
|
||||
parts.push(markdownToPlainText(description))
|
||||
}
|
||||
|
||||
return parts.join('\n')
|
||||
}
|
||||
|
||||
const ISSUE_FIELDS = `
|
||||
id
|
||||
identifier
|
||||
title
|
||||
description
|
||||
priority
|
||||
priorityLabel
|
||||
url
|
||||
createdAt
|
||||
updatedAt
|
||||
state { name }
|
||||
assignee { name }
|
||||
labels { nodes { name } }
|
||||
team { name key }
|
||||
project { name }
|
||||
`
|
||||
|
||||
const ISSUE_BY_ID_QUERY = `
|
||||
query GetIssue($id: String!) {
|
||||
issue(id: $id) {
|
||||
${ISSUE_FIELDS}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
const TEAMS_QUERY = `
|
||||
query { teams { nodes { id name key } } }
|
||||
`
|
||||
|
||||
/**
|
||||
* Dynamically builds a GraphQL issues query with only the filter clauses
|
||||
* that have values, preventing null comparators from being sent to Linear.
|
||||
*/
|
||||
function buildIssuesQuery(sourceConfig: Record<string, unknown>): {
|
||||
query: string
|
||||
variables: Record<string, unknown>
|
||||
} {
|
||||
const teamId = (sourceConfig.teamId as string) || ''
|
||||
const projectId = (sourceConfig.projectId as string) || ''
|
||||
const stateFilter = (sourceConfig.stateFilter as string) || ''
|
||||
|
||||
const varDefs: string[] = ['$first: Int!', '$after: String']
|
||||
const filterClauses: string[] = []
|
||||
const variables: Record<string, unknown> = {}
|
||||
|
||||
if (teamId) {
|
||||
varDefs.push('$teamId: String!')
|
||||
filterClauses.push('team: { id: { eq: $teamId } }')
|
||||
variables.teamId = teamId
|
||||
}
|
||||
|
||||
if (projectId) {
|
||||
varDefs.push('$projectId: String!')
|
||||
filterClauses.push('project: { id: { eq: $projectId } }')
|
||||
variables.projectId = projectId
|
||||
}
|
||||
|
||||
if (stateFilter) {
|
||||
const states = stateFilter
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
if (states.length > 0) {
|
||||
varDefs.push('$stateFilter: [String!]!')
|
||||
filterClauses.push('state: { name: { in: $stateFilter } }')
|
||||
variables.stateFilter = states
|
||||
}
|
||||
}
|
||||
|
||||
const filterArg = filterClauses.length > 0 ? `, filter: { ${filterClauses.join(', ')} }` : ''
|
||||
|
||||
const query = `
|
||||
query ListIssues(${varDefs.join(', ')}) {
|
||||
issues(first: $first, after: $after${filterArg}) {
|
||||
nodes {
|
||||
${ISSUE_FIELDS}
|
||||
}
|
||||
pageInfo {
|
||||
hasNextPage
|
||||
endCursor
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
return { query, variables }
|
||||
}
|
||||
|
||||
export const linearConnector: ConnectorConfig = {
|
||||
id: 'linear',
|
||||
name: 'Linear',
|
||||
description: 'Sync issues from Linear into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: LinearIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'linear',
|
||||
requiredScopes: ['read'],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'teamId',
|
||||
title: 'Team ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. abc123 (leave empty for all teams)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. def456 (leave empty for all projects)',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'stateFilter',
|
||||
title: 'State Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g. In Progress, Todo',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'maxIssues',
|
||||
title: 'Max Issues',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const maxIssues = sourceConfig.maxIssues ? Number(sourceConfig.maxIssues) : 0
|
||||
const pageSize = maxIssues > 0 ? Math.min(maxIssues, 50) : 50
|
||||
|
||||
const { query, variables } = buildIssuesQuery(sourceConfig)
|
||||
const allVars = { ...variables, first: pageSize, after: cursor || undefined }
|
||||
|
||||
logger.info('Listing Linear issues', {
|
||||
cursor,
|
||||
pageSize,
|
||||
hasTeamFilter: Boolean(sourceConfig.teamId),
|
||||
hasProjectFilter: Boolean(sourceConfig.projectId),
|
||||
})
|
||||
|
||||
const data = await linearGraphQL(accessToken, query, allVars)
|
||||
const issuesConn = data.issues as Record<string, unknown>
|
||||
const nodes = (issuesConn.nodes || []) as Record<string, unknown>[]
|
||||
const pageInfo = issuesConn.pageInfo as Record<string, unknown>
|
||||
|
||||
const documents: ExternalDocument[] = await Promise.all(
|
||||
nodes.map(async (issue) => {
|
||||
const content = buildIssueContent(issue)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
|
||||
string,
|
||||
unknown
|
||||
>[]
|
||||
|
||||
return {
|
||||
externalId: issue.id as string,
|
||||
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
|
||||
content,
|
||||
mimeType: 'text/plain' as const,
|
||||
sourceUrl: (issue.url as string) || undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
identifier: issue.identifier,
|
||||
state: (issue.state as Record<string, unknown>)?.name,
|
||||
priority: issue.priorityLabel,
|
||||
assignee: (issue.assignee as Record<string, unknown>)?.name,
|
||||
labels: labelNodes.map((l) => l.name as string),
|
||||
team: (issue.team as Record<string, unknown>)?.name,
|
||||
project: (issue.project as Record<string, unknown>)?.name,
|
||||
lastModified: issue.updatedAt,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const hasNextPage = Boolean(pageInfo.hasNextPage)
|
||||
const endCursor = (pageInfo.endCursor as string) || undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor: hasNextPage ? endCursor : undefined,
|
||||
hasMore: hasNextPage,
|
||||
}
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
try {
|
||||
const data = await linearGraphQL(accessToken, ISSUE_BY_ID_QUERY, { id: externalId })
|
||||
const issue = data.issue as Record<string, unknown> | null
|
||||
|
||||
if (!issue) return null
|
||||
|
||||
const content = buildIssueContent(issue)
|
||||
const contentHash = await computeContentHash(content)
|
||||
|
||||
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
|
||||
string,
|
||||
unknown
|
||||
>[]
|
||||
|
||||
return {
|
||||
externalId: issue.id as string,
|
||||
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
|
||||
content,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: (issue.url as string) || undefined,
|
||||
contentHash,
|
||||
metadata: {
|
||||
identifier: issue.identifier,
|
||||
state: (issue.state as Record<string, unknown>)?.name,
|
||||
priority: issue.priorityLabel,
|
||||
assignee: (issue.assignee as Record<string, unknown>)?.name,
|
||||
labels: labelNodes.map((l) => l.name as string),
|
||||
team: (issue.team as Record<string, unknown>)?.name,
|
||||
project: (issue.project as Record<string, unknown>)?.name,
|
||||
lastModified: issue.updatedAt,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to get Linear issue', {
|
||||
externalId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const maxIssues = sourceConfig.maxIssues as string | undefined
|
||||
if (maxIssues && (Number.isNaN(Number(maxIssues)) || Number(maxIssues) <= 0)) {
|
||||
return { valid: false, error: 'Max issues must be a positive number' }
|
||||
}
|
||||
|
||||
try {
|
||||
// Verify the token works by fetching teams
|
||||
const data = await linearGraphQL(accessToken, TEAMS_QUERY)
|
||||
const teamsConn = data.teams as Record<string, unknown>
|
||||
const teams = (teamsConn.nodes || []) as Record<string, unknown>[]
|
||||
|
||||
if (teams.length === 0) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'No teams found — check that the OAuth token has read access',
|
||||
}
|
||||
}
|
||||
|
||||
// If teamId specified, verify it exists
|
||||
const teamId = sourceConfig.teamId as string | undefined
|
||||
if (teamId) {
|
||||
const found = teams.some((t) => t.id === teamId)
|
||||
if (!found) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Team ID "${teamId}" not found. Available teams: ${teams.map((t) => `${t.name} (${t.id})`).join(', ')}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'labels', displayName: 'Labels', fieldType: 'text' },
|
||||
{ id: 'state', displayName: 'State', fieldType: 'text' },
|
||||
{ id: 'priority', displayName: 'Priority', fieldType: 'text' },
|
||||
{ id: 'assignee', displayName: 'Assignee', fieldType: 'text' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const labels = Array.isArray(metadata.labels) ? (metadata.labels as string[]) : []
|
||||
if (labels.length > 0) result.labels = labels.join(', ')
|
||||
|
||||
if (typeof metadata.state === 'string') result.state = metadata.state
|
||||
if (typeof metadata.priority === 'string') result.priority = metadata.priority
|
||||
if (typeof metadata.assignee === 'string') result.assignee = metadata.assignee
|
||||
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
@@ -1,393 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/components/icons', () => ({
|
||||
JiraIcon: () => null,
|
||||
ConfluenceIcon: () => null,
|
||||
GithubIcon: () => null,
|
||||
LinearIcon: () => null,
|
||||
NotionIcon: () => null,
|
||||
GoogleDriveIcon: () => null,
|
||||
AirtableIcon: () => null,
|
||||
}))
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: () => ({ info: vi.fn(), warn: vi.fn(), error: vi.fn(), debug: vi.fn() }),
|
||||
}))
|
||||
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
||||
fetchWithRetry: vi.fn(),
|
||||
VALIDATE_RETRY_OPTIONS: {},
|
||||
}))
|
||||
vi.mock('@/tools/jira/utils', () => ({ extractAdfText: vi.fn(), getJiraCloudId: vi.fn() }))
|
||||
vi.mock('@/tools/confluence/utils', () => ({ getConfluenceCloudId: vi.fn() }))
|
||||
|
||||
import { airtableConnector } from '@/connectors/airtable/airtable'
|
||||
import { confluenceConnector } from '@/connectors/confluence/confluence'
|
||||
import { githubConnector } from '@/connectors/github/github'
|
||||
import { googleDriveConnector } from '@/connectors/google-drive/google-drive'
|
||||
import { jiraConnector } from '@/connectors/jira/jira'
|
||||
import { linearConnector } from '@/connectors/linear/linear'
|
||||
import { notionConnector } from '@/connectors/notion/notion'
|
||||
|
||||
const ISO_DATE = '2025-06-15T10:30:00.000Z'
|
||||
|
||||
describe('Jira mapTags', () => {
|
||||
const mapTags = jiraConnector.mapTags!
|
||||
|
||||
it.concurrent('maps all fields when present', () => {
|
||||
const result = mapTags({
|
||||
issueType: 'Bug',
|
||||
status: 'In Progress',
|
||||
priority: 'High',
|
||||
labels: ['frontend', 'urgent'],
|
||||
assignee: 'Alice',
|
||||
updated: ISO_DATE,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
issueType: 'Bug',
|
||||
status: 'In Progress',
|
||||
priority: 'High',
|
||||
labels: 'frontend, urgent',
|
||||
assignee: 'Alice',
|
||||
updated: new Date(ISO_DATE),
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips fields with wrong types', () => {
|
||||
const result = mapTags({
|
||||
issueType: 123,
|
||||
status: null,
|
||||
priority: undefined,
|
||||
assignee: true,
|
||||
})
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips updated when date string is invalid', () => {
|
||||
const result = mapTags({ updated: 'not-a-date' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips labels when not an array', () => {
|
||||
const result = mapTags({ labels: 'not-an-array' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips labels when array is empty', () => {
|
||||
const result = mapTags({ labels: [] })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips updated when value is not a string', () => {
|
||||
const result = mapTags({ updated: 12345 })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Confluence mapTags', () => {
|
||||
const mapTags = confluenceConnector.mapTags!
|
||||
|
||||
it.concurrent('maps all fields when present', () => {
|
||||
const result = mapTags({
|
||||
labels: ['docs', 'published'],
|
||||
version: 5,
|
||||
lastModified: ISO_DATE,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
labels: 'docs, published',
|
||||
version: 5,
|
||||
lastModified: new Date(ISO_DATE),
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips labels when not an array', () => {
|
||||
const result = mapTags({ labels: 'single-label' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips version when NaN', () => {
|
||||
const result = mapTags({ version: 'abc' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('converts string version to number', () => {
|
||||
const result = mapTags({ version: '3' })
|
||||
expect(result).toEqual({ version: 3 })
|
||||
})
|
||||
|
||||
it.concurrent('skips lastModified when date is invalid', () => {
|
||||
const result = mapTags({ lastModified: 'garbage' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips lastModified when not a string', () => {
|
||||
const result = mapTags({ lastModified: 12345 })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
|
||||
describe('GitHub mapTags', () => {
|
||||
const mapTags = githubConnector.mapTags!
|
||||
|
||||
it.concurrent('maps all fields when present', () => {
|
||||
const result = mapTags({
|
||||
path: 'src/index.ts',
|
||||
repository: 'owner/repo',
|
||||
branch: 'main',
|
||||
size: 1024,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
path: 'src/index.ts',
|
||||
repository: 'owner/repo',
|
||||
branch: 'main',
|
||||
size: 1024,
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips string fields with wrong types', () => {
|
||||
const result = mapTags({
|
||||
path: 42,
|
||||
repository: null,
|
||||
branch: true,
|
||||
})
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips size when NaN', () => {
|
||||
const result = mapTags({ size: 'not-a-number' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('converts string size to number', () => {
|
||||
const result = mapTags({ size: '512' })
|
||||
expect(result).toEqual({ size: 512 })
|
||||
})
|
||||
|
||||
it.concurrent('maps size of zero', () => {
|
||||
const result = mapTags({ size: 0 })
|
||||
expect(result).toEqual({ size: 0 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('Linear mapTags', () => {
|
||||
const mapTags = linearConnector.mapTags!
|
||||
|
||||
it.concurrent('maps all fields when present', () => {
|
||||
const result = mapTags({
|
||||
labels: ['bug', 'p0'],
|
||||
state: 'In Progress',
|
||||
priority: 'Urgent',
|
||||
assignee: 'Bob',
|
||||
lastModified: ISO_DATE,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
labels: 'bug, p0',
|
||||
state: 'In Progress',
|
||||
priority: 'Urgent',
|
||||
assignee: 'Bob',
|
||||
lastModified: new Date(ISO_DATE),
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips string fields with wrong types', () => {
|
||||
const result = mapTags({
|
||||
state: 123,
|
||||
priority: false,
|
||||
assignee: [],
|
||||
})
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips labels when not an array', () => {
|
||||
const result = mapTags({ labels: 'not-array' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips lastModified when date is invalid', () => {
|
||||
const result = mapTags({ lastModified: 'invalid-date' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips lastModified when not a string', () => {
|
||||
const result = mapTags({ lastModified: 99999 })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Notion mapTags', () => {
|
||||
const mapTags = notionConnector.mapTags!
|
||||
|
||||
it.concurrent('maps all fields when present', () => {
|
||||
const result = mapTags({
|
||||
tags: ['engineering', 'docs'],
|
||||
lastModified: ISO_DATE,
|
||||
createdTime: '2025-01-01T00:00:00.000Z',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
tags: 'engineering, docs',
|
||||
lastModified: new Date(ISO_DATE),
|
||||
created: new Date('2025-01-01T00:00:00.000Z'),
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips tags when not an array', () => {
|
||||
const result = mapTags({ tags: 'single' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips lastModified when date is invalid', () => {
|
||||
const result = mapTags({ lastModified: 'bad-date' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips createdTime when date is invalid', () => {
|
||||
const result = mapTags({ createdTime: 'bad-date' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips date fields when not strings', () => {
|
||||
const result = mapTags({ lastModified: 12345, createdTime: true })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('maps createdTime to created key', () => {
|
||||
const result = mapTags({ createdTime: ISO_DATE })
|
||||
expect(result).toEqual({ created: new Date(ISO_DATE) })
|
||||
expect(result).not.toHaveProperty('createdTime')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Google Drive mapTags', () => {
|
||||
const mapTags = googleDriveConnector.mapTags!
|
||||
|
||||
it.concurrent('maps all fields when present', () => {
|
||||
const result = mapTags({
|
||||
owners: ['Alice', 'Bob'],
|
||||
originalMimeType: 'application/vnd.google-apps.document',
|
||||
modifiedTime: ISO_DATE,
|
||||
starred: true,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
owners: 'Alice, Bob',
|
||||
fileType: 'Google Doc',
|
||||
lastModified: new Date(ISO_DATE),
|
||||
starred: true,
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('maps spreadsheet mime type', () => {
|
||||
const result = mapTags({ originalMimeType: 'application/vnd.google-apps.spreadsheet' })
|
||||
expect(result).toEqual({ fileType: 'Google Sheet' })
|
||||
})
|
||||
|
||||
it.concurrent('maps presentation mime type', () => {
|
||||
const result = mapTags({ originalMimeType: 'application/vnd.google-apps.presentation' })
|
||||
expect(result).toEqual({ fileType: 'Google Slides' })
|
||||
})
|
||||
|
||||
it.concurrent('maps text/ mime types to Text File', () => {
|
||||
const result = mapTags({ originalMimeType: 'text/plain' })
|
||||
expect(result).toEqual({ fileType: 'Text File' })
|
||||
})
|
||||
|
||||
it.concurrent('falls back to raw mime type for unknown types', () => {
|
||||
const result = mapTags({ originalMimeType: 'application/pdf' })
|
||||
expect(result).toEqual({ fileType: 'application/pdf' })
|
||||
})
|
||||
|
||||
it.concurrent('skips owners when not an array', () => {
|
||||
const result = mapTags({ owners: 'not-an-array' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips modifiedTime when date is invalid', () => {
|
||||
const result = mapTags({ modifiedTime: 'garbage' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips modifiedTime when not a string', () => {
|
||||
const result = mapTags({ modifiedTime: 99999 })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('maps starred false', () => {
|
||||
const result = mapTags({ starred: false })
|
||||
expect(result).toEqual({ starred: false })
|
||||
})
|
||||
|
||||
it.concurrent('skips starred when not a boolean', () => {
|
||||
const result = mapTags({ starred: 'yes' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('maps modifiedTime to lastModified key', () => {
|
||||
const result = mapTags({ modifiedTime: ISO_DATE })
|
||||
expect(result).toEqual({ lastModified: new Date(ISO_DATE) })
|
||||
expect(result).not.toHaveProperty('modifiedTime')
|
||||
})
|
||||
|
||||
it.concurrent('maps originalMimeType to fileType key', () => {
|
||||
const result = mapTags({ originalMimeType: 'application/vnd.google-apps.document' })
|
||||
expect(result).toEqual({ fileType: 'Google Doc' })
|
||||
expect(result).not.toHaveProperty('originalMimeType')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Airtable mapTags', () => {
|
||||
const mapTags = airtableConnector.mapTags!
|
||||
|
||||
it.concurrent('maps createdTime when present', () => {
|
||||
const result = mapTags({ createdTime: ISO_DATE })
|
||||
expect(result).toEqual({ createdTime: new Date(ISO_DATE) })
|
||||
})
|
||||
|
||||
it.concurrent('returns empty object for empty metadata', () => {
|
||||
expect(mapTags({})).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips createdTime when date is invalid', () => {
|
||||
const result = mapTags({ createdTime: 'not-a-date' })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('skips createdTime when not a string', () => {
|
||||
const result = mapTags({ createdTime: 12345 })
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it.concurrent('ignores unrelated metadata fields', () => {
|
||||
const result = mapTags({ foo: 'bar', count: 42, createdTime: ISO_DATE })
|
||||
expect(result).toEqual({ createdTime: new Date(ISO_DATE) })
|
||||
})
|
||||
})
|
||||
@@ -1 +0,0 @@
|
||||
export { notionConnector } from '@/connectors/notion/notion'
|
||||
@@ -1,600 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NotionIcon } from '@/components/icons'
|
||||
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
|
||||
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
|
||||
|
||||
const logger = createLogger('NotionConnector')
|
||||
|
||||
const NOTION_API_VERSION = '2022-06-28'
|
||||
const NOTION_BASE_URL = 'https://api.notion.com/v1'
|
||||
|
||||
/**
|
||||
* Computes a SHA-256 hash of the given content.
|
||||
*/
|
||||
async function computeContentHash(content: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(content)
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data)
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the title from a Notion page's properties.
|
||||
*/
|
||||
function extractTitle(properties: Record<string, unknown>): string {
|
||||
for (const value of Object.values(properties)) {
|
||||
const prop = value as Record<string, unknown>
|
||||
if (prop.type === 'title' && Array.isArray(prop.title) && prop.title.length > 0) {
|
||||
return prop.title.map((t: Record<string, unknown>) => (t.plain_text as string) || '').join('')
|
||||
}
|
||||
}
|
||||
return 'Untitled'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts plain text from a rich_text array.
|
||||
*/
|
||||
function richTextToPlain(richText: Record<string, unknown>[]): string {
|
||||
return richText.map((t) => (t.plain_text as string) || '').join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts plain text content from Notion blocks.
|
||||
*/
|
||||
function blocksToPlainText(blocks: Record<string, unknown>[]): string {
|
||||
return blocks
|
||||
.map((block) => {
|
||||
const type = block.type as string
|
||||
const blockData = block[type] as Record<string, unknown> | undefined
|
||||
if (!blockData) return ''
|
||||
|
||||
const richText = blockData.rich_text as Record<string, unknown>[] | undefined
|
||||
if (!richText) return ''
|
||||
|
||||
const text = richTextToPlain(richText)
|
||||
|
||||
switch (type) {
|
||||
case 'heading_1':
|
||||
return `# ${text}`
|
||||
case 'heading_2':
|
||||
return `## ${text}`
|
||||
case 'heading_3':
|
||||
return `### ${text}`
|
||||
case 'bulleted_list_item':
|
||||
return `- ${text}`
|
||||
case 'numbered_list_item':
|
||||
return `1. ${text}`
|
||||
case 'to_do': {
|
||||
const checked = (blockData.checked as boolean) ? '[x]' : '[ ]'
|
||||
return `${checked} ${text}`
|
||||
}
|
||||
case 'quote':
|
||||
return `> ${text}`
|
||||
case 'callout':
|
||||
return text
|
||||
case 'toggle':
|
||||
return text
|
||||
default:
|
||||
return text
|
||||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all block children for a page, handling pagination.
|
||||
*/
|
||||
async function fetchAllBlocks(
|
||||
accessToken: string,
|
||||
pageId: string
|
||||
): Promise<Record<string, unknown>[]> {
|
||||
const allBlocks: Record<string, unknown>[] = []
|
||||
let cursor: string | undefined
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore) {
|
||||
const params = new URLSearchParams({ page_size: '100' })
|
||||
if (cursor) params.append('start_cursor', cursor)
|
||||
|
||||
const response = await fetchWithRetry(
|
||||
`${NOTION_BASE_URL}/blocks/${pageId}/children?${params.toString()}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch blocks for page ${pageId}`, { status: response.status })
|
||||
break
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
allBlocks.push(...(data.results || []))
|
||||
cursor = data.next_cursor ?? undefined
|
||||
hasMore = data.has_more === true
|
||||
}
|
||||
|
||||
return allBlocks
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts multi_select tags from page properties.
|
||||
*/
|
||||
function extractTags(properties: Record<string, unknown>): string[] {
|
||||
const tags: string[] = []
|
||||
for (const value of Object.values(properties)) {
|
||||
const prop = value as Record<string, unknown>
|
||||
if (prop.type === 'multi_select' && Array.isArray(prop.multi_select)) {
|
||||
for (const item of prop.multi_select) {
|
||||
const name = (item as Record<string, unknown>).name as string
|
||||
if (name) tags.push(name)
|
||||
}
|
||||
}
|
||||
if (prop.type === 'select' && prop.select) {
|
||||
const name = (prop.select as Record<string, unknown>).name as string
|
||||
if (name) tags.push(name)
|
||||
}
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Notion page to an ExternalDocument by fetching its block content.
|
||||
*/
|
||||
async function pageToExternalDocument(
|
||||
accessToken: string,
|
||||
page: Record<string, unknown>
|
||||
): Promise<ExternalDocument> {
|
||||
const pageId = page.id as string
|
||||
const properties = (page.properties || {}) as Record<string, unknown>
|
||||
const title = extractTitle(properties)
|
||||
const url = page.url as string
|
||||
|
||||
// Fetch page content
|
||||
const blocks = await fetchAllBlocks(accessToken, pageId)
|
||||
const plainText = blocksToPlainText(blocks)
|
||||
const contentHash = await computeContentHash(plainText)
|
||||
|
||||
// Extract tags from multi_select/select properties
|
||||
const tags = extractTags(properties)
|
||||
|
||||
return {
|
||||
externalId: pageId,
|
||||
title: title || 'Untitled',
|
||||
content: plainText,
|
||||
mimeType: 'text/plain',
|
||||
sourceUrl: url,
|
||||
contentHash,
|
||||
metadata: {
|
||||
tags,
|
||||
lastModified: page.last_edited_time as string,
|
||||
createdTime: page.created_time as string,
|
||||
parentType: (page.parent as Record<string, unknown>)?.type,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const notionConnector: ConnectorConfig = {
|
||||
id: 'notion',
|
||||
name: 'Notion',
|
||||
description: 'Sync pages from a Notion workspace into your knowledge base',
|
||||
version: '1.0.0',
|
||||
icon: NotionIcon,
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'notion',
|
||||
requiredScopes: [],
|
||||
},
|
||||
|
||||
configFields: [
|
||||
{
|
||||
id: 'scope',
|
||||
title: 'Sync Scope',
|
||||
type: 'dropdown',
|
||||
required: false,
|
||||
options: [
|
||||
{ label: 'Entire workspace', id: 'workspace' },
|
||||
{ label: 'Specific database', id: 'database' },
|
||||
{ label: 'Specific page (and children)', id: 'page' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'databaseId',
|
||||
title: 'Database ID',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 8a3b5f6e-1234-5678-abcd-ef0123456789',
|
||||
},
|
||||
{
|
||||
id: 'rootPageId',
|
||||
title: 'Page ID',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 8a3b5f6e-1234-5678-abcd-ef0123456789',
|
||||
},
|
||||
{
|
||||
id: 'searchQuery',
|
||||
title: 'Search Filter',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. meeting notes, project plan',
|
||||
},
|
||||
{
|
||||
id: 'maxPages',
|
||||
title: 'Max Pages',
|
||||
type: 'short-input',
|
||||
required: false,
|
||||
placeholder: 'e.g. 500 (default: unlimited)',
|
||||
},
|
||||
],
|
||||
|
||||
listDocuments: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> => {
|
||||
const scope = (sourceConfig.scope as string) || 'workspace'
|
||||
const databaseId = (sourceConfig.databaseId as string)?.trim()
|
||||
const rootPageId = (sourceConfig.rootPageId as string)?.trim()
|
||||
const maxPages = sourceConfig.maxPages ? Number(sourceConfig.maxPages) : 0
|
||||
|
||||
if (scope === 'database' && databaseId) {
|
||||
return listFromDatabase(accessToken, databaseId, maxPages, cursor)
|
||||
}
|
||||
|
||||
if (scope === 'page' && rootPageId) {
|
||||
return listFromParentPage(accessToken, rootPageId, maxPages, cursor)
|
||||
}
|
||||
|
||||
// Default: workspace-wide search
|
||||
const searchQuery = (sourceConfig.searchQuery as string) || ''
|
||||
return listFromWorkspace(accessToken, searchQuery, maxPages, cursor)
|
||||
},
|
||||
|
||||
getDocument: async (
|
||||
accessToken: string,
|
||||
_sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
): Promise<ExternalDocument | null> => {
|
||||
const response = await fetchWithRetry(`${NOTION_BASE_URL}/pages/${externalId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) return null
|
||||
throw new Error(`Failed to get Notion page: ${response.status}`)
|
||||
}
|
||||
|
||||
const page = await response.json()
|
||||
return pageToExternalDocument(accessToken, page)
|
||||
},
|
||||
|
||||
validateConfig: async (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
): Promise<{ valid: boolean; error?: string }> => {
|
||||
const scope = (sourceConfig.scope as string) || 'workspace'
|
||||
const databaseId = (sourceConfig.databaseId as string)?.trim()
|
||||
const rootPageId = (sourceConfig.rootPageId as string)?.trim()
|
||||
const maxPages = sourceConfig.maxPages as string | undefined
|
||||
|
||||
if (maxPages && (Number.isNaN(Number(maxPages)) || Number(maxPages) <= 0)) {
|
||||
return { valid: false, error: 'Max pages must be a positive number' }
|
||||
}
|
||||
|
||||
if (scope === 'database' && !databaseId) {
|
||||
return { valid: false, error: 'Database ID is required when scope is "Specific database"' }
|
||||
}
|
||||
|
||||
if (scope === 'page' && !rootPageId) {
|
||||
return { valid: false, error: 'Page ID is required when scope is "Specific page"' }
|
||||
}
|
||||
|
||||
try {
|
||||
// Verify the token works
|
||||
if (scope === 'database' && databaseId) {
|
||||
// Verify database is accessible
|
||||
const response = await fetchWithRetry(
|
||||
`${NOTION_BASE_URL}/databases/${databaseId}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Cannot access database: ${response.status}` }
|
||||
}
|
||||
} else if (scope === 'page' && rootPageId) {
|
||||
// Verify page is accessible
|
||||
const response = await fetchWithRetry(
|
||||
`${NOTION_BASE_URL}/pages/${rootPageId}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
if (!response.ok) {
|
||||
return { valid: false, error: `Cannot access page: ${response.status}` }
|
||||
}
|
||||
} else {
|
||||
// Workspace scope — just verify token works
|
||||
const response = await fetchWithRetry(
|
||||
`${NOTION_BASE_URL}/search`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ page_size: 1 }),
|
||||
},
|
||||
VALIDATE_RETRY_OPTIONS
|
||||
)
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
return { valid: false, error: `Cannot access Notion workspace: ${errorText}` }
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Failed to validate configuration'
|
||||
return { valid: false, error: message }
|
||||
}
|
||||
},
|
||||
|
||||
tagDefinitions: [
|
||||
{ id: 'tags', displayName: 'Tags', fieldType: 'text' },
|
||||
{ id: 'lastModified', displayName: 'Last Modified', fieldType: 'date' },
|
||||
{ id: 'created', displayName: 'Created', fieldType: 'date' },
|
||||
],
|
||||
|
||||
mapTags: (metadata: Record<string, unknown>): Record<string, unknown> => {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
const tags = Array.isArray(metadata.tags) ? (metadata.tags as string[]) : []
|
||||
if (tags.length > 0) result.tags = tags.join(', ')
|
||||
|
||||
if (typeof metadata.lastModified === 'string') {
|
||||
const date = new Date(metadata.lastModified)
|
||||
if (!Number.isNaN(date.getTime())) result.lastModified = date
|
||||
}
|
||||
|
||||
if (typeof metadata.createdTime === 'string') {
|
||||
const date = new Date(metadata.createdTime)
|
||||
if (!Number.isNaN(date.getTime())) result.created = date
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists pages from the entire workspace using the search API.
|
||||
*/
|
||||
async function listFromWorkspace(
|
||||
accessToken: string,
|
||||
searchQuery: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const body: Record<string, unknown> = {
|
||||
page_size: 20,
|
||||
filter: { value: 'page', property: 'object' },
|
||||
sort: { direction: 'descending', timestamp: 'last_edited_time' },
|
||||
}
|
||||
|
||||
if (searchQuery.trim()) {
|
||||
body.query = searchQuery.trim()
|
||||
}
|
||||
|
||||
if (cursor) {
|
||||
body.start_cursor = cursor
|
||||
}
|
||||
|
||||
logger.info('Listing Notion pages from workspace', { searchQuery, cursor })
|
||||
|
||||
const response = await fetchWithRetry(`${NOTION_BASE_URL}/search`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to search Notion', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to search Notion: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = (data.results || []) as Record<string, unknown>[]
|
||||
const pages = results.filter((r) => r.object === 'page' && !(r.archived as boolean))
|
||||
|
||||
const documents = await processPages(accessToken, pages)
|
||||
const nextCursor = (data.next_cursor as string) ?? undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: data.has_more === true && (maxPages <= 0 || documents.length < maxPages),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists pages from a specific Notion database.
|
||||
*/
|
||||
async function listFromDatabase(
|
||||
accessToken: string,
|
||||
databaseId: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const body: Record<string, unknown> = {
|
||||
page_size: 20,
|
||||
}
|
||||
|
||||
if (cursor) {
|
||||
body.start_cursor = cursor
|
||||
}
|
||||
|
||||
logger.info('Querying Notion database', { databaseId, cursor })
|
||||
|
||||
const response = await fetchWithRetry(`${NOTION_BASE_URL}/databases/${databaseId}/query`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to query Notion database', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to query Notion database: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const results = (data.results || []) as Record<string, unknown>[]
|
||||
const pages = results.filter((r) => r.object === 'page' && !(r.archived as boolean))
|
||||
|
||||
const documents = await processPages(accessToken, pages)
|
||||
const nextCursor = (data.next_cursor as string) ?? undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: data.has_more === true && (maxPages <= 0 || documents.length < maxPages),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists child pages under a specific parent page.
|
||||
*
|
||||
* Uses the blocks children endpoint to find child_page blocks,
|
||||
* then fetches each page's content.
|
||||
*/
|
||||
async function listFromParentPage(
|
||||
accessToken: string,
|
||||
rootPageId: string,
|
||||
maxPages: number,
|
||||
cursor?: string
|
||||
): Promise<ExternalDocumentList> {
|
||||
const params = new URLSearchParams({ page_size: '100' })
|
||||
if (cursor) params.append('start_cursor', cursor)
|
||||
|
||||
logger.info('Listing child pages under root page', { rootPageId, cursor })
|
||||
|
||||
const response = await fetchWithRetry(
|
||||
`${NOTION_BASE_URL}/blocks/${rootPageId}/children?${params.toString()}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error('Failed to list child blocks', { status: response.status, error: errorText })
|
||||
throw new Error(`Failed to list child blocks: ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const blocks = (data.results || []) as Record<string, unknown>[]
|
||||
|
||||
// Filter to child_page and child_database blocks
|
||||
const childPageIds = blocks
|
||||
.filter((b) => b.type === 'child_page' || b.type === 'child_database')
|
||||
.map((b) => b.id as string)
|
||||
|
||||
// Also include the root page itself on the first call (no cursor)
|
||||
const pageIdsToFetch = !cursor ? [rootPageId, ...childPageIds] : childPageIds
|
||||
|
||||
// Fetch each child page
|
||||
const documents: ExternalDocument[] = []
|
||||
for (const pageId of pageIdsToFetch) {
|
||||
if (maxPages > 0 && documents.length >= maxPages) break
|
||||
|
||||
try {
|
||||
const pageResponse = await fetchWithRetry(`${NOTION_BASE_URL}/pages/${pageId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Notion-Version': NOTION_API_VERSION,
|
||||
},
|
||||
})
|
||||
|
||||
if (!pageResponse.ok) {
|
||||
logger.warn(`Failed to fetch child page ${pageId}`, { status: pageResponse.status })
|
||||
continue
|
||||
}
|
||||
|
||||
const page = await pageResponse.json()
|
||||
if (page.archived) continue
|
||||
|
||||
const doc = await pageToExternalDocument(accessToken, page)
|
||||
documents.push(doc)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to process child page ${pageId}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const nextCursor = (data.next_cursor as string) ?? undefined
|
||||
|
||||
return {
|
||||
documents,
|
||||
nextCursor,
|
||||
hasMore: data.has_more === true && (maxPages <= 0 || documents.length < maxPages),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an array of Notion page objects to ExternalDocuments.
|
||||
*/
|
||||
async function processPages(
|
||||
accessToken: string,
|
||||
pages: Record<string, unknown>[]
|
||||
): Promise<ExternalDocument[]> {
|
||||
const documents: ExternalDocument[] = []
|
||||
for (const page of pages) {
|
||||
try {
|
||||
const doc = await pageToExternalDocument(accessToken, page)
|
||||
documents.push(doc)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to process Notion page ${page.id}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
return documents
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import { airtableConnector } from '@/connectors/airtable'
|
||||
import { confluenceConnector } from '@/connectors/confluence'
|
||||
import { githubConnector } from '@/connectors/github'
|
||||
import { googleDriveConnector } from '@/connectors/google-drive'
|
||||
import { jiraConnector } from '@/connectors/jira'
|
||||
import { linearConnector } from '@/connectors/linear'
|
||||
import { notionConnector } from '@/connectors/notion'
|
||||
import type { ConnectorRegistry } from '@/connectors/types'
|
||||
|
||||
export const CONNECTOR_REGISTRY: ConnectorRegistry = {
|
||||
airtable: airtableConnector,
|
||||
confluence: confluenceConnector,
|
||||
github: githubConnector,
|
||||
google_drive: googleDriveConnector,
|
||||
jira: jiraConnector,
|
||||
linear: linearConnector,
|
||||
notion: notionConnector,
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
import type { OAuthService } from '@/lib/oauth/types'
|
||||
|
||||
/**
|
||||
* A single document fetched from an external source.
|
||||
*/
|
||||
export interface ExternalDocument {
|
||||
/** Source-specific unique ID (page ID, file ID) */
|
||||
externalId: string
|
||||
/** Document title / filename */
|
||||
title: string
|
||||
/** Extracted text content */
|
||||
content: string
|
||||
/** MIME type of the content */
|
||||
mimeType: string
|
||||
/** Link back to the original document */
|
||||
sourceUrl?: string
|
||||
/** SHA-256 of content for change detection */
|
||||
contentHash: string
|
||||
/** Additional source-specific metadata */
|
||||
metadata?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
* Paginated result from listing documents in an external source.
|
||||
*/
|
||||
export interface ExternalDocumentList {
|
||||
documents: ExternalDocument[]
|
||||
nextCursor?: string
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of a sync operation.
|
||||
*/
|
||||
export interface SyncResult {
|
||||
docsAdded: number
|
||||
docsUpdated: number
|
||||
docsDeleted: number
|
||||
docsUnchanged: number
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Config field for source-specific settings (rendered in the add-connector UI).
|
||||
*/
|
||||
export interface ConnectorConfigField {
|
||||
id: string
|
||||
title: string
|
||||
type: 'short-input' | 'dropdown'
|
||||
placeholder?: string
|
||||
required?: boolean
|
||||
description?: string
|
||||
options?: { label: string; id: string }[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Declarative config for a knowledge source connector.
|
||||
*
|
||||
* Mirrors ToolConfig/TriggerConfig pattern:
|
||||
* - Purely declarative metadata (id, name, icon, oauth, configFields)
|
||||
* - Runtime functions for data fetching (listDocuments, getDocument, validateConfig)
|
||||
*
|
||||
* Adding a new connector = creating one of these + registering it.
|
||||
*/
|
||||
export interface ConnectorConfig {
|
||||
/** Unique connector identifier, e.g. 'confluence', 'google_drive', 'notion' */
|
||||
id: string
|
||||
/** Human-readable name, e.g. 'Confluence', 'Google Drive' */
|
||||
name: string
|
||||
/** Short description of the connector */
|
||||
description: string
|
||||
/** Semver version */
|
||||
version: string
|
||||
/** Icon component for the connector */
|
||||
icon: React.ComponentType<{ className?: string }>
|
||||
|
||||
/** OAuth configuration (same pattern as ToolConfig.oauth) */
|
||||
oauth: {
|
||||
required: true
|
||||
provider: OAuthService
|
||||
requiredScopes?: string[]
|
||||
}
|
||||
|
||||
/** Source configuration fields rendered in the add-connector UI */
|
||||
configFields: ConnectorConfigField[]
|
||||
|
||||
/**
|
||||
* List all documents from the configured source (handles pagination via cursor).
|
||||
* syncContext is a mutable object shared across all pages of a single sync run —
|
||||
* connectors can use it to cache expensive lookups (e.g. schema fetches) without
|
||||
* leaking state into module-level globals.
|
||||
*/
|
||||
listDocuments: (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
cursor?: string,
|
||||
syncContext?: Record<string, unknown>
|
||||
) => Promise<ExternalDocumentList>
|
||||
|
||||
/** Fetch a single document by its external ID */
|
||||
getDocument: (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>,
|
||||
externalId: string
|
||||
) => Promise<ExternalDocument | null>
|
||||
|
||||
/** Validate that sourceConfig is correct and accessible (called on save) */
|
||||
validateConfig: (
|
||||
accessToken: string,
|
||||
sourceConfig: Record<string, unknown>
|
||||
) => Promise<{ valid: boolean; error?: string }>
|
||||
|
||||
/** Map source metadata to semantic tag keys (translated to slots by the sync engine) */
|
||||
mapTags?: (metadata: Record<string, unknown>) => Record<string, unknown>
|
||||
|
||||
/**
|
||||
* Tag definitions this connector populates. Shown in the add-connector modal
|
||||
* as opt-out checkboxes. On connector creation, tag definitions are auto-created
|
||||
* on the KB for enabled slots, and mapTags output is filtered to only include them.
|
||||
*/
|
||||
tagDefinitions?: ConnectorTagDefinition[]
|
||||
}
|
||||
|
||||
/**
|
||||
* A tag that a connector populates, with a semantic ID and human-readable name.
|
||||
* Slots are dynamically assigned on connector creation via getNextAvailableSlot.
|
||||
*/
|
||||
export interface ConnectorTagDefinition {
|
||||
/** Semantic ID matching a key returned by mapTags (e.g. 'labels', 'version') */
|
||||
id: string
|
||||
/** Human-readable name shown in UI (e.g. 'Labels', 'Last Modified') */
|
||||
displayName: string
|
||||
/** Field type determines which slot pool to draw from */
|
||||
fieldType: 'text' | 'number' | 'date' | 'boolean'
|
||||
}
|
||||
|
||||
/**
|
||||
* Tag slots available on the document table for connector metadata mapping.
|
||||
*/
|
||||
export interface DocumentTags {
|
||||
tag1?: string
|
||||
tag2?: string
|
||||
tag3?: string
|
||||
tag4?: string
|
||||
tag5?: string
|
||||
tag6?: string
|
||||
tag7?: string
|
||||
number1?: number
|
||||
number2?: number
|
||||
number3?: number
|
||||
number4?: number
|
||||
number5?: number
|
||||
date1?: Date
|
||||
date2?: Date
|
||||
boolean1?: boolean
|
||||
boolean2?: boolean
|
||||
boolean3?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Registry mapping connector IDs to their configs.
|
||||
*/
|
||||
export interface ConnectorRegistry {
|
||||
[connectorId: string]: ConnectorConfig
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/kb/knowledge'
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/knowledge'
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
|
||||
@@ -2,7 +2,6 @@ import { useCallback, useMemo } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
type DocumentTagFilter,
|
||||
type KnowledgeChunksResponse,
|
||||
type KnowledgeDocumentsResponse,
|
||||
knowledgeKeys,
|
||||
@@ -13,7 +12,7 @@ import {
|
||||
useKnowledgeBasesQuery,
|
||||
useKnowledgeChunksQuery,
|
||||
useKnowledgeDocumentsQuery,
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const DEFAULT_PAGE_SIZE = 50
|
||||
|
||||
@@ -73,14 +72,12 @@ export function useKnowledgeBaseDocuments(
|
||||
| false
|
||||
| ((data: KnowledgeDocumentsResponse | undefined) => number | false)
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
tagFilters?: DocumentTagFilter[]
|
||||
}
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset ?? 0
|
||||
const enabledFilter = options?.enabledFilter ?? 'all'
|
||||
const tagFilters = options?.tagFilters
|
||||
const paramsKey = serializeDocumentParams({
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
@@ -89,7 +86,6 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
tagFilters,
|
||||
})
|
||||
|
||||
const refetchIntervalFn = useMemo(() => {
|
||||
@@ -111,7 +107,6 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
tagFilters,
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
@@ -232,8 +227,7 @@ export function useDocumentChunks(
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
page = 1,
|
||||
search = '',
|
||||
enabledFilter: 'all' | 'enabled' | 'disabled' = 'all'
|
||||
search = ''
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
@@ -247,7 +241,6 @@ export function useDocumentChunks(
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
@@ -279,12 +272,11 @@ export function useDocumentChunks(
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
enabledFilter,
|
||||
})
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, offset, search, enabledFilter, queryClient])
|
||||
}, [knowledgeBaseId, documentId, offset, search, queryClient])
|
||||
|
||||
const updateChunk = useCallback(
|
||||
(chunkId: string, updates: Partial<ChunkData>) => {
|
||||
@@ -294,7 +286,6 @@ export function useDocumentChunks(
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
enabledFilter,
|
||||
})
|
||||
queryClient.setQueryData<KnowledgeChunksResponse>(
|
||||
knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
@@ -309,7 +300,7 @@ export function useDocumentChunks(
|
||||
}
|
||||
)
|
||||
},
|
||||
[knowledgeBaseId, documentId, offset, search, enabledFilter, queryClient]
|
||||
[knowledgeBaseId, documentId, offset, search, queryClient]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
useDeleteDocumentTagDefinitions,
|
||||
useDocumentTagDefinitionsQuery,
|
||||
useSaveDocumentTagDefinitions,
|
||||
} from '@/hooks/queries/kb/knowledge'
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
|
||||
@@ -1,426 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { knowledgeKeys } from '@/hooks/queries/kb/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeConnectorQueries')
|
||||
|
||||
export interface ConnectorData {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
connectorType: string
|
||||
credentialId: string
|
||||
sourceConfig: Record<string, unknown>
|
||||
syncMode: string
|
||||
syncIntervalMinutes: number
|
||||
status: 'active' | 'paused' | 'syncing' | 'error'
|
||||
lastSyncAt: string | null
|
||||
lastSyncError: string | null
|
||||
lastSyncDocCount: number | null
|
||||
nextSyncAt: string | null
|
||||
consecutiveFailures: number
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface SyncLogData {
|
||||
id: string
|
||||
connectorId: string
|
||||
status: string
|
||||
startedAt: string
|
||||
completedAt: string | null
|
||||
docsAdded: number
|
||||
docsUpdated: number
|
||||
docsDeleted: number
|
||||
docsUnchanged: number
|
||||
errorMessage: string | null
|
||||
}
|
||||
|
||||
export interface ConnectorDetailData extends ConnectorData {
|
||||
syncLogs: SyncLogData[]
|
||||
}
|
||||
|
||||
export const connectorKeys = {
|
||||
all: (knowledgeBaseId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'connectors'] as const,
|
||||
list: (knowledgeBaseId?: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'connectors', 'list'] as const,
|
||||
detail: (knowledgeBaseId?: string, connectorId?: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'connectors', 'detail', connectorId ?? ''] as const,
|
||||
}
|
||||
|
||||
async function fetchConnectors(knowledgeBaseId: string): Promise<ConnectorData[]> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch connectors: ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch connectors')
|
||||
}
|
||||
|
||||
return Array.isArray(result.data) ? result.data : []
|
||||
}
|
||||
|
||||
async function fetchConnectorDetail(
|
||||
knowledgeBaseId: string,
|
||||
connectorId: string
|
||||
): Promise<ConnectorDetailData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch connector: ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to fetch connector')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useConnectorList(knowledgeBaseId?: string) {
|
||||
return useQuery({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
queryFn: () => fetchConnectors(knowledgeBaseId as string),
|
||||
enabled: Boolean(knowledgeBaseId),
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
refetchInterval: (query) => {
|
||||
const connectors = query.state.data
|
||||
const hasSyncing = connectors?.some((c) => c.status === 'syncing')
|
||||
return hasSyncing ? 3000 : false
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useConnectorDetail(knowledgeBaseId?: string, connectorId?: string) {
|
||||
return useQuery({
|
||||
queryKey: connectorKeys.detail(knowledgeBaseId, connectorId),
|
||||
queryFn: () => fetchConnectorDetail(knowledgeBaseId as string, connectorId as string),
|
||||
enabled: Boolean(knowledgeBaseId && connectorId),
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface CreateConnectorParams {
|
||||
knowledgeBaseId: string
|
||||
connectorType: string
|
||||
credentialId: string
|
||||
sourceConfig: Record<string, unknown>
|
||||
syncIntervalMinutes?: number
|
||||
}
|
||||
|
||||
async function createConnector({
|
||||
knowledgeBaseId,
|
||||
...body
|
||||
}: CreateConnectorParams): Promise<ConnectorData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to create connector')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to create connector')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useCreateConnector() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: createConnector,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface UpdateConnectorParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
updates: {
|
||||
sourceConfig?: Record<string, unknown>
|
||||
syncIntervalMinutes?: number
|
||||
status?: 'active' | 'paused'
|
||||
}
|
||||
}
|
||||
|
||||
async function updateConnector({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
updates,
|
||||
}: UpdateConnectorParams): Promise<ConnectorData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to update connector')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update connector')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useUpdateConnector() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: updateConnector,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.detail(knowledgeBaseId, connectorId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteConnectorParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
}
|
||||
|
||||
async function deleteConnector({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
}: DeleteConnectorParams): Promise<void> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete connector')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete connector')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteConnector() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteConnector,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface TriggerSyncParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
}
|
||||
|
||||
async function triggerSync({ knowledgeBaseId, connectorId }: TriggerSyncParams): Promise<void> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/sync`, {
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to trigger sync')
|
||||
}
|
||||
}
|
||||
|
||||
export function useTriggerSync() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: triggerSync,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.list(knowledgeBaseId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorKeys.detail(knowledgeBaseId, connectorId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface ConnectorDocumentData {
|
||||
id: string
|
||||
filename: string
|
||||
externalId: string | null
|
||||
sourceUrl: string | null
|
||||
enabled: boolean
|
||||
deletedAt: string | null
|
||||
userExcluded: boolean
|
||||
uploadedAt: string
|
||||
processingStatus: string
|
||||
}
|
||||
|
||||
export interface ConnectorDocumentsResponse {
|
||||
documents: ConnectorDocumentData[]
|
||||
counts: { active: number; excluded: number }
|
||||
}
|
||||
|
||||
export const connectorDocumentKeys = {
|
||||
list: (knowledgeBaseId?: string, connectorId?: string) =>
|
||||
[...connectorKeys.detail(knowledgeBaseId, connectorId), 'documents'] as const,
|
||||
}
|
||||
|
||||
async function fetchConnectorDocuments(
|
||||
knowledgeBaseId: string,
|
||||
connectorId: string,
|
||||
includeExcluded: boolean
|
||||
): Promise<ConnectorDocumentsResponse> {
|
||||
const params = includeExcluded ? '?includeExcluded=true' : ''
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/documents${params}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch connector documents: ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch connector documents')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useConnectorDocuments(
|
||||
knowledgeBaseId?: string,
|
||||
connectorId?: string,
|
||||
options?: { includeExcluded?: boolean }
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: [
|
||||
...connectorDocumentKeys.list(knowledgeBaseId, connectorId),
|
||||
options?.includeExcluded ?? false,
|
||||
],
|
||||
queryFn: () =>
|
||||
fetchConnectorDocuments(
|
||||
knowledgeBaseId as string,
|
||||
connectorId as string,
|
||||
options?.includeExcluded ?? false
|
||||
),
|
||||
enabled: Boolean(knowledgeBaseId && connectorId),
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
interface ConnectorDocumentMutationParams {
|
||||
knowledgeBaseId: string
|
||||
connectorId: string
|
||||
documentIds: string[]
|
||||
}
|
||||
|
||||
async function excludeConnectorDocuments({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
documentIds,
|
||||
}: ConnectorDocumentMutationParams): Promise<{ excludedCount: number }> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/documents`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation: 'exclude', documentIds }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to exclude documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useExcludeConnectorDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: excludeConnectorDocuments,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorDocumentKeys.list(knowledgeBaseId, connectorId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async function restoreConnectorDocuments({
|
||||
knowledgeBaseId,
|
||||
connectorId,
|
||||
documentIds,
|
||||
}: ConnectorDocumentMutationParams): Promise<{ restoredCount: number }> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/connectors/${connectorId}/documents`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation: 'restore', documentIds }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to restore documents')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useRestoreConnectorDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: restoreConnectorDocuments,
|
||||
onSuccess: (_, { knowledgeBaseId, connectorId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: connectorDocumentKeys.list(knowledgeBaseId, connectorId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -79,14 +79,6 @@ export async function fetchDocument(
|
||||
return result.data
|
||||
}
|
||||
|
||||
export interface DocumentTagFilter {
|
||||
tagSlot: string
|
||||
fieldType: 'text' | 'number' | 'date' | 'boolean'
|
||||
operator: string
|
||||
value: string
|
||||
valueTo?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsParams {
|
||||
knowledgeBaseId: string
|
||||
search?: string
|
||||
@@ -95,7 +87,6 @@ export interface KnowledgeDocumentsParams {
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
tagFilters?: DocumentTagFilter[]
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsResponse {
|
||||
@@ -111,7 +102,6 @@ export async function fetchKnowledgeDocuments({
|
||||
sortBy,
|
||||
sortOrder,
|
||||
enabledFilter,
|
||||
tagFilters,
|
||||
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
@@ -120,7 +110,6 @@ export async function fetchKnowledgeDocuments({
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', offset.toString())
|
||||
if (enabledFilter) params.set('enabledFilter', enabledFilter)
|
||||
if (tagFilters && tagFilters.length > 0) params.set('tagFilters', JSON.stringify(tagFilters))
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
@@ -158,7 +147,6 @@ export interface KnowledgeChunksParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
search?: string
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
limit?: number
|
||||
offset?: number
|
||||
}
|
||||
@@ -172,15 +160,11 @@ export async function fetchKnowledgeChunks({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
search,
|
||||
enabledFilter,
|
||||
limit = 50,
|
||||
offset = 0,
|
||||
}: KnowledgeChunksParams): Promise<KnowledgeChunksResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
if (enabledFilter && enabledFilter !== 'all') {
|
||||
params.set('enabled', enabledFilter === 'enabled' ? 'true' : 'false')
|
||||
}
|
||||
if (limit) params.set('limit', limit.toString())
|
||||
if (offset) params.set('offset', offset.toString())
|
||||
|
||||
@@ -250,7 +234,6 @@ export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
|
||||
sortBy: params.sortBy ?? '',
|
||||
sortOrder: params.sortOrder ?? '',
|
||||
enabledFilter: params.enabledFilter ?? 'all',
|
||||
tagFilters: params.tagFilters ?? [],
|
||||
})
|
||||
|
||||
export function useKnowledgeDocumentsQuery(
|
||||
@@ -277,7 +260,6 @@ export function useKnowledgeDocumentsQuery(
|
||||
export const serializeChunkParams = (params: KnowledgeChunksParams) =>
|
||||
JSON.stringify({
|
||||
search: params.search ?? '',
|
||||
enabledFilter: params.enabledFilter ?? 'all',
|
||||
limit: params.limit ?? 50,
|
||||
offset: params.offset ?? 0,
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user